From acd94ff128186089c50828751101cb97e3ca97df Mon Sep 17 00:00:00 2001 From: nikpivkin Date: Fri, 1 Dec 2023 16:40:59 +0700 Subject: [PATCH 1/5] merge trivy-iac --- go.mod | 55 +- go.sum | 81 +- .../aws/000000000000/eu-central-1/data.json | 1 + internal/adapters/arm/adapt.go | 50 + internal/adapters/arm/appservice/adapt.go | 58 + internal/adapters/arm/authorization/adapt.go | 38 + internal/adapters/arm/compute/adapt.go | 85 + internal/adapters/arm/compute/adapt_test.go | 60 + internal/adapters/arm/container/adapt.go | 17 + internal/adapters/arm/database/adapt.go | 35 + internal/adapters/arm/database/firewall.go | 18 + internal/adapters/arm/database/maria.go | 27 + internal/adapters/arm/database/mssql.go | 61 + internal/adapters/arm/database/postgresql.go | 64 + internal/adapters/arm/datafactory/adapt.go | 27 + internal/adapters/arm/datalake/adapt.go | 28 + internal/adapters/arm/keyvault/adapt.go | 64 + internal/adapters/arm/monitor/adapt.go | 45 + internal/adapters/arm/network/adapt.go | 126 + internal/adapters/arm/securitycenter/adapt.go | 43 + internal/adapters/arm/storage/adapt.go | 69 + internal/adapters/arm/storage/adapt_test.go | 59 + internal/adapters/arm/synapse/adapt.go | 34 + internal/adapters/cloudformation/adapt.go | 14 + .../aws/accessanalyzer/accessanalyzer.go | 13 + .../aws/accessanalyzer/analyzer.go | 24 + internal/adapters/cloudformation/aws/adapt.go | 74 + .../aws/apigateway/apigateway.go | 21 + .../cloudformation/aws/apigateway/stage.go | 68 + .../cloudformation/aws/athena/athena.go | 14 + .../cloudformation/aws/athena/workgroup.go | 30 + .../aws/cloudfront/cloudfront.go | 13 + .../aws/cloudfront/distribution.go | 55 + .../aws/cloudtrail/cloudtrail.go | 13 + .../cloudformation/aws/cloudtrail/trails.go | 27 + .../aws/cloudwatch/cloudwatch.go | 14 + .../aws/cloudwatch/log_group.go | 26 + .../cloudformation/aws/codebuild/codebuild.go | 13 + .../cloudformation/aws/codebuild/project.go | 63 + .../cloudformation/aws/config/aggregator.go | 50 + .../cloudformation/aws/config/config.go | 13 + .../cloudformation/aws/documentdb/cluster.go | 58 + .../aws/documentdb/documentdb.go | 13 + .../cloudformation/aws/dynamodb/cluster.go | 36 + .../cloudformation/aws/dynamodb/dynamodb.go | 13 + .../adapters/cloudformation/aws/ec2/ec2.go | 20 + .../cloudformation/aws/ec2/instance.go | 70 + .../aws/ec2/launch_configuration.go | 48 + .../cloudformation/aws/ec2/launch_template.go | 56 + .../adapters/cloudformation/aws/ec2/nacl.go | 71 + .../cloudformation/aws/ec2/security_group.go | 68 + .../adapters/cloudformation/aws/ec2/subnet.go | 21 + .../adapters/cloudformation/aws/ec2/volume.go | 25 + .../adapters/cloudformation/aws/ecr/ecr.go | 13 + .../cloudformation/aws/ecr/repository.go | 93 + .../cloudformation/aws/ecs/cluster.go | 57 + .../adapters/cloudformation/aws/ecs/ecs.go | 14 + .../cloudformation/aws/ecs/task_definition.go | 86 + .../adapters/cloudformation/aws/efs/efs.go | 13 + .../cloudformation/aws/efs/filesystem.go | 23 + .../cloudformation/aws/eks/cluster.go | 56 + .../adapters/cloudformation/aws/eks/eks.go | 13 + .../cloudformation/aws/elasticache/cluster.go | 24 + .../aws/elasticache/elasticache.go | 15 + .../aws/elasticache/replication_group.go | 23 + .../aws/elasticache/security_group.go | 22 + .../aws/elasticsearch/domain.go | 84 + .../aws/elasticsearch/elasticsearch.go | 13 + .../adapters/cloudformation/aws/elb/elb.go | 13 + .../cloudformation/aws/elb/loadbalancer.go | 89 + .../adapters/cloudformation/aws/iam/iam.go | 27 + .../adapters/cloudformation/aws/iam/policy.go | 125 + .../cloudformation/aws/kinesis/kinesis.go | 13 + .../cloudformation/aws/kinesis/stream.go | 36 + .../cloudformation/aws/lambda/function.go | 53 + .../cloudformation/aws/lambda/lambda.go | 13 + .../adapters/cloudformation/aws/mq/broker.go | 33 + internal/adapters/cloudformation/aws/mq/mq.go | 13 + .../cloudformation/aws/msk/cluster.go | 80 + .../adapters/cloudformation/aws/msk/msk.go | 13 + .../cloudformation/aws/neptune/cluster.go | 34 + .../cloudformation/aws/neptune/neptune.go | 13 + .../cloudformation/aws/rds/cluster.go | 80 + .../cloudformation/aws/rds/instance.go | 128 + .../aws/rds/parameter_groups.go | 42 + .../adapters/cloudformation/aws/rds/rds.go | 18 + .../cloudformation/aws/redshift/cluster.go | 54 + .../cloudformation/aws/redshift/redshift.go | 16 + .../aws/redshift/security_group.go | 17 + .../adapters/cloudformation/aws/s3/bucket.go | 148 + internal/adapters/cloudformation/aws/s3/s3.go | 13 + .../adapters/cloudformation/aws/sam/api.go | 96 + .../cloudformation/aws/sam/function.go | 58 + .../cloudformation/aws/sam/http_api.go | 64 + .../adapters/cloudformation/aws/sam/sam.go | 17 + .../cloudformation/aws/sam/state_machines.go | 80 + .../adapters/cloudformation/aws/sam/tables.go | 42 + .../adapters/cloudformation/aws/sns/sns.go | 13 + .../adapters/cloudformation/aws/sns/topic.go | 24 + .../adapters/cloudformation/aws/sqs/queue.go | 66 + .../adapters/cloudformation/aws/sqs/sqs.go | 13 + .../adapters/cloudformation/aws/ssm/secret.go | 18 + .../adapters/cloudformation/aws/ssm/ssm.go | 13 + .../aws/workspaces/workspace.go | 31 + .../aws/workspaces/workspaces.go | 13 + internal/adapters/terraform/adapt.go | 31 + .../aws/accessanalyzer/accessanalyzer.go | 40 + internal/adapters/terraform/aws/adapt.go | 79 + .../terraform/aws/apigateway/adapt.go | 21 + .../terraform/aws/apigateway/adapt_test.go | 233 + .../terraform/aws/apigateway/apiv1.go | 115 + .../terraform/aws/apigateway/apiv1_test.go | 125 + .../terraform/aws/apigateway/apiv2.go | 69 + .../terraform/aws/apigateway/apiv2_test.go | 103 + .../terraform/aws/apigateway/namesv1.go | 24 + .../terraform/aws/apigateway/namesv1_test.go | 54 + .../terraform/aws/apigateway/namesv2.go | 28 + .../terraform/aws/apigateway/namesv2_test.go | 56 + .../adapters/terraform/aws/athena/adapt.go | 80 + .../terraform/aws/athena/adapt_test.go | 211 + .../terraform/aws/cloudfront/adapt.go | 79 + .../terraform/aws/cloudfront/adapt_test.go | 163 + .../terraform/aws/cloudtrail/adapt.go | 67 + .../terraform/aws/cloudtrail/adapt_test.go | 106 + .../terraform/aws/cloudwatch/adapt.go | 47 + .../terraform/aws/cloudwatch/adapt_test.go | 114 + .../adapters/terraform/aws/codebuild/adapt.go | 66 + .../terraform/aws/codebuild/adapt_test.go | 116 + .../adapters/terraform/aws/config/adapt.go | 33 + .../terraform/aws/config/adapt_test.go | 81 + .../terraform/aws/documentdb/adapt.go | 63 + .../terraform/aws/documentdb/adapt_test.go | 125 + .../adapters/terraform/aws/dynamodb/adapt.go | 94 + .../terraform/aws/dynamodb/adapt_test.go | 176 + internal/adapters/terraform/aws/ec2/adapt.go | 74 + .../adapters/terraform/aws/ec2/adapt_test.go | 166 + .../adapters/terraform/aws/ec2/autoscaling.go | 121 + .../terraform/aws/ec2/autoscaling_test.go | 199 + internal/adapters/terraform/aws/ec2/subnet.go | 26 + .../adapters/terraform/aws/ec2/subnet_test.go | 90 + internal/adapters/terraform/aws/ec2/volume.go | 40 + .../adapters/terraform/aws/ec2/volume_test.go | 112 + internal/adapters/terraform/aws/ec2/vpc.go | 229 + .../adapters/terraform/aws/ec2/vpc_test.go | 339 + internal/adapters/terraform/aws/ecr/adapt.go | 113 + .../adapters/terraform/aws/ecr/adapt_test.go | 248 + internal/adapters/terraform/aws/ecs/adapt.go | 107 + .../adapters/terraform/aws/ecs/adapt_test.go | 246 + internal/adapters/terraform/aws/efs/adapt.go | 32 + .../adapters/terraform/aws/efs/adapt_test.go | 78 + internal/adapters/terraform/aws/eks/adapt.go | 91 + .../adapters/terraform/aws/eks/adapt_test.go | 163 + .../terraform/aws/elasticache/adapt.go | 85 + .../terraform/aws/elasticache/adapt_test.go | 233 + .../terraform/aws/elasticsearch/adapt.go | 100 + .../terraform/aws/elasticsearch/adapt_test.go | 173 + internal/adapters/terraform/aws/elb/adapt.go | 120 + .../adapters/terraform/aws/elb/adapt_test.go | 161 + internal/adapters/terraform/aws/emr/adapt.go | 49 + .../adapters/terraform/aws/emr/adapt_test.go | 116 + internal/adapters/terraform/aws/iam/adapt.go | 16 + .../adapters/terraform/aws/iam/adapt_test.go | 128 + .../adapters/terraform/aws/iam/convert.go | 240 + internal/adapters/terraform/aws/iam/groups.go | 32 + .../adapters/terraform/aws/iam/groups_test.go | 115 + .../adapters/terraform/aws/iam/passwords.go | 76 + .../terraform/aws/iam/passwords_test.go | 54 + .../adapters/terraform/aws/iam/policies.go | 109 + .../terraform/aws/iam/policies_test.go | 182 + internal/adapters/terraform/aws/iam/roles.go | 38 + .../adapters/terraform/aws/iam/roles_test.go | 220 + internal/adapters/terraform/aws/iam/users.go | 56 + .../adapters/terraform/aws/iam/users_test.go | 173 + .../adapters/terraform/aws/kinesis/adapt.go | 41 + .../terraform/aws/kinesis/adapt_test.go | 87 + internal/adapters/terraform/aws/kms/adapt.go | 36 + .../adapters/terraform/aws/kms/adapt_test.go | 80 + .../adapters/terraform/aws/lambda/adapt.go | 98 + .../terraform/aws/lambda/adapt_test.go | 155 + internal/adapters/terraform/aws/mq/adapt.go | 48 + .../adapters/terraform/aws/mq/adapt_test.go | 119 + internal/adapters/terraform/aws/msk/adapt.go | 97 + .../adapters/terraform/aws/msk/adapt_test.go | 200 + .../adapters/terraform/aws/neptune/adapt.go | 50 + .../terraform/aws/neptune/adapt_test.go | 97 + .../adapters/terraform/aws/provider/adapt.go | 166 + .../terraform/aws/provider/adapt_test.go | 129 + internal/adapters/terraform/aws/rds/adapt.go | 256 + .../adapters/terraform/aws/rds/adapt_test.go | 332 + .../adapters/terraform/aws/redshift/adapt.go | 117 + .../terraform/aws/redshift/adapt_test.go | 230 + internal/adapters/terraform/aws/s3/adapt.go | 18 + .../adapters/terraform/aws/s3/adapt_test.go | 385 + internal/adapters/terraform/aws/s3/bucket.go | 283 + .../adapters/terraform/aws/s3/bucket_test.go | 331 + .../adapters/terraform/aws/s3/policies.go | 53 + .../terraform/aws/s3/public_access_block.go | 41 + internal/adapters/terraform/aws/sns/adapt.go | 38 + .../adapters/terraform/aws/sns/adapt_test.go | 82 + internal/adapters/terraform/aws/sqs/adapt.go | 167 + .../adapters/terraform/aws/sqs/adapt_test.go | 140 + internal/adapters/terraform/aws/ssm/adapt.go | 40 + .../adapters/terraform/aws/ssm/adapt_test.go | 110 + .../terraform/aws/workspaces/adapt.go | 57 + .../terraform/aws/workspaces/adapt_test.go | 111 + internal/adapters/terraform/azure/adapt.go | 37 + .../terraform/azure/appservice/adapt.go | 94 + .../terraform/azure/appservice/adapt_test.go | 188 + .../terraform/azure/authorization/adapt.go | 42 + .../azure/authorization/adapt_test.go | 119 + .../adapters/terraform/azure/compute/adapt.go | 135 + .../terraform/azure/compute/adapt_test.go | 238 + .../terraform/azure/container/adapt.go | 106 + .../terraform/azure/container/adapt_test.go | 262 + .../terraform/azure/database/adapt.go | 439 ++ .../terraform/azure/database/adapt_test.go | 454 ++ .../terraform/azure/datafactory/adapt.go | 33 + .../terraform/azure/datafactory/adapt_test.go | 79 + .../terraform/azure/datalake/adapt.go | 38 + .../terraform/azure/datalake/adapt_test.go | 83 + .../terraform/azure/keyvault/adapt.go | 159 + .../terraform/azure/keyvault/adapt_test.go | 271 + .../adapters/terraform/azure/monitor/adapt.go | 56 + .../terraform/azure/monitor/adapt_test.go | 128 + .../adapters/terraform/azure/network/adapt.go | 220 + .../terraform/azure/network/adapt_test.go | 262 + .../terraform/azure/securitycenter/adapt.go | 59 + .../azure/securitycenter/adapt_test.go | 137 + .../adapters/terraform/azure/storage/adapt.go | 173 + .../terraform/azure/storage/adapt_test.go | 252 + .../adapters/terraform/azure/synapse/adapt.go | 32 + .../terraform/azure/synapse/adapt_test.go | 83 + .../adapters/terraform/cloudstack/adapt.go | 13 + .../terraform/cloudstack/compute/adapt.go | 49 + .../cloudstack/compute/adapt_test.go | 91 + .../adapters/terraform/digitalocean/adapt.go | 15 + .../terraform/digitalocean/compute/adapt.go | 106 + .../digitalocean/compute/adapt_test.go | 350 + .../terraform/digitalocean/spaces/adapt.go | 91 + .../digitalocean/spaces/adapt_test.go | 144 + internal/adapters/terraform/github/adapt.go | 17 + .../github/branch_protections/adapt.go | 30 + .../github/branch_protections/adapt_test.go | 59 + .../terraform/github/repositories/adapt.go | 48 + .../github/repositories/adapt_test.go | 111 + .../terraform/github/secrets/adapt.go | 32 + .../terraform/github/secrets/adapt_test.go | 69 + internal/adapters/terraform/google/adapt.go | 27 + .../terraform/google/bigquery/adapt.go | 54 + .../terraform/google/bigquery/adapt_test.go | 155 + .../terraform/google/compute/adapt.go | 16 + .../terraform/google/compute/adapt_test.go | 210 + .../terraform/google/compute/disks.go | 38 + .../terraform/google/compute/disks_test.go | 93 + .../terraform/google/compute/instances.go | 124 + .../google/compute/instances_test.go | 169 + .../terraform/google/compute/metadata.go | 26 + .../terraform/google/compute/metadata_test.go | 56 + .../terraform/google/compute/networks.go | 198 + .../terraform/google/compute/networks_test.go | 126 + .../adapters/terraform/google/compute/ssl.go | 19 + .../terraform/google/compute/ssl_test.go | 62 + .../adapters/terraform/google/dns/adapt.go | 64 + .../terraform/google/dns/adapt_test.go | 113 + .../adapters/terraform/google/gke/adapt.go | 328 + .../terraform/google/gke/adapt_test.go | 412 + .../adapters/terraform/google/iam/adapt.go | 108 + .../terraform/google/iam/adapt_test.go | 266 + .../adapters/terraform/google/iam/convert.go | 26 + .../terraform/google/iam/folder_iam.go | 117 + .../adapters/terraform/google/iam/folders.go | 40 + .../adapters/terraform/google/iam/org_iam.go | 113 + .../terraform/google/iam/project_iam.go | 287 + .../terraform/google/iam/project_iam_test.go | 59 + .../adapters/terraform/google/iam/projects.go | 58 + .../iam/workload_identity_pool_providers.go | 18 + .../adapters/terraform/google/kms/adapt.go | 60 + .../terraform/google/kms/adapt_test.go | 126 + .../adapters/terraform/google/sql/adapt.go | 156 + .../terraform/google/sql/adapt_test.go | 278 + .../terraform/google/storage/adapt.go | 129 + .../terraform/google/storage/adapt_test.go | 198 + .../adapters/terraform/google/storage/iam.go | 96 + .../adapters/terraform/kubernetes/adapt.go | 94 + .../terraform/nifcloud/computing/adapt.go | 16 + .../nifcloud/computing/adapt_test.go | 61 + .../terraform/nifcloud/computing/instance.go | 35 + .../nifcloud/computing/instance_test.go | 71 + .../nifcloud/computing/security_group.go | 76 + .../nifcloud/computing/security_group_test.go | 86 + .../adapters/terraform/nifcloud/dns/adapt.go | 12 + .../terraform/nifcloud/dns/adapt_test.go | 32 + .../adapters/terraform/nifcloud/dns/record.go | 23 + .../terraform/nifcloud/dns/record_test.go | 56 + .../adapters/terraform/nifcloud/nas/adapt.go | 13 + .../terraform/nifcloud/nas/adapt_test.go | 44 + .../terraform/nifcloud/nas/nas_instance.go | 22 + .../nifcloud/nas/nas_instance_test.go | 54 + .../nifcloud/nas/nas_security_group.go | 30 + .../nifcloud/nas/nas_security_group_test.go | 66 + .../terraform/nifcloud/network/adapt.go | 16 + .../terraform/nifcloud/network/adapt_test.go | 83 + .../nifcloud/network/elastic_load_balancer.go | 50 + .../network/elastic_load_balancer_test.go | 90 + .../nifcloud/network/load_balancer.go | 67 + .../nifcloud/network/load_balancer_test.go | 75 + .../terraform/nifcloud/network/router.go | 37 + .../terraform/nifcloud/network/router_test.go | 70 + .../terraform/nifcloud/network/vpn_gateway.go | 22 + .../nifcloud/network/vpn_gateway_test.go | 53 + .../adapters/terraform/nifcloud/nifcloud.go | 23 + .../adapters/terraform/nifcloud/rdb/adapt.go | 13 + .../terraform/nifcloud/rdb/adapt_test.go | 60 + .../terraform/nifcloud/rdb/db_instance.go | 26 + .../nifcloud/rdb/db_instance_test.go | 66 + .../nifcloud/rdb/db_security_group.go | 30 + .../nifcloud/rdb/db_security_group_test.go | 66 + .../nifcloud/sslcertificate/adapt.go | 12 + .../nifcloud/sslcertificate/adapt_test.go | 28 + .../sslcertificate/server_certificate.go | 41 + .../sslcertificate/server_certificate_test.go | 72 + .../adapters/terraform/openstack/adapt.go | 84 + .../terraform/openstack/adapt_test.go | 133 + .../terraform/openstack/networking.go | 77 + .../terraform/openstack/networking_test.go | 72 + internal/adapters/terraform/oracle/adapt.go | 30 + .../adapters/terraform/tftestutil/testutil.go | 26 + internal/rules/register.go | 134 + internal/rules/register_test.go | 139 + .../analyzer/config/terraform/terraform.go | 2 +- pkg/fanal/analyzer/const.go | 2 +- pkg/fanal/secret/builtin-rules.go | 2 +- pkg/iac/detection/detect.go | 296 + pkg/iac/detection/detect_test.go | 410 + pkg/iac/detection/peek.go | 53 + pkg/iac/detection/testdata/big.file | Bin 0 -> 5120 bytes pkg/iac/detection/testdata/small.file | 3 + pkg/iac/extrafs/extrafs.go | 54 + pkg/iac/rego/build.go | 84 + pkg/iac/rego/custom.go | 109 + pkg/iac/rego/embed.go | 107 + pkg/iac/rego/embed_test.go | 123 + pkg/iac/rego/exceptions.go | 33 + pkg/iac/rego/load.go | 210 + pkg/iac/rego/load_test.go | 46 + pkg/iac/rego/metadata.go | 380 + pkg/iac/rego/metadata_test.go | 188 + pkg/iac/rego/result.go | 166 + pkg/iac/rego/result_test.go | 104 + pkg/iac/rego/runtime.go | 28 + pkg/iac/rego/scanner.go | 413 + pkg/iac/rego/scanner_test.go | 978 +++ pkg/iac/rego/schemas/00_schema.go | 22 + pkg/iac/rego/schemas/builder.go | 270 + pkg/iac/rego/schemas/cloud.json | 6818 +++++++++++++++++ pkg/iac/rego/schemas/dockerfile.json | 70 + pkg/iac/rego/schemas/kubernetes.json | 51 + pkg/iac/rego/schemas/rbac.json | 51 + pkg/iac/rego/schemas/schemas.go | 16 + pkg/iac/rego/store.go | 48 + pkg/iac/rego/testdata/policies/._sysfile.rego | 0 pkg/iac/rego/testdata/policies/invalid.rego | 8 + pkg/iac/rego/testdata/policies/valid.rego | 8 + pkg/iac/rules/providers.go | 169 + pkg/iac/rules/register.go | 25 + pkg/iac/rules/rules.go | 82 + .../azure/arm/parser/armjson/bench_test.go | 71 + .../azure/arm/parser/armjson/decode.go | 66 + .../azure/arm/parser/armjson/decode_array.go | 51 + .../arm/parser/armjson/decode_boolean.go | 18 + .../arm/parser/armjson/decode_meta_test.go | 40 + .../azure/arm/parser/armjson/decode_null.go | 10 + .../azure/arm/parser/armjson/decode_number.go | 46 + .../azure/arm/parser/armjson/decode_object.go | 122 + .../azure/arm/parser/armjson/decode_string.go | 19 + .../scanners/azure/arm/parser/armjson/kind.go | 14 + .../scanners/azure/arm/parser/armjson/node.go | 59 + .../azure/arm/parser/armjson/parse.go | 150 + .../azure/arm/parser/armjson/parse_array.go | 54 + .../arm/parser/armjson/parse_array_test.go | 46 + .../azure/arm/parser/armjson/parse_boolean.go | 40 + .../arm/parser/armjson/parse_boolean_test.go | 54 + .../azure/arm/parser/armjson/parse_comment.go | 98 + .../arm/parser/armjson/parse_complex_test.go | 131 + .../azure/arm/parser/armjson/parse_null.go | 23 + .../arm/parser/armjson/parse_null_test.go | 18 + .../azure/arm/parser/armjson/parse_number.go | 163 + .../arm/parser/armjson/parse_number_test.go | 178 + .../azure/arm/parser/armjson/parse_object.go | 143 + .../arm/parser/armjson/parse_object_test.go | 115 + .../azure/arm/parser/armjson/parse_string.go | 91 + .../arm/parser/armjson/parse_string_test.go | 37 + .../arm/parser/armjson/parse_whitespace.go | 29 + .../azure/arm/parser/armjson/reader.go | 36 + .../azure/arm/parser/armjson/reader_test.go | 62 + .../azure/arm/parser/armjson/unmarshal.go | 40 + pkg/iac/scanners/azure/arm/parser/parser.go | 194 + .../scanners/azure/arm/parser/parser_test.go | 338 + pkg/iac/scanners/azure/arm/parser/template.go | 78 + .../azure/arm/parser/template_test.go | 60 + .../azure/arm/parser/testdata/example.json | 15 + .../azure/arm/parser/testdata/postgres.json | 73 + pkg/iac/scanners/azure/arm/scanner.go | 187 + pkg/iac/scanners/azure/deployment.go | 179 + pkg/iac/scanners/azure/expressions/lex.go | 203 + pkg/iac/scanners/azure/expressions/node.go | 75 + .../azure/expressions/token_walker.go | 40 + pkg/iac/scanners/azure/functions/add.go | 15 + pkg/iac/scanners/azure/functions/add_test.go | 38 + pkg/iac/scanners/azure/functions/and.go | 27 + pkg/iac/scanners/azure/functions/and_test.go | 39 + pkg/iac/scanners/azure/functions/array.go | 29 + .../scanners/azure/functions/array_test.go | 44 + pkg/iac/scanners/azure/functions/base64.go | 52 + .../scanners/azure/functions/base64_test.go | 85 + pkg/iac/scanners/azure/functions/bool.go | 20 + pkg/iac/scanners/azure/functions/bool_test.go | 63 + pkg/iac/scanners/azure/functions/casing.go | 29 + .../scanners/azure/functions/casing_test.go | 71 + pkg/iac/scanners/azure/functions/coalesce.go | 10 + .../scanners/azure/functions/coalesce_test.go | 56 + pkg/iac/scanners/azure/functions/concat.go | 28 + .../scanners/azure/functions/concat_test.go | 94 + pkg/iac/scanners/azure/functions/contains.go | 40 + .../scanners/azure/functions/contains_test.go | 95 + .../scanners/azure/functions/copy_index.go | 25 + .../azure/functions/copy_index_test.go | 52 + .../scanners/azure/functions/create_array.go | 11 + .../azure/functions/create_array_test.go | 68 + .../scanners/azure/functions/create_object.go | 21 + .../azure/functions/create_object_test.go | 60 + pkg/iac/scanners/azure/functions/data_uri.go | 36 + .../scanners/azure/functions/data_uri_test.go | 53 + .../scanners/azure/functions/date_time_add.go | 115 + .../azure/functions/date_time_epoch.go | 38 + .../azure/functions/date_time_epoch_test.go | 51 + .../azure/functions/datetime_add_test.go | 72 + .../scanners/azure/functions/deployment.go | 75 + pkg/iac/scanners/azure/functions/div.go | 15 + pkg/iac/scanners/azure/functions/div_test.go | 38 + pkg/iac/scanners/azure/functions/empty.go | 33 + .../scanners/azure/functions/empty_test.go | 68 + pkg/iac/scanners/azure/functions/ends_with.go | 22 + .../azure/functions/ends_with_test.go | 41 + pkg/iac/scanners/azure/functions/equals.go | 25 + .../scanners/azure/functions/equals_test.go | 111 + pkg/iac/scanners/azure/functions/false.go | 5 + pkg/iac/scanners/azure/functions/first.go | 37 + .../scanners/azure/functions/first_test.go | 51 + pkg/iac/scanners/azure/functions/float.go | 20 + .../scanners/azure/functions/float_test.go | 36 + pkg/iac/scanners/azure/functions/format.go | 31 + .../scanners/azure/functions/format_test.go | 42 + pkg/iac/scanners/azure/functions/functions.go | 99 + pkg/iac/scanners/azure/functions/greater.go | 47 + .../scanners/azure/functions/greater_test.go | 119 + pkg/iac/scanners/azure/functions/guid.go | 44 + pkg/iac/scanners/azure/functions/guid_test.go | 35 + pkg/iac/scanners/azure/functions/if.go | 15 + pkg/iac/scanners/azure/functions/if_test.go | 44 + pkg/iac/scanners/azure/functions/index_of.go | 22 + .../scanners/azure/functions/index_of_test.go | 48 + pkg/iac/scanners/azure/functions/int.go | 20 + pkg/iac/scanners/azure/functions/int_test.go | 36 + .../scanners/azure/functions/intersection.go | 76 + .../azure/functions/intersection_test.go | 106 + pkg/iac/scanners/azure/functions/items.go | 6 + pkg/iac/scanners/azure/functions/join.go | 22 + pkg/iac/scanners/azure/functions/join_test.go | 39 + pkg/iac/scanners/azure/functions/json.go | 20 + pkg/iac/scanners/azure/functions/json_test.go | 42 + pkg/iac/scanners/azure/functions/last.go | 37 + .../scanners/azure/functions/last_index_of.go | 22 + .../azure/functions/last_index_of_test.go | 48 + pkg/iac/scanners/azure/functions/last_test.go | 51 + pkg/iac/scanners/azure/functions/length.go | 29 + .../scanners/azure/functions/length_test.go | 53 + pkg/iac/scanners/azure/functions/less.go | 47 + pkg/iac/scanners/azure/functions/less_test.go | 119 + pkg/iac/scanners/azure/functions/max.go | 33 + pkg/iac/scanners/azure/functions/max_test.go | 58 + pkg/iac/scanners/azure/functions/min.go | 33 + pkg/iac/scanners/azure/functions/min_test.go | 58 + pkg/iac/scanners/azure/functions/mod.go | 14 + pkg/iac/scanners/azure/functions/mod_test.go | 41 + pkg/iac/scanners/azure/functions/mul.go | 15 + pkg/iac/scanners/azure/functions/mul_test.go | 38 + pkg/iac/scanners/azure/functions/not.go | 13 + pkg/iac/scanners/azure/functions/not_test.go | 33 + pkg/iac/scanners/azure/functions/null.go | 5 + pkg/iac/scanners/azure/functions/null_test.go | 12 + pkg/iac/scanners/azure/functions/or.go | 20 + pkg/iac/scanners/azure/functions/or_test.go | 44 + pkg/iac/scanners/azure/functions/pad.go | 32 + pkg/iac/scanners/azure/functions/pad_test.go | 61 + .../scanners/azure/functions/parameters.go | 1 + .../scanners/azure/functions/pick_zones.go | 23 + .../azure/functions/pick_zones_test.go | 14 + pkg/iac/scanners/azure/functions/range.go | 30 + .../scanners/azure/functions/range_test.go | 47 + pkg/iac/scanners/azure/functions/reference.go | 12 + .../azure/functions/reference_test.go | 12 + pkg/iac/scanners/azure/functions/replace.go | 26 + .../scanners/azure/functions/replace_test.go | 41 + pkg/iac/scanners/azure/functions/resource.go | 48 + .../scanners/azure/functions/resource_test.go | 12 + pkg/iac/scanners/azure/functions/scope.go | 106 + .../scanners/azure/functions/scope_test.go | 34 + pkg/iac/scanners/azure/functions/skip.go | 34 + pkg/iac/scanners/azure/functions/skip_test.go | 65 + pkg/iac/scanners/azure/functions/split.go | 36 + .../scanners/azure/functions/split_test.go | 38 + .../scanners/azure/functions/starts_with.go | 22 + .../azure/functions/starts_with_test.go | 41 + pkg/iac/scanners/azure/functions/string.go | 16 + .../scanners/azure/functions/string_test.go | 44 + pkg/iac/scanners/azure/functions/sub.go | 15 + pkg/iac/scanners/azure/functions/sub_test.go | 43 + pkg/iac/scanners/azure/functions/substring.go | 36 + .../azure/functions/substring_test.go | 49 + pkg/iac/scanners/azure/functions/take.go | 49 + pkg/iac/scanners/azure/functions/take_test.go | 63 + pkg/iac/scanners/azure/functions/trim.go | 16 + pkg/iac/scanners/azure/functions/trim_test.go | 71 + pkg/iac/scanners/azure/functions/true.go | 5 + pkg/iac/scanners/azure/functions/union.go | 60 + .../scanners/azure/functions/union_test.go | 110 + .../scanners/azure/functions/unique_string.go | 21 + .../azure/functions/unique_string_test.go | 38 + pkg/iac/scanners/azure/functions/uri.go | 29 + pkg/iac/scanners/azure/functions/uri_test.go | 48 + pkg/iac/scanners/azure/functions/utc_now.go | 47 + .../scanners/azure/functions/utc_now_test.go | 40 + pkg/iac/scanners/azure/resolver/resolver.go | 51 + .../scanners/azure/resolver/resolver_test.go | 101 + pkg/iac/scanners/azure/value.go | 357 + pkg/iac/scanners/azure/value_test.go | 13 + .../scanners/cloudformation/cftypes/types.go | 12 + .../scanners/cloudformation/parser/errors.go | 24 + .../cloudformation/parser/file_context.go | 61 + .../parser/file_context_test.go | 61 + .../scanners/cloudformation/parser/fn_and.go | 38 + .../cloudformation/parser/fn_and_test.go | 186 + .../cloudformation/parser/fn_base64.go | 19 + .../cloudformation/parser/fn_base64_test.go | 35 + .../cloudformation/parser/fn_builtin.go | 65 + .../cloudformation/parser/fn_builtin_test.go | 63 + .../cloudformation/parser/fn_condition.go | 21 + .../parser/fn_condition_test.go | 98 + .../cloudformation/parser/fn_equals.go | 21 + .../cloudformation/parser/fn_equals_test.go | 180 + .../cloudformation/parser/fn_find_in_map.go | 45 + .../parser/fn_find_in_map_test.go | 100 + .../cloudformation/parser/fn_get_attr.go | 46 + .../cloudformation/parser/fn_get_attr_test.go | 50 + .../scanners/cloudformation/parser/fn_if.go | 40 + .../cloudformation/parser/fn_if_test.go | 56 + .../scanners/cloudformation/parser/fn_join.go | 34 + .../cloudformation/parser/fn_join_test.go | 152 + .../cloudformation/parser/fn_length.go | 24 + .../cloudformation/parser/fn_length_test.go | 99 + .../scanners/cloudformation/parser/fn_not.go | 23 + .../cloudformation/parser/fn_not_test.go | 124 + .../scanners/cloudformation/parser/fn_or.go | 39 + .../cloudformation/parser/fn_or_test.go | 184 + .../scanners/cloudformation/parser/fn_ref.go | 54 + .../cloudformation/parser/fn_ref_test.go | 89 + .../cloudformation/parser/fn_select.go | 41 + .../cloudformation/parser/fn_select_test.go | 77 + .../cloudformation/parser/fn_split.go | 44 + .../cloudformation/parser/fn_split_test.go | 56 + .../scanners/cloudformation/parser/fn_sub.go | 71 + .../cloudformation/parser/fn_sub_test.go | 103 + .../cloudformation/parser/intrinsics.go | 101 + .../cloudformation/parser/intrinsics_test.go | 45 + .../cloudformation/parser/parameter.go | 129 + .../cloudformation/parser/parameters_test.go | 89 + .../scanners/cloudformation/parser/parser.go | 236 + .../cloudformation/parser/parser_test.go | 374 + .../cloudformation/parser/property.go | 428 ++ .../parser/property_conversion.go | 129 + .../cloudformation/parser/property_helpers.go | 267 + .../parser/property_helpers_test.go | 195 + .../parser/pseudo_parameters.go | 46 + .../parser/pseudo_parameters_test.go | 36 + .../cloudformation/parser/reference.go | 58 + .../cloudformation/parser/resource.go | 211 + .../cloudformation/parser/resource_test.go | 75 + .../scanners/cloudformation/parser/util.go | 139 + pkg/iac/scanners/cloudformation/scanner.go | 263 + .../scanners/cloudformation/scanner_test.go | 103 + .../cloudformation/test/cf_scanning_test.go | 48 + .../test/examples/bucket/bucket.yaml | 24 + .../examples/ignores/bucket_with_ignores.yaml | 24 + .../test/examples/roles/roles.yml | 51 + pkg/iac/scanners/dockerfile/parser/parser.go | 151 + .../scanners/dockerfile/parser/parser_test.go | 56 + pkg/iac/scanners/dockerfile/scanner.go | 182 + pkg/iac/scanners/dockerfile/scanner_test.go | 638 ++ pkg/iac/scanners/helm/options.go | 51 + pkg/iac/scanners/helm/parser/option.go | 52 + pkg/iac/scanners/helm/parser/parser.go | 322 + pkg/iac/scanners/helm/parser/parser_tar.go | 110 + pkg/iac/scanners/helm/parser/parser_test.go | 24 + .../my-chart-0.1.0.tgz | Bin 0 -> 419 bytes .../my-chart/Chart.yaml | 6 + .../my-chart/templates/pod.yaml | 21 + pkg/iac/scanners/helm/parser/vals.go | 114 + pkg/iac/scanners/helm/scanner.go | 219 + pkg/iac/scanners/helm/test/mysql/.helmignore | 21 + pkg/iac/scanners/helm/test/mysql/Chart.lock | 6 + pkg/iac/scanners/helm/test/mysql/Chart.yaml | 28 + pkg/iac/scanners/helm/test/mysql/README.md | 491 ++ .../helm/test/mysql/charts/common/.helmignore | 22 + .../helm/test/mysql/charts/common/Chart.yaml | 23 + .../helm/test/mysql/charts/common/README.md | 345 + .../charts/common/templates/_affinities.tpl | 102 + .../charts/common/templates/_capabilities.tpl | 128 + .../mysql/charts/common/templates/_errors.tpl | 23 + .../mysql/charts/common/templates/_images.tpl | 75 + .../charts/common/templates/_ingress.tpl | 68 + .../mysql/charts/common/templates/_labels.tpl | 18 + .../mysql/charts/common/templates/_names.tpl | 52 + .../charts/common/templates/_secrets.tpl | 131 + .../charts/common/templates/_storage.tpl | 23 + .../charts/common/templates/_tplvalues.tpl | 13 + .../mysql/charts/common/templates/_utils.tpl | 62 + .../charts/common/templates/_warnings.tpl | 14 + .../templates/validations/_cassandra.tpl | 72 + .../common/templates/validations/_mariadb.tpl | 103 + .../common/templates/validations/_mongodb.tpl | 108 + .../templates/validations/_postgresql.tpl | 129 + .../common/templates/validations/_redis.tpl | 76 + .../templates/validations/_validations.tpl | 46 + .../helm/test/mysql/charts/common/values.yaml | 5 + .../mysql/ci/values-production-with-rbac.yaml | 30 + .../helm/test/mysql/templates/NOTES.txt | 102 + .../helm/test/mysql/templates/_helpers.tpl | 192 + .../helm/test/mysql/templates/extra-list.yaml | 4 + .../test/mysql/templates/metrics-svc.yaml | 29 + .../test/mysql/templates/networkpolicy.yaml | 38 + .../mysql/templates/primary/configmap.yaml | 18 + .../primary/initialization-configmap.yaml | 14 + .../test/mysql/templates/primary/pdb.yaml | 25 + .../mysql/templates/primary/statefulset.yaml | 368 + .../mysql/templates/primary/svc-headless.yaml | 24 + .../test/mysql/templates/primary/svc.yaml | 41 + .../helm/test/mysql/templates/role.yaml | 21 + .../test/mysql/templates/rolebinding.yaml | 21 + .../mysql/templates/secondary/configmap.yaml | 18 + .../test/mysql/templates/secondary/pdb.yaml | 25 + .../templates/secondary/statefulset.yaml | 338 + .../templates/secondary/svc-headless.yaml | 26 + .../test/mysql/templates/secondary/svc.yaml | 43 + .../helm/test/mysql/templates/secrets.yaml | 21 + .../test/mysql/templates/serviceaccount.yaml | 22 + .../test/mysql/templates/servicemonitor.yaml | 42 + .../helm/test/mysql/values.schema.json | 178 + pkg/iac/scanners/helm/test/mysql/values.yaml | 1020 +++ pkg/iac/scanners/helm/test/option_test.go | 167 + pkg/iac/scanners/helm/test/parser_test.go | 199 + pkg/iac/scanners/helm/test/scanner_test.go | 265 + .../aws-cluster-autoscaler-bad.tar.gz | Bin 0 -> 4054 bytes .../mysql/templates/primary/configmap.yaml | 42 + .../mysql/templates/primary/statefulset.yaml | 147 + .../mysql/templates/primary/svc-headless.yaml | 25 + .../expected/mysql/templates/primary/svc.yaml | 25 + .../expected/mysql/templates/secrets.yaml | 15 + .../mysql/templates/serviceaccount.yaml | 14 + .../testchart/templates/deployment.yaml | 46 + .../options/testchart/templates/service.yaml | 21 + .../testchart/templates/serviceaccount.yaml | 11 + .../with-api-version/templates/pdb.yaml | 17 + .../testchart/templates/deployment.yaml | 46 + .../expected/testchart/templates/service.yaml | 21 + .../testchart/templates/serviceaccount.yaml | 11 + .../with-tarred-dep/templates/deployment.yaml | 78 + .../with-tarred-dep/templates/ingress.yaml | 26 + .../with-tarred-dep/templates/service.yaml | 24 + .../helm/test/testdata/mysql-8.8.26.tar | Bin 0 -> 284672 bytes .../helm/test/testdata/mysql-8.8.26.tar.gz | Bin 0 -> 40449 bytes .../helm/test/testdata/mysql-8.8.26.tgz | Bin 0 -> 40449 bytes pkg/iac/scanners/helm/test/testdata/nope.tgz | Bin 0 -> 114 bytes .../helm/test/testdata/numberName/Chart.yaml | 3 + .../test/testdata/templated-name/Chart.yaml | 7 + .../helm/test/testdata/testchart/.helmignore | 23 + .../helm/test/testdata/testchart/Chart.yaml | 24 + .../testdata/testchart/templates/NOTES.txt | 22 + .../testdata/testchart/templates/_helpers.tpl | 62 + .../testchart/templates/deployment.yaml | 61 + .../testdata/testchart/templates/hpa.yaml | 28 + .../testdata/testchart/templates/ingress.yaml | 61 + .../testdata/testchart/templates/service.yaml | 15 + .../testchart/templates/serviceaccount.yaml | 12 + .../templates/tests/test-connection.yaml | 15 + .../helm/test/testdata/testchart/values.yaml | 86 + .../testdata/with-api-version/.helmignore | 23 + .../test/testdata/with-api-version/Chart.yaml | 24 + .../with-api-version/templates/_helpers.tpl | 62 + .../with-api-version/templates/pdb.yaml | 11 + .../testdata/with-api-version/values.yaml | 0 .../test/testdata/with-tarred-dep/.helmignore | 22 + .../test/testdata/with-tarred-dep/Chart.yaml | 14 + .../test/testdata/with-tarred-dep/LICENSE | 201 + .../with-tarred-dep/charts/common-1.16.1.tgz | Bin 0 -> 14613 bytes .../testdata/with-tarred-dep/renovate.json | 5 + .../with-tarred-dep/templates/.gitkeep | 0 .../with-tarred-dep/templates/deployment.yaml | 62 + .../with-tarred-dep/templates/ingress.yaml | 36 + .../templates/secrets-crdb-ca.yaml | 17 + .../templates/secrets-dbconn.yaml | 17 + .../with-tarred-dep/templates/service.yaml | 17 + .../test/testdata/with-tarred-dep/values.yaml | 30 + pkg/iac/scanners/helm/test/values/values.yaml | 3 + pkg/iac/scanners/json/parser/parser.go | 89 + pkg/iac/scanners/json/parser/parser_test.go | 51 + pkg/iac/scanners/json/scanner.go | 170 + pkg/iac/scanners/json/scanner_test.go | 77 + .../scanners/kubernetes/parser/manifest.go | 33 + .../kubernetes/parser/manifest_node.go | 140 + pkg/iac/scanners/kubernetes/parser/parser.go | 137 + pkg/iac/scanners/kubernetes/scanner.go | 176 + pkg/iac/scanners/kubernetes/scanner_test.go | 736 ++ pkg/iac/scanners/scanner.go | 21 + .../scanners/terraform/executor/executor.go | 269 + .../terraform/executor/executor_test.go | 124 + pkg/iac/scanners/terraform/executor/option.go | 103 + pkg/iac/scanners/terraform/executor/pool.go | 299 + .../scanners/terraform/executor/statistics.go | 91 + pkg/iac/scanners/terraform/options.go | 211 + .../scanners/terraform/parser/evaluator.go | 508 ++ .../terraform/parser/evaluator_test.go | 94 + .../scanners/terraform/parser/funcs/cidr.go | 212 + .../terraform/parser/funcs/collection.go | 711 ++ .../terraform/parser/funcs/conversion.go | 223 + .../scanners/terraform/parser/funcs/crypto.go | 335 + .../terraform/parser/funcs/datetime.go | 71 + .../terraform/parser/funcs/defaults.go | 288 + .../terraform/parser/funcs/encoding.go | 254 + .../terraform/parser/funcs/filesystem.go | 467 ++ .../scanners/terraform/parser/funcs/marks.go | 44 + .../scanners/terraform/parser/funcs/number.go | 170 + .../terraform/parser/funcs/sensitive.go | 67 + .../scanners/terraform/parser/funcs/string.go | 54 + .../scanners/terraform/parser/functions.go | 123 + .../scanners/terraform/parser/load_blocks.go | 130 + .../terraform/parser/load_blocks_test.go | 13 + .../scanners/terraform/parser/load_module.go | 183 + .../terraform/parser/load_module_metadata.go | 33 + .../scanners/terraform/parser/load_vars.go | 83 + .../terraform/parser/load_vars_test.go | 46 + .../terraform/parser/module_retrieval.go | 33 + pkg/iac/scanners/terraform/parser/option.go | 67 + pkg/iac/scanners/terraform/parser/parser.go | 349 + .../parser/parser_integration_test.go | 51 + .../scanners/terraform/parser/parser_test.go | 1026 +++ .../terraform/parser/resolvers/cache.go | 62 + .../terraform/parser/resolvers/local.go | 26 + .../terraform/parser/resolvers/options.go | 28 + .../terraform/parser/resolvers/registry.go | 165 + .../terraform/parser/resolvers/remote.go | 92 + .../terraform/parser/resolvers/writable.go | 36 + .../parser/resolvers/writable_windows.go | 24 + pkg/iac/scanners/terraform/parser/sort.go | 58 + .../parser/testdata/tfvars/terraform.tfvars | 1 + .../testdata/tfvars/terraform.tfvars.json | 10 + pkg/iac/scanners/terraform/scanner.go | 379 + .../terraform/scanner_integration_test.go | 132 + pkg/iac/scanners/terraform/scanner_test.go | 1360 ++++ .../scanners/terraformplan/parser/option.go | 17 + .../scanners/terraformplan/parser/parser.go | 219 + .../terraformplan/parser/plan_file.go | 67 + pkg/iac/scanners/terraformplan/scanner.go | 162 + .../scanners/terraformplan/scanner_test.go | 120 + .../terraformplan/test/parser_test.go | 21 + .../terraformplan/test/scanner_test.go | 39 + .../terraformplan/test/testdata/plan.json | 1 + pkg/iac/scanners/toml/parser/parser.go | 89 + pkg/iac/scanners/toml/parser/parser_test.go | 55 + pkg/iac/scanners/toml/scanner.go | 164 + pkg/iac/scanners/toml/scanner_test.go | 82 + pkg/iac/scanners/universal/scanner.go | 63 + pkg/iac/scanners/yaml/parser/parser.go | 109 + pkg/iac/scanners/yaml/parser/parser_test.go | 150 + pkg/iac/scanners/yaml/scanner.go | 165 + pkg/iac/scanners/yaml/scanner_test.go | 85 + pkg/iac/types/rule.go | 18 + pkg/misconf/scanner.go | 20 +- test/testutil/util.go | 113 + 789 files changed, 75986 insertions(+), 84 deletions(-) create mode 100644 integration/cloud/aws/000000000000/eu-central-1/data.json create mode 100644 internal/adapters/arm/adapt.go create mode 100644 internal/adapters/arm/appservice/adapt.go create mode 100644 internal/adapters/arm/authorization/adapt.go create mode 100644 internal/adapters/arm/compute/adapt.go create mode 100644 internal/adapters/arm/compute/adapt_test.go create mode 100644 internal/adapters/arm/container/adapt.go create mode 100644 internal/adapters/arm/database/adapt.go create mode 100644 internal/adapters/arm/database/firewall.go create mode 100644 internal/adapters/arm/database/maria.go create mode 100644 internal/adapters/arm/database/mssql.go create mode 100644 internal/adapters/arm/database/postgresql.go create mode 100644 internal/adapters/arm/datafactory/adapt.go create mode 100644 internal/adapters/arm/datalake/adapt.go create mode 100644 internal/adapters/arm/keyvault/adapt.go create mode 100644 internal/adapters/arm/monitor/adapt.go create mode 100644 internal/adapters/arm/network/adapt.go create mode 100644 internal/adapters/arm/securitycenter/adapt.go create mode 100644 internal/adapters/arm/storage/adapt.go create mode 100644 internal/adapters/arm/storage/adapt_test.go create mode 100644 internal/adapters/arm/synapse/adapt.go create mode 100644 internal/adapters/cloudformation/adapt.go create mode 100644 internal/adapters/cloudformation/aws/accessanalyzer/accessanalyzer.go create mode 100644 internal/adapters/cloudformation/aws/accessanalyzer/analyzer.go create mode 100644 internal/adapters/cloudformation/aws/adapt.go create mode 100644 internal/adapters/cloudformation/aws/apigateway/apigateway.go create mode 100644 internal/adapters/cloudformation/aws/apigateway/stage.go create mode 100644 internal/adapters/cloudformation/aws/athena/athena.go create mode 100644 internal/adapters/cloudformation/aws/athena/workgroup.go create mode 100644 internal/adapters/cloudformation/aws/cloudfront/cloudfront.go create mode 100644 internal/adapters/cloudformation/aws/cloudfront/distribution.go create mode 100644 internal/adapters/cloudformation/aws/cloudtrail/cloudtrail.go create mode 100644 internal/adapters/cloudformation/aws/cloudtrail/trails.go create mode 100644 internal/adapters/cloudformation/aws/cloudwatch/cloudwatch.go create mode 100644 internal/adapters/cloudformation/aws/cloudwatch/log_group.go create mode 100644 internal/adapters/cloudformation/aws/codebuild/codebuild.go create mode 100644 internal/adapters/cloudformation/aws/codebuild/project.go create mode 100644 internal/adapters/cloudformation/aws/config/aggregator.go create mode 100644 internal/adapters/cloudformation/aws/config/config.go create mode 100644 internal/adapters/cloudformation/aws/documentdb/cluster.go create mode 100644 internal/adapters/cloudformation/aws/documentdb/documentdb.go create mode 100644 internal/adapters/cloudformation/aws/dynamodb/cluster.go create mode 100644 internal/adapters/cloudformation/aws/dynamodb/dynamodb.go create mode 100644 internal/adapters/cloudformation/aws/ec2/ec2.go create mode 100644 internal/adapters/cloudformation/aws/ec2/instance.go create mode 100644 internal/adapters/cloudformation/aws/ec2/launch_configuration.go create mode 100644 internal/adapters/cloudformation/aws/ec2/launch_template.go create mode 100644 internal/adapters/cloudformation/aws/ec2/nacl.go create mode 100644 internal/adapters/cloudformation/aws/ec2/security_group.go create mode 100644 internal/adapters/cloudformation/aws/ec2/subnet.go create mode 100644 internal/adapters/cloudformation/aws/ec2/volume.go create mode 100644 internal/adapters/cloudformation/aws/ecr/ecr.go create mode 100644 internal/adapters/cloudformation/aws/ecr/repository.go create mode 100644 internal/adapters/cloudformation/aws/ecs/cluster.go create mode 100644 internal/adapters/cloudformation/aws/ecs/ecs.go create mode 100644 internal/adapters/cloudformation/aws/ecs/task_definition.go create mode 100644 internal/adapters/cloudformation/aws/efs/efs.go create mode 100644 internal/adapters/cloudformation/aws/efs/filesystem.go create mode 100644 internal/adapters/cloudformation/aws/eks/cluster.go create mode 100644 internal/adapters/cloudformation/aws/eks/eks.go create mode 100644 internal/adapters/cloudformation/aws/elasticache/cluster.go create mode 100644 internal/adapters/cloudformation/aws/elasticache/elasticache.go create mode 100644 internal/adapters/cloudformation/aws/elasticache/replication_group.go create mode 100644 internal/adapters/cloudformation/aws/elasticache/security_group.go create mode 100644 internal/adapters/cloudformation/aws/elasticsearch/domain.go create mode 100644 internal/adapters/cloudformation/aws/elasticsearch/elasticsearch.go create mode 100644 internal/adapters/cloudformation/aws/elb/elb.go create mode 100644 internal/adapters/cloudformation/aws/elb/loadbalancer.go create mode 100644 internal/adapters/cloudformation/aws/iam/iam.go create mode 100644 internal/adapters/cloudformation/aws/iam/policy.go create mode 100644 internal/adapters/cloudformation/aws/kinesis/kinesis.go create mode 100644 internal/adapters/cloudformation/aws/kinesis/stream.go create mode 100644 internal/adapters/cloudformation/aws/lambda/function.go create mode 100644 internal/adapters/cloudformation/aws/lambda/lambda.go create mode 100644 internal/adapters/cloudformation/aws/mq/broker.go create mode 100644 internal/adapters/cloudformation/aws/mq/mq.go create mode 100644 internal/adapters/cloudformation/aws/msk/cluster.go create mode 100644 internal/adapters/cloudformation/aws/msk/msk.go create mode 100644 internal/adapters/cloudformation/aws/neptune/cluster.go create mode 100644 internal/adapters/cloudformation/aws/neptune/neptune.go create mode 100644 internal/adapters/cloudformation/aws/rds/cluster.go create mode 100644 internal/adapters/cloudformation/aws/rds/instance.go create mode 100644 internal/adapters/cloudformation/aws/rds/parameter_groups.go create mode 100644 internal/adapters/cloudformation/aws/rds/rds.go create mode 100644 internal/adapters/cloudformation/aws/redshift/cluster.go create mode 100644 internal/adapters/cloudformation/aws/redshift/redshift.go create mode 100644 internal/adapters/cloudformation/aws/redshift/security_group.go create mode 100644 internal/adapters/cloudformation/aws/s3/bucket.go create mode 100644 internal/adapters/cloudformation/aws/s3/s3.go create mode 100644 internal/adapters/cloudformation/aws/sam/api.go create mode 100644 internal/adapters/cloudformation/aws/sam/function.go create mode 100644 internal/adapters/cloudformation/aws/sam/http_api.go create mode 100644 internal/adapters/cloudformation/aws/sam/sam.go create mode 100644 internal/adapters/cloudformation/aws/sam/state_machines.go create mode 100644 internal/adapters/cloudformation/aws/sam/tables.go create mode 100644 internal/adapters/cloudformation/aws/sns/sns.go create mode 100644 internal/adapters/cloudformation/aws/sns/topic.go create mode 100644 internal/adapters/cloudformation/aws/sqs/queue.go create mode 100644 internal/adapters/cloudformation/aws/sqs/sqs.go create mode 100644 internal/adapters/cloudformation/aws/ssm/secret.go create mode 100644 internal/adapters/cloudformation/aws/ssm/ssm.go create mode 100644 internal/adapters/cloudformation/aws/workspaces/workspace.go create mode 100644 internal/adapters/cloudformation/aws/workspaces/workspaces.go create mode 100644 internal/adapters/terraform/adapt.go create mode 100644 internal/adapters/terraform/aws/accessanalyzer/accessanalyzer.go create mode 100644 internal/adapters/terraform/aws/adapt.go create mode 100644 internal/adapters/terraform/aws/apigateway/adapt.go create mode 100644 internal/adapters/terraform/aws/apigateway/adapt_test.go create mode 100644 internal/adapters/terraform/aws/apigateway/apiv1.go create mode 100644 internal/adapters/terraform/aws/apigateway/apiv1_test.go create mode 100644 internal/adapters/terraform/aws/apigateway/apiv2.go create mode 100644 internal/adapters/terraform/aws/apigateway/apiv2_test.go create mode 100644 internal/adapters/terraform/aws/apigateway/namesv1.go create mode 100644 internal/adapters/terraform/aws/apigateway/namesv1_test.go create mode 100644 internal/adapters/terraform/aws/apigateway/namesv2.go create mode 100644 internal/adapters/terraform/aws/apigateway/namesv2_test.go create mode 100644 internal/adapters/terraform/aws/athena/adapt.go create mode 100644 internal/adapters/terraform/aws/athena/adapt_test.go create mode 100644 internal/adapters/terraform/aws/cloudfront/adapt.go create mode 100644 internal/adapters/terraform/aws/cloudfront/adapt_test.go create mode 100644 internal/adapters/terraform/aws/cloudtrail/adapt.go create mode 100644 internal/adapters/terraform/aws/cloudtrail/adapt_test.go create mode 100644 internal/adapters/terraform/aws/cloudwatch/adapt.go create mode 100644 internal/adapters/terraform/aws/cloudwatch/adapt_test.go create mode 100644 internal/adapters/terraform/aws/codebuild/adapt.go create mode 100644 internal/adapters/terraform/aws/codebuild/adapt_test.go create mode 100644 internal/adapters/terraform/aws/config/adapt.go create mode 100644 internal/adapters/terraform/aws/config/adapt_test.go create mode 100644 internal/adapters/terraform/aws/documentdb/adapt.go create mode 100644 internal/adapters/terraform/aws/documentdb/adapt_test.go create mode 100644 internal/adapters/terraform/aws/dynamodb/adapt.go create mode 100644 internal/adapters/terraform/aws/dynamodb/adapt_test.go create mode 100644 internal/adapters/terraform/aws/ec2/adapt.go create mode 100644 internal/adapters/terraform/aws/ec2/adapt_test.go create mode 100644 internal/adapters/terraform/aws/ec2/autoscaling.go create mode 100644 internal/adapters/terraform/aws/ec2/autoscaling_test.go create mode 100644 internal/adapters/terraform/aws/ec2/subnet.go create mode 100644 internal/adapters/terraform/aws/ec2/subnet_test.go create mode 100644 internal/adapters/terraform/aws/ec2/volume.go create mode 100644 internal/adapters/terraform/aws/ec2/volume_test.go create mode 100644 internal/adapters/terraform/aws/ec2/vpc.go create mode 100644 internal/adapters/terraform/aws/ec2/vpc_test.go create mode 100644 internal/adapters/terraform/aws/ecr/adapt.go create mode 100644 internal/adapters/terraform/aws/ecr/adapt_test.go create mode 100644 internal/adapters/terraform/aws/ecs/adapt.go create mode 100644 internal/adapters/terraform/aws/ecs/adapt_test.go create mode 100644 internal/adapters/terraform/aws/efs/adapt.go create mode 100644 internal/adapters/terraform/aws/efs/adapt_test.go create mode 100644 internal/adapters/terraform/aws/eks/adapt.go create mode 100644 internal/adapters/terraform/aws/eks/adapt_test.go create mode 100644 internal/adapters/terraform/aws/elasticache/adapt.go create mode 100644 internal/adapters/terraform/aws/elasticache/adapt_test.go create mode 100644 internal/adapters/terraform/aws/elasticsearch/adapt.go create mode 100644 internal/adapters/terraform/aws/elasticsearch/adapt_test.go create mode 100644 internal/adapters/terraform/aws/elb/adapt.go create mode 100644 internal/adapters/terraform/aws/elb/adapt_test.go create mode 100644 internal/adapters/terraform/aws/emr/adapt.go create mode 100644 internal/adapters/terraform/aws/emr/adapt_test.go create mode 100644 internal/adapters/terraform/aws/iam/adapt.go create mode 100644 internal/adapters/terraform/aws/iam/adapt_test.go create mode 100644 internal/adapters/terraform/aws/iam/convert.go create mode 100644 internal/adapters/terraform/aws/iam/groups.go create mode 100644 internal/adapters/terraform/aws/iam/groups_test.go create mode 100644 internal/adapters/terraform/aws/iam/passwords.go create mode 100644 internal/adapters/terraform/aws/iam/passwords_test.go create mode 100644 internal/adapters/terraform/aws/iam/policies.go create mode 100644 internal/adapters/terraform/aws/iam/policies_test.go create mode 100644 internal/adapters/terraform/aws/iam/roles.go create mode 100644 internal/adapters/terraform/aws/iam/roles_test.go create mode 100644 internal/adapters/terraform/aws/iam/users.go create mode 100644 internal/adapters/terraform/aws/iam/users_test.go create mode 100644 internal/adapters/terraform/aws/kinesis/adapt.go create mode 100644 internal/adapters/terraform/aws/kinesis/adapt_test.go create mode 100644 internal/adapters/terraform/aws/kms/adapt.go create mode 100644 internal/adapters/terraform/aws/kms/adapt_test.go create mode 100644 internal/adapters/terraform/aws/lambda/adapt.go create mode 100644 internal/adapters/terraform/aws/lambda/adapt_test.go create mode 100644 internal/adapters/terraform/aws/mq/adapt.go create mode 100644 internal/adapters/terraform/aws/mq/adapt_test.go create mode 100644 internal/adapters/terraform/aws/msk/adapt.go create mode 100644 internal/adapters/terraform/aws/msk/adapt_test.go create mode 100644 internal/adapters/terraform/aws/neptune/adapt.go create mode 100644 internal/adapters/terraform/aws/neptune/adapt_test.go create mode 100644 internal/adapters/terraform/aws/provider/adapt.go create mode 100644 internal/adapters/terraform/aws/provider/adapt_test.go create mode 100644 internal/adapters/terraform/aws/rds/adapt.go create mode 100644 internal/adapters/terraform/aws/rds/adapt_test.go create mode 100644 internal/adapters/terraform/aws/redshift/adapt.go create mode 100644 internal/adapters/terraform/aws/redshift/adapt_test.go create mode 100644 internal/adapters/terraform/aws/s3/adapt.go create mode 100644 internal/adapters/terraform/aws/s3/adapt_test.go create mode 100644 internal/adapters/terraform/aws/s3/bucket.go create mode 100644 internal/adapters/terraform/aws/s3/bucket_test.go create mode 100644 internal/adapters/terraform/aws/s3/policies.go create mode 100644 internal/adapters/terraform/aws/s3/public_access_block.go create mode 100644 internal/adapters/terraform/aws/sns/adapt.go create mode 100644 internal/adapters/terraform/aws/sns/adapt_test.go create mode 100644 internal/adapters/terraform/aws/sqs/adapt.go create mode 100644 internal/adapters/terraform/aws/sqs/adapt_test.go create mode 100644 internal/adapters/terraform/aws/ssm/adapt.go create mode 100644 internal/adapters/terraform/aws/ssm/adapt_test.go create mode 100644 internal/adapters/terraform/aws/workspaces/adapt.go create mode 100644 internal/adapters/terraform/aws/workspaces/adapt_test.go create mode 100644 internal/adapters/terraform/azure/adapt.go create mode 100644 internal/adapters/terraform/azure/appservice/adapt.go create mode 100644 internal/adapters/terraform/azure/appservice/adapt_test.go create mode 100644 internal/adapters/terraform/azure/authorization/adapt.go create mode 100644 internal/adapters/terraform/azure/authorization/adapt_test.go create mode 100644 internal/adapters/terraform/azure/compute/adapt.go create mode 100644 internal/adapters/terraform/azure/compute/adapt_test.go create mode 100644 internal/adapters/terraform/azure/container/adapt.go create mode 100644 internal/adapters/terraform/azure/container/adapt_test.go create mode 100644 internal/adapters/terraform/azure/database/adapt.go create mode 100644 internal/adapters/terraform/azure/database/adapt_test.go create mode 100644 internal/adapters/terraform/azure/datafactory/adapt.go create mode 100644 internal/adapters/terraform/azure/datafactory/adapt_test.go create mode 100644 internal/adapters/terraform/azure/datalake/adapt.go create mode 100644 internal/adapters/terraform/azure/datalake/adapt_test.go create mode 100644 internal/adapters/terraform/azure/keyvault/adapt.go create mode 100644 internal/adapters/terraform/azure/keyvault/adapt_test.go create mode 100644 internal/adapters/terraform/azure/monitor/adapt.go create mode 100644 internal/adapters/terraform/azure/monitor/adapt_test.go create mode 100644 internal/adapters/terraform/azure/network/adapt.go create mode 100644 internal/adapters/terraform/azure/network/adapt_test.go create mode 100644 internal/adapters/terraform/azure/securitycenter/adapt.go create mode 100644 internal/adapters/terraform/azure/securitycenter/adapt_test.go create mode 100644 internal/adapters/terraform/azure/storage/adapt.go create mode 100644 internal/adapters/terraform/azure/storage/adapt_test.go create mode 100644 internal/adapters/terraform/azure/synapse/adapt.go create mode 100644 internal/adapters/terraform/azure/synapse/adapt_test.go create mode 100644 internal/adapters/terraform/cloudstack/adapt.go create mode 100644 internal/adapters/terraform/cloudstack/compute/adapt.go create mode 100644 internal/adapters/terraform/cloudstack/compute/adapt_test.go create mode 100644 internal/adapters/terraform/digitalocean/adapt.go create mode 100644 internal/adapters/terraform/digitalocean/compute/adapt.go create mode 100644 internal/adapters/terraform/digitalocean/compute/adapt_test.go create mode 100644 internal/adapters/terraform/digitalocean/spaces/adapt.go create mode 100644 internal/adapters/terraform/digitalocean/spaces/adapt_test.go create mode 100644 internal/adapters/terraform/github/adapt.go create mode 100644 internal/adapters/terraform/github/branch_protections/adapt.go create mode 100644 internal/adapters/terraform/github/branch_protections/adapt_test.go create mode 100644 internal/adapters/terraform/github/repositories/adapt.go create mode 100644 internal/adapters/terraform/github/repositories/adapt_test.go create mode 100644 internal/adapters/terraform/github/secrets/adapt.go create mode 100644 internal/adapters/terraform/github/secrets/adapt_test.go create mode 100644 internal/adapters/terraform/google/adapt.go create mode 100644 internal/adapters/terraform/google/bigquery/adapt.go create mode 100644 internal/adapters/terraform/google/bigquery/adapt_test.go create mode 100644 internal/adapters/terraform/google/compute/adapt.go create mode 100644 internal/adapters/terraform/google/compute/adapt_test.go create mode 100644 internal/adapters/terraform/google/compute/disks.go create mode 100644 internal/adapters/terraform/google/compute/disks_test.go create mode 100644 internal/adapters/terraform/google/compute/instances.go create mode 100644 internal/adapters/terraform/google/compute/instances_test.go create mode 100644 internal/adapters/terraform/google/compute/metadata.go create mode 100644 internal/adapters/terraform/google/compute/metadata_test.go create mode 100644 internal/adapters/terraform/google/compute/networks.go create mode 100644 internal/adapters/terraform/google/compute/networks_test.go create mode 100644 internal/adapters/terraform/google/compute/ssl.go create mode 100644 internal/adapters/terraform/google/compute/ssl_test.go create mode 100644 internal/adapters/terraform/google/dns/adapt.go create mode 100644 internal/adapters/terraform/google/dns/adapt_test.go create mode 100644 internal/adapters/terraform/google/gke/adapt.go create mode 100644 internal/adapters/terraform/google/gke/adapt_test.go create mode 100644 internal/adapters/terraform/google/iam/adapt.go create mode 100644 internal/adapters/terraform/google/iam/adapt_test.go create mode 100644 internal/adapters/terraform/google/iam/convert.go create mode 100644 internal/adapters/terraform/google/iam/folder_iam.go create mode 100644 internal/adapters/terraform/google/iam/folders.go create mode 100644 internal/adapters/terraform/google/iam/org_iam.go create mode 100644 internal/adapters/terraform/google/iam/project_iam.go create mode 100644 internal/adapters/terraform/google/iam/project_iam_test.go create mode 100644 internal/adapters/terraform/google/iam/projects.go create mode 100644 internal/adapters/terraform/google/iam/workload_identity_pool_providers.go create mode 100644 internal/adapters/terraform/google/kms/adapt.go create mode 100644 internal/adapters/terraform/google/kms/adapt_test.go create mode 100644 internal/adapters/terraform/google/sql/adapt.go create mode 100644 internal/adapters/terraform/google/sql/adapt_test.go create mode 100644 internal/adapters/terraform/google/storage/adapt.go create mode 100644 internal/adapters/terraform/google/storage/adapt_test.go create mode 100644 internal/adapters/terraform/google/storage/iam.go create mode 100644 internal/adapters/terraform/kubernetes/adapt.go create mode 100644 internal/adapters/terraform/nifcloud/computing/adapt.go create mode 100644 internal/adapters/terraform/nifcloud/computing/adapt_test.go create mode 100644 internal/adapters/terraform/nifcloud/computing/instance.go create mode 100644 internal/adapters/terraform/nifcloud/computing/instance_test.go create mode 100644 internal/adapters/terraform/nifcloud/computing/security_group.go create mode 100644 internal/adapters/terraform/nifcloud/computing/security_group_test.go create mode 100644 internal/adapters/terraform/nifcloud/dns/adapt.go create mode 100644 internal/adapters/terraform/nifcloud/dns/adapt_test.go create mode 100644 internal/adapters/terraform/nifcloud/dns/record.go create mode 100644 internal/adapters/terraform/nifcloud/dns/record_test.go create mode 100644 internal/adapters/terraform/nifcloud/nas/adapt.go create mode 100644 internal/adapters/terraform/nifcloud/nas/adapt_test.go create mode 100644 internal/adapters/terraform/nifcloud/nas/nas_instance.go create mode 100644 internal/adapters/terraform/nifcloud/nas/nas_instance_test.go create mode 100644 internal/adapters/terraform/nifcloud/nas/nas_security_group.go create mode 100644 internal/adapters/terraform/nifcloud/nas/nas_security_group_test.go create mode 100644 internal/adapters/terraform/nifcloud/network/adapt.go create mode 100644 internal/adapters/terraform/nifcloud/network/adapt_test.go create mode 100644 internal/adapters/terraform/nifcloud/network/elastic_load_balancer.go create mode 100644 internal/adapters/terraform/nifcloud/network/elastic_load_balancer_test.go create mode 100644 internal/adapters/terraform/nifcloud/network/load_balancer.go create mode 100644 internal/adapters/terraform/nifcloud/network/load_balancer_test.go create mode 100644 internal/adapters/terraform/nifcloud/network/router.go create mode 100644 internal/adapters/terraform/nifcloud/network/router_test.go create mode 100644 internal/adapters/terraform/nifcloud/network/vpn_gateway.go create mode 100644 internal/adapters/terraform/nifcloud/network/vpn_gateway_test.go create mode 100644 internal/adapters/terraform/nifcloud/nifcloud.go create mode 100644 internal/adapters/terraform/nifcloud/rdb/adapt.go create mode 100644 internal/adapters/terraform/nifcloud/rdb/adapt_test.go create mode 100644 internal/adapters/terraform/nifcloud/rdb/db_instance.go create mode 100644 internal/adapters/terraform/nifcloud/rdb/db_instance_test.go create mode 100644 internal/adapters/terraform/nifcloud/rdb/db_security_group.go create mode 100644 internal/adapters/terraform/nifcloud/rdb/db_security_group_test.go create mode 100644 internal/adapters/terraform/nifcloud/sslcertificate/adapt.go create mode 100644 internal/adapters/terraform/nifcloud/sslcertificate/adapt_test.go create mode 100644 internal/adapters/terraform/nifcloud/sslcertificate/server_certificate.go create mode 100644 internal/adapters/terraform/nifcloud/sslcertificate/server_certificate_test.go create mode 100644 internal/adapters/terraform/openstack/adapt.go create mode 100644 internal/adapters/terraform/openstack/adapt_test.go create mode 100644 internal/adapters/terraform/openstack/networking.go create mode 100644 internal/adapters/terraform/openstack/networking_test.go create mode 100644 internal/adapters/terraform/oracle/adapt.go create mode 100644 internal/adapters/terraform/tftestutil/testutil.go create mode 100755 internal/rules/register.go create mode 100644 internal/rules/register_test.go create mode 100644 pkg/iac/detection/detect.go create mode 100644 pkg/iac/detection/detect_test.go create mode 100644 pkg/iac/detection/peek.go create mode 100644 pkg/iac/detection/testdata/big.file create mode 100644 pkg/iac/detection/testdata/small.file create mode 100644 pkg/iac/extrafs/extrafs.go create mode 100644 pkg/iac/rego/build.go create mode 100644 pkg/iac/rego/custom.go create mode 100644 pkg/iac/rego/embed.go create mode 100644 pkg/iac/rego/embed_test.go create mode 100644 pkg/iac/rego/exceptions.go create mode 100644 pkg/iac/rego/load.go create mode 100644 pkg/iac/rego/load_test.go create mode 100644 pkg/iac/rego/metadata.go create mode 100644 pkg/iac/rego/metadata_test.go create mode 100644 pkg/iac/rego/result.go create mode 100644 pkg/iac/rego/result_test.go create mode 100644 pkg/iac/rego/runtime.go create mode 100644 pkg/iac/rego/scanner.go create mode 100644 pkg/iac/rego/scanner_test.go create mode 100644 pkg/iac/rego/schemas/00_schema.go create mode 100644 pkg/iac/rego/schemas/builder.go create mode 100644 pkg/iac/rego/schemas/cloud.json create mode 100644 pkg/iac/rego/schemas/dockerfile.json create mode 100644 pkg/iac/rego/schemas/kubernetes.json create mode 100644 pkg/iac/rego/schemas/rbac.json create mode 100644 pkg/iac/rego/schemas/schemas.go create mode 100644 pkg/iac/rego/store.go create mode 100644 pkg/iac/rego/testdata/policies/._sysfile.rego create mode 100644 pkg/iac/rego/testdata/policies/invalid.rego create mode 100644 pkg/iac/rego/testdata/policies/valid.rego create mode 100644 pkg/iac/rules/providers.go create mode 100644 pkg/iac/rules/register.go create mode 100644 pkg/iac/rules/rules.go create mode 100644 pkg/iac/scanners/azure/arm/parser/armjson/bench_test.go create mode 100644 pkg/iac/scanners/azure/arm/parser/armjson/decode.go create mode 100644 pkg/iac/scanners/azure/arm/parser/armjson/decode_array.go create mode 100644 pkg/iac/scanners/azure/arm/parser/armjson/decode_boolean.go create mode 100644 pkg/iac/scanners/azure/arm/parser/armjson/decode_meta_test.go create mode 100644 pkg/iac/scanners/azure/arm/parser/armjson/decode_null.go create mode 100644 pkg/iac/scanners/azure/arm/parser/armjson/decode_number.go create mode 100644 pkg/iac/scanners/azure/arm/parser/armjson/decode_object.go create mode 100644 pkg/iac/scanners/azure/arm/parser/armjson/decode_string.go create mode 100644 pkg/iac/scanners/azure/arm/parser/armjson/kind.go create mode 100644 pkg/iac/scanners/azure/arm/parser/armjson/node.go create mode 100644 pkg/iac/scanners/azure/arm/parser/armjson/parse.go create mode 100644 pkg/iac/scanners/azure/arm/parser/armjson/parse_array.go create mode 100644 pkg/iac/scanners/azure/arm/parser/armjson/parse_array_test.go create mode 100644 pkg/iac/scanners/azure/arm/parser/armjson/parse_boolean.go create mode 100644 pkg/iac/scanners/azure/arm/parser/armjson/parse_boolean_test.go create mode 100644 pkg/iac/scanners/azure/arm/parser/armjson/parse_comment.go create mode 100644 pkg/iac/scanners/azure/arm/parser/armjson/parse_complex_test.go create mode 100644 pkg/iac/scanners/azure/arm/parser/armjson/parse_null.go create mode 100644 pkg/iac/scanners/azure/arm/parser/armjson/parse_null_test.go create mode 100644 pkg/iac/scanners/azure/arm/parser/armjson/parse_number.go create mode 100644 pkg/iac/scanners/azure/arm/parser/armjson/parse_number_test.go create mode 100644 pkg/iac/scanners/azure/arm/parser/armjson/parse_object.go create mode 100644 pkg/iac/scanners/azure/arm/parser/armjson/parse_object_test.go create mode 100644 pkg/iac/scanners/azure/arm/parser/armjson/parse_string.go create mode 100644 pkg/iac/scanners/azure/arm/parser/armjson/parse_string_test.go create mode 100644 pkg/iac/scanners/azure/arm/parser/armjson/parse_whitespace.go create mode 100644 pkg/iac/scanners/azure/arm/parser/armjson/reader.go create mode 100644 pkg/iac/scanners/azure/arm/parser/armjson/reader_test.go create mode 100644 pkg/iac/scanners/azure/arm/parser/armjson/unmarshal.go create mode 100644 pkg/iac/scanners/azure/arm/parser/parser.go create mode 100644 pkg/iac/scanners/azure/arm/parser/parser_test.go create mode 100644 pkg/iac/scanners/azure/arm/parser/template.go create mode 100644 pkg/iac/scanners/azure/arm/parser/template_test.go create mode 100644 pkg/iac/scanners/azure/arm/parser/testdata/example.json create mode 100644 pkg/iac/scanners/azure/arm/parser/testdata/postgres.json create mode 100644 pkg/iac/scanners/azure/arm/scanner.go create mode 100644 pkg/iac/scanners/azure/deployment.go create mode 100644 pkg/iac/scanners/azure/expressions/lex.go create mode 100644 pkg/iac/scanners/azure/expressions/node.go create mode 100644 pkg/iac/scanners/azure/expressions/token_walker.go create mode 100644 pkg/iac/scanners/azure/functions/add.go create mode 100644 pkg/iac/scanners/azure/functions/add_test.go create mode 100644 pkg/iac/scanners/azure/functions/and.go create mode 100644 pkg/iac/scanners/azure/functions/and_test.go create mode 100644 pkg/iac/scanners/azure/functions/array.go create mode 100644 pkg/iac/scanners/azure/functions/array_test.go create mode 100644 pkg/iac/scanners/azure/functions/base64.go create mode 100644 pkg/iac/scanners/azure/functions/base64_test.go create mode 100644 pkg/iac/scanners/azure/functions/bool.go create mode 100644 pkg/iac/scanners/azure/functions/bool_test.go create mode 100644 pkg/iac/scanners/azure/functions/casing.go create mode 100644 pkg/iac/scanners/azure/functions/casing_test.go create mode 100644 pkg/iac/scanners/azure/functions/coalesce.go create mode 100644 pkg/iac/scanners/azure/functions/coalesce_test.go create mode 100644 pkg/iac/scanners/azure/functions/concat.go create mode 100644 pkg/iac/scanners/azure/functions/concat_test.go create mode 100644 pkg/iac/scanners/azure/functions/contains.go create mode 100644 pkg/iac/scanners/azure/functions/contains_test.go create mode 100644 pkg/iac/scanners/azure/functions/copy_index.go create mode 100644 pkg/iac/scanners/azure/functions/copy_index_test.go create mode 100644 pkg/iac/scanners/azure/functions/create_array.go create mode 100644 pkg/iac/scanners/azure/functions/create_array_test.go create mode 100644 pkg/iac/scanners/azure/functions/create_object.go create mode 100644 pkg/iac/scanners/azure/functions/create_object_test.go create mode 100644 pkg/iac/scanners/azure/functions/data_uri.go create mode 100644 pkg/iac/scanners/azure/functions/data_uri_test.go create mode 100644 pkg/iac/scanners/azure/functions/date_time_add.go create mode 100644 pkg/iac/scanners/azure/functions/date_time_epoch.go create mode 100644 pkg/iac/scanners/azure/functions/date_time_epoch_test.go create mode 100644 pkg/iac/scanners/azure/functions/datetime_add_test.go create mode 100644 pkg/iac/scanners/azure/functions/deployment.go create mode 100644 pkg/iac/scanners/azure/functions/div.go create mode 100644 pkg/iac/scanners/azure/functions/div_test.go create mode 100644 pkg/iac/scanners/azure/functions/empty.go create mode 100644 pkg/iac/scanners/azure/functions/empty_test.go create mode 100644 pkg/iac/scanners/azure/functions/ends_with.go create mode 100644 pkg/iac/scanners/azure/functions/ends_with_test.go create mode 100644 pkg/iac/scanners/azure/functions/equals.go create mode 100644 pkg/iac/scanners/azure/functions/equals_test.go create mode 100644 pkg/iac/scanners/azure/functions/false.go create mode 100644 pkg/iac/scanners/azure/functions/first.go create mode 100644 pkg/iac/scanners/azure/functions/first_test.go create mode 100644 pkg/iac/scanners/azure/functions/float.go create mode 100644 pkg/iac/scanners/azure/functions/float_test.go create mode 100644 pkg/iac/scanners/azure/functions/format.go create mode 100644 pkg/iac/scanners/azure/functions/format_test.go create mode 100644 pkg/iac/scanners/azure/functions/functions.go create mode 100644 pkg/iac/scanners/azure/functions/greater.go create mode 100644 pkg/iac/scanners/azure/functions/greater_test.go create mode 100644 pkg/iac/scanners/azure/functions/guid.go create mode 100644 pkg/iac/scanners/azure/functions/guid_test.go create mode 100644 pkg/iac/scanners/azure/functions/if.go create mode 100644 pkg/iac/scanners/azure/functions/if_test.go create mode 100644 pkg/iac/scanners/azure/functions/index_of.go create mode 100644 pkg/iac/scanners/azure/functions/index_of_test.go create mode 100644 pkg/iac/scanners/azure/functions/int.go create mode 100644 pkg/iac/scanners/azure/functions/int_test.go create mode 100644 pkg/iac/scanners/azure/functions/intersection.go create mode 100644 pkg/iac/scanners/azure/functions/intersection_test.go create mode 100644 pkg/iac/scanners/azure/functions/items.go create mode 100644 pkg/iac/scanners/azure/functions/join.go create mode 100644 pkg/iac/scanners/azure/functions/join_test.go create mode 100644 pkg/iac/scanners/azure/functions/json.go create mode 100644 pkg/iac/scanners/azure/functions/json_test.go create mode 100644 pkg/iac/scanners/azure/functions/last.go create mode 100644 pkg/iac/scanners/azure/functions/last_index_of.go create mode 100644 pkg/iac/scanners/azure/functions/last_index_of_test.go create mode 100644 pkg/iac/scanners/azure/functions/last_test.go create mode 100644 pkg/iac/scanners/azure/functions/length.go create mode 100644 pkg/iac/scanners/azure/functions/length_test.go create mode 100644 pkg/iac/scanners/azure/functions/less.go create mode 100644 pkg/iac/scanners/azure/functions/less_test.go create mode 100644 pkg/iac/scanners/azure/functions/max.go create mode 100644 pkg/iac/scanners/azure/functions/max_test.go create mode 100644 pkg/iac/scanners/azure/functions/min.go create mode 100644 pkg/iac/scanners/azure/functions/min_test.go create mode 100644 pkg/iac/scanners/azure/functions/mod.go create mode 100644 pkg/iac/scanners/azure/functions/mod_test.go create mode 100644 pkg/iac/scanners/azure/functions/mul.go create mode 100644 pkg/iac/scanners/azure/functions/mul_test.go create mode 100644 pkg/iac/scanners/azure/functions/not.go create mode 100644 pkg/iac/scanners/azure/functions/not_test.go create mode 100644 pkg/iac/scanners/azure/functions/null.go create mode 100644 pkg/iac/scanners/azure/functions/null_test.go create mode 100644 pkg/iac/scanners/azure/functions/or.go create mode 100644 pkg/iac/scanners/azure/functions/or_test.go create mode 100644 pkg/iac/scanners/azure/functions/pad.go create mode 100644 pkg/iac/scanners/azure/functions/pad_test.go create mode 100644 pkg/iac/scanners/azure/functions/parameters.go create mode 100644 pkg/iac/scanners/azure/functions/pick_zones.go create mode 100644 pkg/iac/scanners/azure/functions/pick_zones_test.go create mode 100644 pkg/iac/scanners/azure/functions/range.go create mode 100644 pkg/iac/scanners/azure/functions/range_test.go create mode 100644 pkg/iac/scanners/azure/functions/reference.go create mode 100644 pkg/iac/scanners/azure/functions/reference_test.go create mode 100644 pkg/iac/scanners/azure/functions/replace.go create mode 100644 pkg/iac/scanners/azure/functions/replace_test.go create mode 100644 pkg/iac/scanners/azure/functions/resource.go create mode 100644 pkg/iac/scanners/azure/functions/resource_test.go create mode 100644 pkg/iac/scanners/azure/functions/scope.go create mode 100644 pkg/iac/scanners/azure/functions/scope_test.go create mode 100644 pkg/iac/scanners/azure/functions/skip.go create mode 100644 pkg/iac/scanners/azure/functions/skip_test.go create mode 100644 pkg/iac/scanners/azure/functions/split.go create mode 100644 pkg/iac/scanners/azure/functions/split_test.go create mode 100644 pkg/iac/scanners/azure/functions/starts_with.go create mode 100644 pkg/iac/scanners/azure/functions/starts_with_test.go create mode 100644 pkg/iac/scanners/azure/functions/string.go create mode 100644 pkg/iac/scanners/azure/functions/string_test.go create mode 100644 pkg/iac/scanners/azure/functions/sub.go create mode 100644 pkg/iac/scanners/azure/functions/sub_test.go create mode 100644 pkg/iac/scanners/azure/functions/substring.go create mode 100644 pkg/iac/scanners/azure/functions/substring_test.go create mode 100644 pkg/iac/scanners/azure/functions/take.go create mode 100644 pkg/iac/scanners/azure/functions/take_test.go create mode 100644 pkg/iac/scanners/azure/functions/trim.go create mode 100644 pkg/iac/scanners/azure/functions/trim_test.go create mode 100644 pkg/iac/scanners/azure/functions/true.go create mode 100644 pkg/iac/scanners/azure/functions/union.go create mode 100644 pkg/iac/scanners/azure/functions/union_test.go create mode 100644 pkg/iac/scanners/azure/functions/unique_string.go create mode 100644 pkg/iac/scanners/azure/functions/unique_string_test.go create mode 100644 pkg/iac/scanners/azure/functions/uri.go create mode 100644 pkg/iac/scanners/azure/functions/uri_test.go create mode 100644 pkg/iac/scanners/azure/functions/utc_now.go create mode 100644 pkg/iac/scanners/azure/functions/utc_now_test.go create mode 100644 pkg/iac/scanners/azure/resolver/resolver.go create mode 100644 pkg/iac/scanners/azure/resolver/resolver_test.go create mode 100644 pkg/iac/scanners/azure/value.go create mode 100644 pkg/iac/scanners/azure/value_test.go create mode 100644 pkg/iac/scanners/cloudformation/cftypes/types.go create mode 100644 pkg/iac/scanners/cloudformation/parser/errors.go create mode 100644 pkg/iac/scanners/cloudformation/parser/file_context.go create mode 100644 pkg/iac/scanners/cloudformation/parser/file_context_test.go create mode 100644 pkg/iac/scanners/cloudformation/parser/fn_and.go create mode 100644 pkg/iac/scanners/cloudformation/parser/fn_and_test.go create mode 100644 pkg/iac/scanners/cloudformation/parser/fn_base64.go create mode 100644 pkg/iac/scanners/cloudformation/parser/fn_base64_test.go create mode 100644 pkg/iac/scanners/cloudformation/parser/fn_builtin.go create mode 100644 pkg/iac/scanners/cloudformation/parser/fn_builtin_test.go create mode 100644 pkg/iac/scanners/cloudformation/parser/fn_condition.go create mode 100644 pkg/iac/scanners/cloudformation/parser/fn_condition_test.go create mode 100644 pkg/iac/scanners/cloudformation/parser/fn_equals.go create mode 100644 pkg/iac/scanners/cloudformation/parser/fn_equals_test.go create mode 100644 pkg/iac/scanners/cloudformation/parser/fn_find_in_map.go create mode 100644 pkg/iac/scanners/cloudformation/parser/fn_find_in_map_test.go create mode 100644 pkg/iac/scanners/cloudformation/parser/fn_get_attr.go create mode 100644 pkg/iac/scanners/cloudformation/parser/fn_get_attr_test.go create mode 100644 pkg/iac/scanners/cloudformation/parser/fn_if.go create mode 100644 pkg/iac/scanners/cloudformation/parser/fn_if_test.go create mode 100644 pkg/iac/scanners/cloudformation/parser/fn_join.go create mode 100644 pkg/iac/scanners/cloudformation/parser/fn_join_test.go create mode 100644 pkg/iac/scanners/cloudformation/parser/fn_length.go create mode 100644 pkg/iac/scanners/cloudformation/parser/fn_length_test.go create mode 100644 pkg/iac/scanners/cloudformation/parser/fn_not.go create mode 100644 pkg/iac/scanners/cloudformation/parser/fn_not_test.go create mode 100644 pkg/iac/scanners/cloudformation/parser/fn_or.go create mode 100644 pkg/iac/scanners/cloudformation/parser/fn_or_test.go create mode 100644 pkg/iac/scanners/cloudformation/parser/fn_ref.go create mode 100644 pkg/iac/scanners/cloudformation/parser/fn_ref_test.go create mode 100644 pkg/iac/scanners/cloudformation/parser/fn_select.go create mode 100644 pkg/iac/scanners/cloudformation/parser/fn_select_test.go create mode 100644 pkg/iac/scanners/cloudformation/parser/fn_split.go create mode 100644 pkg/iac/scanners/cloudformation/parser/fn_split_test.go create mode 100644 pkg/iac/scanners/cloudformation/parser/fn_sub.go create mode 100644 pkg/iac/scanners/cloudformation/parser/fn_sub_test.go create mode 100644 pkg/iac/scanners/cloudformation/parser/intrinsics.go create mode 100644 pkg/iac/scanners/cloudformation/parser/intrinsics_test.go create mode 100644 pkg/iac/scanners/cloudformation/parser/parameter.go create mode 100644 pkg/iac/scanners/cloudformation/parser/parameters_test.go create mode 100644 pkg/iac/scanners/cloudformation/parser/parser.go create mode 100644 pkg/iac/scanners/cloudformation/parser/parser_test.go create mode 100644 pkg/iac/scanners/cloudformation/parser/property.go create mode 100644 pkg/iac/scanners/cloudformation/parser/property_conversion.go create mode 100644 pkg/iac/scanners/cloudformation/parser/property_helpers.go create mode 100644 pkg/iac/scanners/cloudformation/parser/property_helpers_test.go create mode 100644 pkg/iac/scanners/cloudformation/parser/pseudo_parameters.go create mode 100644 pkg/iac/scanners/cloudformation/parser/pseudo_parameters_test.go create mode 100644 pkg/iac/scanners/cloudformation/parser/reference.go create mode 100644 pkg/iac/scanners/cloudformation/parser/resource.go create mode 100644 pkg/iac/scanners/cloudformation/parser/resource_test.go create mode 100644 pkg/iac/scanners/cloudformation/parser/util.go create mode 100644 pkg/iac/scanners/cloudformation/scanner.go create mode 100644 pkg/iac/scanners/cloudformation/scanner_test.go create mode 100644 pkg/iac/scanners/cloudformation/test/cf_scanning_test.go create mode 100644 pkg/iac/scanners/cloudformation/test/examples/bucket/bucket.yaml create mode 100644 pkg/iac/scanners/cloudformation/test/examples/ignores/bucket_with_ignores.yaml create mode 100644 pkg/iac/scanners/cloudformation/test/examples/roles/roles.yml create mode 100644 pkg/iac/scanners/dockerfile/parser/parser.go create mode 100644 pkg/iac/scanners/dockerfile/parser/parser_test.go create mode 100644 pkg/iac/scanners/dockerfile/scanner.go create mode 100644 pkg/iac/scanners/dockerfile/scanner_test.go create mode 100644 pkg/iac/scanners/helm/options.go create mode 100644 pkg/iac/scanners/helm/parser/option.go create mode 100644 pkg/iac/scanners/helm/parser/parser.go create mode 100644 pkg/iac/scanners/helm/parser/parser_tar.go create mode 100644 pkg/iac/scanners/helm/parser/parser_test.go create mode 100644 pkg/iac/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart-0.1.0.tgz create mode 100644 pkg/iac/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart/Chart.yaml create mode 100644 pkg/iac/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart/templates/pod.yaml create mode 100644 pkg/iac/scanners/helm/parser/vals.go create mode 100644 pkg/iac/scanners/helm/scanner.go create mode 100644 pkg/iac/scanners/helm/test/mysql/.helmignore create mode 100644 pkg/iac/scanners/helm/test/mysql/Chart.lock create mode 100644 pkg/iac/scanners/helm/test/mysql/Chart.yaml create mode 100644 pkg/iac/scanners/helm/test/mysql/README.md create mode 100644 pkg/iac/scanners/helm/test/mysql/charts/common/.helmignore create mode 100644 pkg/iac/scanners/helm/test/mysql/charts/common/Chart.yaml create mode 100644 pkg/iac/scanners/helm/test/mysql/charts/common/README.md create mode 100644 pkg/iac/scanners/helm/test/mysql/charts/common/templates/_affinities.tpl create mode 100644 pkg/iac/scanners/helm/test/mysql/charts/common/templates/_capabilities.tpl create mode 100644 pkg/iac/scanners/helm/test/mysql/charts/common/templates/_errors.tpl create mode 100644 pkg/iac/scanners/helm/test/mysql/charts/common/templates/_images.tpl create mode 100644 pkg/iac/scanners/helm/test/mysql/charts/common/templates/_ingress.tpl create mode 100644 pkg/iac/scanners/helm/test/mysql/charts/common/templates/_labels.tpl create mode 100644 pkg/iac/scanners/helm/test/mysql/charts/common/templates/_names.tpl create mode 100644 pkg/iac/scanners/helm/test/mysql/charts/common/templates/_secrets.tpl create mode 100644 pkg/iac/scanners/helm/test/mysql/charts/common/templates/_storage.tpl create mode 100644 pkg/iac/scanners/helm/test/mysql/charts/common/templates/_tplvalues.tpl create mode 100644 pkg/iac/scanners/helm/test/mysql/charts/common/templates/_utils.tpl create mode 100644 pkg/iac/scanners/helm/test/mysql/charts/common/templates/_warnings.tpl create mode 100644 pkg/iac/scanners/helm/test/mysql/charts/common/templates/validations/_cassandra.tpl create mode 100644 pkg/iac/scanners/helm/test/mysql/charts/common/templates/validations/_mariadb.tpl create mode 100644 pkg/iac/scanners/helm/test/mysql/charts/common/templates/validations/_mongodb.tpl create mode 100644 pkg/iac/scanners/helm/test/mysql/charts/common/templates/validations/_postgresql.tpl create mode 100644 pkg/iac/scanners/helm/test/mysql/charts/common/templates/validations/_redis.tpl create mode 100644 pkg/iac/scanners/helm/test/mysql/charts/common/templates/validations/_validations.tpl create mode 100644 pkg/iac/scanners/helm/test/mysql/charts/common/values.yaml create mode 100644 pkg/iac/scanners/helm/test/mysql/ci/values-production-with-rbac.yaml create mode 100644 pkg/iac/scanners/helm/test/mysql/templates/NOTES.txt create mode 100644 pkg/iac/scanners/helm/test/mysql/templates/_helpers.tpl create mode 100644 pkg/iac/scanners/helm/test/mysql/templates/extra-list.yaml create mode 100644 pkg/iac/scanners/helm/test/mysql/templates/metrics-svc.yaml create mode 100644 pkg/iac/scanners/helm/test/mysql/templates/networkpolicy.yaml create mode 100644 pkg/iac/scanners/helm/test/mysql/templates/primary/configmap.yaml create mode 100644 pkg/iac/scanners/helm/test/mysql/templates/primary/initialization-configmap.yaml create mode 100644 pkg/iac/scanners/helm/test/mysql/templates/primary/pdb.yaml create mode 100644 pkg/iac/scanners/helm/test/mysql/templates/primary/statefulset.yaml create mode 100644 pkg/iac/scanners/helm/test/mysql/templates/primary/svc-headless.yaml create mode 100644 pkg/iac/scanners/helm/test/mysql/templates/primary/svc.yaml create mode 100644 pkg/iac/scanners/helm/test/mysql/templates/role.yaml create mode 100644 pkg/iac/scanners/helm/test/mysql/templates/rolebinding.yaml create mode 100644 pkg/iac/scanners/helm/test/mysql/templates/secondary/configmap.yaml create mode 100644 pkg/iac/scanners/helm/test/mysql/templates/secondary/pdb.yaml create mode 100644 pkg/iac/scanners/helm/test/mysql/templates/secondary/statefulset.yaml create mode 100644 pkg/iac/scanners/helm/test/mysql/templates/secondary/svc-headless.yaml create mode 100644 pkg/iac/scanners/helm/test/mysql/templates/secondary/svc.yaml create mode 100644 pkg/iac/scanners/helm/test/mysql/templates/secrets.yaml create mode 100644 pkg/iac/scanners/helm/test/mysql/templates/serviceaccount.yaml create mode 100644 pkg/iac/scanners/helm/test/mysql/templates/servicemonitor.yaml create mode 100644 pkg/iac/scanners/helm/test/mysql/values.schema.json create mode 100644 pkg/iac/scanners/helm/test/mysql/values.yaml create mode 100644 pkg/iac/scanners/helm/test/option_test.go create mode 100644 pkg/iac/scanners/helm/test/parser_test.go create mode 100644 pkg/iac/scanners/helm/test/scanner_test.go create mode 100644 pkg/iac/scanners/helm/test/testdata/aws-cluster-autoscaler-bad.tar.gz create mode 100644 pkg/iac/scanners/helm/test/testdata/expected/mysql/templates/primary/configmap.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/expected/mysql/templates/primary/statefulset.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/expected/mysql/templates/primary/svc-headless.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/expected/mysql/templates/primary/svc.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/expected/mysql/templates/secrets.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/expected/mysql/templates/serviceaccount.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/expected/options/testchart/templates/deployment.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/expected/options/testchart/templates/service.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/expected/options/testchart/templates/serviceaccount.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/expected/options/with-api-version/templates/pdb.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/expected/testchart/templates/deployment.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/expected/testchart/templates/service.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/expected/testchart/templates/serviceaccount.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/expected/with-tarred-dep/templates/deployment.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/expected/with-tarred-dep/templates/ingress.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/expected/with-tarred-dep/templates/service.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/mysql-8.8.26.tar create mode 100644 pkg/iac/scanners/helm/test/testdata/mysql-8.8.26.tar.gz create mode 100644 pkg/iac/scanners/helm/test/testdata/mysql-8.8.26.tgz create mode 100644 pkg/iac/scanners/helm/test/testdata/nope.tgz create mode 100644 pkg/iac/scanners/helm/test/testdata/numberName/Chart.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/templated-name/Chart.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/testchart/.helmignore create mode 100644 pkg/iac/scanners/helm/test/testdata/testchart/Chart.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/testchart/templates/NOTES.txt create mode 100644 pkg/iac/scanners/helm/test/testdata/testchart/templates/_helpers.tpl create mode 100644 pkg/iac/scanners/helm/test/testdata/testchart/templates/deployment.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/testchart/templates/hpa.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/testchart/templates/ingress.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/testchart/templates/service.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/testchart/templates/serviceaccount.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/testchart/templates/tests/test-connection.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/testchart/values.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/with-api-version/.helmignore create mode 100644 pkg/iac/scanners/helm/test/testdata/with-api-version/Chart.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/with-api-version/templates/_helpers.tpl create mode 100644 pkg/iac/scanners/helm/test/testdata/with-api-version/templates/pdb.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/with-api-version/values.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/with-tarred-dep/.helmignore create mode 100644 pkg/iac/scanners/helm/test/testdata/with-tarred-dep/Chart.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/with-tarred-dep/LICENSE create mode 100644 pkg/iac/scanners/helm/test/testdata/with-tarred-dep/charts/common-1.16.1.tgz create mode 100644 pkg/iac/scanners/helm/test/testdata/with-tarred-dep/renovate.json create mode 100644 pkg/iac/scanners/helm/test/testdata/with-tarred-dep/templates/.gitkeep create mode 100644 pkg/iac/scanners/helm/test/testdata/with-tarred-dep/templates/deployment.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/with-tarred-dep/templates/ingress.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/with-tarred-dep/templates/secrets-crdb-ca.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/with-tarred-dep/templates/secrets-dbconn.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/with-tarred-dep/templates/service.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/with-tarred-dep/values.yaml create mode 100644 pkg/iac/scanners/helm/test/values/values.yaml create mode 100644 pkg/iac/scanners/json/parser/parser.go create mode 100644 pkg/iac/scanners/json/parser/parser_test.go create mode 100644 pkg/iac/scanners/json/scanner.go create mode 100644 pkg/iac/scanners/json/scanner_test.go create mode 100644 pkg/iac/scanners/kubernetes/parser/manifest.go create mode 100644 pkg/iac/scanners/kubernetes/parser/manifest_node.go create mode 100644 pkg/iac/scanners/kubernetes/parser/parser.go create mode 100644 pkg/iac/scanners/kubernetes/scanner.go create mode 100644 pkg/iac/scanners/kubernetes/scanner_test.go create mode 100644 pkg/iac/scanners/scanner.go create mode 100644 pkg/iac/scanners/terraform/executor/executor.go create mode 100644 pkg/iac/scanners/terraform/executor/executor_test.go create mode 100644 pkg/iac/scanners/terraform/executor/option.go create mode 100644 pkg/iac/scanners/terraform/executor/pool.go create mode 100644 pkg/iac/scanners/terraform/executor/statistics.go create mode 100644 pkg/iac/scanners/terraform/options.go create mode 100644 pkg/iac/scanners/terraform/parser/evaluator.go create mode 100644 pkg/iac/scanners/terraform/parser/evaluator_test.go create mode 100644 pkg/iac/scanners/terraform/parser/funcs/cidr.go create mode 100644 pkg/iac/scanners/terraform/parser/funcs/collection.go create mode 100644 pkg/iac/scanners/terraform/parser/funcs/conversion.go create mode 100644 pkg/iac/scanners/terraform/parser/funcs/crypto.go create mode 100644 pkg/iac/scanners/terraform/parser/funcs/datetime.go create mode 100644 pkg/iac/scanners/terraform/parser/funcs/defaults.go create mode 100644 pkg/iac/scanners/terraform/parser/funcs/encoding.go create mode 100644 pkg/iac/scanners/terraform/parser/funcs/filesystem.go create mode 100644 pkg/iac/scanners/terraform/parser/funcs/marks.go create mode 100644 pkg/iac/scanners/terraform/parser/funcs/number.go create mode 100644 pkg/iac/scanners/terraform/parser/funcs/sensitive.go create mode 100644 pkg/iac/scanners/terraform/parser/funcs/string.go create mode 100644 pkg/iac/scanners/terraform/parser/functions.go create mode 100644 pkg/iac/scanners/terraform/parser/load_blocks.go create mode 100644 pkg/iac/scanners/terraform/parser/load_blocks_test.go create mode 100644 pkg/iac/scanners/terraform/parser/load_module.go create mode 100644 pkg/iac/scanners/terraform/parser/load_module_metadata.go create mode 100644 pkg/iac/scanners/terraform/parser/load_vars.go create mode 100644 pkg/iac/scanners/terraform/parser/load_vars_test.go create mode 100644 pkg/iac/scanners/terraform/parser/module_retrieval.go create mode 100644 pkg/iac/scanners/terraform/parser/option.go create mode 100644 pkg/iac/scanners/terraform/parser/parser.go create mode 100644 pkg/iac/scanners/terraform/parser/parser_integration_test.go create mode 100644 pkg/iac/scanners/terraform/parser/parser_test.go create mode 100644 pkg/iac/scanners/terraform/parser/resolvers/cache.go create mode 100644 pkg/iac/scanners/terraform/parser/resolvers/local.go create mode 100644 pkg/iac/scanners/terraform/parser/resolvers/options.go create mode 100644 pkg/iac/scanners/terraform/parser/resolvers/registry.go create mode 100644 pkg/iac/scanners/terraform/parser/resolvers/remote.go create mode 100644 pkg/iac/scanners/terraform/parser/resolvers/writable.go create mode 100644 pkg/iac/scanners/terraform/parser/resolvers/writable_windows.go create mode 100644 pkg/iac/scanners/terraform/parser/sort.go create mode 100644 pkg/iac/scanners/terraform/parser/testdata/tfvars/terraform.tfvars create mode 100644 pkg/iac/scanners/terraform/parser/testdata/tfvars/terraform.tfvars.json create mode 100644 pkg/iac/scanners/terraform/scanner.go create mode 100644 pkg/iac/scanners/terraform/scanner_integration_test.go create mode 100644 pkg/iac/scanners/terraform/scanner_test.go create mode 100644 pkg/iac/scanners/terraformplan/parser/option.go create mode 100644 pkg/iac/scanners/terraformplan/parser/parser.go create mode 100644 pkg/iac/scanners/terraformplan/parser/plan_file.go create mode 100644 pkg/iac/scanners/terraformplan/scanner.go create mode 100644 pkg/iac/scanners/terraformplan/scanner_test.go create mode 100644 pkg/iac/scanners/terraformplan/test/parser_test.go create mode 100644 pkg/iac/scanners/terraformplan/test/scanner_test.go create mode 100644 pkg/iac/scanners/terraformplan/test/testdata/plan.json create mode 100644 pkg/iac/scanners/toml/parser/parser.go create mode 100644 pkg/iac/scanners/toml/parser/parser_test.go create mode 100644 pkg/iac/scanners/toml/scanner.go create mode 100644 pkg/iac/scanners/toml/scanner_test.go create mode 100644 pkg/iac/scanners/universal/scanner.go create mode 100644 pkg/iac/scanners/yaml/parser/parser.go create mode 100644 pkg/iac/scanners/yaml/parser/parser_test.go create mode 100644 pkg/iac/scanners/yaml/scanner.go create mode 100644 pkg/iac/scanners/yaml/scanner_test.go create mode 100644 pkg/iac/types/rule.go create mode 100644 test/testutil/util.go diff --git a/go.mod b/go.mod index df4ae1efd7f1..17c3168920d1 100644 --- a/go.mod +++ b/go.mod @@ -9,11 +9,13 @@ require ( github.com/BurntSushi/toml v1.3.2 github.com/CycloneDX/cyclonedx-go v0.7.2 github.com/GoogleCloudPlatform/docker-credential-gcr v2.0.5+incompatible + github.com/Masterminds/semver v1.5.0 github.com/Masterminds/sprig/v3 v3.2.3 github.com/NYTimes/gziphandler v1.1.1 github.com/alicebob/miniredis/v2 v2.30.4 + github.com/apparentlymart/go-cidr v1.1.0 github.com/aquasecurity/bolt-fixtures v0.0.0-20200903104109-d34e7f983986 - github.com/aquasecurity/defsec v0.93.2-0.20231120220217-6818261529c8 + github.com/aquasecurity/defsec v0.93.2-0.20231121210951-9b3cc255faff github.com/aquasecurity/go-dep-parser v0.0.0-20231120074854-8322cc2242bf github.com/aquasecurity/go-gem-version v0.0.0-20201115065557-8eed6fe000ce github.com/aquasecurity/go-npm-version v0.0.0-20201110091526-0b796d180798 @@ -23,9 +25,8 @@ require ( github.com/aquasecurity/table v1.8.0 github.com/aquasecurity/testdocker v0.0.0-20230111101738-e741bda259da github.com/aquasecurity/tml v0.6.1 - github.com/aquasecurity/trivy-aws v0.5.0 + github.com/aquasecurity/trivy-aws v0.6.0 github.com/aquasecurity/trivy-db v0.0.0-20231005141211-4fc651f7ac8d - github.com/aquasecurity/trivy-iac v0.7.0 github.com/aquasecurity/trivy-java-db v0.0.0-20230209231723-7cddb1406728 github.com/aquasecurity/trivy-kubernetes v0.5.9-0.20231115100645-921512b4d163 github.com/aquasecurity/trivy-policies v0.6.1-0.20231120231532-f6f2330bf842 @@ -37,6 +38,7 @@ require ( github.com/aws/aws-sdk-go-v2/service/ecr v1.21.0 github.com/aws/aws-sdk-go-v2/service/s3 v1.40.2 github.com/aws/aws-sdk-go-v2/service/sts v1.25.0 + github.com/aws/smithy-go v1.16.0 github.com/bmatcuk/doublestar/v4 v4.6.0 github.com/cenkalti/backoff v2.2.1+incompatible github.com/cheggaaa/pb/v3 v3.1.4 @@ -56,7 +58,9 @@ require ( github.com/google/wire v0.5.0 github.com/hashicorp/go-getter v1.7.2 github.com/hashicorp/go-multierror v1.1.1 + github.com/hashicorp/go-uuid v1.0.3 github.com/hashicorp/golang-lru/v2 v2.0.6 + github.com/hashicorp/hcl/v2 v2.18.1 github.com/in-toto/in-toto-golang v0.9.0 github.com/knqyf263/go-apk-version v0.0.0-20200609155635-041fdbb8563f github.com/knqyf263/go-deb-version v0.0.0-20230223133812-3ed183d23422 @@ -64,6 +68,9 @@ require ( github.com/knqyf263/go-rpmdb v0.0.0-20231008124120-ac49267ab4e1 github.com/knqyf263/nested v0.0.1 github.com/kylelemons/godebug v1.1.0 + github.com/liamg/iamgo v0.0.9 + github.com/liamg/jfather v0.0.7 + github.com/liamg/memoryfs v1.6.0 github.com/magefile/mage v1.15.0 github.com/mailru/easyjson v0.7.7 github.com/masahiro331/go-disk v0.0.0-20220919035250-c8da316f91ac @@ -72,10 +79,12 @@ require ( github.com/masahiro331/go-mvn-version v0.0.0-20210429150710-d3157d602a08 github.com/masahiro331/go-vmdk-parser v0.0.0-20221225061455-612096e4bbbd github.com/masahiro331/go-xfs-filesystem v0.0.0-20230608043311-a335f4599b70 + github.com/mitchellh/go-homedir v1.1.0 github.com/mitchellh/hashstructure/v2 v2.0.2 github.com/mitchellh/mapstructure v1.5.0 github.com/moby/buildkit v0.11.6 - github.com/open-policy-agent/opa v0.57.0 + github.com/olekukonko/tablewriter v0.0.5 + github.com/open-policy-agent/opa v0.58.0 github.com/opencontainers/go-digest v1.0.0 github.com/opencontainers/image-spec v1.1.0-rc5 github.com/openvex/go-vex v0.2.5 @@ -100,16 +109,20 @@ require ( github.com/twitchtv/twirp v8.1.2+incompatible github.com/xeipuuv/gojsonschema v1.2.0 github.com/xlab/treeprint v1.2.0 + github.com/zclconf/go-cty v1.13.0 + github.com/zclconf/go-cty-yaml v1.0.3 go.etcd.io/bbolt v1.3.7 go.uber.org/zap v1.26.0 + golang.org/x/crypto v0.15.0 golang.org/x/exp v0.0.0-20230522175609-2e198f4a06a1 golang.org/x/mod v0.14.0 - golang.org/x/sync v0.3.0 + golang.org/x/sync v0.4.0 golang.org/x/term v0.14.0 golang.org/x/text v0.14.0 golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 google.golang.org/protobuf v1.31.0 gopkg.in/yaml.v3 v3.0.1 + helm.sh/helm/v3 v3.13.0 k8s.io/api v0.28.3 k8s.io/utils v0.0.0-20230406110748-d93618cff8a2 modernc.org/sqlite v1.23.1 @@ -135,7 +148,6 @@ require ( github.com/AzureAD/microsoft-authentication-library-for-go v1.1.1 // indirect github.com/MakeNowJust/heredoc v1.0.0 // indirect github.com/Masterminds/goutils v1.1.1 // indirect - github.com/Masterminds/semver v1.5.0 // indirect github.com/Masterminds/semver/v3 v3.2.1 // indirect github.com/Masterminds/squirrel v1.5.4 // indirect github.com/Microsoft/go-winio v0.6.1 // indirect @@ -149,7 +161,6 @@ require ( github.com/alecthomas/chroma v0.10.0 // indirect github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a // indirect github.com/anchore/go-struct-converter v0.0.0-20221118182256-c68fdcfa2092 // indirect - github.com/apparentlymart/go-cidr v1.1.0 // indirect github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect @@ -199,7 +210,6 @@ require ( github.com/aws/aws-sdk-go-v2/service/sso v1.15.2 // indirect github.com/aws/aws-sdk-go-v2/service/ssooidc v1.17.3 // indirect github.com/aws/aws-sdk-go-v2/service/workspaces v1.31.1 // indirect - github.com/aws/smithy-go v1.16.0 // indirect github.com/beorn7/perks v1.0.1 // indirect github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d // indirect github.com/briandowns/spinner v1.23.0 // indirect @@ -233,7 +243,7 @@ require ( github.com/emirpasic/gods v1.18.1 // indirect github.com/evanphx/json-patch v5.6.0+incompatible // indirect github.com/exponent-io/jsonpath v0.0.0-20151013193312-d6023ce2651d // indirect - github.com/fsnotify/fsnotify v1.6.0 // indirect + github.com/fsnotify/fsnotify v1.7.0 // indirect github.com/go-errors/errors v1.4.2 // indirect github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect github.com/go-git/go-billy/v5 v5.4.1 // indirect @@ -270,10 +280,8 @@ require ( github.com/hashicorp/errwrap v1.1.0 // indirect github.com/hashicorp/go-cleanhttp v0.5.2 // indirect github.com/hashicorp/go-safetemp v1.0.0 // indirect - github.com/hashicorp/go-uuid v1.0.3 // indirect github.com/hashicorp/go-version v1.6.0 // indirect github.com/hashicorp/hcl v1.0.0 // indirect - github.com/hashicorp/hcl/v2 v2.18.1 // indirect github.com/huandu/xstrings v1.4.0 // indirect github.com/imdario/mergo v0.3.15 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect @@ -287,9 +295,6 @@ require ( github.com/klauspost/compress v1.16.6 // indirect github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 // indirect github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 // indirect - github.com/liamg/iamgo v0.0.9 // indirect - github.com/liamg/jfather v0.0.7 // indirect - github.com/liamg/memoryfs v1.6.0 // indirect github.com/lib/pq v1.10.9 // indirect github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de // indirect github.com/lunixbochs/struc v0.0.0-20200707160740-784aaebc1d40 // indirect @@ -301,7 +306,6 @@ require ( github.com/microsoft/go-rustaudit v0.0.0-20220808201409-204dfee52032 // indirect github.com/miekg/dns v1.1.50 // indirect github.com/mitchellh/copystructure v1.2.0 // indirect - github.com/mitchellh/go-homedir v1.1.0 // indirect github.com/mitchellh/go-testing-interface v1.14.1 // indirect github.com/mitchellh/go-wordwrap v1.0.1 // indirect github.com/mitchellh/reflectwalk v1.0.2 // indirect @@ -318,7 +322,6 @@ require ( github.com/morikuni/aec v1.0.0 // indirect github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect github.com/oklog/ulid v1.3.1 // indirect - github.com/olekukonko/tablewriter v0.0.5 // indirect github.com/opencontainers/runc v1.1.5 // indirect github.com/opencontainers/runtime-spec v1.1.0-rc.1 // indirect github.com/opencontainers/selinux v1.11.0 // indirect @@ -355,17 +358,14 @@ require ( github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect github.com/yashtewari/glob-intersection v0.2.0 // indirect github.com/yuin/gopher-lua v1.1.0 // indirect - github.com/zclconf/go-cty v1.13.0 // indirect - github.com/zclconf/go-cty-yaml v1.0.3 // indirect go.mongodb.org/mongo-driver v1.11.3 // indirect go.opencensus.io v0.24.0 // indirect - go.opentelemetry.io/otel v1.16.0 // indirect - go.opentelemetry.io/otel/metric v1.16.0 // indirect - go.opentelemetry.io/otel/sdk v1.16.0 // indirect - go.opentelemetry.io/otel/trace v1.16.0 // indirect + go.opentelemetry.io/otel v1.19.0 // indirect + go.opentelemetry.io/otel/metric v1.19.0 // indirect + go.opentelemetry.io/otel/sdk v1.19.0 // indirect + go.opentelemetry.io/otel/trace v1.19.0 // indirect go.starlark.net v0.0.0-20230525235612-a134d8f9ddca // indirect go.uber.org/multierr v1.11.0 // indirect - golang.org/x/crypto v0.15.0 // indirect golang.org/x/net v0.18.0 // indirect golang.org/x/oauth2 v0.11.0 // indirect golang.org/x/sys v0.14.0 // indirect @@ -373,16 +373,15 @@ require ( golang.org/x/tools v0.13.0 // indirect google.golang.org/api v0.138.0 // indirect google.golang.org/appengine v1.6.7 // indirect - google.golang.org/genproto v0.0.0-20230803162519-f966b187b2e5 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20230803162519-f966b187b2e5 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20230807174057-1744710a1577 // indirect - google.golang.org/grpc v1.58.3 // indirect + google.golang.org/genproto v0.0.0-20230822172742-b8732ec3820d // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20230822172742-b8732ec3820d // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20230822172742-b8732ec3820d // indirect + google.golang.org/grpc v1.59.0 // indirect gopkg.in/cheggaaa/pb.v1 v1.0.28 // indirect gopkg.in/inf.v0 v0.9.1 // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/warnings.v0 v0.1.2 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect - helm.sh/helm/v3 v3.13.0 // indirect k8s.io/apiextensions-apiserver v0.28.2 // indirect k8s.io/apimachinery v0.28.3 // indirect k8s.io/apiserver v0.28.2 // indirect diff --git a/go.sum b/go.sum index 8f7d341932c4..03406b30210b 100644 --- a/go.sum +++ b/go.sum @@ -322,8 +322,8 @@ github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4= github.com/aquasecurity/bolt-fixtures v0.0.0-20200903104109-d34e7f983986 h1:2a30xLN2sUZcMXl50hg+PJCIDdJgIvIbVcKqLJ/ZrtM= github.com/aquasecurity/bolt-fixtures v0.0.0-20200903104109-d34e7f983986/go.mod h1:NT+jyeCzXk6vXR5MTkdn4z64TgGfE5HMLC8qfj5unl8= -github.com/aquasecurity/defsec v0.93.2-0.20231120220217-6818261529c8 h1:w/Sm2fVtb0Rv1bcLLwsW9j37mNUya8MwzKMcjG9OW/Q= -github.com/aquasecurity/defsec v0.93.2-0.20231120220217-6818261529c8/go.mod h1:J30VViSgmoW2Ic/6aqVJO2qvuADsmZ3MYuNxPcU6Vt0= +github.com/aquasecurity/defsec v0.93.2-0.20231121210951-9b3cc255faff h1:P9ISna6RaiMyoxDcROR4v68/OGnnrGC1AE60l/c6Y8M= +github.com/aquasecurity/defsec v0.93.2-0.20231121210951-9b3cc255faff/go.mod h1:djPPxDAf6seSulvNiZn7jelIddA9wdWRvfWarso3U3c= github.com/aquasecurity/go-dep-parser v0.0.0-20231120074854-8322cc2242bf h1:kweQrNMfarPfjZGI1537GtuujhpzhsuT/MvmW2FwaBE= github.com/aquasecurity/go-dep-parser v0.0.0-20231120074854-8322cc2242bf/go.mod h1:7+xrs6AWD5+onpmX8f7qIkAhUgkPP0mhUdBjxJBcfas= github.com/aquasecurity/go-gem-version v0.0.0-20201115065557-8eed6fe000ce h1:QgBRgJvtEOBtUXilDb1MLi1p1MWoyFDXAu5DEUl5nwM= @@ -345,12 +345,10 @@ github.com/aquasecurity/testdocker v0.0.0-20230111101738-e741bda259da h1:pj/adfN github.com/aquasecurity/testdocker v0.0.0-20230111101738-e741bda259da/go.mod h1:852lbQLpK2nCwlR4ZLYIccxYCfoQao6q9Nl6tjz54v8= github.com/aquasecurity/tml v0.6.1 h1:y2ZlGSfrhnn7t4ZJ/0rotuH+v5Jgv6BDDO5jB6A9gwo= github.com/aquasecurity/tml v0.6.1/go.mod h1:OnYMWY5lvI9ejU7yH9LCberWaaTBW7hBFsITiIMY2yY= -github.com/aquasecurity/trivy-aws v0.5.0 h1:6RJrw+QHeVn2MH7bI7bsVIiqRyhDCPvdEqkNn54Ui4I= -github.com/aquasecurity/trivy-aws v0.5.0/go.mod h1:dPx0xRElmFrVXBxeYqEAl5NejJ2kHb51ybFPzBMxWow= +github.com/aquasecurity/trivy-aws v0.6.0 h1:+ggTdikr7ON09r/MlR2YDCSRjVFn04SlhQmL2+59F10= +github.com/aquasecurity/trivy-aws v0.6.0/go.mod h1:C7N/OpmPZIWO6G26djauF8rOFRb8oVUkdKt4eP796Rs= github.com/aquasecurity/trivy-db v0.0.0-20231005141211-4fc651f7ac8d h1:fjI9mkoTUAkbGqpzt9nJsO24RAdfG+ZSiLFj0G2jO8c= github.com/aquasecurity/trivy-db v0.0.0-20231005141211-4fc651f7ac8d/go.mod h1:cj9/QmD9N3OZnKQMp+/DvdV+ym3HyIkd4e+F0ZM3ZGs= -github.com/aquasecurity/trivy-iac v0.7.0 h1:L2/mqQJD1iwY4xOr1un5Prg51epYBQgM34JVZtkp4Gg= -github.com/aquasecurity/trivy-iac v0.7.0/go.mod h1:GG9Y2YylH3e16PoJ0RUZ+C0Xw93Gic/5fwdkKjKwwqU= github.com/aquasecurity/trivy-java-db v0.0.0-20230209231723-7cddb1406728 h1:0eS+V7SXHgqoT99tV1mtMW6HL4HdoB9qGLMCb1fZp8A= github.com/aquasecurity/trivy-java-db v0.0.0-20230209231723-7cddb1406728/go.mod h1:Ldya37FLi0e/5Cjq2T5Bty7cFkzUDwTcPeQua+2M8i8= github.com/aquasecurity/trivy-kubernetes v0.5.9-0.20231115100645-921512b4d163 h1:6TsI0lQN7H/d3pM5vK1/taYbWMgnNYEOk+V2ydBdg0s= @@ -803,8 +801,8 @@ github.com/frankban/quicktest v1.14.4 h1:g2rn0vABPOOXmZUj+vbmUp0lPoXEMuhTpIluN0X github.com/frankban/quicktest v1.14.4/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= -github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY= -github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= +github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA= +github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM= github.com/fullsailor/pkcs7 v0.0.0-20190404230743-d7302db945fa/go.mod h1:KnogPXtdwXqoenmZCw6S+25EAm2MkxbG0deNDu4cbSA= github.com/garyburd/redigo v0.0.0-20150301180006-535138d7bcd7/go.mod h1:NR3MbYisc3/PwhQ00EMzDiPmrwpPxAn5GI05/YaO1SY= github.com/ghodss/yaml v0.0.0-20150909031657-73d445a93680/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= @@ -961,8 +959,8 @@ github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w github.com/golang-jwt/jwt/v5 v5.0.0 h1:1n1XNM9hk7O9mnQoNBGolZvzebBQ7p93ULHRc28XJUE= github.com/golang-jwt/jwt/v5 v5.0.0/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/glog v1.1.0 h1:/d3pCKDPWNnvIWe0vVUpNP32qc8U3PDVxySP/y360qE= -github.com/golang/glog v1.1.0/go.mod h1:pfYeQZ3JWZoXTV5sFc986z3HTpwQs9At6P4ImfuP3NQ= +github.com/golang/glog v1.1.2 h1:DVjP2PbBOzHyzA+dn3WhHIq4NdVu3Q+pvivFICf/7fo= +github.com/golang/glog v1.1.2/go.mod h1:zR+okUeTbrL6EL3xHUDxZuEtGv04p5shwip1+mL/rLQ= github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= @@ -1120,8 +1118,8 @@ github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= github.com/grpc-ecosystem/grpc-gateway v1.16.0 h1:gmcG1KaJ57LophUzW0Hy8NmPhnMZb4M0+kPpLofRdBo= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.7.0 h1:BZHcxBETFHIdVyhyEfOvn/RdU/QGdLI4y34qQGjGWO0= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.7.0/go.mod h1:hgWBS7lorOAVIJEQMi4ZsPv9hVvWI6+ch50m39Pf2Ks= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0 h1:YBftPWNWd4WwGqtY2yeZL2ef8rHAxPBD8KFhJpmcqms= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0/go.mod h1:YN5jB8ie0yfIUg6VvR9Kz84aCaG7AsGZnLjhHbUqwPg= github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q= github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= github.com/hashicorp/errwrap v0.0.0-20141028054710-7554cd9344ce/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= @@ -1437,8 +1435,8 @@ github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7J github.com/onsi/gomega v1.10.3/go.mod h1:V9xEwhxec5O8UDM77eCW8vLymOMltsqPVYWrpDsH8xc= github.com/onsi/gomega v1.27.6 h1:ENqfyGeS5AX/rlXDd/ETokDz93u0YufY1Pgxuy/PvWE= github.com/onsi/gomega v1.27.6/go.mod h1:PIQNjfQwkP3aQAH7lf7j87O/5FiNr+ZR8+ipb+qQlhg= -github.com/open-policy-agent/opa v0.57.0 h1:DftxYfOEHOheXvO2Q6HCIM2ZVdKrvnF4cZlU9C64MIQ= -github.com/open-policy-agent/opa v0.57.0/go.mod h1:3FY6GNSbUqOhjCdvTXCBJ2rNuh66p/XrIc2owr/hSwo= +github.com/open-policy-agent/opa v0.58.0 h1:S5qvevW8JoFizU7Hp66R/Y1SOXol0aCdFYVkzIqIpUo= +github.com/open-policy-agent/opa v0.58.0/go.mod h1:EGWBwvmyt50YURNvL8X4W5hXdlKeNhAHn3QXsetmYcc= github.com/opencontainers/go-digest v0.0.0-20170106003457-a6d0ee40d420/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= github.com/opencontainers/go-digest v0.0.0-20180430190053-c9281466c8b2/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= github.com/opencontainers/go-digest v1.0.0-rc1/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= @@ -1774,25 +1772,23 @@ go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.42.0 h1:pginetY7+onl4qN1vl0xW/V/v6OBZ0vVdH+esuJgvmM= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.42.0/go.mod h1:XiYsayHc36K3EByOO6nbAXnAWbrUxdjUROCEeeROOH8= -go.opentelemetry.io/otel v1.16.0 h1:Z7GVAX/UkAXPKsy94IU+i6thsQS4nb7LviLpnaNeW8s= -go.opentelemetry.io/otel v1.16.0/go.mod h1:vl0h9NUa1D5s1nv3A5vZOYWn8av4K8Ml6JDeHrT/bx4= -go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.16.0 h1:t4ZwRPU+emrcvM2e9DHd0Fsf0JTPVcbfa/BhTDF03d0= -go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.16.0/go.mod h1:vLarbg68dH2Wa77g71zmKQqlQ8+8Rq3GRG31uc0WcWI= -go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.16.0 h1:cbsD4cUcviQGXdw8+bo5x2wazq10SKz8hEbtCRPcU78= -go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.16.0/go.mod h1:JgXSGah17croqhJfhByOLVY719k1emAXC8MVhCIJlRs= -go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.16.0 h1:TVQp/bboR4mhZSav+MdgXB8FaRho1RC8UwVn3T0vjVc= -go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.16.0/go.mod h1:I33vtIe0sR96wfrUcilIzLoA3mLHhRmz9S9Te0S3gDo= -go.opentelemetry.io/otel/metric v1.16.0 h1:RbrpwVG1Hfv85LgnZ7+txXioPDoh6EdbZHo26Q3hqOo= -go.opentelemetry.io/otel/metric v1.16.0/go.mod h1:QE47cpOmkwipPiefDwo2wDzwJrlfxxNYodqc4xnGCo4= -go.opentelemetry.io/otel/sdk v1.16.0 h1:Z1Ok1YsijYL0CSJpHt4cS3wDDh7p572grzNrBMiMWgE= -go.opentelemetry.io/otel/sdk v1.16.0/go.mod h1:tMsIuKXuuIWPBAOrH+eHtvhTL+SntFtXF9QD68aP6p4= -go.opentelemetry.io/otel/trace v1.16.0 h1:8JRpaObFoW0pxuVPapkgH8UhHQj+bJW8jJsCZEu5MQs= -go.opentelemetry.io/otel/trace v1.16.0/go.mod h1:Yt9vYq1SdNz3xdjZZK7wcXv1qv2pwLkqr2QVwea0ef0= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.45.0 h1:x8Z78aZx8cOF0+Kkazoc7lwUNMGy0LrzEMxTm4BbTxg= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.45.0/go.mod h1:62CPTSry9QZtOaSsE3tOzhx6LzDhHnXJ6xHeMNNiM6Q= +go.opentelemetry.io/otel v1.19.0 h1:MuS/TNf4/j4IXsZuJegVzI1cwut7Qc00344rgH7p8bs= +go.opentelemetry.io/otel v1.19.0/go.mod h1:i0QyjOq3UPoTzff0PJB2N66fb4S0+rSbSB15/oyH9fY= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.19.0 h1:Mne5On7VWdx7omSrSSZvM4Kw7cS7NQkOOmLcgscI51U= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.19.0/go.mod h1:IPtUMKL4O3tH5y+iXVyAXqpAwMuzC1IrxVS81rummfE= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.19.0 h1:3d+S281UTjM+AbF31XSOYn1qXn3BgIdWl8HNEpx08Jk= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.19.0/go.mod h1:0+KuTDyKL4gjKCF75pHOX4wuzYDUZYfAQdSu43o+Z2I= +go.opentelemetry.io/otel/metric v1.19.0 h1:aTzpGtV0ar9wlV4Sna9sdJyII5jTVJEvKETPiOKwvpE= +go.opentelemetry.io/otel/metric v1.19.0/go.mod h1:L5rUsV9kM1IxCj1MmSdS+JQAcVm319EUrDVLrt7jqt8= +go.opentelemetry.io/otel/sdk v1.19.0 h1:6USY6zH+L8uMH8L3t1enZPR3WFEmSTADlqldyHtJi3o= +go.opentelemetry.io/otel/sdk v1.19.0/go.mod h1:NedEbbS4w3C6zElbLdPJKOpJQOrGUJ+GfzpjUvI0v1A= +go.opentelemetry.io/otel/trace v1.19.0 h1:DFVQmlVbfVeOuBRrwdtaehRrWiL1JoVs9CPIQ1Dzxpg= +go.opentelemetry.io/otel/trace v1.19.0/go.mod h1:mfaSyvGyEJEI0nyV2I4qhNQnbBOUUmYZpYojqMnX2vo= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= -go.opentelemetry.io/proto/otlp v0.19.0 h1:IVN6GR+mhC4s5yfcTbmzHYODqvWAp3ZedA2SJPI1Nnw= -go.opentelemetry.io/proto/otlp v0.19.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U= +go.opentelemetry.io/proto/otlp v1.0.0 h1:T0TX0tmXU8a3CbNXzEKGeU5mIVOdf0oykP+u2lIVU/I= +go.opentelemetry.io/proto/otlp v1.0.0/go.mod h1:Sy6pihPLfYHkr3NkUbEhGHFhINUSI/v80hjKIs5JXpM= go.starlark.net v0.0.0-20230525235612-a134d8f9ddca h1:VdD38733bfYv5tUZwEIskMM93VanwNIi5bIKnDrJdEY= go.starlark.net v0.0.0-20230525235612-a134d8f9ddca/go.mod h1:jxU+3+j+71eXOW14274+SmmuW82qJzl6iZSeqEtTGds= go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= @@ -1992,8 +1988,8 @@ golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220929204114-8fcdb60fdcc0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E= -golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= +golang.org/x/sync v0.4.0 h1:zxkM55ReGkDlKSM+Fu41A+zmbZuaPVbGMzvvdUPznYQ= +golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -2115,7 +2111,6 @@ golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220906165534-d0df966e6959/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -2400,12 +2395,12 @@ google.golang.org/genproto v0.0.0-20221010155953-15ba04fc1c0e/go.mod h1:3526vdqw google.golang.org/genproto v0.0.0-20221014173430-6e2ab493f96b/go.mod h1:1vXfmgAz9N9Jx0QA82PqRVauvCz1SGSz739p0f183jM= google.golang.org/genproto v0.0.0-20221014213838-99cd37c6964a/go.mod h1:1vXfmgAz9N9Jx0QA82PqRVauvCz1SGSz739p0f183jM= google.golang.org/genproto v0.0.0-20221025140454-527a21cfbd71/go.mod h1:9qHF0xnpdSfF6knlcsnpzUu5y+rpwgbvsyGAZPBMg4s= -google.golang.org/genproto v0.0.0-20230803162519-f966b187b2e5 h1:L6iMMGrtzgHsWofoFcihmDEMYeDR9KN/ThbPWGrh++g= -google.golang.org/genproto v0.0.0-20230803162519-f966b187b2e5/go.mod h1:oH/ZOT02u4kWEp7oYBGYFFkCdKS/uYR9Z7+0/xuuFp8= -google.golang.org/genproto/googleapis/api v0.0.0-20230803162519-f966b187b2e5 h1:nIgk/EEq3/YlnmVVXVnm14rC2oxgs1o0ong4sD/rd44= -google.golang.org/genproto/googleapis/api v0.0.0-20230803162519-f966b187b2e5/go.mod h1:5DZzOUPCLYL3mNkQ0ms0F3EuUNZ7py1Bqeq6sxzI7/Q= -google.golang.org/genproto/googleapis/rpc v0.0.0-20230807174057-1744710a1577 h1:wukfNtZmZUurLN/atp2hiIeTKn7QJWIQdHzqmsOnAOk= -google.golang.org/genproto/googleapis/rpc v0.0.0-20230807174057-1744710a1577/go.mod h1:+Bk1OCOj40wS2hwAMA+aCW9ypzm63QTBBHp6lQ3p+9M= +google.golang.org/genproto v0.0.0-20230822172742-b8732ec3820d h1:VBu5YqKPv6XiJ199exd8Br+Aetz+o08F+PLMnwJQHAY= +google.golang.org/genproto v0.0.0-20230822172742-b8732ec3820d/go.mod h1:yZTlhN0tQnXo3h00fuXNCxJdLdIdnVFVBaRJ5LWBbw4= +google.golang.org/genproto/googleapis/api v0.0.0-20230822172742-b8732ec3820d h1:DoPTO70H+bcDXcd39vOqb2viZxgqeBeSGtZ55yZU4/Q= +google.golang.org/genproto/googleapis/api v0.0.0-20230822172742-b8732ec3820d/go.mod h1:KjSP20unUpOx5kyQUFa7k4OJg0qeJ7DEZflGDu2p6Bk= +google.golang.org/genproto/googleapis/rpc v0.0.0-20230822172742-b8732ec3820d h1:uvYuEyMHKNt+lT4K3bN6fGswmK8qSvcreM3BwjDh+y4= +google.golang.org/genproto/googleapis/rpc v0.0.0-20230822172742-b8732ec3820d/go.mod h1:+Bk1OCOj40wS2hwAMA+aCW9ypzm63QTBBHp6lQ3p+9M= google.golang.org/grpc v0.0.0-20160317175043-d3ddb4469d5a/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= @@ -2445,8 +2440,8 @@ google.golang.org/grpc v1.48.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACu google.golang.org/grpc v1.49.0/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI= google.golang.org/grpc v1.50.0/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI= google.golang.org/grpc v1.50.1/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI= -google.golang.org/grpc v1.58.3 h1:BjnpXut1btbtgN/6sp+brB2Kbm2LjNXnidYujAVbSoQ= -google.golang.org/grpc v1.58.3/go.mod h1:tgX3ZQDlNJGU96V6yHh1T/JeoBQ2TXdr43YbYSsCJk0= +google.golang.org/grpc v1.59.0 h1:Z5Iec2pjwb+LEOqzpB2MR12/eKFhDPhuqW91O+4bwUk= +google.golang.org/grpc v1.59.0/go.mod h1:aUPDwccQo6OTjy7Hct4AfBPD1GptF4fyUjIkQ9YtF98= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= diff --git a/integration/cloud/aws/000000000000/eu-central-1/data.json b/integration/cloud/aws/000000000000/eu-central-1/data.json new file mode 100644 index 000000000000..eb6c8a6500de --- /dev/null +++ b/integration/cloud/aws/000000000000/eu-central-1/data.json @@ -0,0 +1 @@ +{"schema_version":2,"state":{"AWS":{"Meta":{"TFProviders":null},"AccessAnalyzer":{"Analyzers":null},"APIGateway":{"V1":{"APIs":null,"DomainNames":null},"V2":{"APIs":null,"DomainNames":null}},"Athena":{"Databases":null,"Workgroups":null},"Cloudfront":{"Distributions":null},"CloudTrail":{"Trails":null},"CloudWatch":{"LogGroups":null,"Alarms":null},"CodeBuild":{"Projects":null},"Config":{"ConfigurationAggregrator":{"Metadata":{"default":false,"explicit":false,"managed":false,"parent":null,"range":{"endLine":0,"filename":"","fsKey":"","isLogicalSource":false,"sourcePrefix":"","startLine":0},"ref":"","unresolvable":false},"SourceAllRegions":{"metadata":{"default":false,"explicit":false,"managed":false,"parent":null,"range":{"endLine":0,"filename":"","fsKey":"","isLogicalSource":false,"sourcePrefix":"","startLine":0},"ref":"","unresolvable":false},"value":false}}},"DocumentDB":{"Clusters":null},"DynamoDB":{"DAXClusters":null,"Tables":null},"EC2":{"Instances":null,"LaunchConfigurations":null,"LaunchTemplates":null,"VPCs":[{"Metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:vpc/vpc-ff423fb8","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:vpc/vpc-ff423fb8","unresolvable":false},"ID":{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:vpc/vpc-ff423fb8","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:vpc/vpc-ff423fb8","unresolvable":false},"value":"vpc-ff423fb8"},"IsDefault":{"metadata":{"default":true,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:vpc/vpc-ff423fb8","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:vpc/vpc-ff423fb8","unresolvable":false},"value":true},"SecurityGroups":[{"Metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:security-group/sg-3c8e4bc2412b5f2b7","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:security-group/sg-3c8e4bc2412b5f2b7","unresolvable":false},"IsDefault":{"metadata":{"default":true,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:security-group/sg-3c8e4bc2412b5f2b7","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:security-group/sg-3c8e4bc2412b5f2b7","unresolvable":false},"value":true},"Description":{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:security-group/sg-3c8e4bc2412b5f2b7","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:security-group/sg-3c8e4bc2412b5f2b7","unresolvable":false},"value":"default VPC security group"},"IngressRules":null,"EgressRules":null,"VPCID":{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:security-group/sg-3c8e4bc2412b5f2b7","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:security-group/sg-3c8e4bc2412b5f2b7","unresolvable":false},"value":"vpc-ff423fb8"}}],"FlowLogsEnabled":{"metadata":{"default":true,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:vpc/vpc-ff423fb8","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:vpc/vpc-ff423fb8","unresolvable":false},"value":false}}],"SecurityGroups":[{"Metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:security-group/sg-3c8e4bc2412b5f2b7","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:security-group/sg-3c8e4bc2412b5f2b7","unresolvable":false},"IsDefault":{"metadata":{"default":true,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:security-group/sg-3c8e4bc2412b5f2b7","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:security-group/sg-3c8e4bc2412b5f2b7","unresolvable":false},"value":true},"Description":{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:security-group/sg-3c8e4bc2412b5f2b7","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:security-group/sg-3c8e4bc2412b5f2b7","unresolvable":false},"value":"default VPC security group"},"IngressRules":null,"EgressRules":null,"VPCID":{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:security-group/sg-3c8e4bc2412b5f2b7","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:security-group/sg-3c8e4bc2412b5f2b7","unresolvable":false},"value":"vpc-ff423fb8"}}],"NetworkACLs":[{"Metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"Rules":[{"Metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"Type":{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"value":"egress"},"Action":{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"value":"allow"},"Protocol":{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"value":"-1"},"CIDRs":[{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"value":"0.0.0.0/0"}]},{"Metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"Type":{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"value":"egress"},"Action":{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"value":"deny"},"Protocol":{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"value":"-1"},"CIDRs":[{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"value":"0.0.0.0/0"}]},{"Metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"Type":{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"value":"ingress"},"Action":{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"value":"allow"},"Protocol":{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"value":"-1"},"CIDRs":[{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"value":"0.0.0.0/0"}]},{"Metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"Type":{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"value":"ingress"},"Action":{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"value":"deny"},"Protocol":{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"value":"-1"},"CIDRs":[{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"value":"0.0.0.0/0"}]}],"IsDefaultRule":{"metadata":{"default":true,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"value":false}}],"Subnets":null,"Volumes":null},"ECR":{"Repositories":null},"ECS":{"Clusters":null,"TaskDefinitions":null},"EFS":{"FileSystems":null},"EKS":{"Clusters":null},"ElastiCache":{"Clusters":null,"ReplicationGroups":null,"SecurityGroups":null},"Elasticsearch":{"Domains":null},"ELB":{"LoadBalancers":null},"EMR":{"Clusters":null,"SecurityConfiguration":null},"IAM":{"PasswordPolicy":{"Metadata":{"default":false,"explicit":false,"managed":false,"parent":null,"range":{"endLine":0,"filename":"","fsKey":"","isLogicalSource":false,"sourcePrefix":"","startLine":0},"ref":"","unresolvable":false},"ReusePreventionCount":{"metadata":{"default":false,"explicit":false,"managed":false,"parent":null,"range":{"endLine":0,"filename":"","fsKey":"","isLogicalSource":false,"sourcePrefix":"","startLine":0},"ref":"","unresolvable":false},"value":0},"RequireLowercase":{"metadata":{"default":false,"explicit":false,"managed":false,"parent":null,"range":{"endLine":0,"filename":"","fsKey":"","isLogicalSource":false,"sourcePrefix":"","startLine":0},"ref":"","unresolvable":false},"value":false},"RequireUppercase":{"metadata":{"default":false,"explicit":false,"managed":false,"parent":null,"range":{"endLine":0,"filename":"","fsKey":"","isLogicalSource":false,"sourcePrefix":"","startLine":0},"ref":"","unresolvable":false},"value":false},"RequireNumbers":{"metadata":{"default":false,"explicit":false,"managed":false,"parent":null,"range":{"endLine":0,"filename":"","fsKey":"","isLogicalSource":false,"sourcePrefix":"","startLine":0},"ref":"","unresolvable":false},"value":false},"RequireSymbols":{"metadata":{"default":false,"explicit":false,"managed":false,"parent":null,"range":{"endLine":0,"filename":"","fsKey":"","isLogicalSource":false,"sourcePrefix":"","startLine":0},"ref":"","unresolvable":false},"value":false},"MaxAgeDays":{"metadata":{"default":false,"explicit":false,"managed":false,"parent":null,"range":{"endLine":0,"filename":"","fsKey":"","isLogicalSource":false,"sourcePrefix":"","startLine":0},"ref":"","unresolvable":false},"value":0},"MinimumLength":{"metadata":{"default":false,"explicit":false,"managed":false,"parent":null,"range":{"endLine":0,"filename":"","fsKey":"","isLogicalSource":false,"sourcePrefix":"","startLine":0},"ref":"","unresolvable":false},"value":0}},"Policies":null,"Groups":null,"Users":null,"Roles":null,"ServerCertificates":null},"Kinesis":{"Streams":null},"KMS":{"Keys":null},"Lambda":{"Functions":null},"MQ":{"Brokers":null},"MSK":{"Clusters":null},"Neptune":{"Clusters":null},"RDS":{"Instances":null,"Clusters":null,"Classic":{"DBSecurityGroups":null},"Snapshots":null,"ParameterGroups":null},"Redshift":{"Clusters":null,"ReservedNodes":null,"ClusterParameters":null,"SecurityGroups":null},"SAM":{"APIs":null,"Applications":null,"Functions":null,"HttpAPIs":null,"SimpleTables":null,"StateMachines":null},"S3":{"Buckets":null},"SNS":{"Topics":null},"SQS":{"Queues":null},"SSM":{"Secrets":null},"WorkSpaces":{"WorkSpaces":null}},"Azure":{"AppService":{"Services":null,"FunctionApps":null},"Authorization":{"RoleDefinitions":null},"Compute":{"LinuxVirtualMachines":null,"WindowsVirtualMachines":null,"ManagedDisks":null},"Container":{"KubernetesClusters":null},"Database":{"MSSQLServers":null,"MariaDBServers":null,"MySQLServers":null,"PostgreSQLServers":null},"DataFactory":{"DataFactories":null},"DataLake":{"Stores":null},"KeyVault":{"Vaults":null},"Monitor":{"LogProfiles":null},"Network":{"SecurityGroups":null,"NetworkWatcherFlowLogs":null},"SecurityCenter":{"Contacts":null,"Subscriptions":null},"Storage":{"Accounts":null},"Synapse":{"Workspaces":null}},"CloudStack":{"Compute":{"Instances":null}},"DigitalOcean":{"Compute":{"Firewalls":null,"LoadBalancers":null,"Droplets":null,"KubernetesClusters":null},"Spaces":{"Buckets":null}},"GitHub":{"Repositories":null,"EnvironmentSecrets":null,"BranchProtections":null},"Google":{"BigQuery":{"Datasets":null},"Compute":{"Disks":null,"Networks":null,"SSLPolicies":null,"ProjectMetadata":{"Metadata":{"default":false,"explicit":false,"managed":false,"parent":null,"range":{"endLine":0,"filename":"","fsKey":"","isLogicalSource":false,"sourcePrefix":"","startLine":0},"ref":"","unresolvable":false},"EnableOSLogin":{"metadata":{"default":false,"explicit":false,"managed":false,"parent":null,"range":{"endLine":0,"filename":"","fsKey":"","isLogicalSource":false,"sourcePrefix":"","startLine":0},"ref":"","unresolvable":false},"value":false}},"Instances":null},"DNS":{"ManagedZones":null},"GKE":{"Clusters":null},"KMS":{"KeyRings":null},"IAM":{"Organizations":null,"WorkloadIdentityPoolProviders":null},"SQL":{"Instances":null},"Storage":{"Buckets":null}},"Kubernetes":{"NetworkPolicies":null},"OpenStack":{"Compute":{"Instances":null,"Firewall":{"AllowRules":null,"DenyRules":null}},"Networking":{"SecurityGroups":null}},"Oracle":{"Compute":{"AddressReservations":null}},"Nifcloud":{"Computing":{"SecurityGroups":null,"Instances":null},"DNS":{"Records":null},"NAS":{"NASSecurityGroups":null,"NASInstances":null},"Network":{"ElasticLoadBalancers":null,"LoadBalancers":null,"Routers":null,"VpnGateways":null},"RDB":{"DBSecurityGroups":null,"DBInstances":null},"SSLCertificate":{"ServerCertificates":null}}},"service_metadata":{"accessanalyzer":{"name":"accessanalyzer","updated":"2023-12-01T16:34:23.965109+07:00"},"api-gateway":{"name":"api-gateway","updated":"2023-12-01T16:34:23.965129+07:00"},"athena":{"name":"athena","updated":"2023-12-01T16:34:23.965129+07:00"},"cloudfront":{"name":"cloudfront","updated":"2023-12-01T16:34:23.965129+07:00"},"cloudtrail":{"name":"cloudtrail","updated":"2023-12-01T16:34:23.965129+07:00"},"cloudwatch":{"name":"cloudwatch","updated":"2023-12-01T16:34:23.96513+07:00"},"codebuild":{"name":"codebuild","updated":"2023-12-01T16:34:23.96513+07:00"},"documentdb":{"name":"documentdb","updated":"2023-12-01T16:34:23.96513+07:00"},"dynamodb":{"name":"dynamodb","updated":"2023-12-01T16:34:23.96513+07:00"},"ec2":{"name":"ec2","updated":"2023-12-01T16:34:23.965154+07:00"},"ecr":{"name":"ecr","updated":"2023-12-01T16:34:23.965154+07:00"},"ecs":{"name":"ecs","updated":"2023-12-01T16:34:23.965154+07:00"},"efs":{"name":"efs","updated":"2023-12-01T16:34:23.965154+07:00"},"eks":{"name":"eks","updated":"2023-12-01T16:34:23.965171+07:00"},"elasticache":{"name":"elasticache","updated":"2023-12-01T16:34:23.965171+07:00"},"elasticsearch":{"name":"elasticsearch","updated":"2023-12-01T16:34:23.965171+07:00"},"elb":{"name":"elb","updated":"2023-12-01T16:34:23.965172+07:00"},"emr":{"name":"emr","updated":"2023-12-01T16:34:23.965172+07:00"},"iam":{"name":"iam","updated":"2023-12-01T16:34:23.965172+07:00"},"kinesis":{"name":"kinesis","updated":"2023-12-01T16:34:23.965181+07:00"},"kms":{"name":"kms","updated":"2023-12-01T16:34:23.965172+07:00"},"lambda":{"name":"lambda","updated":"2023-12-01T16:34:23.965172+07:00"},"mq":{"name":"mq","updated":"2023-12-01T16:34:23.965173+07:00"},"msk":{"name":"msk","updated":"2023-12-01T16:34:23.965173+07:00"},"neptune":{"name":"neptune","updated":"2023-12-01T16:34:23.965173+07:00"},"rds":{"name":"rds","updated":"2023-12-01T16:34:23.965173+07:00"},"redshift":{"name":"redshift","updated":"2023-12-01T16:34:23.96518+07:00"},"s3":{"name":"s3","updated":"2023-12-01T16:34:23.965181+07:00"},"sns":{"name":"sns","updated":"2023-12-01T16:34:23.96518+07:00"},"sqs":{"name":"sqs","updated":"2023-12-01T16:34:23.965181+07:00"},"ssm":{"name":"ssm","updated":"2023-12-01T16:34:23.965181+07:00"},"workspaces":{"name":"workspaces","updated":"2023-12-01T16:34:23.965181+07:00"}},"updated":"2023-12-01T16:34:23.965105+07:00"} diff --git a/internal/adapters/arm/adapt.go b/internal/adapters/arm/adapt.go new file mode 100644 index 000000000000..44f012146cd3 --- /dev/null +++ b/internal/adapters/arm/adapt.go @@ -0,0 +1,50 @@ +package arm + +import ( + "context" + + "github.com/aquasecurity/trivy/internal/adapters/arm/appservice" + "github.com/aquasecurity/trivy/internal/adapters/arm/authorization" + "github.com/aquasecurity/trivy/internal/adapters/arm/compute" + "github.com/aquasecurity/trivy/internal/adapters/arm/container" + "github.com/aquasecurity/trivy/internal/adapters/arm/database" + "github.com/aquasecurity/trivy/internal/adapters/arm/datafactory" + "github.com/aquasecurity/trivy/internal/adapters/arm/datalake" + "github.com/aquasecurity/trivy/internal/adapters/arm/keyvault" + "github.com/aquasecurity/trivy/internal/adapters/arm/monitor" + "github.com/aquasecurity/trivy/internal/adapters/arm/network" + "github.com/aquasecurity/trivy/internal/adapters/arm/securitycenter" + "github.com/aquasecurity/trivy/internal/adapters/arm/storage" + "github.com/aquasecurity/trivy/internal/adapters/arm/synapse" + + "github.com/aquasecurity/defsec/pkg/providers/azure" + "github.com/aquasecurity/defsec/pkg/state" + scanner "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" +) + +// Adapt ... +func Adapt(ctx context.Context, deployment scanner.Deployment) *state.State { + return &state.State{ + Azure: adaptAzure(deployment), + } +} + +func adaptAzure(deployment scanner.Deployment) azure.Azure { + + return azure.Azure{ + AppService: appservice.Adapt(deployment), + Authorization: authorization.Adapt(deployment), + Compute: compute.Adapt(deployment), + Container: container.Adapt(deployment), + Database: database.Adapt(deployment), + DataFactory: datafactory.Adapt(deployment), + DataLake: datalake.Adapt(deployment), + KeyVault: keyvault.Adapt(deployment), + Monitor: monitor.Adapt(deployment), + Network: network.Adapt(deployment), + SecurityCenter: securitycenter.Adapt(deployment), + Storage: storage.Adapt(deployment), + Synapse: synapse.Adapt(deployment), + } + +} diff --git a/internal/adapters/arm/appservice/adapt.go b/internal/adapters/arm/appservice/adapt.go new file mode 100644 index 000000000000..78922d30f281 --- /dev/null +++ b/internal/adapters/arm/appservice/adapt.go @@ -0,0 +1,58 @@ +package appservice + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/appservice" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" +) + +func Adapt(deployment azure.Deployment) appservice.AppService { + return appservice.AppService{ + Services: adaptServices(deployment), + FunctionApps: adaptFunctionApps(deployment), + } +} + +func adaptFunctionApps(deployment azure.Deployment) []appservice.FunctionApp { + var functionApps []appservice.FunctionApp + + for _, resource := range deployment.GetResourcesByType("Microsoft.Web/sites") { + functionApps = append(functionApps, adaptFunctionApp(resource)) + } + return functionApps +} + +func adaptServices(deployment azure.Deployment) []appservice.Service { + var services []appservice.Service + for _, resource := range deployment.GetResourcesByType("Microsoft.Web/sites") { + services = append(services, adaptService(resource)) + } + return services +} + +func adaptFunctionApp(resource azure.Resource) appservice.FunctionApp { + return appservice.FunctionApp{ + Metadata: resource.Metadata, + HTTPSOnly: resource.Properties.GetMapValue("httpsOnly").AsBoolValue(false, resource.Properties.GetMetadata()), + } +} + +func adaptService(resource azure.Resource) appservice.Service { + return appservice.Service{ + Metadata: resource.Metadata, + EnableClientCert: resource.Properties.GetMapValue("clientCertEnabled").AsBoolValue(false, resource.Properties.GetMetadata()), + Identity: struct{ Type defsecTypes.StringValue }{ + Type: resource.Properties.GetMapValue("identity").GetMapValue("type").AsStringValue("", resource.Properties.GetMetadata()), + }, + Authentication: struct{ Enabled defsecTypes.BoolValue }{ + Enabled: resource.Properties.GetMapValue("siteAuthSettings").GetMapValue("enabled").AsBoolValue(false, resource.Properties.GetMetadata()), + }, + Site: struct { + EnableHTTP2 defsecTypes.BoolValue + MinimumTLSVersion defsecTypes.StringValue + }{ + EnableHTTP2: resource.Properties.GetMapValue("httpsOnly").AsBoolValue(false, resource.Properties.GetMetadata()), + MinimumTLSVersion: resource.Properties.GetMapValue("minTlsVersion").AsStringValue("", resource.Properties.GetMetadata()), + }, + } +} diff --git a/internal/adapters/arm/authorization/adapt.go b/internal/adapters/arm/authorization/adapt.go new file mode 100644 index 000000000000..aa5a2e80d642 --- /dev/null +++ b/internal/adapters/arm/authorization/adapt.go @@ -0,0 +1,38 @@ +package authorization + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/authorization" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" +) + +func Adapt(deployment azure.Deployment) authorization.Authorization { + return authorization.Authorization{ + RoleDefinitions: adaptRoleDefinitions(deployment), + } +} + +func adaptRoleDefinitions(deployment azure.Deployment) (roleDefinitions []authorization.RoleDefinition) { + for _, resource := range deployment.GetResourcesByType("Microsoft.Authorization/roleDefinitions") { + roleDefinitions = append(roleDefinitions, adaptRoleDefinition(resource)) + } + return roleDefinitions +} + +func adaptRoleDefinition(resource azure.Resource) authorization.RoleDefinition { + + return authorization.RoleDefinition{ + Metadata: resource.Metadata, + Permissions: adaptPermissions(resource), + AssignableScopes: resource.Properties.GetMapValue("assignableScopes").AsStringValuesList(""), + } +} + +func adaptPermissions(resource azure.Resource) (permissions []authorization.Permission) { + for _, permission := range resource.Properties.GetMapValue("permissions").AsList() { + permissions = append(permissions, authorization.Permission{ + Metadata: resource.Metadata, + Actions: permission.GetMapValue("actions").AsStringValuesList(""), + }) + } + return permissions +} diff --git a/internal/adapters/arm/compute/adapt.go b/internal/adapters/arm/compute/adapt.go new file mode 100644 index 000000000000..bc072571c7f3 --- /dev/null +++ b/internal/adapters/arm/compute/adapt.go @@ -0,0 +1,85 @@ +package compute + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/compute" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" +) + +func Adapt(deployment azure.Deployment) compute.Compute { + return compute.Compute{ + LinuxVirtualMachines: adaptLinuxVirtualMachines(deployment), + WindowsVirtualMachines: adaptWindowsVirtualMachines(deployment), + ManagedDisks: adaptManagedDisks(deployment), + } +} + +func adaptManagedDisks(deployment azure.Deployment) (managedDisks []compute.ManagedDisk) { + + for _, resource := range deployment.GetResourcesByType("Microsoft.Compute/disks") { + managedDisks = append(managedDisks, adaptManagedDisk(resource)) + } + + return managedDisks +} + +func adaptManagedDisk(resource azure.Resource) compute.ManagedDisk { + hasEncryption := resource.Properties.HasKey("encryption") + + return compute.ManagedDisk{ + Metadata: resource.Metadata, + Encryption: compute.Encryption{ + Metadata: resource.Metadata, + Enabled: defsecTypes.Bool(hasEncryption, resource.Metadata), + }, + } +} + +func adaptWindowsVirtualMachines(deployment azure.Deployment) (windowsVirtualMachines []compute.WindowsVirtualMachine) { + for _, resource := range deployment.GetResourcesByType("Microsoft.Compute/virtualMachines") { + if resource.Properties.GetMapValue("osProfile").GetMapValue("windowsConfiguration").AsMap() != nil { + windowsVirtualMachines = append(windowsVirtualMachines, adaptWindowsVirtualMachine(resource)) + } + } + + return windowsVirtualMachines +} + +func adaptWindowsVirtualMachine(resource azure.Resource) compute.WindowsVirtualMachine { + return compute.WindowsVirtualMachine{ + Metadata: resource.Metadata, + VirtualMachine: compute.VirtualMachine{ + Metadata: resource.Metadata, + CustomData: resource.Properties.GetMapValue("osProfile"). + GetMapValue("customData").AsStringValue("", resource.Metadata), + }, + } +} + +func adaptLinuxVirtualMachines(deployment azure.Deployment) (linuxVirtualMachines []compute.LinuxVirtualMachine) { + for _, resource := range deployment.GetResourcesByType("Microsoft.Compute/virtualMachines") { + if resource.Properties.GetMapValue("osProfile").GetMapValue("linuxConfiguration").AsMap() != nil { + linuxVirtualMachines = append(linuxVirtualMachines, adaptLinuxVirtualMachine(resource)) + } + } + + return linuxVirtualMachines +} + +func adaptLinuxVirtualMachine(resource azure.Resource) compute.LinuxVirtualMachine { + return compute.LinuxVirtualMachine{ + Metadata: resource.Metadata, + VirtualMachine: compute.VirtualMachine{ + Metadata: resource.Metadata, + CustomData: resource.Properties.GetMapValue("osProfile"). + GetMapValue("customData").AsStringValue("", resource.Metadata), + }, + OSProfileLinuxConfig: compute.OSProfileLinuxConfig{ + Metadata: resource.Metadata, + DisablePasswordAuthentication: resource.Properties.GetMapValue("osProfile"). + GetMapValue("linuxConfiguration"). + GetMapValue("disablePasswordAuthentication").AsBoolValue(false, resource.Metadata), + }, + } + +} diff --git a/internal/adapters/arm/compute/adapt_test.go b/internal/adapters/arm/compute/adapt_test.go new file mode 100644 index 000000000000..4f06ead53cad --- /dev/null +++ b/internal/adapters/arm/compute/adapt_test.go @@ -0,0 +1,60 @@ +package compute + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" + + "github.com/stretchr/testify/assert" + + "github.com/stretchr/testify/require" +) + +func Test_AdaptLinuxVM(t *testing.T) { + + input := azure.Deployment{ + Resources: []azure.Resource{ + { + Type: azure.NewValue("Microsoft.Compute/virtualMachines", types.NewTestMetadata()), + Properties: azure.NewValue(map[string]azure.Value{ + "osProfile": azure.NewValue(map[string]azure.Value{ + "linuxConfiguration": azure.NewValue(map[string]azure.Value{ + "disablePasswordAuthentication": azure.NewValue(true, types.NewTestMetadata()), + }, types.NewTestMetadata()), + }, types.NewTestMetadata()), + }, types.NewTestMetadata()), + }, + }, + } + + output := Adapt(input) + + require.Len(t, output.LinuxVirtualMachines, 1) + require.Len(t, output.WindowsVirtualMachines, 0) + + linuxVM := output.LinuxVirtualMachines[0] + assert.True(t, linuxVM.OSProfileLinuxConfig.DisablePasswordAuthentication.IsTrue()) + +} + +func Test_AdaptWindowsVM(t *testing.T) { + + input := azure.Deployment{ + Resources: []azure.Resource{ + { + Type: azure.NewValue("Microsoft.Compute/virtualMachines", types.NewTestMetadata()), + Properties: azure.NewValue(map[string]azure.Value{ + "osProfile": azure.NewValue(map[string]azure.Value{ + "windowsConfiguration": azure.NewValue(map[string]azure.Value{}, types.NewTestMetadata()), + }, types.NewTestMetadata()), + }, types.NewTestMetadata()), + }, + }, + } + + output := Adapt(input) + + require.Len(t, output.LinuxVirtualMachines, 0) + require.Len(t, output.WindowsVirtualMachines, 1) +} diff --git a/internal/adapters/arm/container/adapt.go b/internal/adapters/arm/container/adapt.go new file mode 100644 index 000000000000..0748411ad282 --- /dev/null +++ b/internal/adapters/arm/container/adapt.go @@ -0,0 +1,17 @@ +package container + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/container" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" +) + +func Adapt(deployment azure.Deployment) container.Container { + return container.Container{ + KubernetesClusters: adaptKubernetesClusters(deployment), + } +} + +func adaptKubernetesClusters(deployment azure.Deployment) []container.KubernetesCluster { + + return nil +} diff --git a/internal/adapters/arm/database/adapt.go b/internal/adapters/arm/database/adapt.go new file mode 100644 index 000000000000..7c32428847a6 --- /dev/null +++ b/internal/adapters/arm/database/adapt.go @@ -0,0 +1,35 @@ +package database + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/database" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" +) + +func Adapt(deployment azure.Deployment) database.Database { + return database.Database{ + MSSQLServers: adaptMSSQLServers(deployment), + MariaDBServers: adaptMariaDBServers(deployment), + MySQLServers: adaptMySQLServers(deployment), + PostgreSQLServers: adaptPostgreSQLServers(deployment), + } +} + +func adaptMySQLServers(deployment azure.Deployment) (mysqlDbServers []database.MySQLServer) { + for _, resource := range deployment.GetResourcesByType("Microsoft.DBforMySQL/servers") { + mysqlDbServers = append(mysqlDbServers, adaptMySQLServer(resource, deployment)) + } + return mysqlDbServers +} + +func adaptMySQLServer(resource azure.Resource, deployment azure.Deployment) database.MySQLServer { + return database.MySQLServer{ + Metadata: resource.Metadata, + Server: database.Server{ + Metadata: resource.Metadata, + EnableSSLEnforcement: resource.Properties.GetMapValue("sslEnforcement").AsBoolValue(false, resource.Metadata), + MinimumTLSVersion: resource.Properties.GetMapValue("minimalTlsVersion").AsStringValue("TLSEnforcementDisabled", resource.Metadata), + EnablePublicNetworkAccess: resource.Properties.GetMapValue("publicNetworkAccess").AsBoolValue(false, resource.Metadata), + FirewallRules: addFirewallRule(resource), + }, + } +} diff --git a/internal/adapters/arm/database/firewall.go b/internal/adapters/arm/database/firewall.go new file mode 100644 index 000000000000..c25412871a30 --- /dev/null +++ b/internal/adapters/arm/database/firewall.go @@ -0,0 +1,18 @@ +package database + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/database" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" +) + +func addFirewallRule(resource azure.Resource) []database.FirewallRule { + var rules []database.FirewallRule + for _, rule := range resource.Properties.GetMapValue("firewallRules").AsMap() { + rules = append(rules, database.FirewallRule{ + Metadata: rule.Metadata, + StartIP: rule.GetMapValue("startIpAddress").AsStringValue("", rule.Metadata), + EndIP: rule.GetMapValue("endIpAddress").AsStringValue("", rule.Metadata), + }) + } + return rules +} diff --git a/internal/adapters/arm/database/maria.go b/internal/adapters/arm/database/maria.go new file mode 100644 index 000000000000..853426ad84bd --- /dev/null +++ b/internal/adapters/arm/database/maria.go @@ -0,0 +1,27 @@ +package database + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/database" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" +) + +func adaptMariaDBServers(deployment azure.Deployment) (mariaDbServers []database.MariaDBServer) { + for _, resource := range deployment.GetResourcesByType("Microsoft.DBforMariaDB/servers") { + mariaDbServers = append(mariaDbServers, adaptMariaDBServer(resource, deployment)) + } + return mariaDbServers + +} + +func adaptMariaDBServer(resource azure.Resource, deployment azure.Deployment) database.MariaDBServer { + return database.MariaDBServer{ + Metadata: resource.Metadata, + Server: database.Server{ + Metadata: resource.Metadata, + EnableSSLEnforcement: resource.Properties.GetMapValue("sslEnforcement").AsBoolValue(false, resource.Metadata), + MinimumTLSVersion: resource.Properties.GetMapValue("minimalTlsVersion").AsStringValue("TLSEnforcementDisabled", resource.Metadata), + EnablePublicNetworkAccess: resource.Properties.GetMapValue("publicNetworkAccess").AsBoolValue(false, resource.Metadata), + FirewallRules: addFirewallRule(resource), + }, + } +} diff --git a/internal/adapters/arm/database/mssql.go b/internal/adapters/arm/database/mssql.go new file mode 100644 index 000000000000..c03fb0a5321f --- /dev/null +++ b/internal/adapters/arm/database/mssql.go @@ -0,0 +1,61 @@ +package database + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/database" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" +) + +func adaptMSSQLServers(deployment azure.Deployment) (msSQlServers []database.MSSQLServer) { + for _, resource := range deployment.GetResourcesByType("Microsoft.Sql/servers") { + msSQlServers = append(msSQlServers, adaptMSSQLServer(resource, deployment)) + } + return msSQlServers +} + +func adaptMSSQLServer(resource azure.Resource, deployment azure.Deployment) database.MSSQLServer { + return database.MSSQLServer{ + Metadata: resource.Metadata, + Server: database.Server{ + Metadata: resource.Metadata, + EnableSSLEnforcement: resource.Properties.GetMapValue("sslEnforcement").AsBoolValue(false, resource.Metadata), + MinimumTLSVersion: resource.Properties.GetMapValue("minimalTlsVersion").AsStringValue("TLSEnforcementDisabled", resource.Metadata), + EnablePublicNetworkAccess: resource.Properties.GetMapValue("publicNetworkAccess").AsBoolValue(false, resource.Metadata), + FirewallRules: addFirewallRule(resource), + }, + ExtendedAuditingPolicies: adaptExtendedAuditingPolicies(resource, deployment), + SecurityAlertPolicies: adaptSecurityAlertPolicies(resource, deployment), + } +} + +func adaptExtendedAuditingPolicies(resource azure.Resource, deployment azure.Deployment) (policies []database.ExtendedAuditingPolicy) { + + for _, policy := range deployment.GetResourcesByType("Microsoft.Sql/servers/extendedAuditingSettings") { + policies = append(policies, database.ExtendedAuditingPolicy{ + Metadata: policy.Metadata, + RetentionInDays: policy.Properties.GetMapValue("retentionDays").AsIntValue(0, policy.Metadata), + }) + } + + return policies +} + +func adaptSecurityAlertPolicies(resource azure.Resource, deployment azure.Deployment) (policies []database.SecurityAlertPolicy) { + for _, policy := range deployment.GetResourcesByType("Microsoft.Sql/servers/securityAlertPolicies") { + policies = append(policies, database.SecurityAlertPolicy{ + Metadata: policy.Metadata, + EmailAddresses: adaptStringList(policy.Properties.GetMapValue("emailAddresses")), + DisabledAlerts: adaptStringList(policy.Properties.GetMapValue("disabledAlerts")), + EmailAccountAdmins: policy.Properties.GetMapValue("emailAccountAdmins").AsBoolValue(false, policy.Metadata), + }) + } + return policies +} + +func adaptStringList(value azure.Value) []defsecTypes.StringValue { + var list []defsecTypes.StringValue + for _, v := range value.AsList() { + list = append(list, v.AsStringValue("", value.Metadata)) + } + return list +} diff --git a/internal/adapters/arm/database/postgresql.go b/internal/adapters/arm/database/postgresql.go new file mode 100644 index 000000000000..b4e37e7c43c2 --- /dev/null +++ b/internal/adapters/arm/database/postgresql.go @@ -0,0 +1,64 @@ +package database + +import ( + "fmt" + "strings" + + "github.com/aquasecurity/defsec/pkg/providers/azure/database" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" +) + +func adaptPostgreSQLServers(deployment azure.Deployment) (databases []database.PostgreSQLServer) { + for _, resource := range deployment.GetResourcesByType("Microsoft.DBforPostgreSQL/servers") { + databases = append(databases, adaptPostgreSQLServer(resource, deployment)) + } + + return databases +} + +func adaptPostgreSQLServer(resource azure.Resource, deployment azure.Deployment) database.PostgreSQLServer { + return database.PostgreSQLServer{ + Metadata: resource.Metadata, + Server: database.Server{ + Metadata: resource.Metadata, + EnableSSLEnforcement: resource.Properties.GetMapValue("sslEnforcement").AsBoolValue(false, resource.Metadata), + MinimumTLSVersion: resource.Properties.GetMapValue("minimalTlsVersion").AsStringValue("TLSEnforcementDisabled", resource.Metadata), + EnablePublicNetworkAccess: resource.Properties.GetMapValue("publicNetworkAccess").AsBoolValue(false, resource.Metadata), + FirewallRules: addFirewallRule(resource), + }, + Config: adaptPostgreSQLConfiguration(resource, deployment), + } +} + +func adaptPostgreSQLConfiguration(resource azure.Resource, deployment azure.Deployment) database.PostgresSQLConfig { + + parent := fmt.Sprintf("%s/", resource.Name.AsString()) + + config := database.PostgresSQLConfig{ + Metadata: resource.Metadata, + LogCheckpoints: defsecTypes.BoolDefault(false, resource.Metadata), + ConnectionThrottling: defsecTypes.BoolDefault(false, resource.Metadata), + LogConnections: defsecTypes.BoolDefault(false, resource.Metadata), + } + + for _, configuration := range deployment.GetResourcesByType("Microsoft.DBforPostgreSQL/servers/configurations") { + if strings.HasPrefix(configuration.Name.AsString(), parent) { + val := configuration.Properties.GetMapValue("value") + if strings.HasSuffix(configuration.Name.AsString(), "log_checkpoints") { + config.LogCheckpoints = val.AsBoolValue(false, configuration.Metadata) + continue + } + if strings.HasSuffix(configuration.Name.AsString(), "log_connections") { + config.LogConnections = val.AsBoolValue(false, configuration.Metadata) + continue + } + if strings.HasSuffix(configuration.Name.AsString(), "connection_throttling") { + config.ConnectionThrottling = val.AsBoolValue(false, configuration.Metadata) + continue + } + } + } + + return config +} diff --git a/internal/adapters/arm/datafactory/adapt.go b/internal/adapters/arm/datafactory/adapt.go new file mode 100644 index 000000000000..dfc94b537311 --- /dev/null +++ b/internal/adapters/arm/datafactory/adapt.go @@ -0,0 +1,27 @@ +package datafactory + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/datafactory" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" +) + +func Adapt(deployment azure.Deployment) datafactory.DataFactory { + + return datafactory.DataFactory{ + DataFactories: adaptDataFactories(deployment), + } +} + +func adaptDataFactories(deployment azure.Deployment) (factories []datafactory.Factory) { + for _, resource := range deployment.GetResourcesByType("Microsoft.DataFactory/factories") { + factories = append(factories, adaptDataFactory(resource)) + } + return factories +} + +func adaptDataFactory(resource azure.Resource) datafactory.Factory { + return datafactory.Factory{ + Metadata: resource.Metadata, + EnablePublicNetwork: resource.Properties.GetMapValue("publicNetworkAccess").AsBoolValue(true, resource.Metadata), + } +} diff --git a/internal/adapters/arm/datalake/adapt.go b/internal/adapters/arm/datalake/adapt.go new file mode 100644 index 000000000000..b11d43618f25 --- /dev/null +++ b/internal/adapters/arm/datalake/adapt.go @@ -0,0 +1,28 @@ +package datalake + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/datalake" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" +) + +func Adapt(deployment azure.Deployment) datalake.DataLake { + + return datalake.DataLake{ + Stores: adaptStores(deployment), + } +} + +func adaptStores(deployment azure.Deployment) (stores []datalake.Store) { + for _, resource := range deployment.GetResourcesByType("Microsoft.DataLakeStore/accounts") { + stores = append(stores, adaptStore(resource)) + } + + return stores +} + +func adaptStore(resource azure.Resource) datalake.Store { + return datalake.Store{ + Metadata: resource.Metadata, + EnableEncryption: resource.Properties.GetMapValue("encryptionState").AsBoolValue(false, resource.Metadata), + } +} diff --git a/internal/adapters/arm/keyvault/adapt.go b/internal/adapters/arm/keyvault/adapt.go new file mode 100644 index 000000000000..b1eef216cf85 --- /dev/null +++ b/internal/adapters/arm/keyvault/adapt.go @@ -0,0 +1,64 @@ +package keyvault + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/keyvault" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" +) + +func Adapt(deployment azure.Deployment) keyvault.KeyVault { + return keyvault.KeyVault{ + Vaults: adaptVaults(deployment), + } +} + +func adaptVaults(deployment azure.Deployment) (vaults []keyvault.Vault) { + for _, resource := range deployment.GetResourcesByType("Microsoft.KeyVault/vaults") { + vaults = append(vaults, adaptVault(resource, deployment)) + } + + return vaults +} + +func adaptVault(resource azure.Resource, deployment azure.Deployment) keyvault.Vault { + return keyvault.Vault{ + Metadata: resource.Metadata, + Secrets: adaptSecrets(resource, deployment), + Keys: adaptKeys(resource, deployment), + EnablePurgeProtection: resource.Properties.GetMapValue("enablePurgeProtection").AsBoolValue(false, resource.Metadata), + SoftDeleteRetentionDays: resource.Properties.GetMapValue("softDeleteRetentionInDays").AsIntValue(7, resource.Metadata), + NetworkACLs: keyvault.NetworkACLs{ + Metadata: resource.Metadata, + DefaultAction: resource.Properties.GetMapValue("properties").GetMapValue("networkAcls").GetMapValue("defaultAction").AsStringValue("", resource.Metadata), + }, + } +} + +func adaptKeys(resource azure.Resource, deployment azure.Deployment) (keys []keyvault.Key) { + for _, resource := range deployment.GetResourcesByType("Microsoft.KeyVault/vaults/keys") { + keys = append(keys, adaptKey(resource)) + } + + return keys +} + +func adaptKey(resource azure.Resource) keyvault.Key { + return keyvault.Key{ + Metadata: resource.Metadata, + ExpiryDate: resource.Properties.GetMapValue("attributes").GetMapValue("exp").AsTimeValue(resource.Metadata), + } +} + +func adaptSecrets(resource azure.Resource, deployment azure.Deployment) (secrets []keyvault.Secret) { + for _, resource := range deployment.GetResourcesByType("Microsoft.KeyVault/vaults/secrets") { + secrets = append(secrets, adaptSecret(resource)) + } + return secrets +} + +func adaptSecret(resource azure.Resource) keyvault.Secret { + return keyvault.Secret{ + Metadata: resource.Metadata, + ContentType: resource.Properties.GetMapValue("contentType").AsStringValue("", resource.Metadata), + ExpiryDate: resource.Properties.GetMapValue("attributes").GetMapValue("exp").AsTimeValue(resource.Metadata), + } +} diff --git a/internal/adapters/arm/monitor/adapt.go b/internal/adapters/arm/monitor/adapt.go new file mode 100644 index 000000000000..271bc2ea58d8 --- /dev/null +++ b/internal/adapters/arm/monitor/adapt.go @@ -0,0 +1,45 @@ +package monitor + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/monitor" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" +) + +func Adapt(deployment azure.Deployment) monitor.Monitor { + return monitor.Monitor{ + LogProfiles: adaptLogProfiles(deployment), + } +} + +func adaptLogProfiles(deployment azure.Deployment) (logProfiles []monitor.LogProfile) { + for _, resource := range deployment.GetResourcesByType("Microsoft.Insights/logProfiles") { + logProfiles = append(logProfiles, adaptLogProfile(resource)) + } + return logProfiles +} + +func adaptLogProfile(resource azure.Resource) monitor.LogProfile { + categories := resource.Properties.GetMapValue("categories").AsList() + var categoriesList []types.StringValue + for _, category := range categories { + categoriesList = append(categoriesList, category.AsStringValue("", category.Metadata)) + } + + locations := resource.Properties.GetMapValue("locations").AsList() + var locationsList []types.StringValue + for _, location := range locations { + locationsList = append(locationsList, location.AsStringValue("", location.Metadata)) + } + + return monitor.LogProfile{ + Metadata: resource.Metadata, + RetentionPolicy: monitor.RetentionPolicy{ + Metadata: resource.Metadata, + Enabled: resource.Properties.GetMapValue("retentionPolicy").GetMapValue("enabled").AsBoolValue(false, resource.Metadata), + Days: resource.Properties.GetMapValue("retentionPolicy").GetMapValue("days").AsIntValue(0, resource.Metadata), + }, + Categories: categoriesList, + Locations: locationsList, + } +} diff --git a/internal/adapters/arm/network/adapt.go b/internal/adapters/arm/network/adapt.go new file mode 100644 index 000000000000..2ed036c193d2 --- /dev/null +++ b/internal/adapters/arm/network/adapt.go @@ -0,0 +1,126 @@ +package network + +import ( + "strconv" + "strings" + + "github.com/aquasecurity/defsec/pkg/providers/azure/network" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" +) + +func Adapt(deployment azure.Deployment) network.Network { + return network.Network{ + SecurityGroups: adaptSecurityGroups(deployment), + NetworkWatcherFlowLogs: adaptNetworkWatcherFlowLogs(deployment), + } +} + +func adaptSecurityGroups(deployment azure.Deployment) (sgs []network.SecurityGroup) { + for _, resource := range deployment.GetResourcesByType("Microsoft.Network/networkSecurityGroups") { + sgs = append(sgs, adaptSecurityGroup(resource, deployment)) + } + return sgs + +} + +func adaptSecurityGroup(resource azure.Resource, deployment azure.Deployment) network.SecurityGroup { + return network.SecurityGroup{ + Metadata: resource.Metadata, + Rules: adaptSecurityGroupRules(resource, deployment), + } +} + +func adaptSecurityGroupRules(resource azure.Resource, deployment azure.Deployment) (rules []network.SecurityGroupRule) { + for _, resource := range deployment.GetResourcesByType("Microsoft.Network/networkSecurityGroups/securityRules") { + rules = append(rules, adaptSecurityGroupRule(resource)) + } + return rules +} + +func adaptSecurityGroupRule(resource azure.Resource) network.SecurityGroupRule { + sourceAddressPrefixes := resource.Properties.GetMapValue("sourceAddressPrefixes").AsStringValuesList("") + sourceAddressPrefixes = append(sourceAddressPrefixes, resource.Properties.GetMapValue("sourceAddressPrefix").AsStringValue("", resource.Metadata)) + + var sourcePortRanges []network.PortRange + for _, portRange := range resource.Properties.GetMapValue("sourcePortRanges").AsList() { + sourcePortRanges = append(sourcePortRanges, expandRange(portRange.AsString(), resource.Metadata)) + } + sourcePortRanges = append(sourcePortRanges, expandRange(resource.Properties.GetMapValue("sourcePortRange").AsString(), resource.Metadata)) + + destinationAddressPrefixes := resource.Properties.GetMapValue("destinationAddressPrefixes").AsStringValuesList("") + destinationAddressPrefixes = append(destinationAddressPrefixes, resource.Properties.GetMapValue("destinationAddressPrefix").AsStringValue("", resource.Metadata)) + + var destinationPortRanges []network.PortRange + for _, portRange := range resource.Properties.GetMapValue("destinationPortRanges").AsList() { + destinationPortRanges = append(destinationPortRanges, expandRange(portRange.AsString(), resource.Metadata)) + } + destinationPortRanges = append(destinationPortRanges, expandRange(resource.Properties.GetMapValue("destinationPortRange").AsString(), resource.Metadata)) + + allow := defsecTypes.BoolDefault(false, resource.Metadata) + if resource.Properties.GetMapValue("access").AsString() == "Allow" { + allow = defsecTypes.Bool(true, resource.Metadata) + } + + outbound := defsecTypes.BoolDefault(false, resource.Metadata) + if resource.Properties.GetMapValue("direction").AsString() == "Outbound" { + outbound = defsecTypes.Bool(true, resource.Metadata) + } + + return network.SecurityGroupRule{ + Metadata: resource.Metadata, + Outbound: outbound, + Allow: allow, + SourceAddresses: sourceAddressPrefixes, + SourcePorts: sourcePortRanges, + DestinationAddresses: destinationAddressPrefixes, + DestinationPorts: destinationPortRanges, + Protocol: resource.Properties.GetMapValue("protocol").AsStringValue("", resource.Metadata), + } +} + +func adaptNetworkWatcherFlowLogs(deployment azure.Deployment) (flowLogs []network.NetworkWatcherFlowLog) { + for _, resource := range deployment.GetResourcesByType("Microsoft.Network/networkWatchers/flowLogs") { + flowLogs = append(flowLogs, adaptNetworkWatcherFlowLog(resource)) + } + return flowLogs +} + +func adaptNetworkWatcherFlowLog(resource azure.Resource) network.NetworkWatcherFlowLog { + return network.NetworkWatcherFlowLog{ + Metadata: resource.Metadata, + RetentionPolicy: network.RetentionPolicy{ + Metadata: resource.Metadata, + Enabled: resource.Properties.GetMapValue("retentionPolicy").GetMapValue("enabled").AsBoolValue(false, resource.Metadata), + Days: resource.Properties.GetMapValue("retentionPolicy").GetMapValue("days").AsIntValue(0, resource.Metadata), + }, + } +} + +func expandRange(r string, m defsecTypes.Metadata) network.PortRange { + start := 0 + end := 65535 + switch { + case r == "*": + case strings.Contains(r, "-"): + if parts := strings.Split(r, "-"); len(parts) == 2 { + if p1, err := strconv.ParseInt(parts[0], 10, 32); err == nil { + start = int(p1) + } + if p2, err := strconv.ParseInt(parts[1], 10, 32); err == nil { + end = int(p2) + } + } + default: + if val, err := strconv.ParseInt(r, 10, 32); err == nil { + start = int(val) + end = int(val) + } + } + + return network.PortRange{ + Metadata: m, + Start: start, + End: end, + } +} diff --git a/internal/adapters/arm/securitycenter/adapt.go b/internal/adapters/arm/securitycenter/adapt.go new file mode 100644 index 000000000000..dfa44e943cf8 --- /dev/null +++ b/internal/adapters/arm/securitycenter/adapt.go @@ -0,0 +1,43 @@ +package securitycenter + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/securitycenter" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" +) + +func Adapt(deployment azure.Deployment) securitycenter.SecurityCenter { + return securitycenter.SecurityCenter{ + Contacts: adaptContacts(deployment), + Subscriptions: adaptSubscriptions(deployment), + } +} + +func adaptContacts(deployment azure.Deployment) (contacts []securitycenter.Contact) { + for _, resource := range deployment.GetResourcesByType("Microsoft.Security/securityContacts") { + contacts = append(contacts, adaptContact(resource)) + } + + return contacts +} + +func adaptContact(resource azure.Resource) securitycenter.Contact { + return securitycenter.Contact{ + Metadata: resource.Metadata, + EnableAlertNotifications: resource.Properties.GetMapValue("email").AsBoolValue(false, resource.Metadata), + Phone: resource.Properties.GetMapValue("phone").AsStringValue("", resource.Metadata), + } +} + +func adaptSubscriptions(deployment azure.Deployment) (subscriptions []securitycenter.SubscriptionPricing) { + for _, resource := range deployment.GetResourcesByType("Microsoft.Security/pricings") { + subscriptions = append(subscriptions, adaptSubscription(resource)) + } + return subscriptions +} + +func adaptSubscription(resource azure.Resource) securitycenter.SubscriptionPricing { + return securitycenter.SubscriptionPricing{ + Metadata: resource.Metadata, + Tier: resource.Properties.GetMapValue("pricingTier").AsStringValue("Free", resource.Metadata), + } +} diff --git a/internal/adapters/arm/storage/adapt.go b/internal/adapters/arm/storage/adapt.go new file mode 100644 index 000000000000..31dd0bec0dd8 --- /dev/null +++ b/internal/adapters/arm/storage/adapt.go @@ -0,0 +1,69 @@ +package storage + +import ( + "strings" + + "github.com/aquasecurity/defsec/pkg/providers/azure/storage" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" + + "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(deployment azure.Deployment) storage.Storage { + return storage.Storage{ + Accounts: adaptAccounts(deployment), + } +} + +func adaptAccounts(deployment azure.Deployment) []storage.Account { + var accounts []storage.Account + for _, resource := range deployment.GetResourcesByType("Microsoft.Storage/storageAccounts") { + + var networkRules []storage.NetworkRule + for _, acl := range resource.Properties.GetMapValue("networkAcls").AsList() { + + var bypasses []types.StringValue + bypassProp := acl.GetMapValue("bypass") + for _, bypass := range strings.Split(bypassProp.AsString(), ",") { + bypasses = append(bypasses, types.String(bypass, bypassProp.GetMetadata())) + } + + networkRules = append(networkRules, storage.NetworkRule{ + Metadata: acl.GetMetadata(), + Bypass: bypasses, + AllowByDefault: types.Bool(acl.GetMapValue("defaultAction").EqualTo("Allow"), acl.GetMetadata()), + }) + } + + var queues []storage.Queue + for _, queueResource := range resource.GetResourcesByType("queueServices/queues") { + queues = append(queues, storage.Queue{ + Metadata: queueResource.Metadata, + Name: queueResource.Name.AsStringValue("", queueResource.Metadata), + }) + } + + var containers []storage.Container + for _, containerResource := range resource.GetResourcesByType("containerServices/containers") { + containers = append(containers, storage.Container{ + Metadata: containerResource.Metadata, + PublicAccess: containerResource.Properties.GetMapValue("publicAccess").AsStringValue("None", containerResource.Metadata), + }) + } + + account := storage.Account{ + Metadata: resource.Metadata, + NetworkRules: networkRules, + EnforceHTTPS: resource.Properties.GetMapValue("supportsHttpsTrafficOnly").AsBoolValue(false, resource.Properties.GetMetadata()), + Containers: containers, + QueueProperties: storage.QueueProperties{ + Metadata: resource.Properties.GetMetadata(), + EnableLogging: types.BoolDefault(false, resource.Properties.GetMetadata()), + }, + MinimumTLSVersion: resource.Properties.GetMapValue("minimumTlsVersion").AsStringValue("TLS1_0", resource.Properties.GetMetadata()), + Queues: queues, + } + accounts = append(accounts, account) + } + return accounts +} diff --git a/internal/adapters/arm/storage/adapt_test.go b/internal/adapters/arm/storage/adapt_test.go new file mode 100644 index 000000000000..d921e23993e7 --- /dev/null +++ b/internal/adapters/arm/storage/adapt_test.go @@ -0,0 +1,59 @@ +package storage + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" + + "github.com/stretchr/testify/assert" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/stretchr/testify/require" +) + +func Test_AdaptStorageDefaults(t *testing.T) { + + input := azure.Deployment{ + Resources: []azure.Resource{ + { + Type: azure.NewValue("Microsoft.Storage/storageAccounts", types.NewTestMetadata()), + Properties: azure.NewValue(map[string]azure.Value{}, types.NewTestMetadata()), + }, + }, + } + + output := Adapt(input) + + require.Len(t, output.Accounts, 1) + + account := output.Accounts[0] + assert.Equal(t, "TLS1_0", account.MinimumTLSVersion.Value()) + assert.Equal(t, false, account.EnforceHTTPS.Value()) + +} + +func Test_AdaptStorage(t *testing.T) { + + input := azure.Deployment{ + Resources: []azure.Resource{ + { + Type: azure.NewValue("Microsoft.Storage/storageAccounts", types.NewTestMetadata()), + Name: azure.Value{}, + Properties: azure.NewValue(map[string]azure.Value{ + "minimumTlsVersion": azure.NewValue("TLS1_2", types.NewTestMetadata()), + "supportsHttpsTrafficOnly": azure.NewValue(true, types.NewTestMetadata()), + }, types.NewTestMetadata()), + }, + }, + } + + output := Adapt(input) + + require.Len(t, output.Accounts, 1) + + account := output.Accounts[0] + assert.Equal(t, "TLS1_2", account.MinimumTLSVersion.Value()) + assert.Equal(t, true, account.EnforceHTTPS.Value()) + +} diff --git a/internal/adapters/arm/synapse/adapt.go b/internal/adapters/arm/synapse/adapt.go new file mode 100644 index 000000000000..e295772091bc --- /dev/null +++ b/internal/adapters/arm/synapse/adapt.go @@ -0,0 +1,34 @@ +package synapse + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/synapse" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" +) + +func Adapt(deployment azure.Deployment) synapse.Synapse { + return synapse.Synapse{ + Workspaces: adaptWorkspaces(deployment), + } +} + +func adaptWorkspaces(deployment azure.Deployment) (workspaces []synapse.Workspace) { + for _, resource := range deployment.GetResourcesByType("Microsoft.Synapse/workspaces") { + workspaces = append(workspaces, adaptWorkspace(resource)) + } + return workspaces +} + +func adaptWorkspace(resource azure.Resource) synapse.Workspace { + + managedVirtualNetwork := resource.Properties.GetMapValue("managedVirtualNetwork").AsString() + enableManagedVirtualNetwork := types.BoolDefault(false, resource.Metadata) + if managedVirtualNetwork == "default" { + enableManagedVirtualNetwork = types.Bool(true, resource.Metadata) + } + + return synapse.Workspace{ + Metadata: resource.Metadata, + EnableManagedVirtualNetwork: enableManagedVirtualNetwork, + } +} diff --git a/internal/adapters/cloudformation/adapt.go b/internal/adapters/cloudformation/adapt.go new file mode 100644 index 000000000000..5efe2612524e --- /dev/null +++ b/internal/adapters/cloudformation/adapt.go @@ -0,0 +1,14 @@ +package cloudformation + +import ( + "github.com/aquasecurity/defsec/pkg/state" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) *state.State { + return &state.State{ + AWS: aws.Adapt(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/accessanalyzer/accessanalyzer.go b/internal/adapters/cloudformation/aws/accessanalyzer/accessanalyzer.go new file mode 100644 index 000000000000..687e657ed49d --- /dev/null +++ b/internal/adapters/cloudformation/aws/accessanalyzer/accessanalyzer.go @@ -0,0 +1,13 @@ +package accessanalyzer + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/accessanalyzer" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) accessanalyzer.AccessAnalyzer { + return accessanalyzer.AccessAnalyzer{ + Analyzers: getAccessAnalyzer(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/accessanalyzer/analyzer.go b/internal/adapters/cloudformation/aws/accessanalyzer/analyzer.go new file mode 100644 index 000000000000..c592f1348ad7 --- /dev/null +++ b/internal/adapters/cloudformation/aws/accessanalyzer/analyzer.go @@ -0,0 +1,24 @@ +package accessanalyzer + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/accessanalyzer" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getAccessAnalyzer(ctx parser.FileContext) (analyzers []accessanalyzer.Analyzer) { + + analyzersList := ctx.GetResourcesByType("AWS::AccessAnalyzer::Analyzer") + + for _, r := range analyzersList { + aa := accessanalyzer.Analyzer{ + Metadata: r.Metadata(), + Name: r.GetStringProperty("AnalyzerName"), + ARN: r.StringDefault(""), + Active: types.BoolDefault(false, r.Metadata()), + } + + analyzers = append(analyzers, aa) + } + return analyzers +} diff --git a/internal/adapters/cloudformation/aws/adapt.go b/internal/adapters/cloudformation/aws/adapt.go new file mode 100644 index 000000000000..03746988ec16 --- /dev/null +++ b/internal/adapters/cloudformation/aws/adapt.go @@ -0,0 +1,74 @@ +package aws + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/apigateway" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/athena" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/cloudfront" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/cloudtrail" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/cloudwatch" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/codebuild" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/config" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/documentdb" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/dynamodb" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/ec2" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/ecr" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/ecs" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/efs" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/eks" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/elasticache" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/elasticsearch" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/elb" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/iam" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/kinesis" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/lambda" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/mq" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/msk" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/neptune" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/rds" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/redshift" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/s3" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/sam" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/sns" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/sqs" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/ssm" + "github.com/aquasecurity/trivy/internal/adapters/cloudformation/aws/workspaces" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) aws.AWS { + return aws.AWS{ + APIGateway: apigateway.Adapt(cfFile), + Athena: athena.Adapt(cfFile), + Cloudfront: cloudfront.Adapt(cfFile), + CloudTrail: cloudtrail.Adapt(cfFile), + CloudWatch: cloudwatch.Adapt(cfFile), + CodeBuild: codebuild.Adapt(cfFile), + Config: config.Adapt(cfFile), + DocumentDB: documentdb.Adapt(cfFile), + DynamoDB: dynamodb.Adapt(cfFile), + EC2: ec2.Adapt(cfFile), + ECR: ecr.Adapt(cfFile), + ECS: ecs.Adapt(cfFile), + EFS: efs.Adapt(cfFile), + IAM: iam.Adapt(cfFile), + EKS: eks.Adapt(cfFile), + ElastiCache: elasticache.Adapt(cfFile), + Elasticsearch: elasticsearch.Adapt(cfFile), + ELB: elb.Adapt(cfFile), + MSK: msk.Adapt(cfFile), + MQ: mq.Adapt(cfFile), + Kinesis: kinesis.Adapt(cfFile), + Lambda: lambda.Adapt(cfFile), + Neptune: neptune.Adapt(cfFile), + RDS: rds.Adapt(cfFile), + Redshift: redshift.Adapt(cfFile), + S3: s3.Adapt(cfFile), + SAM: sam.Adapt(cfFile), + SNS: sns.Adapt(cfFile), + SQS: sqs.Adapt(cfFile), + SSM: ssm.Adapt(cfFile), + WorkSpaces: workspaces.Adapt(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/apigateway/apigateway.go b/internal/adapters/cloudformation/aws/apigateway/apigateway.go new file mode 100644 index 000000000000..e71444059838 --- /dev/null +++ b/internal/adapters/cloudformation/aws/apigateway/apigateway.go @@ -0,0 +1,21 @@ +package apigateway + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway" + v1 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v1" + v2 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v2" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) apigateway.APIGateway { + return apigateway.APIGateway{ + V1: v1.APIGateway{ + APIs: nil, + DomainNames: nil, + }, + V2: v2.APIGateway{ + APIs: getApis(cfFile), + }, + } +} diff --git a/internal/adapters/cloudformation/aws/apigateway/stage.go b/internal/adapters/cloudformation/aws/apigateway/stage.go new file mode 100644 index 000000000000..a25dad75bfee --- /dev/null +++ b/internal/adapters/cloudformation/aws/apigateway/stage.go @@ -0,0 +1,68 @@ +package apigateway + +import ( + v2 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v2" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getApis(cfFile parser.FileContext) (apis []v2.API) { + + apiResources := cfFile.GetResourcesByType("AWS::ApiGatewayV2::Api") + for _, apiRes := range apiResources { + api := v2.API{ + Metadata: apiRes.Metadata(), + Name: types.StringDefault("", apiRes.Metadata()), + ProtocolType: types.StringDefault("", apiRes.Metadata()), + Stages: getStages(apiRes.ID(), cfFile), + } + apis = append(apis, api) + } + + return apis +} + +func getStages(apiId string, cfFile parser.FileContext) []v2.Stage { + var apiStages []v2.Stage + + stageResources := cfFile.GetResourcesByType("AWS::ApiGatewayV2::Stage") + for _, r := range stageResources { + stageApiId := r.GetStringProperty("ApiId") + if stageApiId.Value() != apiId { + continue + } + + s := v2.Stage{ + Metadata: r.Metadata(), + Name: r.GetStringProperty("StageName"), + AccessLogging: getAccessLogging(r), + } + apiStages = append(apiStages, s) + } + + return apiStages +} + +func getAccessLogging(r *parser.Resource) v2.AccessLogging { + + loggingProp := r.GetProperty("AccessLogSettings") + if loggingProp.IsNil() { + return v2.AccessLogging{ + Metadata: r.Metadata(), + CloudwatchLogGroupARN: types.StringDefault("", r.Metadata()), + } + } + + destinationProp := r.GetProperty("AccessLogSettings.DestinationArn") + + if destinationProp.IsNil() { + return v2.AccessLogging{ + Metadata: loggingProp.Metadata(), + CloudwatchLogGroupARN: types.StringDefault("", r.Metadata()), + } + } + return v2.AccessLogging{ + Metadata: destinationProp.Metadata(), + CloudwatchLogGroupARN: destinationProp.AsStringValue(), + } +} diff --git a/internal/adapters/cloudformation/aws/athena/athena.go b/internal/adapters/cloudformation/aws/athena/athena.go new file mode 100644 index 000000000000..5408c4660301 --- /dev/null +++ b/internal/adapters/cloudformation/aws/athena/athena.go @@ -0,0 +1,14 @@ +package athena + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/athena" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) athena.Athena { + return athena.Athena{ + Databases: nil, + Workgroups: getWorkGroups(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/athena/workgroup.go b/internal/adapters/cloudformation/aws/athena/workgroup.go new file mode 100644 index 000000000000..b62eef37566a --- /dev/null +++ b/internal/adapters/cloudformation/aws/athena/workgroup.go @@ -0,0 +1,30 @@ +package athena + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/athena" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getWorkGroups(cfFile parser.FileContext) []athena.Workgroup { + + var workgroups []athena.Workgroup + + workgroupResources := cfFile.GetResourcesByType("AWS::Athena::WorkGroup") + + for _, r := range workgroupResources { + + wg := athena.Workgroup{ + Metadata: r.Metadata(), + Name: r.GetStringProperty("Name"), + Encryption: athena.EncryptionConfiguration{ + Metadata: r.Metadata(), + Type: r.GetStringProperty("WorkGroupConfiguration.ResultConfiguration.EncryptionConfiguration.EncryptionOption"), + }, + EnforceConfiguration: r.GetBoolProperty("WorkGroupConfiguration.EnforceWorkGroupConfiguration"), + } + + workgroups = append(workgroups, wg) + } + + return workgroups +} diff --git a/internal/adapters/cloudformation/aws/cloudfront/cloudfront.go b/internal/adapters/cloudformation/aws/cloudfront/cloudfront.go new file mode 100644 index 000000000000..e12dd2529036 --- /dev/null +++ b/internal/adapters/cloudformation/aws/cloudfront/cloudfront.go @@ -0,0 +1,13 @@ +package cloudfront + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/cloudfront" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) cloudfront.Cloudfront { + return cloudfront.Cloudfront{ + Distributions: getDistributions(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/cloudfront/distribution.go b/internal/adapters/cloudformation/aws/cloudfront/distribution.go new file mode 100644 index 000000000000..c63aa365fe3c --- /dev/null +++ b/internal/adapters/cloudformation/aws/cloudfront/distribution.go @@ -0,0 +1,55 @@ +package cloudfront + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/cloudfront" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getDistributions(ctx parser.FileContext) (distributions []cloudfront.Distribution) { + + distributionResources := ctx.GetResourcesByType("AWS::CloudFront::Distribution") + + for _, r := range distributionResources { + distribution := cloudfront.Distribution{ + Metadata: r.Metadata(), + WAFID: r.GetStringProperty("DistributionConfig.WebACLId"), + Logging: cloudfront.Logging{ + Metadata: r.Metadata(), + Bucket: r.GetStringProperty("DistributionConfig.Logging.Bucket"), + }, + DefaultCacheBehaviour: getDefaultCacheBehaviour(r), + OrdererCacheBehaviours: nil, + ViewerCertificate: cloudfront.ViewerCertificate{ + Metadata: r.Metadata(), + MinimumProtocolVersion: r.GetStringProperty("DistributionConfig.ViewerCertificate.MinimumProtocolVersion"), + }, + } + + distributions = append(distributions, distribution) + } + + return distributions +} + +func getDefaultCacheBehaviour(r *parser.Resource) cloudfront.CacheBehaviour { + defaultCache := r.GetProperty("DistributionConfig.DefaultCacheBehavior") + if defaultCache.IsNil() { + return cloudfront.CacheBehaviour{ + Metadata: r.Metadata(), + ViewerProtocolPolicy: types.StringDefault("allow-all", r.Metadata()), + } + } + protoProp := r.GetProperty("DistributionConfig.DefaultCacheBehavior.ViewerProtocolPolicy") + if protoProp.IsNotString() { + return cloudfront.CacheBehaviour{ + Metadata: r.Metadata(), + ViewerProtocolPolicy: types.StringDefault("allow-all", r.Metadata()), + } + } + + return cloudfront.CacheBehaviour{ + Metadata: r.Metadata(), + ViewerProtocolPolicy: protoProp.AsStringValue(), + } +} diff --git a/internal/adapters/cloudformation/aws/cloudtrail/cloudtrail.go b/internal/adapters/cloudformation/aws/cloudtrail/cloudtrail.go new file mode 100644 index 000000000000..848b124de9bf --- /dev/null +++ b/internal/adapters/cloudformation/aws/cloudtrail/cloudtrail.go @@ -0,0 +1,13 @@ +package cloudtrail + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/cloudtrail" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) cloudtrail.CloudTrail { + return cloudtrail.CloudTrail{ + Trails: getCloudTrails(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/cloudtrail/trails.go b/internal/adapters/cloudformation/aws/cloudtrail/trails.go new file mode 100644 index 000000000000..60c8f4417187 --- /dev/null +++ b/internal/adapters/cloudformation/aws/cloudtrail/trails.go @@ -0,0 +1,27 @@ +package cloudtrail + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/cloudtrail" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getCloudTrails(ctx parser.FileContext) (trails []cloudtrail.Trail) { + + cloudtrailResources := ctx.GetResourcesByType("AWS::CloudTrail::Trail") + + for _, r := range cloudtrailResources { + ct := cloudtrail.Trail{ + Metadata: r.Metadata(), + Name: r.GetStringProperty("TrailName"), + EnableLogFileValidation: r.GetBoolProperty("EnableLogFileValidation"), + IsMultiRegion: r.GetBoolProperty("IsMultiRegionTrail"), + KMSKeyID: r.GetStringProperty("KmsKeyId"), + CloudWatchLogsLogGroupArn: r.GetStringProperty("CloudWatchLogsLogGroupArn"), + IsLogging: r.GetBoolProperty("IsLogging"), + BucketName: r.GetStringProperty("S3BucketName"), + } + + trails = append(trails, ct) + } + return trails +} diff --git a/internal/adapters/cloudformation/aws/cloudwatch/cloudwatch.go b/internal/adapters/cloudformation/aws/cloudwatch/cloudwatch.go new file mode 100644 index 000000000000..0acfede18143 --- /dev/null +++ b/internal/adapters/cloudformation/aws/cloudwatch/cloudwatch.go @@ -0,0 +1,14 @@ +package cloudwatch + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/cloudwatch" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) cloudwatch.CloudWatch { + return cloudwatch.CloudWatch{ + LogGroups: getLogGroups(cfFile), + Alarms: nil, + } +} diff --git a/internal/adapters/cloudformation/aws/cloudwatch/log_group.go b/internal/adapters/cloudformation/aws/cloudwatch/log_group.go new file mode 100644 index 000000000000..0f513a314006 --- /dev/null +++ b/internal/adapters/cloudformation/aws/cloudwatch/log_group.go @@ -0,0 +1,26 @@ +package cloudwatch + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/cloudwatch" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getLogGroups(ctx parser.FileContext) (logGroups []cloudwatch.LogGroup) { + + logGroupResources := ctx.GetResourcesByType("AWS::Logs::LogGroup") + + for _, r := range logGroupResources { + group := cloudwatch.LogGroup{ + Metadata: r.Metadata(), + Arn: types.StringDefault("", r.Metadata()), + Name: r.GetStringProperty("LogGroupName"), + KMSKeyID: r.GetStringProperty("KmsKeyId"), + RetentionInDays: r.GetIntProperty("RetentionInDays", 0), + MetricFilters: nil, + } + logGroups = append(logGroups, group) + } + + return logGroups +} diff --git a/internal/adapters/cloudformation/aws/codebuild/codebuild.go b/internal/adapters/cloudformation/aws/codebuild/codebuild.go new file mode 100644 index 000000000000..e9aa90180bd1 --- /dev/null +++ b/internal/adapters/cloudformation/aws/codebuild/codebuild.go @@ -0,0 +1,13 @@ +package codebuild + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/codebuild" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) codebuild.CodeBuild { + return codebuild.CodeBuild{ + Projects: getProjects(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/codebuild/project.go b/internal/adapters/cloudformation/aws/codebuild/project.go new file mode 100644 index 000000000000..d24ffe030626 --- /dev/null +++ b/internal/adapters/cloudformation/aws/codebuild/project.go @@ -0,0 +1,63 @@ +package codebuild + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/codebuild" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getProjects(ctx parser.FileContext) (projects []codebuild.Project) { + + projectResources := ctx.GetResourcesByType("AWS::CodeBuild::Project") + + for _, r := range projectResources { + project := codebuild.Project{ + Metadata: r.Metadata(), + ArtifactSettings: getArtifactSettings(r), + SecondaryArtifactSettings: getSecondaryArtifactSettings(r), + } + + projects = append(projects, project) + } + + return projects +} + +func getSecondaryArtifactSettings(r *parser.Resource) (secondaryArtifacts []codebuild.ArtifactSettings) { + secondaryArtifactsList := r.GetProperty("SecondaryArtifacts") + if secondaryArtifactsList.IsNil() || !secondaryArtifactsList.IsList() { + return + } + + for _, a := range secondaryArtifactsList.AsList() { + settings := codebuild.ArtifactSettings{ + Metadata: secondaryArtifactsList.Metadata(), + EncryptionEnabled: types.BoolDefault(true, secondaryArtifactsList.Metadata()), + } + encryptionDisabled := a.GetProperty("EncryptionDisabled") + if encryptionDisabled.IsBool() { + settings.EncryptionEnabled = types.Bool(!encryptionDisabled.AsBool(), encryptionDisabled.Metadata()) + } + secondaryArtifacts = append(secondaryArtifacts, settings) + } + + return secondaryArtifacts +} + +func getArtifactSettings(r *parser.Resource) codebuild.ArtifactSettings { + + settings := codebuild.ArtifactSettings{ + Metadata: r.Metadata(), + EncryptionEnabled: types.BoolDefault(true, r.Metadata()), + } + + artifactsProperty := r.GetProperty("Artifacts") + if artifactsProperty.IsNotNil() { + encryptionDisabled := artifactsProperty.GetProperty("EncryptionDisabled") + if encryptionDisabled.IsBool() { + settings.EncryptionEnabled = types.Bool(!encryptionDisabled.AsBool(), encryptionDisabled.Metadata()) + } + } + + return settings +} diff --git a/internal/adapters/cloudformation/aws/config/aggregator.go b/internal/adapters/cloudformation/aws/config/aggregator.go new file mode 100644 index 000000000000..6fa7c5a21d99 --- /dev/null +++ b/internal/adapters/cloudformation/aws/config/aggregator.go @@ -0,0 +1,50 @@ +package config + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/config" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getConfigurationAggregator(ctx parser.FileContext) config.ConfigurationAggregrator { + + aggregator := config.ConfigurationAggregrator{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + SourceAllRegions: defsecTypes.BoolDefault(false, ctx.Metadata()), + } + + aggregatorResources := ctx.GetResourcesByType("AWS::Config::ConfigurationAggregator") + + if len(aggregatorResources) == 0 { + return aggregator + } + + return config.ConfigurationAggregrator{ + Metadata: aggregatorResources[0].Metadata(), + SourceAllRegions: isSourcingAllRegions(aggregatorResources[0]), + } +} + +func isSourcingAllRegions(r *parser.Resource) defsecTypes.BoolValue { + accountProp := r.GetProperty("AccountAggregationSources") + orgProp := r.GetProperty("OrganizationAggregationSource") + + if accountProp.IsNotNil() && accountProp.IsList() { + for _, a := range accountProp.AsList() { + regionsProp := a.GetProperty("AllAwsRegions") + if regionsProp.IsNil() || regionsProp.IsBool() { + return regionsProp.AsBoolValue() + } + } + } + + if orgProp.IsNotNil() { + regionsProp := orgProp.GetProperty("AllAwsRegions") + if regionsProp.IsBool() { + return regionsProp.AsBoolValue() + } + } + + // nothing is set or resolvable so its got to be false + return defsecTypes.BoolDefault(false, r.Metadata()) +} diff --git a/internal/adapters/cloudformation/aws/config/config.go b/internal/adapters/cloudformation/aws/config/config.go new file mode 100644 index 000000000000..819b20250266 --- /dev/null +++ b/internal/adapters/cloudformation/aws/config/config.go @@ -0,0 +1,13 @@ +package config + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/config" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) config.Config { + return config.Config{ + ConfigurationAggregrator: getConfigurationAggregator(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/documentdb/cluster.go b/internal/adapters/cloudformation/aws/documentdb/cluster.go new file mode 100644 index 000000000000..8c19aee1650a --- /dev/null +++ b/internal/adapters/cloudformation/aws/documentdb/cluster.go @@ -0,0 +1,58 @@ +package documentdb + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/documentdb" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getClusters(ctx parser.FileContext) (clusters []documentdb.Cluster) { + + clusterResources := ctx.GetResourcesByType("AWS::DocDB::DBCluster") + + for _, r := range clusterResources { + cluster := documentdb.Cluster{ + Metadata: r.Metadata(), + Identifier: r.GetStringProperty("DBClusterIdentifier"), + EnabledLogExports: getLogExports(r), + Instances: nil, + BackupRetentionPeriod: r.GetIntProperty("BackupRetentionPeriod"), + StorageEncrypted: r.GetBoolProperty("StorageEncrypted"), + KMSKeyID: r.GetStringProperty("KmsKeyId"), + } + + updateInstancesOnCluster(&cluster, ctx) + + clusters = append(clusters, cluster) + } + return clusters +} + +func updateInstancesOnCluster(cluster *documentdb.Cluster, ctx parser.FileContext) { + + instanceResources := ctx.GetResourcesByType("AWS::DocDB::DBInstance") + + for _, r := range instanceResources { + clusterIdentifier := r.GetStringProperty("DBClusterIdentifier") + if clusterIdentifier == cluster.Identifier { + cluster.Instances = append(cluster.Instances, documentdb.Instance{ + Metadata: r.Metadata(), + KMSKeyID: cluster.KMSKeyID, + }) + } + } +} + +func getLogExports(r *parser.Resource) (logExports []types.StringValue) { + + exportsList := r.GetProperty("EnableCloudwatchLogsExports") + + if exportsList.IsNil() || exportsList.IsNotList() { + return logExports + } + + for _, export := range exportsList.AsList() { + logExports = append(logExports, export.AsStringValue()) + } + return logExports +} diff --git a/internal/adapters/cloudformation/aws/documentdb/documentdb.go b/internal/adapters/cloudformation/aws/documentdb/documentdb.go new file mode 100644 index 000000000000..91439139d3f8 --- /dev/null +++ b/internal/adapters/cloudformation/aws/documentdb/documentdb.go @@ -0,0 +1,13 @@ +package documentdb + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/documentdb" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) documentdb.DocumentDB { + return documentdb.DocumentDB{ + Clusters: getClusters(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/dynamodb/cluster.go b/internal/adapters/cloudformation/aws/dynamodb/cluster.go new file mode 100644 index 000000000000..76e28c971939 --- /dev/null +++ b/internal/adapters/cloudformation/aws/dynamodb/cluster.go @@ -0,0 +1,36 @@ +package dynamodb + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/dynamodb" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getClusters(file parser.FileContext) (clusters []dynamodb.DAXCluster) { + + clusterResources := file.GetResourcesByType("AWS::DAX::Cluster") + + for _, r := range clusterResources { + cluster := dynamodb.DAXCluster{ + Metadata: r.Metadata(), + ServerSideEncryption: dynamodb.ServerSideEncryption{ + Metadata: r.Metadata(), + Enabled: defsecTypes.BoolDefault(false, r.Metadata()), + KMSKeyID: defsecTypes.StringDefault("", r.Metadata()), + }, + PointInTimeRecovery: defsecTypes.BoolUnresolvable(r.Metadata()), + } + + if sseProp := r.GetProperty("SSESpecification"); sseProp.IsNotNil() { + cluster.ServerSideEncryption = dynamodb.ServerSideEncryption{ + Metadata: sseProp.Metadata(), + Enabled: r.GetBoolProperty("SSESpecification.SSEEnabled"), + KMSKeyID: defsecTypes.StringUnresolvable(sseProp.Metadata()), + } + } + + clusters = append(clusters, cluster) + } + + return clusters +} diff --git a/internal/adapters/cloudformation/aws/dynamodb/dynamodb.go b/internal/adapters/cloudformation/aws/dynamodb/dynamodb.go new file mode 100644 index 000000000000..270aadc02176 --- /dev/null +++ b/internal/adapters/cloudformation/aws/dynamodb/dynamodb.go @@ -0,0 +1,13 @@ +package dynamodb + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/dynamodb" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) dynamodb.DynamoDB { + return dynamodb.DynamoDB{ + DAXClusters: getClusters(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/ec2/ec2.go b/internal/adapters/cloudformation/aws/ec2/ec2.go new file mode 100644 index 000000000000..19eb2080c449 --- /dev/null +++ b/internal/adapters/cloudformation/aws/ec2/ec2.go @@ -0,0 +1,20 @@ +package ec2 + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) ec2.EC2 { + return ec2.EC2{ + LaunchConfigurations: getLaunchConfigurations(cfFile), + LaunchTemplates: getLaunchTemplates(cfFile), + Instances: getInstances(cfFile), + VPCs: nil, + NetworkACLs: getNetworkACLs(cfFile), + SecurityGroups: getSecurityGroups(cfFile), + Subnets: getSubnets(cfFile), + Volumes: getVolumes(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/ec2/instance.go b/internal/adapters/cloudformation/aws/ec2/instance.go new file mode 100644 index 000000000000..ccb74c221288 --- /dev/null +++ b/internal/adapters/cloudformation/aws/ec2/instance.go @@ -0,0 +1,70 @@ +package ec2 + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getInstances(ctx parser.FileContext) (instances []ec2.Instance) { + + instanceResources := ctx.GetResourcesByType("AWS::EC2::Instance") + + for _, r := range instanceResources { + instance := ec2.Instance{ + Metadata: r.Metadata(), + // metadata not supported by CloudFormation at the moment - + // https://github.com/aws-cloudformation/cloudformation-coverage-roadmap/issues/655 + MetadataOptions: ec2.MetadataOptions{ + Metadata: r.Metadata(), + HttpTokens: defsecTypes.StringDefault("optional", r.Metadata()), + HttpEndpoint: defsecTypes.StringDefault("enabled", r.Metadata()), + }, + UserData: r.GetStringProperty("UserData"), + SecurityGroups: nil, + RootBlockDevice: nil, + EBSBlockDevices: nil, + } + blockDevices := getBlockDevices(r) + for i, device := range blockDevices { + copyDevice := device + if i == 0 { + instance.RootBlockDevice = copyDevice + continue + } + instance.EBSBlockDevices = append(instance.EBSBlockDevices, device) + } + instances = append(instances, instance) + } + + return instances +} + +func getBlockDevices(r *parser.Resource) []*ec2.BlockDevice { + var blockDevices []*ec2.BlockDevice + + devicesProp := r.GetProperty("BlockDeviceMappings") + + if devicesProp.IsNil() { + return blockDevices + } + + for _, d := range devicesProp.AsList() { + encrypted := d.GetProperty("Ebs.Encrypted") + var result defsecTypes.BoolValue + if encrypted.IsNil() { + result = defsecTypes.BoolDefault(false, d.Metadata()) + } else { + result = encrypted.AsBoolValue() + } + + device := &ec2.BlockDevice{ + Metadata: d.Metadata(), + Encrypted: result, + } + + blockDevices = append(blockDevices, device) + } + + return blockDevices +} diff --git a/internal/adapters/cloudformation/aws/ec2/launch_configuration.go b/internal/adapters/cloudformation/aws/ec2/launch_configuration.go new file mode 100644 index 000000000000..21051ad1e7e2 --- /dev/null +++ b/internal/adapters/cloudformation/aws/ec2/launch_configuration.go @@ -0,0 +1,48 @@ +package ec2 + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getLaunchConfigurations(file parser.FileContext) (launchConfigurations []ec2.LaunchConfiguration) { + launchConfigResources := file.GetResourcesByType("AWS::AutoScaling::LaunchConfiguration") + + for _, r := range launchConfigResources { + + launchConfig := ec2.LaunchConfiguration{ + Metadata: r.Metadata(), + Name: r.GetStringProperty("Name"), + AssociatePublicIP: r.GetBoolProperty("AssociatePublicIpAddress"), + MetadataOptions: ec2.MetadataOptions{ + Metadata: r.Metadata(), + HttpTokens: types.StringDefault("optional", r.Metadata()), + HttpEndpoint: types.StringDefault("enabled", r.Metadata()), + }, + UserData: r.GetStringProperty("UserData", ""), + } + + if opts := r.GetProperty("MetadataOptions"); opts.IsNotNil() { + launchConfig.MetadataOptions = ec2.MetadataOptions{ + Metadata: opts.Metadata(), + HttpTokens: opts.GetStringProperty("HttpTokens", "optional"), + HttpEndpoint: opts.GetStringProperty("HttpEndpoint", "enabled"), + } + } + + blockDevices := getBlockDevices(r) + for i, device := range blockDevices { + copyDevice := device + if i == 0 { + launchConfig.RootBlockDevice = copyDevice + continue + } + launchConfig.EBSBlockDevices = append(launchConfig.EBSBlockDevices, device) + } + + launchConfigurations = append(launchConfigurations, launchConfig) + + } + return launchConfigurations +} diff --git a/internal/adapters/cloudformation/aws/ec2/launch_template.go b/internal/adapters/cloudformation/aws/ec2/launch_template.go new file mode 100644 index 000000000000..589c984b1049 --- /dev/null +++ b/internal/adapters/cloudformation/aws/ec2/launch_template.go @@ -0,0 +1,56 @@ +package ec2 + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getLaunchTemplates(file parser.FileContext) (templates []ec2.LaunchTemplate) { + launchConfigResources := file.GetResourcesByType("AWS::EC2::LaunchTemplate") + + for _, r := range launchConfigResources { + + launchTemplate := ec2.LaunchTemplate{ + Metadata: r.Metadata(), + Instance: ec2.Instance{ + Metadata: r.Metadata(), + MetadataOptions: ec2.MetadataOptions{ + Metadata: r.Metadata(), + HttpTokens: types.StringDefault("optional", r.Metadata()), + HttpEndpoint: types.StringDefault("enabled", r.Metadata()), + }, + UserData: types.StringDefault("", r.Metadata()), + SecurityGroups: nil, + RootBlockDevice: nil, + EBSBlockDevices: nil, + }, + } + + if data := r.GetProperty("LaunchTemplateData"); data.IsNotNil() { + if opts := data.GetProperty("MetadataOptions"); opts.IsNotNil() { + launchTemplate.MetadataOptions = ec2.MetadataOptions{ + Metadata: opts.Metadata(), + HttpTokens: opts.GetStringProperty("HttpTokens", "optional"), + HttpEndpoint: opts.GetStringProperty("HttpEndpoint", "enabled"), + } + } + + launchTemplate.Instance.UserData = data.GetStringProperty("UserData", "") + + blockDevices := getBlockDevices(r) + for i, device := range blockDevices { + copyDevice := device + if i == 0 { + launchTemplate.RootBlockDevice = copyDevice + continue + } + launchTemplate.EBSBlockDevices = append(launchTemplate.EBSBlockDevices, device) + } + } + + templates = append(templates, launchTemplate) + + } + return templates +} diff --git a/internal/adapters/cloudformation/aws/ec2/nacl.go b/internal/adapters/cloudformation/aws/ec2/nacl.go new file mode 100644 index 000000000000..db90e02d12a6 --- /dev/null +++ b/internal/adapters/cloudformation/aws/ec2/nacl.go @@ -0,0 +1,71 @@ +package ec2 + +import ( + "strconv" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" + + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getNetworkACLs(ctx parser.FileContext) (acls []ec2.NetworkACL) { + for _, aclResource := range ctx.GetResourcesByType("AWS::EC2::NetworkAcl") { + acl := ec2.NetworkACL{ + Metadata: aclResource.Metadata(), + Rules: getRules(aclResource.ID(), ctx), + IsDefaultRule: defsecTypes.BoolDefault(false, aclResource.Metadata()), + } + acls = append(acls, acl) + } + return acls +} + +func getRules(id string, ctx parser.FileContext) (rules []ec2.NetworkACLRule) { + for _, ruleResource := range ctx.GetResourcesByType("AWS::EC2::NetworkAclEntry") { + aclID := ruleResource.GetProperty("NetworkAclId") + if aclID.IsString() && aclID.AsString() == id { + + rule := ec2.NetworkACLRule{ + Metadata: ruleResource.Metadata(), + Type: defsecTypes.StringDefault(ec2.TypeIngress, ruleResource.Metadata()), + Action: defsecTypes.StringDefault(ec2.ActionAllow, ruleResource.Metadata()), + Protocol: defsecTypes.String("-1", ruleResource.Metadata()), + CIDRs: nil, + } + + if egressProperty := ruleResource.GetProperty("Egress"); egressProperty.IsBool() { + if egressProperty.AsBool() { + rule.Type = defsecTypes.String(ec2.TypeEgress, egressProperty.Metadata()) + } else { + rule.Type = defsecTypes.String(ec2.TypeIngress, egressProperty.Metadata()) + } + } + + if actionProperty := ruleResource.GetProperty("RuleAction"); actionProperty.IsString() { + if actionProperty.AsString() == ec2.ActionAllow { + rule.Action = defsecTypes.String(ec2.ActionAllow, actionProperty.Metadata()) + } else { + rule.Action = defsecTypes.String(ec2.ActionDeny, actionProperty.Metadata()) + } + } + + if protocolProperty := ruleResource.GetProperty("Protocol"); protocolProperty.IsInt() { + protocol := protocolProperty.AsIntValue().Value() + rule.Protocol = defsecTypes.String(strconv.Itoa(protocol), protocolProperty.Metadata()) + } + + if ipv4Cidr := ruleResource.GetProperty("CidrBlock"); ipv4Cidr.IsString() { + rule.CIDRs = append(rule.CIDRs, ipv4Cidr.AsStringValue()) + } + + if ipv6Cidr := ruleResource.GetProperty("Ipv6CidrBlock"); ipv6Cidr.IsString() { + rule.CIDRs = append(rule.CIDRs, ipv6Cidr.AsStringValue()) + } + + rules = append(rules, rule) + } + } + return rules +} diff --git a/internal/adapters/cloudformation/aws/ec2/security_group.go b/internal/adapters/cloudformation/aws/ec2/security_group.go new file mode 100644 index 000000000000..9b1eceb6dcbc --- /dev/null +++ b/internal/adapters/cloudformation/aws/ec2/security_group.go @@ -0,0 +1,68 @@ +package ec2 + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getSecurityGroups(ctx parser.FileContext) (groups []ec2.SecurityGroup) { + for _, r := range ctx.GetResourcesByType("AWS::EC2::SecurityGroup") { + group := ec2.SecurityGroup{ + Metadata: r.Metadata(), + Description: r.GetStringProperty("GroupDescription"), + IngressRules: getIngressRules(r), + EgressRules: getEgressRules(r), + IsDefault: types.Bool(r.GetStringProperty("GroupName").EqualTo("default"), r.Metadata()), + VPCID: r.GetStringProperty("VpcId"), + } + + groups = append(groups, group) + } + return groups +} + +func getIngressRules(r *parser.Resource) (sgRules []ec2.SecurityGroupRule) { + if ingressProp := r.GetProperty("SecurityGroupIngress"); ingressProp.IsList() { + for _, ingress := range ingressProp.AsList() { + rule := ec2.SecurityGroupRule{ + Metadata: ingress.Metadata(), + Description: ingress.GetStringProperty("Description"), + CIDRs: nil, + } + v4Cidr := ingress.GetProperty("CidrIp") + if v4Cidr.IsString() && v4Cidr.AsStringValue().IsNotEmpty() { + rule.CIDRs = append(rule.CIDRs, types.StringExplicit(v4Cidr.AsString(), v4Cidr.Metadata())) + } + v6Cidr := ingress.GetProperty("CidrIpv6") + if v6Cidr.IsString() && v6Cidr.AsStringValue().IsNotEmpty() { + rule.CIDRs = append(rule.CIDRs, types.StringExplicit(v6Cidr.AsString(), v6Cidr.Metadata())) + } + + sgRules = append(sgRules, rule) + } + } + return sgRules +} + +func getEgressRules(r *parser.Resource) (sgRules []ec2.SecurityGroupRule) { + if egressProp := r.GetProperty("SecurityGroupEgress"); egressProp.IsList() { + for _, egress := range egressProp.AsList() { + rule := ec2.SecurityGroupRule{ + Metadata: egress.Metadata(), + Description: egress.GetStringProperty("Description"), + } + v4Cidr := egress.GetProperty("CidrIp") + if v4Cidr.IsString() && v4Cidr.AsStringValue().IsNotEmpty() { + rule.CIDRs = append(rule.CIDRs, types.StringExplicit(v4Cidr.AsString(), v4Cidr.Metadata())) + } + v6Cidr := egress.GetProperty("CidrIpv6") + if v6Cidr.IsString() && v6Cidr.AsStringValue().IsNotEmpty() { + rule.CIDRs = append(rule.CIDRs, types.StringExplicit(v6Cidr.AsString(), v6Cidr.Metadata())) + } + + sgRules = append(sgRules, rule) + } + } + return sgRules +} diff --git a/internal/adapters/cloudformation/aws/ec2/subnet.go b/internal/adapters/cloudformation/aws/ec2/subnet.go new file mode 100644 index 000000000000..be75af836593 --- /dev/null +++ b/internal/adapters/cloudformation/aws/ec2/subnet.go @@ -0,0 +1,21 @@ +package ec2 + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getSubnets(ctx parser.FileContext) (subnets []ec2.Subnet) { + + subnetResources := ctx.GetResourcesByType("AWS::EC2::Subnet") + for _, r := range subnetResources { + + subnet := ec2.Subnet{ + Metadata: r.Metadata(), + MapPublicIpOnLaunch: r.GetBoolProperty("MapPublicIpOnLaunch"), + } + + subnets = append(subnets, subnet) + } + return subnets +} diff --git a/internal/adapters/cloudformation/aws/ec2/volume.go b/internal/adapters/cloudformation/aws/ec2/volume.go new file mode 100644 index 000000000000..b1e48835a0a6 --- /dev/null +++ b/internal/adapters/cloudformation/aws/ec2/volume.go @@ -0,0 +1,25 @@ +package ec2 + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getVolumes(ctx parser.FileContext) (volumes []ec2.Volume) { + + volumeResources := ctx.GetResourcesByType("AWS::EC2::Volume") + for _, r := range volumeResources { + + volume := ec2.Volume{ + Metadata: r.Metadata(), + Encryption: ec2.Encryption{ + Metadata: r.Metadata(), + Enabled: r.GetBoolProperty("Encrypted"), + KMSKeyID: r.GetStringProperty("KmsKeyId"), + }, + } + + volumes = append(volumes, volume) + } + return volumes +} diff --git a/internal/adapters/cloudformation/aws/ecr/ecr.go b/internal/adapters/cloudformation/aws/ecr/ecr.go new file mode 100644 index 000000000000..1b15e98eef09 --- /dev/null +++ b/internal/adapters/cloudformation/aws/ecr/ecr.go @@ -0,0 +1,13 @@ +package ecr + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/ecr" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) ecr.ECR { + return ecr.ECR{ + Repositories: getRepositories(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/ecr/repository.go b/internal/adapters/cloudformation/aws/ecr/repository.go new file mode 100644 index 000000000000..c6ccf3c2becb --- /dev/null +++ b/internal/adapters/cloudformation/aws/ecr/repository.go @@ -0,0 +1,93 @@ +package ecr + +import ( + "fmt" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + + "github.com/aquasecurity/defsec/pkg/providers/aws/ecr" + "github.com/aquasecurity/defsec/pkg/providers/aws/iam" + + "github.com/liamg/iamgo" +) + +func getRepositories(ctx parser.FileContext) (repositories []ecr.Repository) { + + repositoryResources := ctx.GetResourcesByType("AWS::ECR::Repository") + + for _, r := range repositoryResources { + + repository := ecr.Repository{ + Metadata: r.Metadata(), + ImageScanning: ecr.ImageScanning{ + Metadata: r.Metadata(), + ScanOnPush: defsecTypes.BoolDefault(false, r.Metadata()), + }, + ImageTagsImmutable: hasImmutableImageTags(r), + Policies: nil, + Encryption: ecr.Encryption{ + Metadata: r.Metadata(), + Type: defsecTypes.StringDefault(ecr.EncryptionTypeAES256, r.Metadata()), + KMSKeyID: defsecTypes.StringDefault("", r.Metadata()), + }, + } + + if imageScanningProp := r.GetProperty("ImageScanningConfiguration"); imageScanningProp.IsNotNil() { + repository.ImageScanning = ecr.ImageScanning{ + Metadata: imageScanningProp.Metadata(), + ScanOnPush: imageScanningProp.GetBoolProperty("ScanOnPush", false), + } + } + + if encProp := r.GetProperty("EncryptionConfiguration"); encProp.IsNotNil() { + repository.Encryption = ecr.Encryption{ + Metadata: encProp.Metadata(), + Type: encProp.GetStringProperty("EncryptionType", ecr.EncryptionTypeAES256), + KMSKeyID: encProp.GetStringProperty("KmsKey", ""), + } + } + + if policy, err := getPolicy(r); err == nil { + repository.Policies = append(repository.Policies, *policy) + } + + repositories = append(repositories, repository) + } + + return repositories +} + +func getPolicy(r *parser.Resource) (*iam.Policy, error) { + policyProp := r.GetProperty("RepositoryPolicyText") + if policyProp.IsNil() { + return nil, fmt.Errorf("missing policy") + } + + parsed, err := iamgo.Parse(policyProp.GetJsonBytes()) + if err != nil { + return nil, err + } + + return &iam.Policy{ + Metadata: policyProp.Metadata(), + Name: defsecTypes.StringDefault("", policyProp.Metadata()), + Document: iam.Document{ + Metadata: policyProp.Metadata(), + Parsed: *parsed, + }, + Builtin: defsecTypes.Bool(false, policyProp.Metadata()), + }, nil +} + +func hasImmutableImageTags(r *parser.Resource) defsecTypes.BoolValue { + mutabilityProp := r.GetProperty("ImageTagMutability") + if mutabilityProp.IsNil() { + return defsecTypes.BoolDefault(false, r.Metadata()) + } + if !mutabilityProp.EqualTo("IMMUTABLE") { + return defsecTypes.Bool(false, mutabilityProp.Metadata()) + } + return defsecTypes.Bool(true, mutabilityProp.Metadata()) +} diff --git a/internal/adapters/cloudformation/aws/ecs/cluster.go b/internal/adapters/cloudformation/aws/ecs/cluster.go new file mode 100644 index 000000000000..1572c0f95110 --- /dev/null +++ b/internal/adapters/cloudformation/aws/ecs/cluster.go @@ -0,0 +1,57 @@ +package ecs + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/ecs" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getClusters(ctx parser.FileContext) (clusters []ecs.Cluster) { + + clusterResources := ctx.GetResourcesByType("AWS::ECS::Cluster") + + for _, r := range clusterResources { + + cluster := ecs.Cluster{ + Metadata: r.Metadata(), + Settings: getClusterSettings(r), + } + + clusters = append(clusters, cluster) + + } + + return clusters +} + +func getClusterSettings(r *parser.Resource) ecs.ClusterSettings { + + clusterSettings := ecs.ClusterSettings{ + Metadata: r.Metadata(), + ContainerInsightsEnabled: types.BoolDefault(false, r.Metadata()), + } + + clusterSettingMap := r.GetProperty("ClusterSettings") + if clusterSettingMap.IsNil() || clusterSettingMap.IsNotList() { + return clusterSettings + } + + clusterSettings.Metadata = clusterSettingMap.Metadata() + + for _, setting := range clusterSettingMap.AsList() { + checkProperty(setting, &clusterSettings) + } + + return clusterSettings +} + +func checkProperty(setting *parser.Property, clusterSettings *ecs.ClusterSettings) { + settingMap := setting.AsMap() + name := settingMap["Name"] + if name.IsNotNil() && name.EqualTo("containerInsights") { + value := settingMap["Value"] + if value.IsNotNil() && value.EqualTo("enabled") { + clusterSettings.ContainerInsightsEnabled = types.Bool(true, value.Metadata()) + } + } +} diff --git a/internal/adapters/cloudformation/aws/ecs/ecs.go b/internal/adapters/cloudformation/aws/ecs/ecs.go new file mode 100644 index 000000000000..b9aa6ea9c755 --- /dev/null +++ b/internal/adapters/cloudformation/aws/ecs/ecs.go @@ -0,0 +1,14 @@ +package ecs + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/ecs" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) ecs.ECS { + return ecs.ECS{ + Clusters: getClusters(cfFile), + TaskDefinitions: getTaskDefinitions(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/ecs/task_definition.go b/internal/adapters/cloudformation/aws/ecs/task_definition.go new file mode 100644 index 000000000000..7f89e0116be7 --- /dev/null +++ b/internal/adapters/cloudformation/aws/ecs/task_definition.go @@ -0,0 +1,86 @@ +package ecs + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/ecs" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getTaskDefinitions(ctx parser.FileContext) (taskDefinitions []ecs.TaskDefinition) { + + taskDefResources := ctx.GetResourcesByType("AWS::ECS::TaskDefinition") + + for _, r := range taskDefResources { + definitions, _ := getContainerDefinitions(r) + taskDef := ecs.TaskDefinition{ + Metadata: r.Metadata(), + Volumes: getVolumes(r), + ContainerDefinitions: definitions, + } + taskDefinitions = append(taskDefinitions, taskDef) + } + + return taskDefinitions +} + +func getContainerDefinitions(r *parser.Resource) ([]ecs.ContainerDefinition, error) { + var definitions []ecs.ContainerDefinition + containerDefs := r.GetProperty("ContainerDefinitions") + if containerDefs.IsNil() || containerDefs.IsNotList() { + return definitions, nil + } + for _, containerDef := range containerDefs.AsList() { + + var envVars []ecs.EnvVar + envVarsList := containerDef.GetProperty("Environment") + if envVarsList.IsNotNil() && envVarsList.IsList() { + for _, envVar := range envVarsList.AsList() { + envVars = append(envVars, ecs.EnvVar{ + Name: envVar.GetStringProperty("Name", "").Value(), + Value: envVar.GetStringProperty("Value", "").Value(), + }) + } + } + definition := ecs.ContainerDefinition{ + Metadata: containerDef.Metadata(), + Name: containerDef.GetStringProperty("Name", ""), + Image: containerDef.GetStringProperty("Image", ""), + CPU: containerDef.GetIntProperty("CPU", 1), + Memory: containerDef.GetIntProperty("Memory", 128), + Essential: containerDef.GetBoolProperty("Essential", false), + Privileged: containerDef.GetBoolProperty("Privileged", false), + Environment: envVars, + PortMappings: nil, + } + definitions = append(definitions, definition) + } + if containerDefs.IsNotNil() && containerDefs.IsString() { + return ecs.CreateDefinitionsFromString(r.Metadata(), containerDefs.AsString()) + } + return definitions, nil +} + +func getVolumes(r *parser.Resource) (volumes []ecs.Volume) { + + volumesList := r.GetProperty("Volumes") + if volumesList.IsNil() || volumesList.IsNotList() { + return volumes + } + + for _, v := range volumesList.AsList() { + volume := ecs.Volume{ + Metadata: r.Metadata(), + EFSVolumeConfiguration: ecs.EFSVolumeConfiguration{ + Metadata: r.Metadata(), + TransitEncryptionEnabled: types.BoolDefault(false, r.Metadata()), + }, + } + transitProp := v.GetProperty("EFSVolumeConfiguration.TransitEncryption") + if transitProp.IsNotNil() && transitProp.EqualTo("enabled", parser.IgnoreCase) { + volume.EFSVolumeConfiguration.TransitEncryptionEnabled = types.Bool(true, transitProp.Metadata()) + } + + volumes = append(volumes, volume) + } + return volumes +} diff --git a/internal/adapters/cloudformation/aws/efs/efs.go b/internal/adapters/cloudformation/aws/efs/efs.go new file mode 100644 index 000000000000..0be8584b70d2 --- /dev/null +++ b/internal/adapters/cloudformation/aws/efs/efs.go @@ -0,0 +1,13 @@ +package efs + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/efs" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) efs.EFS { + return efs.EFS{ + FileSystems: getFileSystems(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/efs/filesystem.go b/internal/adapters/cloudformation/aws/efs/filesystem.go new file mode 100644 index 000000000000..9ef3c6d13fb1 --- /dev/null +++ b/internal/adapters/cloudformation/aws/efs/filesystem.go @@ -0,0 +1,23 @@ +package efs + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/efs" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getFileSystems(ctx parser.FileContext) (filesystems []efs.FileSystem) { + + filesystemResources := ctx.GetResourcesByType("AWS::EFS::FileSystem") + + for _, r := range filesystemResources { + + filesystem := efs.FileSystem{ + Metadata: r.Metadata(), + Encrypted: r.GetBoolProperty("Encrypted"), + } + + filesystems = append(filesystems, filesystem) + } + + return filesystems +} diff --git a/internal/adapters/cloudformation/aws/eks/cluster.go b/internal/adapters/cloudformation/aws/eks/cluster.go new file mode 100644 index 000000000000..5d2036895fdf --- /dev/null +++ b/internal/adapters/cloudformation/aws/eks/cluster.go @@ -0,0 +1,56 @@ +package eks + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/eks" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getClusters(ctx parser.FileContext) (clusters []eks.Cluster) { + + clusterResources := ctx.GetResourcesByType("AWS::EKS::Cluster") + + for _, r := range clusterResources { + cluster := eks.Cluster{ + Metadata: r.Metadata(), + // Logging not supported for cloudformation https://github.com/aws/containers-roadmap/issues/242 + Logging: eks.Logging{ + Metadata: r.Metadata(), + API: defsecTypes.BoolUnresolvable(r.Metadata()), + Audit: defsecTypes.BoolUnresolvable(r.Metadata()), + Authenticator: defsecTypes.BoolUnresolvable(r.Metadata()), + ControllerManager: defsecTypes.BoolUnresolvable(r.Metadata()), + Scheduler: defsecTypes.BoolUnresolvable(r.Metadata()), + }, + Encryption: getEncryptionConfig(r), + // endpoint protection not supported - https://github.com/aws/containers-roadmap/issues/242 + PublicAccessEnabled: defsecTypes.BoolUnresolvable(r.Metadata()), + PublicAccessCIDRs: nil, + } + + clusters = append(clusters, cluster) + } + return clusters +} + +func getEncryptionConfig(r *parser.Resource) eks.Encryption { + + encryption := eks.Encryption{ + Metadata: r.Metadata(), + Secrets: defsecTypes.BoolDefault(false, r.Metadata()), + KMSKeyID: defsecTypes.StringDefault("", r.Metadata()), + } + + if encProp := r.GetProperty("EncryptionConfig"); encProp.IsNotNil() { + encryption.Metadata = encProp.Metadata() + encryption.KMSKeyID = encProp.GetStringProperty("Provider.KeyArn") + resourcesProp := encProp.GetProperty("Resources") + if resourcesProp.IsList() { + if resourcesProp.Contains("secrets") { + encryption.Secrets = defsecTypes.Bool(true, resourcesProp.Metadata()) + } + } + } + + return encryption +} diff --git a/internal/adapters/cloudformation/aws/eks/eks.go b/internal/adapters/cloudformation/aws/eks/eks.go new file mode 100644 index 000000000000..c43c613c2f73 --- /dev/null +++ b/internal/adapters/cloudformation/aws/eks/eks.go @@ -0,0 +1,13 @@ +package eks + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/eks" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) eks.EKS { + return eks.EKS{ + Clusters: getClusters(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/elasticache/cluster.go b/internal/adapters/cloudformation/aws/elasticache/cluster.go new file mode 100644 index 000000000000..28414a62348a --- /dev/null +++ b/internal/adapters/cloudformation/aws/elasticache/cluster.go @@ -0,0 +1,24 @@ +package elasticache + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/elasticache" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getClusterGroups(ctx parser.FileContext) (clusters []elasticache.Cluster) { + + clusterResources := ctx.GetResourcesByType("AWS::ElastiCache::CacheCluster") + + for _, r := range clusterResources { + cluster := elasticache.Cluster{ + Metadata: r.Metadata(), + Engine: r.GetStringProperty("Engine"), + NodeType: r.GetStringProperty("CacheNodeType"), + SnapshotRetentionLimit: r.GetIntProperty("SnapshotRetentionLimit"), + } + + clusters = append(clusters, cluster) + } + + return clusters +} diff --git a/internal/adapters/cloudformation/aws/elasticache/elasticache.go b/internal/adapters/cloudformation/aws/elasticache/elasticache.go new file mode 100644 index 000000000000..bc382616d3b3 --- /dev/null +++ b/internal/adapters/cloudformation/aws/elasticache/elasticache.go @@ -0,0 +1,15 @@ +package elasticache + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/elasticache" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) elasticache.ElastiCache { + return elasticache.ElastiCache{ + Clusters: getClusterGroups(cfFile), + ReplicationGroups: getReplicationGroups(cfFile), + SecurityGroups: getSecurityGroups(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/elasticache/replication_group.go b/internal/adapters/cloudformation/aws/elasticache/replication_group.go new file mode 100644 index 000000000000..3910a377ebf4 --- /dev/null +++ b/internal/adapters/cloudformation/aws/elasticache/replication_group.go @@ -0,0 +1,23 @@ +package elasticache + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/elasticache" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getReplicationGroups(ctx parser.FileContext) (replicationGroups []elasticache.ReplicationGroup) { + + replicationGroupResources := ctx.GetResourcesByType("AWS::ElastiCache::ReplicationGroup") + + for _, r := range replicationGroupResources { + replicationGroup := elasticache.ReplicationGroup{ + Metadata: r.Metadata(), + TransitEncryptionEnabled: r.GetBoolProperty("TransitEncryptionEnabled"), + AtRestEncryptionEnabled: r.GetBoolProperty("AtRestEncryptionEnabled"), + } + + replicationGroups = append(replicationGroups, replicationGroup) + } + + return replicationGroups +} diff --git a/internal/adapters/cloudformation/aws/elasticache/security_group.go b/internal/adapters/cloudformation/aws/elasticache/security_group.go new file mode 100644 index 000000000000..6e51796ff935 --- /dev/null +++ b/internal/adapters/cloudformation/aws/elasticache/security_group.go @@ -0,0 +1,22 @@ +package elasticache + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/elasticache" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getSecurityGroups(ctx parser.FileContext) (securityGroups []elasticache.SecurityGroup) { + + sgResources := ctx.GetResourcesByType("AWS::ElastiCache::SecurityGroup") + + for _, r := range sgResources { + + sg := elasticache.SecurityGroup{ + Metadata: r.Metadata(), + Description: r.GetStringProperty("Description"), + } + securityGroups = append(securityGroups, sg) + } + + return securityGroups +} diff --git a/internal/adapters/cloudformation/aws/elasticsearch/domain.go b/internal/adapters/cloudformation/aws/elasticsearch/domain.go new file mode 100644 index 000000000000..93b0300744c0 --- /dev/null +++ b/internal/adapters/cloudformation/aws/elasticsearch/domain.go @@ -0,0 +1,84 @@ +package elasticsearch + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/elasticsearch" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getDomains(ctx parser.FileContext) (domains []elasticsearch.Domain) { + + domainResources := ctx.GetResourcesByType("AWS::Elasticsearch::Domain", "AWS::OpenSearchService::Domain") + + for _, r := range domainResources { + + domain := elasticsearch.Domain{ + Metadata: r.Metadata(), + DomainName: r.GetStringProperty("DomainName"), + AccessPolicies: r.GetStringProperty("AccessPolicies"), + DedicatedMasterEnabled: r.GetBoolProperty("ElasticsearchClusterConfig.DedicatedMasterEnabled"), + VpcId: defsecTypes.String("", r.Metadata()), + LogPublishing: elasticsearch.LogPublishing{ + Metadata: r.Metadata(), + AuditEnabled: defsecTypes.BoolDefault(false, r.Metadata()), + CloudWatchLogGroupArn: defsecTypes.String("", r.Metadata()), + }, + TransitEncryption: elasticsearch.TransitEncryption{ + Metadata: r.Metadata(), + Enabled: defsecTypes.BoolDefault(false, r.Metadata()), + }, + AtRestEncryption: elasticsearch.AtRestEncryption{ + Metadata: r.Metadata(), + Enabled: defsecTypes.BoolDefault(false, r.Metadata()), + KmsKeyId: defsecTypes.String("", r.Metadata()), + }, + Endpoint: elasticsearch.Endpoint{ + Metadata: r.Metadata(), + EnforceHTTPS: defsecTypes.BoolDefault(false, r.Metadata()), + TLSPolicy: defsecTypes.StringDefault("Policy-Min-TLS-1-0-2019-07", r.Metadata()), + }, + ServiceSoftwareOptions: elasticsearch.ServiceSoftwareOptions{ + Metadata: r.Metadata(), + CurrentVersion: defsecTypes.String("", r.Metadata()), + NewVersion: defsecTypes.String("", r.Metadata()), + UpdateStatus: defsecTypes.String("", r.Metadata()), + UpdateAvailable: defsecTypes.Bool(false, r.Metadata()), + }, + } + + if prop := r.GetProperty("LogPublishingOptions"); prop.IsNotNil() { + domain.LogPublishing = elasticsearch.LogPublishing{ + Metadata: prop.Metadata(), + AuditEnabled: prop.GetBoolProperty("AUDIT_LOGS.Enabled", false), + CloudWatchLogGroupArn: prop.GetStringProperty("CloudWatchLogsLogGroupArn"), + } + } + + if prop := r.GetProperty("NodeToNodeEncryptionOptions"); prop.IsNotNil() { + domain.TransitEncryption = elasticsearch.TransitEncryption{ + Metadata: prop.Metadata(), + Enabled: prop.GetBoolProperty("Enabled", false), + } + } + + if prop := r.GetProperty("EncryptionAtRestOptions"); prop.IsNotNil() { + domain.AtRestEncryption = elasticsearch.AtRestEncryption{ + Metadata: prop.Metadata(), + Enabled: prop.GetBoolProperty("Enabled", false), + KmsKeyId: prop.GetStringProperty("KmsKeyId"), + } + } + + if prop := r.GetProperty("DomainEndpointOptions"); prop.IsNotNil() { + domain.Endpoint = elasticsearch.Endpoint{ + Metadata: prop.Metadata(), + EnforceHTTPS: prop.GetBoolProperty("EnforceHTTPS", false), + TLSPolicy: prop.GetStringProperty("TLSSecurityPolicy", "Policy-Min-TLS-1-0-2019-07"), + } + } + + domains = append(domains, domain) + } + + return domains +} diff --git a/internal/adapters/cloudformation/aws/elasticsearch/elasticsearch.go b/internal/adapters/cloudformation/aws/elasticsearch/elasticsearch.go new file mode 100644 index 000000000000..e7f5bc700916 --- /dev/null +++ b/internal/adapters/cloudformation/aws/elasticsearch/elasticsearch.go @@ -0,0 +1,13 @@ +package elasticsearch + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/elasticsearch" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) elasticsearch.Elasticsearch { + return elasticsearch.Elasticsearch{ + Domains: getDomains(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/elb/elb.go b/internal/adapters/cloudformation/aws/elb/elb.go new file mode 100644 index 000000000000..0bf80ed98424 --- /dev/null +++ b/internal/adapters/cloudformation/aws/elb/elb.go @@ -0,0 +1,13 @@ +package elb + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/elb" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) elb.ELB { + return elb.ELB{ + LoadBalancers: getLoadBalancers(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/elb/loadbalancer.go b/internal/adapters/cloudformation/aws/elb/loadbalancer.go new file mode 100644 index 000000000000..ea5c61bc7c15 --- /dev/null +++ b/internal/adapters/cloudformation/aws/elb/loadbalancer.go @@ -0,0 +1,89 @@ +package elb + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/elb" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getLoadBalancers(ctx parser.FileContext) (loadbalancers []elb.LoadBalancer) { + + loadBalanacerResources := ctx.GetResourcesByType("AWS::ElasticLoadBalancingV2::LoadBalancer") + + for _, r := range loadBalanacerResources { + lb := elb.LoadBalancer{ + Metadata: r.Metadata(), + Type: r.GetStringProperty("Type", "application"), + DropInvalidHeaderFields: checkForDropInvalidHeaders(r), + Internal: isInternal(r), + Listeners: getListeners(r, ctx), + } + loadbalancers = append(loadbalancers, lb) + } + + return loadbalancers +} + +func getListeners(lbr *parser.Resource, ctx parser.FileContext) (listeners []elb.Listener) { + + listenerResources := ctx.GetResourcesByType("AWS::ElasticLoadBalancingV2::Listener") + + for _, r := range listenerResources { + if r.GetStringProperty("LoadBalancerArn").Value() == lbr.ID() { + listener := elb.Listener{ + Metadata: r.Metadata(), + Protocol: r.GetStringProperty("Protocol", "HTTP"), + TLSPolicy: r.GetStringProperty("SslPolicy", ""), + DefaultActions: getDefaultListenerActions(r), + } + + listeners = append(listeners, listener) + } + } + return listeners +} + +func getDefaultListenerActions(r *parser.Resource) (actions []elb.Action) { + defaultActionsProp := r.GetProperty("DefaultActions") + if defaultActionsProp.IsNotList() { + return actions + } + for _, action := range defaultActionsProp.AsList() { + actions = append(actions, elb.Action{ + Metadata: action.Metadata(), + Type: action.GetProperty("Type").AsStringValue(), + }) + } + return actions +} + +func isInternal(r *parser.Resource) types.BoolValue { + schemeProp := r.GetProperty("Scheme") + if schemeProp.IsNotString() { + return r.BoolDefault(false) + } + return types.Bool(schemeProp.EqualTo("internal", parser.IgnoreCase), schemeProp.Metadata()) +} + +func checkForDropInvalidHeaders(r *parser.Resource) types.BoolValue { + attributesProp := r.GetProperty("LoadBalancerAttributes") + if attributesProp.IsNotList() { + return types.BoolDefault(false, r.Metadata()) + } + + for _, attr := range attributesProp.AsList() { + if attr.IsNotMap() { + continue + } + + if attr.AsMap()["Key"].AsString() == "routing.http.drop_invalid_header_fields.enabled" { + val := attr.AsMap()["Value"] + if val.IsBool() { + return val.AsBoolValue() + } + + } + } + + return r.BoolDefault(false) +} diff --git a/internal/adapters/cloudformation/aws/iam/iam.go b/internal/adapters/cloudformation/aws/iam/iam.go new file mode 100644 index 000000000000..c87cf5c04425 --- /dev/null +++ b/internal/adapters/cloudformation/aws/iam/iam.go @@ -0,0 +1,27 @@ +package iam + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/iam" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) iam.IAM { + return iam.IAM{ + PasswordPolicy: iam.PasswordPolicy{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + ReusePreventionCount: defsecTypes.IntDefault(0, defsecTypes.NewUnmanagedMetadata()), + RequireLowercase: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + RequireUppercase: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + RequireNumbers: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + RequireSymbols: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + MaxAgeDays: defsecTypes.IntDefault(0, defsecTypes.NewUnmanagedMetadata()), + MinimumLength: defsecTypes.IntDefault(0, defsecTypes.NewUnmanagedMetadata()), + }, + Policies: getPolicies(cfFile), + Groups: getGroups(cfFile), + Users: getUsers(cfFile), + Roles: getRoles(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/iam/policy.go b/internal/adapters/cloudformation/aws/iam/policy.go new file mode 100644 index 000000000000..586a008fe564 --- /dev/null +++ b/internal/adapters/cloudformation/aws/iam/policy.go @@ -0,0 +1,125 @@ +package iam + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/iam" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/liamg/iamgo" +) + +func getPolicies(ctx parser.FileContext) (policies []iam.Policy) { + for _, policyResource := range ctx.GetResourcesByType("AWS::IAM::Policy") { + + policy := iam.Policy{ + Metadata: policyResource.Metadata(), + Name: policyResource.GetStringProperty("PolicyName"), + Document: iam.Document{ + Metadata: policyResource.Metadata(), + Parsed: iamgo.Document{}, + }, + Builtin: defsecTypes.Bool(false, policyResource.Metadata()), + } + + if policyProp := policyResource.GetProperty("PolicyDocument"); policyProp.IsNotNil() { + doc, err := iamgo.Parse(policyProp.GetJsonBytes()) + if err != nil { + continue + } + policy.Document.Parsed = *doc + } + + policies = append(policies, policy) + } + return policies +} + +func getRoles(ctx parser.FileContext) (roles []iam.Role) { + for _, roleResource := range ctx.GetResourcesByType("AWS::IAM::Role") { + policyProp := roleResource.GetProperty("Policies") + roleName := roleResource.GetStringProperty("RoleName") + + roles = append(roles, iam.Role{ + Metadata: roleResource.Metadata(), + Name: roleName, + Policies: getPoliciesDocs(policyProp), + }) + } + return roles +} + +func getUsers(ctx parser.FileContext) (users []iam.User) { + for _, userResource := range ctx.GetResourcesByType("AWS::IAM::User") { + policyProp := userResource.GetProperty("Policies") + userName := userResource.GetStringProperty("GroupName") + + users = append(users, iam.User{ + Metadata: userResource.Metadata(), + Name: userName, + LastAccess: defsecTypes.TimeUnresolvable(userResource.Metadata()), + Policies: getPoliciesDocs(policyProp), + AccessKeys: getAccessKeys(ctx, userName.Value()), + }) + } + return users +} + +func getAccessKeys(ctx parser.FileContext, username string) (accessKeys []iam.AccessKey) { + for _, keyResource := range ctx.GetResourcesByType("AWS::IAM::AccessKey") { + keyUsername := keyResource.GetStringProperty("UserName") + if !keyUsername.EqualTo(username) { + continue + } + active := defsecTypes.BoolDefault(false, keyResource.Metadata()) + if statusProp := keyResource.GetProperty("Status"); statusProp.IsString() { + active = defsecTypes.Bool(statusProp.AsString() == "Active", statusProp.Metadata()) + } + + accessKeys = append(accessKeys, iam.AccessKey{ + Metadata: keyResource.Metadata(), + AccessKeyId: defsecTypes.StringUnresolvable(keyResource.Metadata()), + CreationDate: defsecTypes.TimeUnresolvable(keyResource.Metadata()), + LastAccess: defsecTypes.TimeUnresolvable(keyResource.Metadata()), + Active: active, + }) + } + return accessKeys +} + +func getGroups(ctx parser.FileContext) (groups []iam.Group) { + for _, groupResource := range ctx.GetResourcesByType("AWS::IAM::Group") { + policyProp := groupResource.GetProperty("Policies") + groupName := groupResource.GetStringProperty("GroupName") + + groups = append(groups, iam.Group{ + Metadata: groupResource.Metadata(), + Name: groupName, + Policies: getPoliciesDocs(policyProp), + }) + } + return groups +} + +func getPoliciesDocs(policiesProp *parser.Property) []iam.Policy { + var policies []iam.Policy + + for _, policy := range policiesProp.AsList() { + policyProp := policy.GetProperty("PolicyDocument") + policyName := policy.GetStringProperty("PolicyName") + + doc, err := iamgo.Parse(policyProp.GetJsonBytes()) + if err != nil { + continue + } + + policies = append(policies, iam.Policy{ + Metadata: policyProp.Metadata(), + Name: policyName, + Document: iam.Document{ + Metadata: policyProp.Metadata(), + Parsed: *doc, + }, + Builtin: defsecTypes.Bool(false, policyProp.Metadata()), + }) + } + return policies +} diff --git a/internal/adapters/cloudformation/aws/kinesis/kinesis.go b/internal/adapters/cloudformation/aws/kinesis/kinesis.go new file mode 100644 index 000000000000..8b30ee219ccf --- /dev/null +++ b/internal/adapters/cloudformation/aws/kinesis/kinesis.go @@ -0,0 +1,13 @@ +package kinesis + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/kinesis" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) kinesis.Kinesis { + return kinesis.Kinesis{ + Streams: getStreams(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/kinesis/stream.go b/internal/adapters/cloudformation/aws/kinesis/stream.go new file mode 100644 index 000000000000..57c16dec985c --- /dev/null +++ b/internal/adapters/cloudformation/aws/kinesis/stream.go @@ -0,0 +1,36 @@ +package kinesis + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/kinesis" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getStreams(ctx parser.FileContext) (streams []kinesis.Stream) { + + streamResources := ctx.GetResourcesByType("AWS::Kinesis::Stream") + + for _, r := range streamResources { + + stream := kinesis.Stream{ + Metadata: r.Metadata(), + Encryption: kinesis.Encryption{ + Metadata: r.Metadata(), + Type: types.StringDefault("KMS", r.Metadata()), + KMSKeyID: types.StringDefault("", r.Metadata()), + }, + } + + if prop := r.GetProperty("StreamEncryption"); prop.IsNotNil() { + stream.Encryption = kinesis.Encryption{ + Metadata: prop.Metadata(), + Type: prop.GetStringProperty("EncryptionType", "KMS"), + KMSKeyID: prop.GetStringProperty("KeyId"), + } + } + + streams = append(streams, stream) + } + + return streams +} diff --git a/internal/adapters/cloudformation/aws/lambda/function.go b/internal/adapters/cloudformation/aws/lambda/function.go new file mode 100644 index 000000000000..f0522992c821 --- /dev/null +++ b/internal/adapters/cloudformation/aws/lambda/function.go @@ -0,0 +1,53 @@ +package lambda + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/lambda" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getFunctions(ctx parser.FileContext) (functions []lambda.Function) { + + functionResources := ctx.GetResourcesByType("AWS::Lambda::Function") + + for _, r := range functionResources { + + function := lambda.Function{ + Metadata: r.Metadata(), + Tracing: lambda.Tracing{ + Metadata: r.Metadata(), + Mode: types.StringDefault("PassThrough", r.Metadata()), + }, + Permissions: getPermissions(r, ctx), + } + + if prop := r.GetProperty("TracingConfig"); prop.IsNotNil() { + function.Tracing = lambda.Tracing{ + Metadata: prop.Metadata(), + Mode: prop.GetStringProperty("Mode", "PassThrough"), + } + } + + functions = append(functions, function) + } + + return functions +} + +func getPermissions(funcR *parser.Resource, ctx parser.FileContext) (perms []lambda.Permission) { + + permissionResources := ctx.GetResourcesByType("AWS::Lambda::Permission") + + for _, r := range permissionResources { + if prop := r.GetStringProperty("FunctionName"); prop.EqualTo(funcR.ID()) { + perm := lambda.Permission{ + Metadata: r.Metadata(), + Principal: r.GetStringProperty("Principal"), + SourceARN: r.GetStringProperty("SourceArn"), + } + perms = append(perms, perm) + } + } + + return perms +} diff --git a/internal/adapters/cloudformation/aws/lambda/lambda.go b/internal/adapters/cloudformation/aws/lambda/lambda.go new file mode 100644 index 000000000000..f1ca0aef6206 --- /dev/null +++ b/internal/adapters/cloudformation/aws/lambda/lambda.go @@ -0,0 +1,13 @@ +package lambda + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/lambda" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) lambda.Lambda { + return lambda.Lambda{ + Functions: getFunctions(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/mq/broker.go b/internal/adapters/cloudformation/aws/mq/broker.go new file mode 100644 index 000000000000..a25944780572 --- /dev/null +++ b/internal/adapters/cloudformation/aws/mq/broker.go @@ -0,0 +1,33 @@ +package mq + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/mq" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getBrokers(ctx parser.FileContext) (brokers []mq.Broker) { + for _, r := range ctx.GetResourcesByType("AWS::AmazonMQ::Broker") { + + broker := mq.Broker{ + Metadata: r.Metadata(), + PublicAccess: r.GetBoolProperty("PubliclyAccessible"), + Logging: mq.Logging{ + Metadata: r.Metadata(), + General: types.BoolDefault(false, r.Metadata()), + Audit: types.BoolDefault(false, r.Metadata()), + }, + } + + if prop := r.GetProperty("Logs"); prop.IsNotNil() { + broker.Logging = mq.Logging{ + Metadata: prop.Metadata(), + General: prop.GetBoolProperty("General"), + Audit: prop.GetBoolProperty("Audit"), + } + } + + brokers = append(brokers, broker) + } + return brokers +} diff --git a/internal/adapters/cloudformation/aws/mq/mq.go b/internal/adapters/cloudformation/aws/mq/mq.go new file mode 100644 index 000000000000..d6fe7470e875 --- /dev/null +++ b/internal/adapters/cloudformation/aws/mq/mq.go @@ -0,0 +1,13 @@ +package mq + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/mq" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) mq.MQ { + return mq.MQ{ + Brokers: getBrokers(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/msk/cluster.go b/internal/adapters/cloudformation/aws/msk/cluster.go new file mode 100644 index 000000000000..d2b7a192d478 --- /dev/null +++ b/internal/adapters/cloudformation/aws/msk/cluster.go @@ -0,0 +1,80 @@ +package msk + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/msk" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getClusters(ctx parser.FileContext) (clusters []msk.Cluster) { + for _, r := range ctx.GetResourcesByType("AWS::MSK::Cluster") { + + cluster := msk.Cluster{ + Metadata: r.Metadata(), + EncryptionInTransit: msk.EncryptionInTransit{ + Metadata: r.Metadata(), + ClientBroker: defsecTypes.StringDefault("TLS", r.Metadata()), + }, + EncryptionAtRest: msk.EncryptionAtRest{ + Metadata: r.Metadata(), + KMSKeyARN: defsecTypes.StringDefault("", r.Metadata()), + Enabled: defsecTypes.BoolDefault(false, r.Metadata()), + }, + Logging: msk.Logging{ + Metadata: r.Metadata(), + Broker: msk.BrokerLogging{ + Metadata: r.Metadata(), + S3: msk.S3Logging{ + Metadata: r.Metadata(), + Enabled: defsecTypes.BoolDefault(false, r.Metadata()), + }, + Cloudwatch: msk.CloudwatchLogging{ + Metadata: r.Metadata(), + Enabled: defsecTypes.BoolDefault(false, r.Metadata()), + }, + Firehose: msk.FirehoseLogging{ + Metadata: r.Metadata(), + Enabled: defsecTypes.BoolDefault(false, r.Metadata()), + }, + }, + }, + } + + if encProp := r.GetProperty("EncryptionInfo.EncryptionInTransit"); encProp.IsNotNil() { + cluster.EncryptionInTransit = msk.EncryptionInTransit{ + Metadata: encProp.Metadata(), + ClientBroker: encProp.GetStringProperty("ClientBroker", "TLS"), + } + } + + if encAtRestProp := r.GetProperty("EncryptionInfo.EncryptionAtRest"); encAtRestProp.IsNotNil() { + cluster.EncryptionAtRest = msk.EncryptionAtRest{ + Metadata: encAtRestProp.Metadata(), + KMSKeyARN: encAtRestProp.GetStringProperty("DataVolumeKMSKeyId", ""), + Enabled: defsecTypes.BoolDefault(true, encAtRestProp.Metadata()), + } + } + + if loggingProp := r.GetProperty("LoggingInfo"); loggingProp.IsNotNil() { + cluster.Logging.Metadata = loggingProp.Metadata() + if brokerLoggingProp := loggingProp.GetProperty("BrokerLogs"); brokerLoggingProp.IsNotNil() { + cluster.Logging.Broker.Metadata = brokerLoggingProp.Metadata() + if s3Prop := brokerLoggingProp.GetProperty("S3"); s3Prop.IsNotNil() { + cluster.Logging.Broker.S3.Metadata = s3Prop.Metadata() + cluster.Logging.Broker.S3.Enabled = s3Prop.GetBoolProperty("Enabled", false) + } + if cwProp := brokerLoggingProp.GetProperty("CloudWatchLogs"); cwProp.IsNotNil() { + cluster.Logging.Broker.Cloudwatch.Metadata = cwProp.Metadata() + cluster.Logging.Broker.Cloudwatch.Enabled = cwProp.GetBoolProperty("Enabled", false) + } + if fhProp := brokerLoggingProp.GetProperty("Firehose"); fhProp.IsNotNil() { + cluster.Logging.Broker.Firehose.Metadata = fhProp.Metadata() + cluster.Logging.Broker.Firehose.Enabled = fhProp.GetBoolProperty("Enabled", false) + } + } + } + + clusters = append(clusters, cluster) + } + return clusters +} diff --git a/internal/adapters/cloudformation/aws/msk/msk.go b/internal/adapters/cloudformation/aws/msk/msk.go new file mode 100644 index 000000000000..4462f7c6f8ca --- /dev/null +++ b/internal/adapters/cloudformation/aws/msk/msk.go @@ -0,0 +1,13 @@ +package msk + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/msk" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) msk.MSK { + return msk.MSK{ + Clusters: getClusters(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/neptune/cluster.go b/internal/adapters/cloudformation/aws/neptune/cluster.go new file mode 100644 index 000000000000..1157caa0bd99 --- /dev/null +++ b/internal/adapters/cloudformation/aws/neptune/cluster.go @@ -0,0 +1,34 @@ +package neptune + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/neptune" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getClusters(ctx parser.FileContext) (clusters []neptune.Cluster) { + for _, r := range ctx.GetResourcesByType("AWS::Neptune::DBCluster") { + + cluster := neptune.Cluster{ + Metadata: r.Metadata(), + Logging: neptune.Logging{ + Metadata: r.Metadata(), + Audit: getAuditLog(r), + }, + StorageEncrypted: r.GetBoolProperty("StorageEncrypted"), + KMSKeyID: r.GetStringProperty("KmsKeyId"), + } + clusters = append(clusters, cluster) + } + return clusters +} + +func getAuditLog(r *parser.Resource) types.BoolValue { + if logsProp := r.GetProperty("EnableCloudwatchLogsExports"); logsProp.IsList() { + if logsProp.Contains("audit") { + return types.Bool(true, logsProp.Metadata()) + } + } + + return types.BoolDefault(false, r.Metadata()) +} diff --git a/internal/adapters/cloudformation/aws/neptune/neptune.go b/internal/adapters/cloudformation/aws/neptune/neptune.go new file mode 100644 index 000000000000..d9fc9e49faf6 --- /dev/null +++ b/internal/adapters/cloudformation/aws/neptune/neptune.go @@ -0,0 +1,13 @@ +package neptune + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/neptune" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) neptune.Neptune { + return neptune.Neptune{ + Clusters: getClusters(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/rds/cluster.go b/internal/adapters/cloudformation/aws/rds/cluster.go new file mode 100644 index 000000000000..47a9780f8032 --- /dev/null +++ b/internal/adapters/cloudformation/aws/rds/cluster.go @@ -0,0 +1,80 @@ +package rds + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/rds" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getClusters(ctx parser.FileContext) (clusters map[string]rds.Cluster) { + clusters = make(map[string]rds.Cluster) + for _, clusterResource := range ctx.GetResourcesByType("AWS::RDS::DBCluster") { + cluster := rds.Cluster{ + Metadata: clusterResource.Metadata(), + BackupRetentionPeriodDays: defsecTypes.IntDefault(1, clusterResource.Metadata()), + ReplicationSourceARN: defsecTypes.StringDefault("", clusterResource.Metadata()), + PerformanceInsights: rds.PerformanceInsights{ + Metadata: clusterResource.Metadata(), + Enabled: defsecTypes.BoolDefault(false, clusterResource.Metadata()), + KMSKeyID: defsecTypes.StringDefault("", clusterResource.Metadata()), + }, + Instances: nil, + Encryption: rds.Encryption{ + Metadata: clusterResource.Metadata(), + EncryptStorage: defsecTypes.BoolDefault(false, clusterResource.Metadata()), + KMSKeyID: defsecTypes.StringDefault("", clusterResource.Metadata()), + }, + PublicAccess: defsecTypes.BoolDefault(false, clusterResource.Metadata()), + Engine: defsecTypes.StringDefault(rds.EngineAurora, clusterResource.Metadata()), + LatestRestorableTime: defsecTypes.TimeUnresolvable(clusterResource.Metadata()), + DeletionProtection: defsecTypes.BoolDefault(false, clusterResource.Metadata()), + } + + if engineProp := clusterResource.GetProperty("Engine"); engineProp.IsString() { + cluster.Engine = engineProp.AsStringValue() + } + + if backupProp := clusterResource.GetProperty("BackupRetentionPeriod"); backupProp.IsInt() { + cluster.BackupRetentionPeriodDays = backupProp.AsIntValue() + } + + if replicaProp := clusterResource.GetProperty("SourceDBInstanceIdentifier"); replicaProp.IsString() { + cluster.ReplicationSourceARN = replicaProp.AsStringValue() + } + + if piProp := clusterResource.GetProperty("EnablePerformanceInsights"); piProp.IsBool() { + cluster.PerformanceInsights.Enabled = piProp.AsBoolValue() + } + + if insightsKeyProp := clusterResource.GetProperty("PerformanceInsightsKMSKeyId"); insightsKeyProp.IsString() { + cluster.PerformanceInsights.KMSKeyID = insightsKeyProp.AsStringValue() + } + + if encryptedProp := clusterResource.GetProperty("StorageEncrypted"); encryptedProp.IsBool() { + cluster.Encryption.EncryptStorage = encryptedProp.AsBoolValue() + } + + if keyProp := clusterResource.GetProperty("KmsKeyId"); keyProp.IsString() { + cluster.Encryption.KMSKeyID = keyProp.AsStringValue() + } + + clusters[clusterResource.ID()] = cluster + } + return clusters +} + +func getClassic(ctx parser.FileContext) rds.Classic { + return rds.Classic{ + DBSecurityGroups: getClassicSecurityGroups(ctx), + } +} + +func getClassicSecurityGroups(ctx parser.FileContext) (groups []rds.DBSecurityGroup) { + for _, dbsgResource := range ctx.GetResourcesByType("AWS::RDS::DBSecurityGroup") { + group := rds.DBSecurityGroup{ + Metadata: dbsgResource.Metadata(), + } + groups = append(groups, group) + } + return groups +} diff --git a/internal/adapters/cloudformation/aws/rds/instance.go b/internal/adapters/cloudformation/aws/rds/instance.go new file mode 100644 index 000000000000..b3265839831a --- /dev/null +++ b/internal/adapters/cloudformation/aws/rds/instance.go @@ -0,0 +1,128 @@ +package rds + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/rds" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getClustersAndInstances(ctx parser.FileContext) (clusters []rds.Cluster, orphans []rds.Instance) { + + clusterMap := getClusters(ctx) + + for _, r := range ctx.GetResourcesByType("AWS::RDS::DBInstance") { + + instance := rds.Instance{ + Metadata: r.Metadata(), + BackupRetentionPeriodDays: r.GetIntProperty("BackupRetentionPeriod", 1), + ReplicationSourceARN: r.GetStringProperty("SourceDBInstanceIdentifier"), + PerformanceInsights: rds.PerformanceInsights{ + Metadata: r.Metadata(), + Enabled: r.GetBoolProperty("EnablePerformanceInsights"), + KMSKeyID: r.GetStringProperty("PerformanceInsightsKMSKeyId"), + }, + Encryption: rds.Encryption{ + Metadata: r.Metadata(), + EncryptStorage: r.GetBoolProperty("StorageEncrypted"), + KMSKeyID: r.GetStringProperty("KmsKeyId"), + }, + PublicAccess: r.GetBoolProperty("PubliclyAccessible", true), + Engine: r.GetStringProperty("Engine"), + IAMAuthEnabled: r.GetBoolProperty("EnableIAMDatabaseAuthentication"), + DeletionProtection: r.GetBoolProperty("DeletionProtection", false), + DBInstanceArn: r.GetStringProperty("DBInstanceArn"), + StorageEncrypted: r.GetBoolProperty("StorageEncrypted", false), + DBInstanceIdentifier: r.GetStringProperty("DBInstanceIdentifier"), + DBParameterGroups: getDBParameterGroups(ctx, r), + TagList: getTagList(r), + EnabledCloudwatchLogsExports: getEnabledCloudwatchLogsExports(r), + EngineVersion: r.GetStringProperty("EngineVersion"), + AutoMinorVersionUpgrade: r.GetBoolProperty("AutoMinorVersionUpgrade"), + MultiAZ: r.GetBoolProperty("MultiAZ"), + PubliclyAccessible: r.GetBoolProperty("PubliclyAccessible"), + LatestRestorableTime: types.TimeUnresolvable(r.Metadata()), + ReadReplicaDBInstanceIdentifiers: getReadReplicaDBInstanceIdentifiers(r), + } + + if clusterID := r.GetProperty("DBClusterIdentifier"); clusterID.IsString() { + var found bool + for key, cluster := range clusterMap { + if key == clusterID.AsString() { + cluster.Instances = append(cluster.Instances, rds.ClusterInstance{ + Instance: instance, + ClusterIdentifier: clusterID.AsStringValue(), + }) + clusterMap[key] = cluster + found = true + break + } + } + if found { + continue + } + } + + orphans = append(orphans, instance) + } + + for _, cluster := range clusterMap { + clusters = append(clusters, cluster) + } + + return clusters, orphans +} + +func getDBParameterGroups(ctx parser.FileContext, r *parser.Resource) (dbParameterGroup []rds.DBParameterGroupsList) { + + for _, r := range ctx.GetResourcesByType("DBParameterGroups") { + dbpmgl := rds.DBParameterGroupsList{ + Metadata: r.Metadata(), + DBParameterGroupName: r.GetStringProperty("DBParameterGroupName"), + KMSKeyID: types.StringUnresolvable(r.Metadata()), + } + dbParameterGroup = append(dbParameterGroup, dbpmgl) + } + + return dbParameterGroup +} + +func getEnabledCloudwatchLogsExports(r *parser.Resource) (enabledcloudwatchlogexportslist []types.StringValue) { + enabledCloudwatchLogExportList := r.GetProperty("EnableCloudwatchLogsExports") + + if enabledCloudwatchLogExportList.IsNil() || enabledCloudwatchLogExportList.IsNotList() { + return enabledcloudwatchlogexportslist + } + + for _, ecle := range enabledCloudwatchLogExportList.AsList() { + enabledcloudwatchlogexportslist = append(enabledcloudwatchlogexportslist, ecle.AsStringValue()) + } + return enabledcloudwatchlogexportslist +} + +func getTagList(r *parser.Resource) (taglist []rds.TagList) { + tagLists := r.GetProperty("tags") + + if tagLists.IsNil() || tagLists.IsNotList() { + return taglist + } + + for _, tl := range tagLists.AsList() { + taglist = append(taglist, rds.TagList{ + Metadata: tl.Metadata(), + }) + } + return taglist +} + +func getReadReplicaDBInstanceIdentifiers(r *parser.Resource) (readreplicadbidentifier []types.StringValue) { + readReplicaDBIdentifier := r.GetProperty("EnableCloudwatchLogsExports") + + if readReplicaDBIdentifier.IsNil() || readReplicaDBIdentifier.IsNotList() { + return readreplicadbidentifier + } + + for _, rr := range readReplicaDBIdentifier.AsList() { + readreplicadbidentifier = append(readreplicadbidentifier, rr.AsStringValue()) + } + return readreplicadbidentifier +} diff --git a/internal/adapters/cloudformation/aws/rds/parameter_groups.go b/internal/adapters/cloudformation/aws/rds/parameter_groups.go new file mode 100644 index 000000000000..9c233aca8f83 --- /dev/null +++ b/internal/adapters/cloudformation/aws/rds/parameter_groups.go @@ -0,0 +1,42 @@ +package rds + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/rds" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getParameterGroups(ctx parser.FileContext) (parametergroups []rds.ParameterGroups) { + + for _, r := range ctx.GetResourcesByType("AWS::RDS::DBParameterGroup") { + + paramgroup := rds.ParameterGroups{ + Metadata: r.Metadata(), + DBParameterGroupName: r.GetStringProperty("DBParameterGroupName"), + DBParameterGroupFamily: r.GetStringProperty("DBParameterGroupFamily"), + Parameters: getParameters(r), + } + + parametergroups = append(parametergroups, paramgroup) + } + + return parametergroups +} + +func getParameters(r *parser.Resource) (parameters []rds.Parameters) { + + dBParam := r.GetProperty("Parameters") + + if dBParam.IsNil() || dBParam.IsNotList() { + return parameters + } + + for _, dbp := range dBParam.AsList() { + parameters = append(parameters, rds.Parameters{ + Metadata: dbp.Metadata(), + ParameterName: types.StringDefault("", dbp.Metadata()), + ParameterValue: types.StringDefault("", dbp.Metadata()), + }) + } + return parameters +} diff --git a/internal/adapters/cloudformation/aws/rds/rds.go b/internal/adapters/cloudformation/aws/rds/rds.go new file mode 100644 index 000000000000..7bcf26716d5d --- /dev/null +++ b/internal/adapters/cloudformation/aws/rds/rds.go @@ -0,0 +1,18 @@ +package rds + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/rds" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) rds.RDS { + clusters, orphans := getClustersAndInstances(cfFile) + return rds.RDS{ + Instances: orphans, + Clusters: clusters, + Classic: getClassic(cfFile), + ParameterGroups: getParameterGroups(cfFile), + Snapshots: nil, + } +} diff --git a/internal/adapters/cloudformation/aws/redshift/cluster.go b/internal/adapters/cloudformation/aws/redshift/cluster.go new file mode 100644 index 000000000000..ebdee435b262 --- /dev/null +++ b/internal/adapters/cloudformation/aws/redshift/cluster.go @@ -0,0 +1,54 @@ +package redshift + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/redshift" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getClusters(ctx parser.FileContext) (clusters []redshift.Cluster) { + for _, r := range ctx.GetResourcesByType("AWS::Redshift::Cluster") { + + cluster := redshift.Cluster{ + Metadata: r.Metadata(), + ClusterIdentifier: r.GetStringProperty("ClusterIdentifier"), + AllowVersionUpgrade: r.GetBoolProperty("AllowVersionUpgrade"), + NodeType: r.GetStringProperty("NodeType"), + NumberOfNodes: r.GetIntProperty("NumberOfNodes"), + PubliclyAccessible: r.GetBoolProperty("PubliclyAccessible"), + MasterUsername: r.GetStringProperty("MasterUsername"), + VpcId: types.String("", r.Metadata()), + LoggingEnabled: types.Bool(false, r.Metadata()), + AutomatedSnapshotRetentionPeriod: r.GetIntProperty("AutomatedSnapshotRetentionPeriod"), + Encryption: redshift.Encryption{ + Metadata: r.Metadata(), + Enabled: r.GetBoolProperty("Encrypted"), + KMSKeyID: r.GetStringProperty("KmsKeyId"), + }, + EndPoint: redshift.EndPoint{ + Metadata: r.Metadata(), + Port: r.GetIntProperty("Endpoint.Port"), + }, + SubnetGroupName: r.GetStringProperty("ClusterSubnetGroupName", ""), + } + + clusters = append(clusters, cluster) + } + return clusters +} + +func getParameters(ctx parser.FileContext) (parameter []redshift.ClusterParameter) { + + paraRes := ctx.GetResourcesByType("AWS::Redshift::ClusterParameterGroup") + var parameters []redshift.ClusterParameter + for _, r := range paraRes { + for _, par := range r.GetProperty("Parameters").AsList() { + parameters = append(parameters, redshift.ClusterParameter{ + Metadata: par.Metadata(), + ParameterName: par.GetStringProperty("ParameterName"), + ParameterValue: par.GetStringProperty("ParameterValue"), + }) + } + } + return parameters +} diff --git a/internal/adapters/cloudformation/aws/redshift/redshift.go b/internal/adapters/cloudformation/aws/redshift/redshift.go new file mode 100644 index 000000000000..fccd7b20f60b --- /dev/null +++ b/internal/adapters/cloudformation/aws/redshift/redshift.go @@ -0,0 +1,16 @@ +package redshift + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/redshift" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) redshift.Redshift { + return redshift.Redshift{ + Clusters: getClusters(cfFile), + SecurityGroups: getSecurityGroups(cfFile), + ClusterParameters: getParameters(cfFile), + ReservedNodes: nil, + } +} diff --git a/internal/adapters/cloudformation/aws/redshift/security_group.go b/internal/adapters/cloudformation/aws/redshift/security_group.go new file mode 100644 index 000000000000..bdd069044e78 --- /dev/null +++ b/internal/adapters/cloudformation/aws/redshift/security_group.go @@ -0,0 +1,17 @@ +package redshift + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/redshift" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getSecurityGroups(ctx parser.FileContext) (groups []redshift.SecurityGroup) { + for _, groupResource := range ctx.GetResourcesByType("AWS::Redshift::ClusterSecurityGroup") { + group := redshift.SecurityGroup{ + Metadata: groupResource.Metadata(), + Description: groupResource.GetProperty("Description").AsStringValue(), + } + groups = append(groups, group) + } + return groups +} diff --git a/internal/adapters/cloudformation/aws/s3/bucket.go b/internal/adapters/cloudformation/aws/s3/bucket.go new file mode 100644 index 000000000000..d22f25c338fe --- /dev/null +++ b/internal/adapters/cloudformation/aws/s3/bucket.go @@ -0,0 +1,148 @@ +package s3 + +import ( + "regexp" + "strings" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/s3" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +var aclConvertRegex = regexp.MustCompile(`[A-Z][^A-Z]*`) + +func getBuckets(cfFile parser.FileContext) []s3.Bucket { + var buckets []s3.Bucket + bucketResources := cfFile.GetResourcesByType("AWS::S3::Bucket") + + for _, r := range bucketResources { + s3b := s3.Bucket{ + Metadata: r.Metadata(), + Name: r.GetStringProperty("BucketName"), + PublicAccessBlock: getPublicAccessBlock(r), + Encryption: getEncryption(r, cfFile), + Versioning: s3.Versioning{ + Metadata: r.Metadata(), + Enabled: hasVersioning(r), + MFADelete: defsecTypes.BoolUnresolvable(r.Metadata()), + }, + Logging: getLogging(r), + ACL: convertAclValue(r.GetStringProperty("AccessControl", "private")), + LifecycleConfiguration: getLifecycle(r), + AccelerateConfigurationStatus: r.GetStringProperty("AccelerateConfiguration.AccelerationStatus"), + Website: getWebsite(r), + BucketLocation: defsecTypes.String("", r.Metadata()), + Objects: nil, + } + + buckets = append(buckets, s3b) + } + return buckets +} + +func getPublicAccessBlock(r *parser.Resource) *s3.PublicAccessBlock { + if block := r.GetProperty("PublicAccessBlockConfiguration"); block.IsNil() { + return nil + } + + return &s3.PublicAccessBlock{ + Metadata: r.Metadata(), + BlockPublicACLs: r.GetBoolProperty("PublicAccessBlockConfiguration.BlockPublicAcls"), + BlockPublicPolicy: r.GetBoolProperty("PublicAccessBlockConfiguration.BlockPublicPolicy"), + IgnorePublicACLs: r.GetBoolProperty("PublicAccessBlockConfiguration.IgnorePublicAcls"), + RestrictPublicBuckets: r.GetBoolProperty("PublicAccessBlockConfiguration.RestrictPublicBuckets"), + } +} + +func convertAclValue(aclValue defsecTypes.StringValue) defsecTypes.StringValue { + matches := aclConvertRegex.FindAllString(aclValue.Value(), -1) + + return defsecTypes.String(strings.ToLower(strings.Join(matches, "-")), aclValue.GetMetadata()) +} + +func getLogging(r *parser.Resource) s3.Logging { + + logging := s3.Logging{ + Metadata: r.Metadata(), + Enabled: defsecTypes.BoolDefault(false, r.Metadata()), + TargetBucket: defsecTypes.StringDefault("", r.Metadata()), + } + + if config := r.GetProperty("LoggingConfiguration"); config.IsNotNil() { + logging.TargetBucket = config.GetStringProperty("DestinationBucketName") + if logging.TargetBucket.IsNotEmpty() || !logging.TargetBucket.GetMetadata().IsResolvable() { + logging.Enabled = defsecTypes.Bool(true, config.Metadata()) + } + } + return logging +} + +func hasVersioning(r *parser.Resource) defsecTypes.BoolValue { + versioningProp := r.GetProperty("VersioningConfiguration.Status") + + if versioningProp.IsNil() { + return defsecTypes.BoolDefault(false, r.Metadata()) + } + + versioningEnabled := false + if versioningProp.EqualTo("Enabled") { + versioningEnabled = true + + } + return defsecTypes.Bool(versioningEnabled, versioningProp.Metadata()) +} + +func getEncryption(r *parser.Resource, _ parser.FileContext) s3.Encryption { + + encryption := s3.Encryption{ + Metadata: r.Metadata(), + Enabled: defsecTypes.BoolDefault(false, r.Metadata()), + Algorithm: defsecTypes.StringDefault("", r.Metadata()), + KMSKeyId: defsecTypes.StringDefault("", r.Metadata()), + } + + if encryptProps := r.GetProperty("BucketEncryption.ServerSideEncryptionConfiguration"); encryptProps.IsNotNil() { + for _, rule := range encryptProps.AsList() { + if algo := rule.GetProperty("ServerSideEncryptionByDefault.SSEAlgorithm"); algo.EqualTo("AES256") { + encryption.Enabled = defsecTypes.Bool(true, algo.Metadata()) + } else if kmsKeyProp := rule.GetProperty("ServerSideEncryptionByDefault.KMSMasterKeyID"); !kmsKeyProp.IsEmpty() && kmsKeyProp.IsString() { + encryption.KMSKeyId = kmsKeyProp.AsStringValue() + } + if encryption.Enabled.IsFalse() { + encryption.Enabled = rule.GetBoolProperty("BucketKeyEnabled", false) + } + } + } + + return encryption +} + +func getLifecycle(resource *parser.Resource) []s3.Rules { + LifecycleProp := resource.GetProperty("LifecycleConfiguration") + RuleProp := LifecycleProp.GetProperty("Rules") + + var rule []s3.Rules + + if RuleProp.IsNil() || RuleProp.IsNotList() { + return rule + } + + for _, r := range RuleProp.AsList() { + rule = append(rule, s3.Rules{ + Metadata: r.Metadata(), + Status: r.GetStringProperty("Status"), + }) + } + return rule +} + +func getWebsite(r *parser.Resource) *s3.Website { + if block := r.GetProperty("WebsiteConfiguration"); block.IsNil() { + return nil + } else { + return &s3.Website{ + Metadata: block.Metadata(), + } + } +} diff --git a/internal/adapters/cloudformation/aws/s3/s3.go b/internal/adapters/cloudformation/aws/s3/s3.go new file mode 100644 index 000000000000..6074cd4bc912 --- /dev/null +++ b/internal/adapters/cloudformation/aws/s3/s3.go @@ -0,0 +1,13 @@ +package s3 + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/s3" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) s3.S3 { + return s3.S3{ + Buckets: getBuckets(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/sam/api.go b/internal/adapters/cloudformation/aws/sam/api.go new file mode 100644 index 000000000000..c33c870ea076 --- /dev/null +++ b/internal/adapters/cloudformation/aws/sam/api.go @@ -0,0 +1,96 @@ +package sam + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/sam" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getApis(cfFile parser.FileContext) (apis []sam.API) { + + apiResources := cfFile.GetResourcesByType("AWS::Serverless::Api") + for _, r := range apiResources { + api := sam.API{ + Metadata: r.Metadata(), + Name: r.GetStringProperty("Name", ""), + TracingEnabled: r.GetBoolProperty("TracingEnabled"), + DomainConfiguration: getDomainConfiguration(r), + AccessLogging: getAccessLogging(r), + RESTMethodSettings: getRestMethodSettings(r), + } + + apis = append(apis, api) + } + + return apis +} + +func getRestMethodSettings(r *parser.Resource) sam.RESTMethodSettings { + + settings := sam.RESTMethodSettings{ + Metadata: r.Metadata(), + CacheDataEncrypted: defsecTypes.BoolDefault(false, r.Metadata()), + LoggingEnabled: defsecTypes.BoolDefault(false, r.Metadata()), + DataTraceEnabled: defsecTypes.BoolDefault(false, r.Metadata()), + MetricsEnabled: defsecTypes.BoolDefault(false, r.Metadata()), + } + + settingsProp := r.GetProperty("MethodSettings") + if settingsProp.IsNotNil() { + + settings = sam.RESTMethodSettings{ + Metadata: settingsProp.Metadata(), + CacheDataEncrypted: settingsProp.GetBoolProperty("CacheDataEncrypted"), + LoggingEnabled: defsecTypes.BoolDefault(false, settingsProp.Metadata()), + DataTraceEnabled: settingsProp.GetBoolProperty("DataTraceEnabled"), + MetricsEnabled: settingsProp.GetBoolProperty("MetricsEnabled"), + } + + if loggingLevel := settingsProp.GetProperty("LoggingLevel"); loggingLevel.IsNotNil() { + if loggingLevel.EqualTo("OFF", parser.IgnoreCase) { + settings.LoggingEnabled = defsecTypes.Bool(false, loggingLevel.Metadata()) + } else { + settings.LoggingEnabled = defsecTypes.Bool(true, loggingLevel.Metadata()) + } + } + } + + return settings +} + +func getAccessLogging(r *parser.Resource) sam.AccessLogging { + + logging := sam.AccessLogging{ + Metadata: r.Metadata(), + CloudwatchLogGroupARN: defsecTypes.StringDefault("", r.Metadata()), + } + + if access := r.GetProperty("AccessLogSetting"); access.IsNotNil() { + logging = sam.AccessLogging{ + Metadata: access.Metadata(), + CloudwatchLogGroupARN: access.GetStringProperty("DestinationArn", ""), + } + } + + return logging +} + +func getDomainConfiguration(r *parser.Resource) sam.DomainConfiguration { + + domainConfig := sam.DomainConfiguration{ + Metadata: r.Metadata(), + Name: defsecTypes.StringDefault("", r.Metadata()), + SecurityPolicy: defsecTypes.StringDefault("TLS_1_0", r.Metadata()), + } + + if domain := r.GetProperty("Domain"); domain.IsNotNil() { + domainConfig = sam.DomainConfiguration{ + Metadata: domain.Metadata(), + Name: domain.GetStringProperty("DomainName", ""), + SecurityPolicy: domain.GetStringProperty("SecurityPolicy", "TLS_1_0"), + } + } + + return domainConfig + +} diff --git a/internal/adapters/cloudformation/aws/sam/function.go b/internal/adapters/cloudformation/aws/sam/function.go new file mode 100644 index 000000000000..e4f0e8901d3c --- /dev/null +++ b/internal/adapters/cloudformation/aws/sam/function.go @@ -0,0 +1,58 @@ +package sam + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/iam" + "github.com/aquasecurity/defsec/pkg/providers/aws/sam" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/liamg/iamgo" +) + +func getFunctions(cfFile parser.FileContext) (functions []sam.Function) { + + functionResources := cfFile.GetResourcesByType("AWS::Serverless::Function") + for _, r := range functionResources { + function := sam.Function{ + Metadata: r.Metadata(), + FunctionName: r.GetStringProperty("FunctionName"), + Tracing: r.GetStringProperty("Tracing", sam.TracingModePassThrough), + ManagedPolicies: nil, + Policies: nil, + } + + setFunctionPolicies(r, &function) + functions = append(functions, function) + } + + return functions +} + +func setFunctionPolicies(r *parser.Resource, function *sam.Function) { + policies := r.GetProperty("Policies") + if policies.IsNotNil() { + if policies.IsString() { + function.ManagedPolicies = append(function.ManagedPolicies, policies.AsStringValue()) + } else if policies.IsList() { + for _, property := range policies.AsList() { + if property.IsMap() { + parsed, err := iamgo.Parse(property.GetJsonBytes(true)) + if err != nil { + continue + } + policy := iam.Policy{ + Metadata: property.Metadata(), + Name: defsecTypes.StringDefault("", property.Metadata()), + Document: iam.Document{ + Metadata: property.Metadata(), + Parsed: *parsed, + }, + Builtin: defsecTypes.Bool(false, property.Metadata()), + } + function.Policies = append(function.Policies, policy) + } else if property.IsString() { + function.ManagedPolicies = append(function.ManagedPolicies, property.AsStringValue()) + } + } + } + } +} diff --git a/internal/adapters/cloudformation/aws/sam/http_api.go b/internal/adapters/cloudformation/aws/sam/http_api.go new file mode 100644 index 000000000000..bc12aec18ba8 --- /dev/null +++ b/internal/adapters/cloudformation/aws/sam/http_api.go @@ -0,0 +1,64 @@ +package sam + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/sam" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getHttpApis(cfFile parser.FileContext) (apis []sam.HttpAPI) { + + apiResources := cfFile.GetResourcesByType("AWS::Serverless::HttpApi") + for _, r := range apiResources { + api := sam.HttpAPI{ + Metadata: r.Metadata(), + Name: r.GetStringProperty("Name", ""), + DomainConfiguration: getDomainConfiguration(r), + AccessLogging: getAccessLoggingV2(r), + DefaultRouteSettings: getRouteSettings(r), + } + + apis = append(apis, api) + } + + return apis +} + +func getAccessLoggingV2(r *parser.Resource) sam.AccessLogging { + + logging := sam.AccessLogging{ + Metadata: r.Metadata(), + CloudwatchLogGroupARN: types.StringDefault("", r.Metadata()), + } + + if access := r.GetProperty("AccessLogSettings"); access.IsNotNil() { + logging = sam.AccessLogging{ + Metadata: access.Metadata(), + CloudwatchLogGroupARN: access.GetStringProperty("DestinationArn", ""), + } + } + + return logging +} + +func getRouteSettings(r *parser.Resource) sam.RouteSettings { + + routeSettings := sam.RouteSettings{ + Metadata: r.Metadata(), + LoggingEnabled: types.BoolDefault(false, r.Metadata()), + DataTraceEnabled: types.BoolDefault(false, r.Metadata()), + DetailedMetricsEnabled: types.BoolDefault(false, r.Metadata()), + } + + if route := r.GetProperty("DefaultRouteSettings"); route.IsNotNil() { + routeSettings = sam.RouteSettings{ + Metadata: route.Metadata(), + LoggingEnabled: route.GetBoolProperty("LoggingLevel"), + DataTraceEnabled: route.GetBoolProperty("DataTraceEnabled"), + DetailedMetricsEnabled: route.GetBoolProperty("DetailedMetricsEnabled"), + } + } + + return routeSettings + +} diff --git a/internal/adapters/cloudformation/aws/sam/sam.go b/internal/adapters/cloudformation/aws/sam/sam.go new file mode 100644 index 000000000000..a5fbe0eea81a --- /dev/null +++ b/internal/adapters/cloudformation/aws/sam/sam.go @@ -0,0 +1,17 @@ +package sam + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/sam" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) sam.SAM { + return sam.SAM{ + APIs: getApis(cfFile), + HttpAPIs: getHttpApis(cfFile), + Functions: getFunctions(cfFile), + StateMachines: getStateMachines(cfFile), + SimpleTables: getSimpleTables(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/sam/state_machines.go b/internal/adapters/cloudformation/aws/sam/state_machines.go new file mode 100644 index 000000000000..a591d4418e95 --- /dev/null +++ b/internal/adapters/cloudformation/aws/sam/state_machines.go @@ -0,0 +1,80 @@ +package sam + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/iam" + "github.com/aquasecurity/defsec/pkg/providers/aws/sam" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/liamg/iamgo" +) + +func getStateMachines(cfFile parser.FileContext) (stateMachines []sam.StateMachine) { + + stateMachineResources := cfFile.GetResourcesByType("AWS::Serverless::StateMachine") + for _, r := range stateMachineResources { + stateMachine := sam.StateMachine{ + Metadata: r.Metadata(), + Name: r.GetStringProperty("Name"), + LoggingConfiguration: sam.LoggingConfiguration{ + Metadata: r.Metadata(), + LoggingEnabled: defsecTypes.BoolDefault(false, r.Metadata()), + }, + ManagedPolicies: nil, + Policies: nil, + Tracing: getTracingConfiguration(r), + } + + if logging := r.GetProperty("Logging"); logging.IsNotNil() { + stateMachine.LoggingConfiguration.Metadata = logging.Metadata() + if level := logging.GetProperty("Level"); level.IsNotNil() { + stateMachine.LoggingConfiguration.LoggingEnabled = defsecTypes.Bool(!level.EqualTo("OFF"), level.Metadata()) + } + } + + setStateMachinePolicies(r, &stateMachine) + stateMachines = append(stateMachines, stateMachine) + } + + return stateMachines +} + +func getTracingConfiguration(r *parser.Resource) sam.TracingConfiguration { + tracing := r.GetProperty("Tracing") + if tracing.IsNil() { + return sam.TracingConfiguration{ + Metadata: r.Metadata(), + Enabled: defsecTypes.BoolDefault(false, r.Metadata()), + } + } + + return sam.TracingConfiguration{ + Metadata: tracing.Metadata(), + Enabled: tracing.GetBoolProperty("Enabled"), + } +} + +func setStateMachinePolicies(r *parser.Resource, stateMachine *sam.StateMachine) { + policies := r.GetProperty("Policies") + if policies.IsNotNil() { + if policies.IsString() { + stateMachine.ManagedPolicies = append(stateMachine.ManagedPolicies, policies.AsStringValue()) + } else if policies.IsList() { + for _, property := range policies.AsList() { + parsed, err := iamgo.Parse(property.GetJsonBytes(true)) + if err != nil { + continue + } + policy := iam.Policy{ + Metadata: property.Metadata(), + Name: defsecTypes.StringDefault("", property.Metadata()), + Document: iam.Document{ + Metadata: property.Metadata(), + Parsed: *parsed, + }, + Builtin: defsecTypes.Bool(false, property.Metadata()), + } + stateMachine.Policies = append(stateMachine.Policies, policy) + } + } + } +} diff --git a/internal/adapters/cloudformation/aws/sam/tables.go b/internal/adapters/cloudformation/aws/sam/tables.go new file mode 100644 index 000000000000..afb7bf6fe7a1 --- /dev/null +++ b/internal/adapters/cloudformation/aws/sam/tables.go @@ -0,0 +1,42 @@ +package sam + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/sam" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getSimpleTables(cfFile parser.FileContext) (tables []sam.SimpleTable) { + + tableResources := cfFile.GetResourcesByType("AWS::Serverless::SimpleTable") + for _, r := range tableResources { + table := sam.SimpleTable{ + Metadata: r.Metadata(), + TableName: r.GetStringProperty("TableName"), + SSESpecification: getSSESpecification(r), + } + + tables = append(tables, table) + } + + return tables +} + +func getSSESpecification(r *parser.Resource) sam.SSESpecification { + + spec := sam.SSESpecification{ + Metadata: r.Metadata(), + Enabled: defsecTypes.BoolDefault(false, r.Metadata()), + KMSMasterKeyID: defsecTypes.StringDefault("", r.Metadata()), + } + + if sse := r.GetProperty("SSESpecification"); sse.IsNotNil() { + spec = sam.SSESpecification{ + Metadata: sse.Metadata(), + Enabled: sse.GetBoolProperty("SSEEnabled"), + KMSMasterKeyID: sse.GetStringProperty("KMSMasterKeyID"), + } + } + + return spec +} diff --git a/internal/adapters/cloudformation/aws/sns/sns.go b/internal/adapters/cloudformation/aws/sns/sns.go new file mode 100644 index 000000000000..fc89a1f1ccb6 --- /dev/null +++ b/internal/adapters/cloudformation/aws/sns/sns.go @@ -0,0 +1,13 @@ +package sns + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/sns" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) sns.SNS { + return sns.SNS{ + Topics: getTopics(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/sns/topic.go b/internal/adapters/cloudformation/aws/sns/topic.go new file mode 100644 index 000000000000..738248ccfd88 --- /dev/null +++ b/internal/adapters/cloudformation/aws/sns/topic.go @@ -0,0 +1,24 @@ +package sns + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/sns" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getTopics(ctx parser.FileContext) (topics []sns.Topic) { + for _, r := range ctx.GetResourcesByType("AWS::SNS::Topic") { + + topic := sns.Topic{ + Metadata: r.Metadata(), + ARN: types.StringDefault("", r.Metadata()), + Encryption: sns.Encryption{ + Metadata: r.Metadata(), + KMSKeyID: r.GetStringProperty("KmsMasterKeyId"), + }, + } + + topics = append(topics, topic) + } + return topics +} diff --git a/internal/adapters/cloudformation/aws/sqs/queue.go b/internal/adapters/cloudformation/aws/sqs/queue.go new file mode 100644 index 000000000000..45e241153d1b --- /dev/null +++ b/internal/adapters/cloudformation/aws/sqs/queue.go @@ -0,0 +1,66 @@ +package sqs + +import ( + "fmt" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + + "github.com/aquasecurity/defsec/pkg/providers/aws/iam" + "github.com/aquasecurity/defsec/pkg/providers/aws/sqs" + + "github.com/liamg/iamgo" +) + +func getQueues(ctx parser.FileContext) (queues []sqs.Queue) { + for _, r := range ctx.GetResourcesByType("AWS::SQS::Queue") { + queue := sqs.Queue{ + Metadata: r.Metadata(), + QueueURL: defsecTypes.StringDefault("", r.Metadata()), + Encryption: sqs.Encryption{ + Metadata: r.Metadata(), + ManagedEncryption: defsecTypes.Bool(false, r.Metadata()), + KMSKeyID: r.GetStringProperty("KmsMasterKeyId"), + }, + Policies: []iam.Policy{}, + } + if policy, err := getPolicy(r.ID(), ctx); err == nil { + queue.Policies = append(queue.Policies, *policy) + } + queues = append(queues, queue) + } + return queues +} + +func getPolicy(id string, ctx parser.FileContext) (*iam.Policy, error) { + for _, policyResource := range ctx.GetResourcesByType("AWS::SQS::QueuePolicy") { + documentProp := policyResource.GetProperty("PolicyDocument") + if documentProp.IsNil() { + continue + } + queuesProp := policyResource.GetProperty("Queues") + if queuesProp.IsNil() { + continue + } + for _, queueRef := range queuesProp.AsList() { + if queueRef.IsString() && queueRef.AsString() == id { + raw := documentProp.GetJsonBytes() + parsed, err := iamgo.Parse(raw) + if err != nil { + continue + } + return &iam.Policy{ + Metadata: documentProp.Metadata(), + Name: defsecTypes.StringDefault("", documentProp.Metadata()), + Document: iam.Document{ + Metadata: documentProp.Metadata(), + Parsed: *parsed, + }, + Builtin: defsecTypes.Bool(false, documentProp.Metadata()), + }, nil + } + } + } + return nil, fmt.Errorf("no matching policy found") +} diff --git a/internal/adapters/cloudformation/aws/sqs/sqs.go b/internal/adapters/cloudformation/aws/sqs/sqs.go new file mode 100644 index 000000000000..1a1b3f94931d --- /dev/null +++ b/internal/adapters/cloudformation/aws/sqs/sqs.go @@ -0,0 +1,13 @@ +package sqs + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/sqs" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) sqs.SQS { + return sqs.SQS{ + Queues: getQueues(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/ssm/secret.go b/internal/adapters/cloudformation/aws/ssm/secret.go new file mode 100644 index 000000000000..181799c68f8e --- /dev/null +++ b/internal/adapters/cloudformation/aws/ssm/secret.go @@ -0,0 +1,18 @@ +package ssm + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/ssm" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getSecrets(ctx parser.FileContext) (secrets []ssm.Secret) { + for _, r := range ctx.GetResourcesByType("AWS::SecretsManager::Secret") { + secret := ssm.Secret{ + Metadata: r.Metadata(), + KMSKeyID: r.GetStringProperty("KmsKeyId"), + } + + secrets = append(secrets, secret) + } + return secrets +} diff --git a/internal/adapters/cloudformation/aws/ssm/ssm.go b/internal/adapters/cloudformation/aws/ssm/ssm.go new file mode 100644 index 000000000000..e85bcfeabbaf --- /dev/null +++ b/internal/adapters/cloudformation/aws/ssm/ssm.go @@ -0,0 +1,13 @@ +package ssm + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/ssm" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) ssm.SSM { + return ssm.SSM{ + Secrets: getSecrets(cfFile), + } +} diff --git a/internal/adapters/cloudformation/aws/workspaces/workspace.go b/internal/adapters/cloudformation/aws/workspaces/workspace.go new file mode 100644 index 000000000000..8a896513a740 --- /dev/null +++ b/internal/adapters/cloudformation/aws/workspaces/workspace.go @@ -0,0 +1,31 @@ +package workspaces + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/workspaces" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func getWorkSpaces(ctx parser.FileContext) (workSpaces []workspaces.WorkSpace) { + for _, r := range ctx.GetResourcesByType("AWS::WorkSpaces::Workspace") { + workspace := workspaces.WorkSpace{ + Metadata: r.Metadata(), + RootVolume: workspaces.Volume{ + Metadata: r.Metadata(), + Encryption: workspaces.Encryption{ + Metadata: r.Metadata(), + Enabled: r.GetBoolProperty("RootVolumeEncryptionEnabled"), + }, + }, + UserVolume: workspaces.Volume{ + Metadata: r.Metadata(), + Encryption: workspaces.Encryption{ + Metadata: r.Metadata(), + Enabled: r.GetBoolProperty("UserVolumeEncryptionEnabled"), + }, + }, + } + + workSpaces = append(workSpaces, workspace) + } + return workSpaces +} diff --git a/internal/adapters/cloudformation/aws/workspaces/workspaces.go b/internal/adapters/cloudformation/aws/workspaces/workspaces.go new file mode 100644 index 000000000000..6b563257d056 --- /dev/null +++ b/internal/adapters/cloudformation/aws/workspaces/workspaces.go @@ -0,0 +1,13 @@ +package workspaces + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/workspaces" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +// Adapt ... +func Adapt(cfFile parser.FileContext) workspaces.WorkSpaces { + return workspaces.WorkSpaces{ + WorkSpaces: getWorkSpaces(cfFile), + } +} diff --git a/internal/adapters/terraform/adapt.go b/internal/adapters/terraform/adapt.go new file mode 100644 index 000000000000..bbd27e88279d --- /dev/null +++ b/internal/adapters/terraform/adapt.go @@ -0,0 +1,31 @@ +package terraform + +import ( + "github.com/aquasecurity/defsec/pkg/state" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws" + "github.com/aquasecurity/trivy/internal/adapters/terraform/azure" + "github.com/aquasecurity/trivy/internal/adapters/terraform/cloudstack" + "github.com/aquasecurity/trivy/internal/adapters/terraform/digitalocean" + "github.com/aquasecurity/trivy/internal/adapters/terraform/github" + "github.com/aquasecurity/trivy/internal/adapters/terraform/google" + "github.com/aquasecurity/trivy/internal/adapters/terraform/kubernetes" + "github.com/aquasecurity/trivy/internal/adapters/terraform/nifcloud" + "github.com/aquasecurity/trivy/internal/adapters/terraform/openstack" + "github.com/aquasecurity/trivy/internal/adapters/terraform/oracle" +) + +func Adapt(modules terraform.Modules) *state.State { + return &state.State{ + AWS: aws.Adapt(modules), + Azure: azure.Adapt(modules), + CloudStack: cloudstack.Adapt(modules), + DigitalOcean: digitalocean.Adapt(modules), + GitHub: github.Adapt(modules), + Google: google.Adapt(modules), + Kubernetes: kubernetes.Adapt(modules), + Nifcloud: nifcloud.Adapt(modules), + OpenStack: openstack.Adapt(modules), + Oracle: oracle.Adapt(modules), + } +} diff --git a/internal/adapters/terraform/aws/accessanalyzer/accessanalyzer.go b/internal/adapters/terraform/aws/accessanalyzer/accessanalyzer.go new file mode 100644 index 000000000000..97fcf38713c6 --- /dev/null +++ b/internal/adapters/terraform/aws/accessanalyzer/accessanalyzer.go @@ -0,0 +1,40 @@ +package accessanalyzer + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/accessanalyzer" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) accessanalyzer.AccessAnalyzer { + return accessanalyzer.AccessAnalyzer{ + Analyzers: adaptTrails(modules), + } +} + +func adaptTrails(modules terraform.Modules) []accessanalyzer.Analyzer { + var analyzer []accessanalyzer.Analyzer + + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_accessanalyzer_analyzer") { + analyzer = append(analyzer, adaptAnalyzers(resource)) + } + } + return analyzer +} + +func adaptAnalyzers(resource *terraform.Block) accessanalyzer.Analyzer { + + analyzerName := resource.GetAttribute("analyzer_name") + analyzerNameAttr := analyzerName.AsStringValueOrDefault("", resource) + + arnAnalyzer := resource.GetAttribute("arn") + arnAnalyzerAttr := arnAnalyzer.AsStringValueOrDefault("", resource) + + return accessanalyzer.Analyzer{ + Metadata: resource.GetMetadata(), + Name: analyzerNameAttr, + ARN: arnAnalyzerAttr, + Active: types.BoolDefault(false, resource.GetMetadata()), + } +} diff --git a/internal/adapters/terraform/aws/adapt.go b/internal/adapters/terraform/aws/adapt.go new file mode 100644 index 000000000000..e18ec4dc1633 --- /dev/null +++ b/internal/adapters/terraform/aws/adapt.go @@ -0,0 +1,79 @@ +package aws + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/apigateway" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/athena" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/cloudfront" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/cloudtrail" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/cloudwatch" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/codebuild" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/config" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/documentdb" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/dynamodb" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/ec2" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/ecr" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/ecs" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/efs" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/eks" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/elasticache" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/elasticsearch" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/elb" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/emr" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/iam" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/kinesis" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/kms" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/lambda" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/mq" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/msk" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/neptune" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/provider" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/rds" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/redshift" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/s3" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/sns" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/sqs" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/ssm" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/workspaces" +) + +func Adapt(modules terraform.Modules) aws.AWS { + return aws.AWS{ + Meta: aws.Meta{ + TFProviders: provider.Adapt(modules), + }, + APIGateway: apigateway.Adapt(modules), + Athena: athena.Adapt(modules), + Cloudfront: cloudfront.Adapt(modules), + CloudTrail: cloudtrail.Adapt(modules), + CloudWatch: cloudwatch.Adapt(modules), + CodeBuild: codebuild.Adapt(modules), + Config: config.Adapt(modules), + DocumentDB: documentdb.Adapt(modules), + DynamoDB: dynamodb.Adapt(modules), + EC2: ec2.Adapt(modules), + ECR: ecr.Adapt(modules), + ECS: ecs.Adapt(modules), + EFS: efs.Adapt(modules), + EKS: eks.Adapt(modules), + ElastiCache: elasticache.Adapt(modules), + Elasticsearch: elasticsearch.Adapt(modules), + ELB: elb.Adapt(modules), + EMR: emr.Adapt(modules), + IAM: iam.Adapt(modules), + Kinesis: kinesis.Adapt(modules), + KMS: kms.Adapt(modules), + Lambda: lambda.Adapt(modules), + MQ: mq.Adapt(modules), + MSK: msk.Adapt(modules), + Neptune: neptune.Adapt(modules), + RDS: rds.Adapt(modules), + Redshift: redshift.Adapt(modules), + S3: s3.Adapt(modules), + SNS: sns.Adapt(modules), + SQS: sqs.Adapt(modules), + SSM: ssm.Adapt(modules), + WorkSpaces: workspaces.Adapt(modules), + } +} diff --git a/internal/adapters/terraform/aws/apigateway/adapt.go b/internal/adapters/terraform/aws/apigateway/adapt.go new file mode 100644 index 000000000000..2c6b2cb8d6a6 --- /dev/null +++ b/internal/adapters/terraform/aws/apigateway/adapt.go @@ -0,0 +1,21 @@ +package apigateway + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway" + v1 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v1" + v2 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v2" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func Adapt(modules terraform.Modules) apigateway.APIGateway { + return apigateway.APIGateway{ + V1: v1.APIGateway{ + APIs: adaptAPIsV1(modules), + DomainNames: adaptDomainNamesV1(modules), + }, + V2: v2.APIGateway{ + APIs: adaptAPIsV2(modules), + DomainNames: adaptDomainNamesV2(modules), + }, + } +} diff --git a/internal/adapters/terraform/aws/apigateway/adapt_test.go b/internal/adapters/terraform/aws/apigateway/adapt_test.go new file mode 100644 index 000000000000..1b116d0df8e5 --- /dev/null +++ b/internal/adapters/terraform/aws/apigateway/adapt_test.go @@ -0,0 +1,233 @@ +package apigateway + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway" + v1 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v1" + v2 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v2" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_Adapt(t *testing.T) { + tests := []struct { + name string + terraform string + expected apigateway.APIGateway + }{ + { + name: "basic", + terraform: ` +resource "aws_api_gateway_rest_api" "MyDemoAPI" { + name = "MyDemoAPI" + description = "This is my API for demonstration purposes" +} +resource "aws_api_gateway_resource" "example" { + rest_api_id = aws_api_gateway_rest_api.MyDemoAPI.id +} +resource "aws_api_gateway_method" "example" { + rest_api_id = aws_api_gateway_rest_api.MyDemoAPI.id + resource_id = aws_api_gateway_resource.example.id + http_method = "GET" + authorization = "NONE" +} +resource "aws_apigatewayv2_api" "example" { + name = "tfsec" + protocol_type = "HTTP" +} + + +resource "aws_apigatewayv2_stage" "example" { + api_id = aws_apigatewayv2_api.example.id + name = "tfsec" + access_log_settings { + destination_arn = "arn:123" + } +} + +resource "aws_api_gateway_domain_name" "example" { + domain_name = "v1.com" + security_policy = "TLS_1_0" +} + +resource "aws_apigatewayv2_domain_name" "example" { + domain_name = "v2.com" + domain_name_configuration { + security_policy = "TLS_1_2" + } +} +`, + expected: apigateway.APIGateway{ + V1: v1.APIGateway{ + APIs: []v1.API{ + { + Metadata: defsecTypes.Metadata{}, + Name: String("MyDemoAPI"), + Resources: []v1.Resource{ + { + Methods: []v1.Method{ + { + HTTPMethod: String("GET"), + AuthorizationType: String("NONE"), + APIKeyRequired: Bool(false), + }, + }, + }, + }, + }, + }, + DomainNames: []v1.DomainName{ + { + Name: String("v1.com"), + SecurityPolicy: String("TLS_1_0"), + }, + }, + }, + V2: v2.APIGateway{ + APIs: []v2.API{ + { + Name: String("tfsec"), + ProtocolType: String("HTTP"), + Stages: []v2.Stage{ + { + Name: String("tfsec"), + AccessLogging: v2.AccessLogging{ + CloudwatchLogGroupARN: String("arn:123"), + }, + }, + }, + }, + }, + DomainNames: []v2.DomainName{ + { + Name: String("v2.com"), + SecurityPolicy: String("TLS_1_2"), + }, + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := Adapt(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func Int(i int) defsecTypes.IntValue { + return defsecTypes.Int(i, defsecTypes.NewTestMetadata()) +} + +func Bool(b bool) defsecTypes.BoolValue { + return defsecTypes.Bool(b, defsecTypes.NewTestMetadata()) +} + +func String(s string) defsecTypes.StringValue { + return defsecTypes.String(s, defsecTypes.NewTestMetadata()) +} +func TestLines(t *testing.T) { + src := ` + resource "aws_api_gateway_rest_api" "MyDemoAPI" { + name = "MyDemoAPI" + description = "This is my API for demonstration purposes" + } + + resource "aws_api_gateway_resource" "example" { + rest_api_id = aws_api_gateway_rest_api.MyDemoAPI.id + } + + resource "aws_api_gateway_method" "example" { + rest_api_id = aws_api_gateway_rest_api.MyDemoAPI.id + resource_id = aws_api_gateway_resource.example.id + http_method = "GET" + authorization = "NONE" + api_key_required = true + } + + resource "aws_apigatewayv2_api" "example" { + name = "tfsec" + protocol_type = "HTTP" + } + + resource "aws_apigatewayv2_stage" "example" { + api_id = aws_apigatewayv2_api.example.id + name = "tfsec" + access_log_settings { + destination_arn = "arn:123" + } + } + + resource "aws_api_gateway_domain_name" "example" { + domain_name = "v1.com" + security_policy = "TLS_1_0" + } + + ` + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.V1.APIs, 1) + require.Len(t, adapted.V2.APIs, 1) + require.Len(t, adapted.V1.DomainNames, 1) + + apiV1 := adapted.V1.APIs[0] + apiV2 := adapted.V2.APIs[0] + domainName := adapted.V1.DomainNames[0] + + assert.Equal(t, 2, apiV1.Metadata.Range().GetStartLine()) + assert.Equal(t, 5, apiV1.Metadata.Range().GetEndLine()) + + assert.Equal(t, 3, apiV1.Name.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, apiV1.Name.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 11, apiV1.Resources[0].Methods[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 17, apiV1.Resources[0].Methods[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 14, apiV1.Resources[0].Methods[0].HTTPMethod.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 14, apiV1.Resources[0].Methods[0].HTTPMethod.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 15, apiV1.Resources[0].Methods[0].AuthorizationType.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 15, apiV1.Resources[0].Methods[0].AuthorizationType.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 16, apiV1.Resources[0].Methods[0].APIKeyRequired.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 16, apiV1.Resources[0].Methods[0].APIKeyRequired.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 19, apiV2.Metadata.Range().GetStartLine()) + assert.Equal(t, 22, apiV2.Metadata.Range().GetEndLine()) + + assert.Equal(t, 20, apiV2.Name.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 20, apiV2.Name.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 21, apiV2.ProtocolType.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 21, apiV2.ProtocolType.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 24, apiV2.Stages[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 30, apiV2.Stages[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 26, apiV2.Stages[0].Name.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 26, apiV2.Stages[0].Name.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 27, apiV2.Stages[0].AccessLogging.Metadata.Range().GetStartLine()) + assert.Equal(t, 29, apiV2.Stages[0].AccessLogging.Metadata.Range().GetEndLine()) + + assert.Equal(t, 28, apiV2.Stages[0].AccessLogging.CloudwatchLogGroupARN.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 28, apiV2.Stages[0].AccessLogging.CloudwatchLogGroupARN.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 32, domainName.Metadata.Range().GetStartLine()) + assert.Equal(t, 35, domainName.Metadata.Range().GetEndLine()) + + assert.Equal(t, 33, domainName.Name.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 33, domainName.Name.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 34, domainName.SecurityPolicy.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 34, domainName.SecurityPolicy.GetMetadata().Range().GetEndLine()) + +} diff --git a/internal/adapters/terraform/aws/apigateway/apiv1.go b/internal/adapters/terraform/aws/apigateway/apiv1.go new file mode 100644 index 000000000000..e01c9cbdbc94 --- /dev/null +++ b/internal/adapters/terraform/aws/apigateway/apiv1.go @@ -0,0 +1,115 @@ +package apigateway + +import ( + v1 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v1" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +func adaptAPIResourcesV1(modules terraform.Modules, apiBlock *terraform.Block) []v1.Resource { + var resources []v1.Resource + for _, resourceBlock := range modules.GetReferencingResources(apiBlock, "aws_api_gateway_resource", "rest_api_id") { + method := v1.Resource{ + Metadata: resourceBlock.GetMetadata(), + Methods: adaptAPIMethodsV1(modules, resourceBlock), + } + resources = append(resources, method) + } + return resources +} + +func adaptAPIMethodsV1(modules terraform.Modules, resourceBlock *terraform.Block) []v1.Method { + var methods []v1.Method + for _, methodBlock := range modules.GetReferencingResources(resourceBlock, "aws_api_gateway_method", "resource_id") { + method := v1.Method{ + Metadata: methodBlock.GetMetadata(), + HTTPMethod: methodBlock.GetAttribute("http_method").AsStringValueOrDefault("", methodBlock), + AuthorizationType: methodBlock.GetAttribute("authorization").AsStringValueOrDefault("", methodBlock), + APIKeyRequired: methodBlock.GetAttribute("api_key_required").AsBoolValueOrDefault(false, methodBlock), + } + methods = append(methods, method) + } + return methods +} + +func adaptAPIsV1(modules terraform.Modules) []v1.API { + + var apis []v1.API + apiStageIDs := modules.GetChildResourceIDMapByType("aws_api_gateway_stage") + + for _, apiBlock := range modules.GetResourcesByType("aws_api_gateway_rest_api") { + api := v1.API{ + Metadata: apiBlock.GetMetadata(), + Name: apiBlock.GetAttribute("name").AsStringValueOrDefault("", apiBlock), + Stages: nil, + Resources: adaptAPIResourcesV1(modules, apiBlock), + } + + for _, stageBlock := range modules.GetReferencingResources(apiBlock, "aws_api_gateway_stage", "rest_api_id") { + apiStageIDs.Resolve(stageBlock.ID()) + stage := adaptStageV1(stageBlock, modules) + + api.Stages = append(api.Stages, stage) + } + + apis = append(apis, api) + } + + orphanResources := modules.GetResourceByIDs(apiStageIDs.Orphans()...) + + if len(orphanResources) > 0 { + orphanage := v1.API{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Name: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + } + for _, stage := range orphanResources { + orphanage.Stages = append(orphanage.Stages, adaptStageV1(stage, modules)) + } + apis = append(apis, orphanage) + } + + return apis +} + +func adaptStageV1(stageBlock *terraform.Block, modules terraform.Modules) v1.Stage { + stage := v1.Stage{ + Metadata: stageBlock.GetMetadata(), + Name: stageBlock.GetAttribute("name").AsStringValueOrDefault("", stageBlock), + AccessLogging: v1.AccessLogging{ + Metadata: stageBlock.GetMetadata(), + CloudwatchLogGroupARN: defsecTypes.StringDefault("", stageBlock.GetMetadata()), + }, + XRayTracingEnabled: stageBlock.GetAttribute("xray_tracing_enabled").AsBoolValueOrDefault(false, stageBlock), + } + for _, methodSettings := range modules.GetReferencingResources(stageBlock, "aws_api_gateway_method_settings", "stage_name") { + + restMethodSettings := v1.RESTMethodSettings{ + Metadata: methodSettings.GetMetadata(), + Method: defsecTypes.String("", methodSettings.GetMetadata()), + CacheDataEncrypted: defsecTypes.BoolDefault(false, methodSettings.GetMetadata()), + CacheEnabled: defsecTypes.BoolDefault(false, methodSettings.GetMetadata()), + } + + if settings := methodSettings.GetBlock("settings"); settings.IsNotNil() { + if encrypted := settings.GetAttribute("cache_data_encrypted"); encrypted.IsNotNil() { + restMethodSettings.CacheDataEncrypted = settings.GetAttribute("cache_data_encrypted").AsBoolValueOrDefault(false, settings) + } + if encrypted := settings.GetAttribute("caching_enabled"); encrypted.IsNotNil() { + restMethodSettings.CacheEnabled = settings.GetAttribute("caching_enabled").AsBoolValueOrDefault(false, settings) + } + } + + stage.RESTMethodSettings = append(stage.RESTMethodSettings, restMethodSettings) + } + + stage.Name = stageBlock.GetAttribute("stage_name").AsStringValueOrDefault("", stageBlock) + if accessLogging := stageBlock.GetBlock("access_log_settings"); accessLogging.IsNotNil() { + stage.AccessLogging.Metadata = accessLogging.GetMetadata() + stage.AccessLogging.CloudwatchLogGroupARN = accessLogging.GetAttribute("destination_arn").AsStringValueOrDefault("", accessLogging) + } else { + stage.AccessLogging.Metadata = stageBlock.GetMetadata() + stage.AccessLogging.CloudwatchLogGroupARN = defsecTypes.StringDefault("", stageBlock.GetMetadata()) + } + + return stage +} diff --git a/internal/adapters/terraform/aws/apigateway/apiv1_test.go b/internal/adapters/terraform/aws/apigateway/apiv1_test.go new file mode 100644 index 000000000000..1d1ae52dbd85 --- /dev/null +++ b/internal/adapters/terraform/aws/apigateway/apiv1_test.go @@ -0,0 +1,125 @@ +package apigateway + +import ( + "testing" + + v1 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v1" + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" +) + +func Test_adaptAPIMethodsV1(t *testing.T) { + tests := []struct { + name string + terraform string + expected []v1.Method + }{ + { + name: "defaults", + terraform: ` +resource "aws_api_gateway_rest_api" "MyDemoAPI" { + name = "MyDemoAPI" + description = "This is my API for demonstration purposes" +} + +resource "aws_api_gateway_resource" "example" { + rest_api_id = aws_api_gateway_rest_api.MyDemoAPI.id +} + +resource "aws_api_gateway_method" "example" { + rest_api_id = aws_api_gateway_rest_api.MyDemoAPI.id + resource_id = aws_api_gateway_resource.example.id + http_method = "GET" + authorization = "NONE" +} +`, + expected: []v1.Method{ + { + HTTPMethod: String("GET"), + AuthorizationType: String("NONE"), + APIKeyRequired: Bool(false), + }, + }, + }, + { + name: "basic", + terraform: ` +resource "aws_api_gateway_rest_api" "MyDemoAPI" { + name = "MyDemoAPI" + description = "This is my API for demonstration purposes" +} + +resource "aws_api_gateway_resource" "example" { + rest_api_id = aws_api_gateway_rest_api.MyDemoAPI.id +} + +resource "aws_api_gateway_method" "example" { + rest_api_id = aws_api_gateway_rest_api.MyDemoAPI.id + resource_id = aws_api_gateway_resource.example.id + http_method = "GET" + authorization = "NONE" + api_key_required = true +} +`, + expected: []v1.Method{ + { + HTTPMethod: String("GET"), + AuthorizationType: String("NONE"), + APIKeyRequired: Bool(true), + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + restApiBlock := modules.GetBlocks()[1] + adapted := adaptAPIMethodsV1(modules, restApiBlock) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func Test_adaptAPIsV1(t *testing.T) { + tests := []struct { + name string + terraform string + expected []v1.API + }{ + { + name: "defaults", + terraform: ` +resource "aws_api_gateway_rest_api" "example" { + +} +`, + expected: []v1.API{ + { + Name: String(""), + }, + }, + }, + { + name: "full", + terraform: ` +resource "aws_api_gateway_rest_api" "example" { + name = "tfsec" +} +`, + expected: []v1.API{ + { + Name: String("tfsec"), + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptAPIsV1(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} diff --git a/internal/adapters/terraform/aws/apigateway/apiv2.go b/internal/adapters/terraform/aws/apigateway/apiv2.go new file mode 100644 index 000000000000..811abbc1b81c --- /dev/null +++ b/internal/adapters/terraform/aws/apigateway/apiv2.go @@ -0,0 +1,69 @@ +package apigateway + +import ( + v2 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v2" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +func adaptAPIsV2(modules terraform.Modules) []v2.API { + + var apis []v2.API + apiStageIDs := modules.GetChildResourceIDMapByType("aws_apigatewayv2_stage") + + for _, module := range modules { + for _, apiBlock := range module.GetResourcesByType("aws_apigatewayv2_api") { + api := v2.API{ + Metadata: apiBlock.GetMetadata(), + Name: apiBlock.GetAttribute("name").AsStringValueOrDefault("", apiBlock), + ProtocolType: apiBlock.GetAttribute("protocol_type").AsStringValueOrDefault("", apiBlock), + Stages: nil, + } + + for _, stageBlock := range module.GetReferencingResources(apiBlock, "aws_apigatewayv2_stage", "api_id") { + apiStageIDs.Resolve(stageBlock.ID()) + + stage := adaptStageV2(stageBlock) + + api.Stages = append(api.Stages, stage) + } + + apis = append(apis, api) + } + } + + orphanResources := modules.GetResourceByIDs(apiStageIDs.Orphans()...) + if len(orphanResources) > 0 { + orphanage := v2.API{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Name: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + ProtocolType: defsecTypes.StringUnresolvable(defsecTypes.NewUnmanagedMetadata()), + Stages: nil, + } + for _, stage := range orphanResources { + orphanage.Stages = append(orphanage.Stages, adaptStageV2(stage)) + } + apis = append(apis, orphanage) + } + + return apis +} + +func adaptStageV2(stageBlock *terraform.Block) v2.Stage { + stage := v2.Stage{ + Metadata: stageBlock.GetMetadata(), + Name: stageBlock.GetAttribute("name").AsStringValueOrDefault("", stageBlock), + AccessLogging: v2.AccessLogging{ + Metadata: stageBlock.GetMetadata(), + CloudwatchLogGroupARN: defsecTypes.StringDefault("", stageBlock.GetMetadata()), + }, + } + if accessLogging := stageBlock.GetBlock("access_log_settings"); accessLogging.IsNotNil() { + stage.AccessLogging.Metadata = accessLogging.GetMetadata() + stage.AccessLogging.CloudwatchLogGroupARN = accessLogging.GetAttribute("destination_arn").AsStringValueOrDefault("", accessLogging) + } else { + stage.AccessLogging.Metadata = stageBlock.GetMetadata() + stage.AccessLogging.CloudwatchLogGroupARN = defsecTypes.StringDefault("", stageBlock.GetMetadata()) + } + return stage +} diff --git a/internal/adapters/terraform/aws/apigateway/apiv2_test.go b/internal/adapters/terraform/aws/apigateway/apiv2_test.go new file mode 100644 index 000000000000..f5c728996746 --- /dev/null +++ b/internal/adapters/terraform/aws/apigateway/apiv2_test.go @@ -0,0 +1,103 @@ +package apigateway + +import ( + "testing" + + v2 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v2" + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" +) + +func Test_adaptAPIsV2(t *testing.T) { + tests := []struct { + name string + terraform string + expected []v2.API + }{ + { + name: "defaults", + terraform: ` +resource "aws_apigatewayv2_api" "example" { + protocol_type = "HTTP" +} +`, + expected: []v2.API{ + { + Name: String(""), + ProtocolType: String("HTTP"), + }, + }, + }, + { + name: "full", + terraform: ` +resource "aws_apigatewayv2_api" "example" { + name = "tfsec" + protocol_type = "HTTP" +} +`, + expected: []v2.API{ + { + Name: String("tfsec"), + ProtocolType: String("HTTP"), + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptAPIsV2(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func Test_adaptStageV2(t *testing.T) { + tests := []struct { + name string + terraform string + expected v2.Stage + }{ + { + name: "defaults", + terraform: ` +resource "aws_apigatewayv2_stage" "example" { + +} +`, + expected: v2.Stage{ + Name: String(""), + AccessLogging: v2.AccessLogging{ + CloudwatchLogGroupARN: String(""), + }, + }, + }, + { + name: "basics", + terraform: ` +resource "aws_apigatewayv2_stage" "example" { + name = "tfsec" + access_log_settings { + destination_arn = "arn:123" + } +} +`, + expected: v2.Stage{ + Name: String("tfsec"), + AccessLogging: v2.AccessLogging{ + CloudwatchLogGroupARN: String("arn:123"), + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptStageV2(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} diff --git a/internal/adapters/terraform/aws/apigateway/namesv1.go b/internal/adapters/terraform/aws/apigateway/namesv1.go new file mode 100644 index 000000000000..bec491d6e8a2 --- /dev/null +++ b/internal/adapters/terraform/aws/apigateway/namesv1.go @@ -0,0 +1,24 @@ +package apigateway + +import ( + v1 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v1" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func adaptDomainNamesV1(modules terraform.Modules) []v1.DomainName { + + var domainNames []v1.DomainName + + for _, module := range modules { + for _, nameBlock := range module.GetResourcesByType("aws_api_gateway_domain_name") { + domainName := v1.DomainName{ + Metadata: nameBlock.GetMetadata(), + Name: nameBlock.GetAttribute("domain_name").AsStringValueOrDefault("", nameBlock), + SecurityPolicy: nameBlock.GetAttribute("security_policy").AsStringValueOrDefault("TLS_1_0", nameBlock), + } + domainNames = append(domainNames, domainName) + } + } + + return domainNames +} diff --git a/internal/adapters/terraform/aws/apigateway/namesv1_test.go b/internal/adapters/terraform/aws/apigateway/namesv1_test.go new file mode 100644 index 000000000000..8232d1271aea --- /dev/null +++ b/internal/adapters/terraform/aws/apigateway/namesv1_test.go @@ -0,0 +1,54 @@ +package apigateway + +import ( + "testing" + + v1 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v1" + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" +) + +func Test_adaptDomainNamesV1(t *testing.T) { + tests := []struct { + name string + terraform string + expected []v1.DomainName + }{ + { + name: "defaults", + terraform: ` +resource "aws_api_gateway_domain_name" "example" { +} +`, + expected: []v1.DomainName{ + { + Name: String(""), + SecurityPolicy: String("TLS_1_0"), + }, + }, + }, + { + name: "basic", + terraform: ` +resource "aws_api_gateway_domain_name" "example" { + domain_name = "testing.com" + security_policy = "TLS_1_2" +} +`, + expected: []v1.DomainName{ + { + Name: String("testing.com"), + SecurityPolicy: String("TLS_1_2"), + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptDomainNamesV1(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} diff --git a/internal/adapters/terraform/aws/apigateway/namesv2.go b/internal/adapters/terraform/aws/apigateway/namesv2.go new file mode 100644 index 000000000000..f526ba793850 --- /dev/null +++ b/internal/adapters/terraform/aws/apigateway/namesv2.go @@ -0,0 +1,28 @@ +package apigateway + +import ( + v2 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v2" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/defsec/pkg/types" +) + +func adaptDomainNamesV2(modules terraform.Modules) []v2.DomainName { + + var domainNames []v2.DomainName + + for _, module := range modules { + for _, nameBlock := range module.GetResourcesByType("aws_apigatewayv2_domain_name") { + domainName := v2.DomainName{ + Metadata: nameBlock.GetMetadata(), + Name: nameBlock.GetAttribute("domain_name").AsStringValueOrDefault("", nameBlock), + SecurityPolicy: types.StringDefault("TLS_1_0", nameBlock.GetMetadata()), + } + if config := nameBlock.GetBlock("domain_name_configuration"); config.IsNotNil() { + domainName.SecurityPolicy = config.GetAttribute("security_policy").AsStringValueOrDefault("TLS_1_0", config) + } + domainNames = append(domainNames, domainName) + } + } + + return domainNames +} diff --git a/internal/adapters/terraform/aws/apigateway/namesv2_test.go b/internal/adapters/terraform/aws/apigateway/namesv2_test.go new file mode 100644 index 000000000000..c2c1c8da4c78 --- /dev/null +++ b/internal/adapters/terraform/aws/apigateway/namesv2_test.go @@ -0,0 +1,56 @@ +package apigateway + +import ( + "testing" + + v2 "github.com/aquasecurity/defsec/pkg/providers/aws/apigateway/v2" + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" +) + +func Test_adaptDomainNamesV2(t *testing.T) { + tests := []struct { + name string + terraform string + expected []v2.DomainName + }{ + { + name: "defaults", + terraform: ` +resource "aws_apigatewayv2_domain_name" "example" { +} +`, + expected: []v2.DomainName{ + { + Name: String(""), + SecurityPolicy: String("TLS_1_0"), + }, + }, + }, + { + name: "fully populated", + terraform: ` +resource "aws_apigatewayv2_domain_name" "example" { + domain_name = "testing.com" + domain_name_configuration { + security_policy = "TLS_1_2" + } +} +`, + expected: []v2.DomainName{ + { + Name: String("testing.com"), + SecurityPolicy: String("TLS_1_2"), + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptDomainNamesV2(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} diff --git a/internal/adapters/terraform/aws/athena/adapt.go b/internal/adapters/terraform/aws/athena/adapt.go new file mode 100644 index 000000000000..0f72c457551b --- /dev/null +++ b/internal/adapters/terraform/aws/athena/adapt.go @@ -0,0 +1,80 @@ +package athena + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/athena" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) athena.Athena { + return athena.Athena{ + Databases: adaptDatabases(modules), + Workgroups: adaptWorkgroups(modules), + } +} + +func adaptDatabases(modules terraform.Modules) []athena.Database { + var databases []athena.Database + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_athena_database") { + databases = append(databases, adaptDatabase(resource)) + } + } + return databases +} + +func adaptWorkgroups(modules terraform.Modules) []athena.Workgroup { + var workgroups []athena.Workgroup + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_athena_workgroup") { + workgroups = append(workgroups, adaptWorkgroup(resource)) + } + } + return workgroups +} + +func adaptDatabase(resource *terraform.Block) athena.Database { + database := athena.Database{ + Metadata: resource.GetMetadata(), + Name: resource.GetAttribute("name").AsStringValueOrDefault("", resource), + Encryption: athena.EncryptionConfiguration{ + Metadata: resource.GetMetadata(), + Type: defsecTypes.StringDefault("", resource.GetMetadata()), + }, + } + if encryptionConfigBlock := resource.GetBlock("encryption_configuration"); encryptionConfigBlock.IsNotNil() { + database.Encryption.Metadata = encryptionConfigBlock.GetMetadata() + encryptionOptionAttr := encryptionConfigBlock.GetAttribute("encryption_option") + database.Encryption.Type = encryptionOptionAttr.AsStringValueOrDefault("", encryptionConfigBlock) + } + + return database +} + +func adaptWorkgroup(resource *terraform.Block) athena.Workgroup { + workgroup := athena.Workgroup{ + Metadata: resource.GetMetadata(), + Name: resource.GetAttribute("name").AsStringValueOrDefault("", resource), + Encryption: athena.EncryptionConfiguration{ + Metadata: resource.GetMetadata(), + Type: defsecTypes.StringDefault("", resource.GetMetadata()), + }, + EnforceConfiguration: defsecTypes.BoolDefault(false, resource.GetMetadata()), + } + + if configBlock := resource.GetBlock("configuration"); configBlock.IsNotNil() { + + enforceWGConfigAttr := configBlock.GetAttribute("enforce_workgroup_configuration") + workgroup.EnforceConfiguration = enforceWGConfigAttr.AsBoolValueOrDefault(true, configBlock) + + if resultConfigBlock := configBlock.GetBlock("result_configuration"); configBlock.IsNotNil() { + if encryptionConfigBlock := resultConfigBlock.GetBlock("encryption_configuration"); encryptionConfigBlock.IsNotNil() { + encryptionOptionAttr := encryptionConfigBlock.GetAttribute("encryption_option") + workgroup.Encryption.Metadata = encryptionConfigBlock.GetMetadata() + workgroup.Encryption.Type = encryptionOptionAttr.AsStringValueOrDefault("", encryptionConfigBlock) + } + } + } + + return workgroup +} diff --git a/internal/adapters/terraform/aws/athena/adapt_test.go b/internal/adapters/terraform/aws/athena/adapt_test.go new file mode 100644 index 000000000000..c4daaf9c9284 --- /dev/null +++ b/internal/adapters/terraform/aws/athena/adapt_test.go @@ -0,0 +1,211 @@ +package athena + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/athena" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptDatabase(t *testing.T) { + tests := []struct { + name string + terraform string + expected athena.Database + }{ + { + name: "athena database", + terraform: ` + resource "aws_athena_database" "my_wg" { + name = "database_name" + + encryption_configuration { + encryption_option = "SSE_KMS" + } + } +`, + expected: athena.Database{ + Metadata: defsecTypes.NewTestMetadata(), + Name: defsecTypes.String("database_name", defsecTypes.NewTestMetadata()), + Encryption: athena.EncryptionConfiguration{ + Metadata: defsecTypes.NewTestMetadata(), + Type: defsecTypes.String(athena.EncryptionTypeSSEKMS, defsecTypes.NewTestMetadata()), + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptDatabase(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func Test_adaptWorkgroup(t *testing.T) { + tests := []struct { + name string + terraform string + expected athena.Workgroup + }{ + { + name: "encryption type SSE KMS", + terraform: ` + resource "aws_athena_workgroup" "my_wg" { + name = "example" + + configuration { + enforce_workgroup_configuration = true + + result_configuration { + encryption_configuration { + encryption_option = "SSE_KMS" + } + } + } + } +`, + expected: athena.Workgroup{ + Metadata: defsecTypes.NewTestMetadata(), + Name: defsecTypes.String("example", defsecTypes.NewTestMetadata()), + Encryption: athena.EncryptionConfiguration{ + Metadata: defsecTypes.NewTestMetadata(), + Type: defsecTypes.String(athena.EncryptionTypeSSEKMS, defsecTypes.NewTestMetadata()), + }, + EnforceConfiguration: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + { + name: "configuration not enforced", + terraform: ` + resource "aws_athena_workgroup" "my_wg" { + name = "example" + + configuration { + enforce_workgroup_configuration = false + + result_configuration { + encryption_configuration { + encryption_option = "SSE_KMS" + } + } + } + } +`, + expected: athena.Workgroup{ + Metadata: defsecTypes.NewTestMetadata(), + Name: defsecTypes.String("example", defsecTypes.NewTestMetadata()), + Encryption: athena.EncryptionConfiguration{ + Metadata: defsecTypes.NewTestMetadata(), + Type: defsecTypes.String(athena.EncryptionTypeSSEKMS, defsecTypes.NewTestMetadata()), + }, + EnforceConfiguration: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + { + name: "enforce configuration defaults to true", + terraform: ` + resource "aws_athena_workgroup" "my_wg" { + name = "example" + + configuration { + result_configuration { + encryption_configuration { + encryption_option = "" + } + } + } + } +`, + expected: athena.Workgroup{ + Metadata: defsecTypes.NewTestMetadata(), + Name: defsecTypes.String("example", defsecTypes.NewTestMetadata()), + Encryption: athena.EncryptionConfiguration{ + Metadata: defsecTypes.NewTestMetadata(), + Type: defsecTypes.String(athena.EncryptionTypeNone, defsecTypes.NewTestMetadata()), + }, + EnforceConfiguration: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + { + name: "missing configuration block", + terraform: ` + resource "aws_athena_workgroup" "my_wg" { + name = "example" + } +`, + expected: athena.Workgroup{ + Metadata: defsecTypes.NewTestMetadata(), + Name: defsecTypes.String("example", defsecTypes.NewTestMetadata()), + Encryption: athena.EncryptionConfiguration{ + Metadata: defsecTypes.NewTestMetadata(), + Type: defsecTypes.String(athena.EncryptionTypeNone, defsecTypes.NewTestMetadata()), + }, + EnforceConfiguration: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptWorkgroup(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "aws_athena_database" "good_example" { + name = "database_name" + bucket = aws_s3_bucket.hoge.bucket + + encryption_configuration { + encryption_option = "SSE_KMS" + kms_key_arn = aws_kms_key.example.arn + } + } + + resource "aws_athena_workgroup" "good_example" { + name = "example" + + configuration { + enforce_workgroup_configuration = true + publish_cloudwatch_metrics_enabled = true + + result_configuration { + output_location = "s3://${aws_s3_bucket.example.bucket}/output/" + + encryption_configuration { + encryption_option = "SSE_KMS" + kms_key_arn = aws_kms_key.example.arn + } + } + } + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Databases, 1) + require.Len(t, adapted.Workgroups, 1) + + assert.Equal(t, 7, adapted.Databases[0].Encryption.Type.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 7, adapted.Databases[0].Encryption.Type.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 16, adapted.Workgroups[0].EnforceConfiguration.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 16, adapted.Workgroups[0].EnforceConfiguration.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 23, adapted.Workgroups[0].Encryption.Type.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 23, adapted.Workgroups[0].Encryption.Type.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/aws/cloudfront/adapt.go b/internal/adapters/terraform/aws/cloudfront/adapt.go new file mode 100644 index 000000000000..dba2662f9b9c --- /dev/null +++ b/internal/adapters/terraform/aws/cloudfront/adapt.go @@ -0,0 +1,79 @@ +package cloudfront + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/cloudfront" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) cloudfront.Cloudfront { + return cloudfront.Cloudfront{ + Distributions: adaptDistributions(modules), + } +} + +func adaptDistributions(modules terraform.Modules) []cloudfront.Distribution { + var distributions []cloudfront.Distribution + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_cloudfront_distribution") { + distributions = append(distributions, adaptDistribution(resource)) + } + } + return distributions +} + +func adaptDistribution(resource *terraform.Block) cloudfront.Distribution { + + distribution := cloudfront.Distribution{ + Metadata: resource.GetMetadata(), + WAFID: types.StringDefault("", resource.GetMetadata()), + Logging: cloudfront.Logging{ + Metadata: resource.GetMetadata(), + Bucket: types.StringDefault("", resource.GetMetadata()), + }, + DefaultCacheBehaviour: cloudfront.CacheBehaviour{ + Metadata: resource.GetMetadata(), + ViewerProtocolPolicy: types.String("allow-all", resource.GetMetadata()), + }, + OrdererCacheBehaviours: nil, + ViewerCertificate: cloudfront.ViewerCertificate{ + Metadata: resource.GetMetadata(), + MinimumProtocolVersion: types.StringDefault("TLSv1", resource.GetMetadata()), + }, + } + + distribution.WAFID = resource.GetAttribute("web_acl_id").AsStringValueOrDefault("", resource) + + if loggingBlock := resource.GetBlock("logging_config"); loggingBlock.IsNotNil() { + distribution.Logging.Metadata = loggingBlock.GetMetadata() + bucketAttr := loggingBlock.GetAttribute("bucket") + distribution.Logging.Bucket = bucketAttr.AsStringValueOrDefault("", loggingBlock) + } + + if defaultCacheBlock := resource.GetBlock("default_cache_behavior"); defaultCacheBlock.IsNotNil() { + distribution.DefaultCacheBehaviour.Metadata = defaultCacheBlock.GetMetadata() + viewerProtocolPolicyAttr := defaultCacheBlock.GetAttribute("viewer_protocol_policy") + distribution.DefaultCacheBehaviour.ViewerProtocolPolicy = viewerProtocolPolicyAttr.AsStringValueOrDefault("allow-all", defaultCacheBlock) + } + + orderedCacheBlocks := resource.GetBlocks("ordered_cache_behavior") + for _, orderedCacheBlock := range orderedCacheBlocks { + viewerProtocolPolicyAttr := orderedCacheBlock.GetAttribute("viewer_protocol_policy") + viewerProtocolPolicyVal := viewerProtocolPolicyAttr.AsStringValueOrDefault("allow-all", orderedCacheBlock) + distribution.OrdererCacheBehaviours = append(distribution.OrdererCacheBehaviours, cloudfront.CacheBehaviour{ + Metadata: orderedCacheBlock.GetMetadata(), + ViewerProtocolPolicy: viewerProtocolPolicyVal, + }) + } + + if viewerCertBlock := resource.GetBlock("viewer_certificate"); viewerCertBlock.IsNotNil() { + distribution.ViewerCertificate = cloudfront.ViewerCertificate{ + Metadata: viewerCertBlock.GetMetadata(), + MinimumProtocolVersion: viewerCertBlock.GetAttribute("minimum_protocol_version").AsStringValueOrDefault("TLSv1", viewerCertBlock), + SSLSupportMethod: viewerCertBlock.GetAttribute("ssl_support_method").AsStringValueOrDefault("", viewerCertBlock), + CloudfrontDefaultCertificate: viewerCertBlock.GetAttribute("cloudfront_default_certificate").AsBoolValueOrDefault(false, viewerCertBlock), + } + } + + return distribution +} diff --git a/internal/adapters/terraform/aws/cloudfront/adapt_test.go b/internal/adapters/terraform/aws/cloudfront/adapt_test.go new file mode 100644 index 000000000000..9131bd1a36d0 --- /dev/null +++ b/internal/adapters/terraform/aws/cloudfront/adapt_test.go @@ -0,0 +1,163 @@ +package cloudfront + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/cloudfront" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptDistribution(t *testing.T) { + tests := []struct { + name string + terraform string + expected cloudfront.Distribution + }{ + { + name: "configured", + terraform: ` + resource "aws_cloudfront_distribution" "example" { + logging_config { + bucket = "mylogs.s3.amazonaws.com" + } + + web_acl_id = "waf_id" + + default_cache_behavior { + viewer_protocol_policy = "redirect-to-https" + } + + ordered_cache_behavior { + viewer_protocol_policy = "redirect-to-https" + } + + viewer_certificate { + cloudfront_default_certificate = true + minimum_protocol_version = "TLSv1.2_2021" + ssl_support_method = "sni-only" + } + } +`, + expected: cloudfront.Distribution{ + Metadata: defsecTypes.NewTestMetadata(), + WAFID: defsecTypes.String("waf_id", defsecTypes.NewTestMetadata()), + Logging: cloudfront.Logging{ + Metadata: defsecTypes.NewTestMetadata(), + Bucket: defsecTypes.String("mylogs.s3.amazonaws.com", defsecTypes.NewTestMetadata()), + }, + DefaultCacheBehaviour: cloudfront.CacheBehaviour{ + Metadata: defsecTypes.NewTestMetadata(), + ViewerProtocolPolicy: defsecTypes.String("redirect-to-https", defsecTypes.NewTestMetadata()), + }, + OrdererCacheBehaviours: []cloudfront.CacheBehaviour{ + { + Metadata: defsecTypes.NewTestMetadata(), + ViewerProtocolPolicy: defsecTypes.String("redirect-to-https", defsecTypes.NewTestMetadata()), + }, + }, + ViewerCertificate: cloudfront.ViewerCertificate{ + Metadata: defsecTypes.NewTestMetadata(), + MinimumProtocolVersion: defsecTypes.String("TLSv1.2_2021", defsecTypes.NewTestMetadata()), + CloudfrontDefaultCertificate: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + SSLSupportMethod: defsecTypes.String("sni-only", defsecTypes.NewTestMetadata()), + }, + }, + }, + { + name: "defaults", + terraform: ` + resource "aws_cloudfront_distribution" "example" { + } +`, + expected: cloudfront.Distribution{ + Metadata: defsecTypes.NewTestMetadata(), + WAFID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + Logging: cloudfront.Logging{ + Metadata: defsecTypes.NewTestMetadata(), + Bucket: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + DefaultCacheBehaviour: cloudfront.CacheBehaviour{ + Metadata: defsecTypes.NewTestMetadata(), + ViewerProtocolPolicy: defsecTypes.String("allow-all", defsecTypes.NewTestMetadata()), + }, + + ViewerCertificate: cloudfront.ViewerCertificate{ + Metadata: defsecTypes.NewTestMetadata(), + MinimumProtocolVersion: defsecTypes.String("TLSv1", defsecTypes.NewTestMetadata()), + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptDistribution(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "aws_cloudfront_distribution" "example" { + logging_config { + bucket = "mylogs.s3.amazonaws.com" + } + + web_acl_id = "waf_id" + + default_cache_behavior { + viewer_protocol_policy = "redirect-to-https" + } + + ordered_cache_behavior { + viewer_protocol_policy = "redirect-to-https" + } + + viewer_certificate { + cloudfront_default_certificate = true + minimum_protocol_version = "TLSv1.2_2021" + } + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Distributions, 1) + distribution := adapted.Distributions[0] + + assert.Equal(t, 2, distribution.Metadata.Range().GetStartLine()) + assert.Equal(t, 21, distribution.Metadata.Range().GetEndLine()) + + assert.Equal(t, 3, distribution.Logging.Metadata.Range().GetStartLine()) + assert.Equal(t, 5, distribution.Logging.Metadata.Range().GetEndLine()) + + assert.Equal(t, 7, distribution.WAFID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 7, distribution.WAFID.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 9, distribution.DefaultCacheBehaviour.Metadata.Range().GetStartLine()) + assert.Equal(t, 11, distribution.DefaultCacheBehaviour.Metadata.Range().GetEndLine()) + + assert.Equal(t, 10, distribution.DefaultCacheBehaviour.ViewerProtocolPolicy.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 10, distribution.DefaultCacheBehaviour.ViewerProtocolPolicy.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 13, distribution.OrdererCacheBehaviours[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 15, distribution.OrdererCacheBehaviours[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 14, distribution.OrdererCacheBehaviours[0].ViewerProtocolPolicy.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 14, distribution.OrdererCacheBehaviours[0].ViewerProtocolPolicy.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 17, distribution.ViewerCertificate.Metadata.Range().GetStartLine()) + assert.Equal(t, 20, distribution.ViewerCertificate.Metadata.Range().GetEndLine()) + + assert.Equal(t, 19, distribution.ViewerCertificate.MinimumProtocolVersion.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 19, distribution.ViewerCertificate.MinimumProtocolVersion.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/aws/cloudtrail/adapt.go b/internal/adapters/terraform/aws/cloudtrail/adapt.go new file mode 100644 index 000000000000..0a9e8b0d5828 --- /dev/null +++ b/internal/adapters/terraform/aws/cloudtrail/adapt.go @@ -0,0 +1,67 @@ +package cloudtrail + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/cloudtrail" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func Adapt(modules terraform.Modules) cloudtrail.CloudTrail { + return cloudtrail.CloudTrail{ + Trails: adaptTrails(modules), + } +} + +func adaptTrails(modules terraform.Modules) []cloudtrail.Trail { + var trails []cloudtrail.Trail + + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_cloudtrail") { + trails = append(trails, adaptTrail(resource)) + } + } + return trails +} + +func adaptTrail(resource *terraform.Block) cloudtrail.Trail { + nameAttr := resource.GetAttribute("name") + nameVal := nameAttr.AsStringValueOrDefault("", resource) + + enableLogFileValidationAttr := resource.GetAttribute("enable_log_file_validation") + enableLogFileValidationVal := enableLogFileValidationAttr.AsBoolValueOrDefault(false, resource) + + isMultiRegionAttr := resource.GetAttribute("is_multi_region_trail") + isMultiRegionVal := isMultiRegionAttr.AsBoolValueOrDefault(false, resource) + + KMSKeyIDAttr := resource.GetAttribute("kms_key_id") + KMSKeyIDVal := KMSKeyIDAttr.AsStringValueOrDefault("", resource) + + var selectors []cloudtrail.EventSelector + for _, selBlock := range resource.GetBlocks("event_selector") { + var resources []cloudtrail.DataResource + for _, resBlock := range selBlock.GetBlocks("data_resource") { + resources = append(resources, cloudtrail.DataResource{ + Metadata: resBlock.GetMetadata(), + Type: resBlock.GetAttribute("type").AsStringValueOrDefault("", resBlock), + Values: resBlock.GetAttribute("values").AsStringValues(), + }) + } + selector := cloudtrail.EventSelector{ + Metadata: selBlock.GetMetadata(), + DataResources: resources, + ReadWriteType: selBlock.GetAttribute("read_write_type").AsStringValueOrDefault("All", selBlock), + } + selectors = append(selectors, selector) + } + + return cloudtrail.Trail{ + Metadata: resource.GetMetadata(), + Name: nameVal, + EnableLogFileValidation: enableLogFileValidationVal, + IsMultiRegion: isMultiRegionVal, + KMSKeyID: KMSKeyIDVal, + CloudWatchLogsLogGroupArn: resource.GetAttribute("cloud_watch_logs_group_arn").AsStringValueOrDefault("", resource), + IsLogging: resource.GetAttribute("enable_logging").AsBoolValueOrDefault(true, resource), + BucketName: resource.GetAttribute("s3_bucket_name").AsStringValueOrDefault("", resource), + EventSelectors: selectors, + } +} diff --git a/internal/adapters/terraform/aws/cloudtrail/adapt_test.go b/internal/adapters/terraform/aws/cloudtrail/adapt_test.go new file mode 100644 index 000000000000..c669d96f0010 --- /dev/null +++ b/internal/adapters/terraform/aws/cloudtrail/adapt_test.go @@ -0,0 +1,106 @@ +package cloudtrail + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/cloudtrail" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptTrail(t *testing.T) { + tests := []struct { + name string + terraform string + expected cloudtrail.Trail + }{ + { + name: "configured", + terraform: ` + resource "aws_cloudtrail" "example" { + name = "example" + is_multi_region_trail = true + + enable_log_file_validation = true + kms_key_id = "kms-key" + s3_bucket_name = "abcdefgh" + cloud_watch_logs_group_arn = "abc" + enable_logging = false + } +`, + expected: cloudtrail.Trail{ + Metadata: defsecTypes.NewTestMetadata(), + Name: defsecTypes.String("example", defsecTypes.NewTestMetadata()), + EnableLogFileValidation: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + IsMultiRegion: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("kms-key", defsecTypes.NewTestMetadata()), + CloudWatchLogsLogGroupArn: defsecTypes.String("abc", defsecTypes.NewTestMetadata()), + IsLogging: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + BucketName: defsecTypes.String("abcdefgh", defsecTypes.NewTestMetadata()), + }, + }, + { + name: "defaults", + terraform: ` + resource "aws_cloudtrail" "example" { + } +`, + expected: cloudtrail.Trail{ + Metadata: defsecTypes.NewTestMetadata(), + Name: defsecTypes.String("", defsecTypes.NewTestMetadata()), + EnableLogFileValidation: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + IsMultiRegion: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + BucketName: defsecTypes.String("", defsecTypes.NewTestMetadata()), + CloudWatchLogsLogGroupArn: defsecTypes.String("", defsecTypes.NewTestMetadata()), + IsLogging: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptTrail(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "aws_cloudtrail" "example" { + name = "example" + is_multi_region_trail = true + + enable_log_file_validation = true + kms_key_id = "kms-key" + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Trails, 1) + trail := adapted.Trails[0] + + assert.Equal(t, 2, trail.Metadata.Range().GetStartLine()) + assert.Equal(t, 8, trail.Metadata.Range().GetEndLine()) + + assert.Equal(t, 3, trail.Name.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, trail.Name.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 4, trail.IsMultiRegion.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 4, trail.IsMultiRegion.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 6, trail.EnableLogFileValidation.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 6, trail.EnableLogFileValidation.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 7, trail.KMSKeyID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 7, trail.KMSKeyID.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/aws/cloudwatch/adapt.go b/internal/adapters/terraform/aws/cloudwatch/adapt.go new file mode 100644 index 000000000000..3e3a378e0b8d --- /dev/null +++ b/internal/adapters/terraform/aws/cloudwatch/adapt.go @@ -0,0 +1,47 @@ +package cloudwatch + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/cloudwatch" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) cloudwatch.CloudWatch { + return cloudwatch.CloudWatch{ + LogGroups: adaptLogGroups(modules), + } +} + +func adaptLogGroups(modules terraform.Modules) []cloudwatch.LogGroup { + var logGroups []cloudwatch.LogGroup + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_cloudwatch_log_group") { + logGroups = append(logGroups, adaptLogGroup(resource, module)) + } + } + return logGroups +} + +func adaptLogGroup(resource *terraform.Block, module *terraform.Module) cloudwatch.LogGroup { + nameAttr := resource.GetAttribute("name") + nameVal := nameAttr.AsStringValueOrDefault("", resource) + + KMSKeyIDAttr := resource.GetAttribute("kms_key_id") + KMSKeyIDVal := KMSKeyIDAttr.AsStringValueOrDefault("", resource) + + if keyBlock, err := module.GetReferencedBlock(KMSKeyIDAttr, resource); err == nil { + KMSKeyIDVal = types.String(keyBlock.FullName(), keyBlock.GetMetadata()) + } + + retentionInDaysAttr := resource.GetAttribute("retention_in_days") + retentionInDaysVal := retentionInDaysAttr.AsIntValueOrDefault(0, resource) + + return cloudwatch.LogGroup{ + Metadata: resource.GetMetadata(), + Arn: types.StringDefault("", resource.GetMetadata()), + Name: nameVal, + KMSKeyID: KMSKeyIDVal, + RetentionInDays: retentionInDaysVal, + MetricFilters: nil, + } +} diff --git a/internal/adapters/terraform/aws/cloudwatch/adapt_test.go b/internal/adapters/terraform/aws/cloudwatch/adapt_test.go new file mode 100644 index 000000000000..1486cfeb7d08 --- /dev/null +++ b/internal/adapters/terraform/aws/cloudwatch/adapt_test.go @@ -0,0 +1,114 @@ +package cloudwatch + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/cloudwatch" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptLogGroups(t *testing.T) { + tests := []struct { + name string + terraform string + expected []cloudwatch.LogGroup + }{ + { + name: "key referencing block", + terraform: ` + resource "aws_cloudwatch_log_group" "my-group" { + name = "my-group" + kms_key_id = aws_kms_key.log_key.arn + } + + resource "aws_kms_key" "log_key" { + } +`, + expected: []cloudwatch.LogGroup{ + { + Metadata: defsecTypes.NewTestMetadata(), + Arn: defsecTypes.String("", defsecTypes.NewTestMetadata()), + Name: defsecTypes.String("my-group", defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("aws_kms_key.log_key", defsecTypes.NewTestMetadata()), + RetentionInDays: defsecTypes.Int(0, defsecTypes.NewTestMetadata()), + MetricFilters: nil, + }, + }, + }, + { + name: "key as string", + terraform: ` + resource "aws_cloudwatch_log_group" "my-group" { + name = "my-group" + kms_key_id = "key-as-string" + } +`, + expected: []cloudwatch.LogGroup{ + { + Metadata: defsecTypes.NewTestMetadata(), + Arn: defsecTypes.String("", defsecTypes.NewTestMetadata()), + Name: defsecTypes.String("my-group", defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("key-as-string", defsecTypes.NewTestMetadata()), + RetentionInDays: defsecTypes.Int(0, defsecTypes.NewTestMetadata()), + }, + }, + }, + { + name: "missing key", + terraform: ` + resource "aws_cloudwatch_log_group" "my-group" { + name = "my-group" + retention_in_days = 3 + } +`, + expected: []cloudwatch.LogGroup{ + { + Metadata: defsecTypes.NewTestMetadata(), + Arn: defsecTypes.String("", defsecTypes.NewTestMetadata()), + Name: defsecTypes.String("my-group", defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + RetentionInDays: defsecTypes.Int(3, defsecTypes.NewTestMetadata()), + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptLogGroups(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "aws_cloudwatch_log_group" "my-group" { + name = "my-group" + kms_key_id = aws_kms_key.log_key.arn + retention_in_days = 3 + + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + require.Len(t, adapted.LogGroups, 1) + logGroup := adapted.LogGroups[0] + + assert.Equal(t, 3, logGroup.Name.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, logGroup.Name.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 4, logGroup.KMSKeyID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 4, logGroup.KMSKeyID.GetMetadata().Range().GetStartLine()) + + assert.Equal(t, 5, logGroup.RetentionInDays.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 5, logGroup.RetentionInDays.GetMetadata().Range().GetStartLine()) +} diff --git a/internal/adapters/terraform/aws/codebuild/adapt.go b/internal/adapters/terraform/aws/codebuild/adapt.go new file mode 100644 index 000000000000..7870ff26eae0 --- /dev/null +++ b/internal/adapters/terraform/aws/codebuild/adapt.go @@ -0,0 +1,66 @@ +package codebuild + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/codebuild" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) codebuild.CodeBuild { + return codebuild.CodeBuild{ + Projects: adaptProjects(modules), + } +} + +func adaptProjects(modules terraform.Modules) []codebuild.Project { + var projects []codebuild.Project + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_codebuild_project") { + projects = append(projects, adaptProject(resource)) + } + } + return projects +} + +func adaptProject(resource *terraform.Block) codebuild.Project { + + project := codebuild.Project{ + Metadata: resource.GetMetadata(), + ArtifactSettings: codebuild.ArtifactSettings{ + Metadata: resource.GetMetadata(), + EncryptionEnabled: types.BoolDefault(true, resource.GetMetadata()), + }, + SecondaryArtifactSettings: nil, + } + + var hasArtifacts bool + + if artifactsBlock := resource.GetBlock("artifacts"); artifactsBlock.IsNotNil() { + project.ArtifactSettings.Metadata = artifactsBlock.GetMetadata() + typeAttr := artifactsBlock.GetAttribute("type") + encryptionDisabledAttr := artifactsBlock.GetAttribute("encryption_disabled") + hasArtifacts = typeAttr.NotEqual("NO_ARTIFACTS") + if encryptionDisabledAttr.IsTrue() && hasArtifacts { + project.ArtifactSettings.EncryptionEnabled = types.Bool(false, artifactsBlock.GetMetadata()) + } else { + project.ArtifactSettings.EncryptionEnabled = types.Bool(true, artifactsBlock.GetMetadata()) + } + } + + secondaryArtifactBlocks := resource.GetBlocks("secondary_artifacts") + for _, secondaryArtifactBlock := range secondaryArtifactBlocks { + + secondaryEncryptionEnabled := types.BoolDefault(true, secondaryArtifactBlock.GetMetadata()) + secondaryEncryptionDisabledAttr := secondaryArtifactBlock.GetAttribute("encryption_disabled") + if secondaryEncryptionDisabledAttr.IsTrue() && hasArtifacts { + secondaryEncryptionEnabled = types.Bool(false, secondaryArtifactBlock.GetMetadata()) + } + + project.SecondaryArtifactSettings = append(project.SecondaryArtifactSettings, codebuild.ArtifactSettings{ + Metadata: secondaryArtifactBlock.GetMetadata(), + EncryptionEnabled: secondaryEncryptionEnabled, + }) + } + + return project +} diff --git a/internal/adapters/terraform/aws/codebuild/adapt_test.go b/internal/adapters/terraform/aws/codebuild/adapt_test.go new file mode 100644 index 000000000000..0488a1441831 --- /dev/null +++ b/internal/adapters/terraform/aws/codebuild/adapt_test.go @@ -0,0 +1,116 @@ +package codebuild + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/codebuild" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptProject(t *testing.T) { + tests := []struct { + name string + terraform string + expected codebuild.Project + }{ + { + name: "configured", + terraform: ` + resource "aws_codebuild_project" "codebuild" { + + artifacts { + encryption_disabled = false + } + + secondary_artifacts { + encryption_disabled = false + } + secondary_artifacts { + encryption_disabled = true + } + } +`, + expected: codebuild.Project{ + Metadata: defsecTypes.NewTestMetadata(), + ArtifactSettings: codebuild.ArtifactSettings{ + Metadata: defsecTypes.NewTestMetadata(), + EncryptionEnabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + SecondaryArtifactSettings: []codebuild.ArtifactSettings{ + { + Metadata: defsecTypes.NewTestMetadata(), + EncryptionEnabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + { + Metadata: defsecTypes.NewTestMetadata(), + EncryptionEnabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + { + name: "defaults - encryption enabled", + terraform: ` + resource "aws_codebuild_project" "codebuild" { + } +`, + expected: codebuild.Project{ + Metadata: defsecTypes.NewTestMetadata(), + ArtifactSettings: codebuild.ArtifactSettings{ + Metadata: defsecTypes.NewTestMetadata(), + EncryptionEnabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptProject(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "aws_codebuild_project" "codebuild" { + artifacts { + encryption_disabled = false + } + + secondary_artifacts { + encryption_disabled = false + } + + secondary_artifacts { + encryption_disabled = true + } + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Projects, 1) + project := adapted.Projects[0] + + assert.Equal(t, 2, project.Metadata.Range().GetStartLine()) + assert.Equal(t, 14, project.Metadata.Range().GetEndLine()) + + assert.Equal(t, 3, project.ArtifactSettings.Metadata.Range().GetStartLine()) + assert.Equal(t, 5, project.ArtifactSettings.Metadata.Range().GetEndLine()) + + assert.Equal(t, 7, project.SecondaryArtifactSettings[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 9, project.SecondaryArtifactSettings[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 11, project.SecondaryArtifactSettings[1].Metadata.Range().GetStartLine()) + assert.Equal(t, 13, project.SecondaryArtifactSettings[1].Metadata.Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/aws/config/adapt.go b/internal/adapters/terraform/aws/config/adapt.go new file mode 100644 index 000000000000..608f6f741f81 --- /dev/null +++ b/internal/adapters/terraform/aws/config/adapt.go @@ -0,0 +1,33 @@ +package config + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/config" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) config.Config { + return config.Config{ + ConfigurationAggregrator: adaptConfigurationAggregrator(modules), + } +} + +func adaptConfigurationAggregrator(modules terraform.Modules) config.ConfigurationAggregrator { + configurationAggregrator := config.ConfigurationAggregrator{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + SourceAllRegions: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + } + + for _, resource := range modules.GetResourcesByType("aws_config_configuration_aggregator") { + configurationAggregrator.Metadata = resource.GetMetadata() + aggregationBlock := resource.GetFirstMatchingBlock("account_aggregation_source", "organization_aggregation_source") + if aggregationBlock.IsNil() { + configurationAggregrator.SourceAllRegions = defsecTypes.Bool(false, resource.GetMetadata()) + } else { + allRegionsAttr := aggregationBlock.GetAttribute("all_regions") + allRegionsVal := allRegionsAttr.AsBoolValueOrDefault(false, aggregationBlock) + configurationAggregrator.SourceAllRegions = allRegionsVal + } + } + return configurationAggregrator +} diff --git a/internal/adapters/terraform/aws/config/adapt_test.go b/internal/adapters/terraform/aws/config/adapt_test.go new file mode 100644 index 000000000000..5fd929b70689 --- /dev/null +++ b/internal/adapters/terraform/aws/config/adapt_test.go @@ -0,0 +1,81 @@ +package config + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/config" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" + "github.com/stretchr/testify/assert" +) + +func Test_adaptConfigurationAggregrator(t *testing.T) { + tests := []struct { + name string + terraform string + expected config.ConfigurationAggregrator + }{ + { + name: "configured", + terraform: ` + resource "aws_config_configuration_aggregator" "example" { + name = "example" + + account_aggregation_source { + account_ids = ["123456789012"] + all_regions = true + } + } +`, + expected: config.ConfigurationAggregrator{ + Metadata: defsecTypes.NewTestMetadata(), + SourceAllRegions: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + { + name: "defaults", + terraform: ` + resource "aws_config_configuration_aggregator" "example" { + } +`, + expected: config.ConfigurationAggregrator{ + Metadata: defsecTypes.NewTestMetadata(), + SourceAllRegions: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptConfigurationAggregrator(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "aws_config_configuration_aggregator" "example" { + name = "example" + + account_aggregation_source { + account_ids = ["123456789012"] + all_regions = true + } + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + aggregator := adapted.ConfigurationAggregrator + + assert.Equal(t, 2, aggregator.Metadata.Range().GetStartLine()) + assert.Equal(t, 9, aggregator.Metadata.Range().GetEndLine()) + + assert.Equal(t, 7, aggregator.SourceAllRegions.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 7, aggregator.SourceAllRegions.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/aws/documentdb/adapt.go b/internal/adapters/terraform/aws/documentdb/adapt.go new file mode 100644 index 000000000000..104ef836d498 --- /dev/null +++ b/internal/adapters/terraform/aws/documentdb/adapt.go @@ -0,0 +1,63 @@ +package documentdb + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/documentdb" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) documentdb.DocumentDB { + return documentdb.DocumentDB{ + Clusters: adaptClusters(modules), + } +} + +func adaptClusters(modules terraform.Modules) []documentdb.Cluster { + var clusters []documentdb.Cluster + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_docdb_cluster") { + clusters = append(clusters, adaptCluster(resource, module)) + } + } + return clusters +} + +func adaptCluster(resource *terraform.Block, module *terraform.Module) documentdb.Cluster { + identifierAttr := resource.GetAttribute("cluster_identifier") + identifierVal := identifierAttr.AsStringValueOrDefault("", resource) + + var enabledLogExports []types.StringValue + var instances []documentdb.Instance + + enabledLogExportsAttr := resource.GetAttribute("enabled_cloudwatch_logs_exports") + for _, logExport := range enabledLogExportsAttr.AsStringValues() { + enabledLogExports = append(enabledLogExports, logExport) + } + + instancesRes := module.GetReferencingResources(resource, "aws_docdb_cluster_instance", "cluster_identifier") + for _, instanceRes := range instancesRes { + keyIDAttr := instanceRes.GetAttribute("kms_key_id") + keyIDVal := keyIDAttr.AsStringValueOrDefault("", instanceRes) + + instances = append(instances, documentdb.Instance{ + Metadata: instanceRes.GetMetadata(), + KMSKeyID: keyIDVal, + }) + } + + storageEncryptedAttr := resource.GetAttribute("storage_encrypted") + storageEncryptedVal := storageEncryptedAttr.AsBoolValueOrDefault(false, resource) + + KMSKeyIDAttr := resource.GetAttribute("kms_key_id") + KMSKeyIDVal := KMSKeyIDAttr.AsStringValueOrDefault("", resource) + + return documentdb.Cluster{ + Metadata: resource.GetMetadata(), + Identifier: identifierVal, + EnabledLogExports: enabledLogExports, + BackupRetentionPeriod: resource.GetAttribute("backup_retention_period").AsIntValueOrDefault(0, resource), + Instances: instances, + StorageEncrypted: storageEncryptedVal, + KMSKeyID: KMSKeyIDVal, + } +} diff --git a/internal/adapters/terraform/aws/documentdb/adapt_test.go b/internal/adapters/terraform/aws/documentdb/adapt_test.go new file mode 100644 index 000000000000..470d2992c189 --- /dev/null +++ b/internal/adapters/terraform/aws/documentdb/adapt_test.go @@ -0,0 +1,125 @@ +package documentdb + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/documentdb" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptCluster(t *testing.T) { + tests := []struct { + name string + terraform string + expected documentdb.Cluster + }{ + { + name: "configured", + terraform: ` + resource "aws_docdb_cluster" "docdb" { + cluster_identifier = "my-docdb-cluster" + kms_key_id = "kms-key" + enabled_cloudwatch_logs_exports = "audit" + storage_encrypted = true + } + + resource "aws_docdb_cluster_instance" "cluster_instances" { + count = 1 + identifier = "my-docdb-cluster" + cluster_identifier = aws_docdb_cluster.docdb.id + kms_key_id = "kms-key#1" + } +`, + expected: documentdb.Cluster{ + Metadata: defsecTypes.NewTestMetadata(), + Identifier: defsecTypes.String("my-docdb-cluster", defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("kms-key", defsecTypes.NewTestMetadata()), + EnabledLogExports: []defsecTypes.StringValue{ + defsecTypes.String("audit", defsecTypes.NewTestMetadata()), + }, + Instances: []documentdb.Instance{ + { + Metadata: defsecTypes.NewTestMetadata(), + KMSKeyID: defsecTypes.String("kms-key#1", defsecTypes.NewTestMetadata()), + }, + }, + StorageEncrypted: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + { + name: "defaults", + terraform: ` + resource "aws_docdb_cluster" "docdb" { + } +`, + expected: documentdb.Cluster{ + Metadata: defsecTypes.NewTestMetadata(), + Identifier: defsecTypes.String("", defsecTypes.NewTestMetadata()), + StorageEncrypted: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptCluster(modules.GetBlocks()[0], modules[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "aws_docdb_cluster" "docdb" { + cluster_identifier = "my-docdb-cluster" + kms_key_id = "kms-key" + enabled_cloudwatch_logs_exports = "audit" + storage_encrypted = true + } + + resource "aws_docdb_cluster_instance" "cluster_instances" { + count = 1 + identifier = "my-docdb-cluster" + cluster_identifier = aws_docdb_cluster.docdb.id + kms_key_id = "kms-key" + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Clusters, 1) + require.Len(t, adapted.Clusters[0].Instances, 1) + + cluster := adapted.Clusters[0] + instance := cluster.Instances[0] + + assert.Equal(t, 2, cluster.Metadata.Range().GetStartLine()) + assert.Equal(t, 7, cluster.Metadata.Range().GetEndLine()) + + assert.Equal(t, 3, cluster.Identifier.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, cluster.Identifier.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 4, cluster.KMSKeyID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 4, cluster.KMSKeyID.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 5, cluster.EnabledLogExports[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 5, cluster.EnabledLogExports[0].GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 6, cluster.StorageEncrypted.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 6, cluster.StorageEncrypted.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 9, instance.Metadata.Range().GetStartLine()) + assert.Equal(t, 14, instance.Metadata.Range().GetEndLine()) + + assert.Equal(t, 13, instance.KMSKeyID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 13, instance.KMSKeyID.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/aws/dynamodb/adapt.go b/internal/adapters/terraform/aws/dynamodb/adapt.go new file mode 100644 index 000000000000..a09ffcfb742d --- /dev/null +++ b/internal/adapters/terraform/aws/dynamodb/adapt.go @@ -0,0 +1,94 @@ +package dynamodb + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/dynamodb" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) dynamodb.DynamoDB { + return dynamodb.DynamoDB{ + DAXClusters: adaptClusters(modules), + Tables: adaptTables(modules), + } +} + +func adaptClusters(modules terraform.Modules) []dynamodb.DAXCluster { + var clusters []dynamodb.DAXCluster + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_dax_cluster") { + clusters = append(clusters, adaptCluster(resource, module)) + } + } + return clusters +} + +func adaptTables(modules terraform.Modules) []dynamodb.Table { + var tables []dynamodb.Table + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_dynamodb_table") { + tables = append(tables, adaptTable(resource, module)) + } + } + return tables +} + +func adaptCluster(resource *terraform.Block, module *terraform.Module) dynamodb.DAXCluster { + + cluster := dynamodb.DAXCluster{ + Metadata: resource.GetMetadata(), + ServerSideEncryption: dynamodb.ServerSideEncryption{ + Metadata: resource.GetMetadata(), + Enabled: defsecTypes.BoolDefault(false, resource.GetMetadata()), + KMSKeyID: defsecTypes.StringDefault("", resource.GetMetadata()), + }, + PointInTimeRecovery: defsecTypes.BoolDefault(false, resource.GetMetadata()), + } + + if ssEncryptionBlock := resource.GetBlock("server_side_encryption"); ssEncryptionBlock.IsNotNil() { + cluster.ServerSideEncryption.Metadata = ssEncryptionBlock.GetMetadata() + enabledAttr := ssEncryptionBlock.GetAttribute("enabled") + cluster.ServerSideEncryption.Enabled = enabledAttr.AsBoolValueOrDefault(false, ssEncryptionBlock) + } + + if recoveryBlock := resource.GetBlock("point_in_time_recovery"); recoveryBlock.IsNotNil() { + recoveryEnabledAttr := recoveryBlock.GetAttribute("enabled") + cluster.PointInTimeRecovery = recoveryEnabledAttr.AsBoolValueOrDefault(false, recoveryBlock) + } + + return cluster +} + +func adaptTable(resource *terraform.Block, module *terraform.Module) dynamodb.Table { + + table := dynamodb.Table{ + Metadata: resource.GetMetadata(), + ServerSideEncryption: dynamodb.ServerSideEncryption{ + Metadata: resource.GetMetadata(), + Enabled: defsecTypes.BoolDefault(false, resource.GetMetadata()), + KMSKeyID: defsecTypes.StringDefault("", resource.GetMetadata()), + }, + PointInTimeRecovery: defsecTypes.BoolDefault(false, resource.GetMetadata()), + } + + if ssEncryptionBlock := resource.GetBlock("server_side_encryption"); ssEncryptionBlock.IsNotNil() { + table.ServerSideEncryption.Metadata = ssEncryptionBlock.GetMetadata() + enabledAttr := ssEncryptionBlock.GetAttribute("enabled") + table.ServerSideEncryption.Enabled = enabledAttr.AsBoolValueOrDefault(false, ssEncryptionBlock) + + kmsKeyIdAttr := ssEncryptionBlock.GetAttribute("kms_key_arn") + table.ServerSideEncryption.KMSKeyID = kmsKeyIdAttr.AsStringValueOrDefault("alias/aws/dynamodb", ssEncryptionBlock) + + kmsBlock, err := module.GetReferencedBlock(kmsKeyIdAttr, resource) + if err == nil && kmsBlock.IsNotNil() { + table.ServerSideEncryption.KMSKeyID = defsecTypes.String(kmsBlock.FullName(), kmsBlock.GetMetadata()) + } + } + + if recoveryBlock := resource.GetBlock("point_in_time_recovery"); recoveryBlock.IsNotNil() { + recoveryEnabledAttr := recoveryBlock.GetAttribute("enabled") + table.PointInTimeRecovery = recoveryEnabledAttr.AsBoolValueOrDefault(false, recoveryBlock) + } + + return table +} diff --git a/internal/adapters/terraform/aws/dynamodb/adapt_test.go b/internal/adapters/terraform/aws/dynamodb/adapt_test.go new file mode 100644 index 000000000000..5c233021083d --- /dev/null +++ b/internal/adapters/terraform/aws/dynamodb/adapt_test.go @@ -0,0 +1,176 @@ +package dynamodb + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/dynamodb" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptCluster(t *testing.T) { + tests := []struct { + name string + terraform string + expected dynamodb.DAXCluster + }{ + { + name: "cluster", + terraform: ` + resource "aws_dax_cluster" "example" { + server_side_encryption { + enabled = true + } + } +`, + expected: dynamodb.DAXCluster{ + Metadata: defsecTypes.NewTestMetadata(), + ServerSideEncryption: dynamodb.ServerSideEncryption{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + PointInTimeRecovery: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptCluster(modules.GetBlocks()[0], modules[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func Test_adaptTable(t *testing.T) { + tests := []struct { + name string + terraform string + expected dynamodb.Table + }{ + { + name: "table", + terraform: ` + resource "aws_dynamodb_table" "example" { + name = "example" + + server_side_encryption { + enabled = true + kms_key_arn = "key-string" + } + + point_in_time_recovery { + enabled = true + } + } +`, + expected: dynamodb.Table{ + Metadata: defsecTypes.NewTestMetadata(), + ServerSideEncryption: dynamodb.ServerSideEncryption{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("key-string", defsecTypes.NewTestMetadata()), + }, + PointInTimeRecovery: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + { + name: "table no kms", + terraform: ` + resource "aws_dax_cluster" "example" { + server_side_encryption { + enabled = true + } + } +`, + expected: dynamodb.Table{ + Metadata: defsecTypes.NewTestMetadata(), + ServerSideEncryption: dynamodb.ServerSideEncryption{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("alias/aws/dynamodb", defsecTypes.NewTestMetadata()), + }, + PointInTimeRecovery: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + { + name: "reference key", + terraform: ` + resource "aws_dynamodb_table" "example" { + name = "example" + + server_side_encryption { + enabled = true + kms_key_arn = aws_kms_key.a.arn + } + } + + resource "aws_kms_key" "a" { + } +`, + expected: dynamodb.Table{ + Metadata: defsecTypes.NewTestMetadata(), + ServerSideEncryption: dynamodb.ServerSideEncryption{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("aws_kms_key.a", defsecTypes.NewTestMetadata()), + }, + PointInTimeRecovery: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptTable(modules.GetBlocks()[0], modules[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "aws_dynamodb_table" "example" { + name = "example" + + server_side_encryption { + enabled = true + kms_key_arn = "key-string" + } + + point_in_time_recovery { + enabled = true + } + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.DAXClusters, 0) + require.Len(t, adapted.Tables, 1) + table := adapted.Tables[0] + + assert.Equal(t, 2, table.Metadata.Range().GetStartLine()) + assert.Equal(t, 13, table.Metadata.Range().GetEndLine()) + + assert.Equal(t, 5, table.ServerSideEncryption.Metadata.Range().GetStartLine()) + assert.Equal(t, 8, table.ServerSideEncryption.Metadata.Range().GetEndLine()) + + assert.Equal(t, 6, table.ServerSideEncryption.Enabled.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 6, table.ServerSideEncryption.Enabled.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 7, table.ServerSideEncryption.KMSKeyID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 7, table.ServerSideEncryption.KMSKeyID.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 11, table.PointInTimeRecovery.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 11, table.PointInTimeRecovery.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/aws/ec2/adapt.go b/internal/adapters/terraform/aws/ec2/adapt.go new file mode 100644 index 000000000000..b0bd052e7f4e --- /dev/null +++ b/internal/adapters/terraform/aws/ec2/adapt.go @@ -0,0 +1,74 @@ +package ec2 + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) ec2.EC2 { + + naclAdapter := naclAdapter{naclRuleIDs: modules.GetChildResourceIDMapByType("aws_network_acl_rule")} + sgAdapter := sgAdapter{sgRuleIDs: modules.GetChildResourceIDMapByType("aws_security_group_rule")} + + return ec2.EC2{ + Instances: getInstances(modules), + VPCs: adaptVPCs(modules), + SecurityGroups: sgAdapter.adaptSecurityGroups(modules), + Subnets: adaptSubnets(modules), + NetworkACLs: naclAdapter.adaptNetworkACLs(modules), + LaunchConfigurations: adaptLaunchConfigurations(modules), + LaunchTemplates: adaptLaunchTemplates(modules), + Volumes: adaptVolumes(modules), + } +} + +func getInstances(modules terraform.Modules) []ec2.Instance { + var instances []ec2.Instance + + blocks := modules.GetResourcesByType("aws_instance") + + for _, b := range blocks { + + metadataOptions := getMetadataOptions(b) + userData := b.GetAttribute("user_data").AsStringValueOrDefault("", b) + + instance := ec2.Instance{ + Metadata: b.GetMetadata(), + MetadataOptions: metadataOptions, + UserData: userData, + SecurityGroups: nil, + RootBlockDevice: &ec2.BlockDevice{ + Metadata: b.GetMetadata(), + Encrypted: types.BoolDefault(false, b.GetMetadata()), + }, + EBSBlockDevices: nil, + } + + if rootBlockDevice := b.GetBlock("root_block_device"); rootBlockDevice.IsNotNil() { + instance.RootBlockDevice.Metadata = rootBlockDevice.GetMetadata() + instance.RootBlockDevice.Encrypted = rootBlockDevice.GetAttribute("encrypted").AsBoolValueOrDefault(false, b) + } + + for _, ebsBlock := range b.GetBlocks("ebs_block_device") { + instance.EBSBlockDevices = append(instance.EBSBlockDevices, &ec2.BlockDevice{ + Metadata: ebsBlock.GetMetadata(), + Encrypted: ebsBlock.GetAttribute("encrypted").AsBoolValueOrDefault(false, b), + }) + } + + for _, resource := range modules.GetResourcesByType("aws_ebs_encryption_by_default") { + if resource.GetAttribute("enabled").NotEqual(false) { + instance.RootBlockDevice.Encrypted = types.BoolDefault(true, resource.GetMetadata()) + for i := 0; i < len(instance.EBSBlockDevices); i++ { + ebs := instance.EBSBlockDevices[i] + ebs.Encrypted = types.BoolDefault(true, resource.GetMetadata()) + } + } + } + + instances = append(instances, instance) + } + + return instances +} diff --git a/internal/adapters/terraform/aws/ec2/adapt_test.go b/internal/adapters/terraform/aws/ec2/adapt_test.go new file mode 100644 index 000000000000..fb5761260956 --- /dev/null +++ b/internal/adapters/terraform/aws/ec2/adapt_test.go @@ -0,0 +1,166 @@ +package ec2 + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_Adapt(t *testing.T) { + tests := []struct { + name string + terraform string + expected ec2.EC2 + }{ + { + name: "configured", + terraform: ` + resource "aws_instance" "example" { + ami = "ami-7f89a64f" + instance_type = "t1.micro" + + root_block_device { + encrypted = true + } + + metadata_options { + http_tokens = "required" + http_endpoint = "disabled" + } + + ebs_block_device { + encrypted = true + } + + user_data = < 0 { + orphanage := ec2.SecurityGroup{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Description: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + IngressRules: nil, + EgressRules: nil, + IsDefault: defsecTypes.BoolUnresolvable(defsecTypes.NewUnmanagedMetadata()), + VPCID: defsecTypes.StringUnresolvable(defsecTypes.NewUnmanagedMetadata()), + } + for _, sgRule := range orphanResources { + if sgRule.GetAttribute("type").Equals("ingress") { + orphanage.IngressRules = append(orphanage.IngressRules, adaptSGRule(sgRule, modules)) + } else if sgRule.GetAttribute("type").Equals("egress") { + orphanage.EgressRules = append(orphanage.EgressRules, adaptSGRule(sgRule, modules)) + } + } + securityGroups = append(securityGroups, orphanage) + } + + return securityGroups +} + +func (a *naclAdapter) adaptNetworkACLs(modules terraform.Modules) []ec2.NetworkACL { + var networkACLs []ec2.NetworkACL + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_network_acl") { + networkACLs = append(networkACLs, a.adaptNetworkACL(resource, module)) + } + } + + orphanResources := modules.GetResourceByIDs(a.naclRuleIDs.Orphans()...) + if len(orphanResources) > 0 { + orphanage := ec2.NetworkACL{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Rules: nil, + IsDefaultRule: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + } + for _, naclRule := range orphanResources { + orphanage.Rules = append(orphanage.Rules, adaptNetworkACLRule(naclRule)) + } + networkACLs = append(networkACLs, orphanage) + } + + return networkACLs +} + +func (a *sgAdapter) adaptSecurityGroup(resource *terraform.Block, module terraform.Modules) ec2.SecurityGroup { + var ingressRules []ec2.SecurityGroupRule + var egressRules []ec2.SecurityGroupRule + + descriptionAttr := resource.GetAttribute("description") + descriptionVal := descriptionAttr.AsStringValueOrDefault("Managed by Terraform", resource) + + ingressBlocks := resource.GetBlocks("ingress") + for _, ingressBlock := range ingressBlocks { + ingressRules = append(ingressRules, adaptSGRule(ingressBlock, module)) + } + + egressBlocks := resource.GetBlocks("egress") + for _, egressBlock := range egressBlocks { + egressRules = append(egressRules, adaptSGRule(egressBlock, module)) + } + + rulesBlocks := module.GetReferencingResources(resource, "aws_security_group_rule", "security_group_id") + for _, ruleBlock := range rulesBlocks { + a.sgRuleIDs.Resolve(ruleBlock.ID()) + if ruleBlock.GetAttribute("type").Equals("ingress") { + ingressRules = append(ingressRules, adaptSGRule(ruleBlock, module)) + } else if ruleBlock.GetAttribute("type").Equals("egress") { + egressRules = append(egressRules, adaptSGRule(ruleBlock, module)) + } + } + + return ec2.SecurityGroup{ + Metadata: resource.GetMetadata(), + Description: descriptionVal, + IngressRules: ingressRules, + EgressRules: egressRules, + IsDefault: defsecTypes.Bool(false, defsecTypes.NewUnmanagedMetadata()), + VPCID: resource.GetAttribute("vpc_id").AsStringValueOrDefault("", resource), + } +} + +func adaptSGRule(resource *terraform.Block, modules terraform.Modules) ec2.SecurityGroupRule { + ruleDescAttr := resource.GetAttribute("description") + ruleDescVal := ruleDescAttr.AsStringValueOrDefault("", resource) + + var cidrs []defsecTypes.StringValue + + cidrBlocks := resource.GetAttribute("cidr_blocks") + ipv6cidrBlocks := resource.GetAttribute("ipv6_cidr_blocks") + varBlocks := modules.GetBlocks().OfType("variable") + + for _, vb := range varBlocks { + if cidrBlocks.IsNotNil() && cidrBlocks.ReferencesBlock(vb) { + cidrBlocks = vb.GetAttribute("default") + } + if ipv6cidrBlocks.IsNotNil() && ipv6cidrBlocks.ReferencesBlock(vb) { + ipv6cidrBlocks = vb.GetAttribute("default") + } + } + + if cidrBlocks.IsNotNil() { + cidrs = cidrBlocks.AsStringValues() + } + + if ipv6cidrBlocks.IsNotNil() { + cidrs = append(cidrs, ipv6cidrBlocks.AsStringValues()...) + } + + return ec2.SecurityGroupRule{ + Metadata: resource.GetMetadata(), + Description: ruleDescVal, + CIDRs: cidrs, + } +} + +func (a *naclAdapter) adaptNetworkACL(resource *terraform.Block, module *terraform.Module) ec2.NetworkACL { + var networkRules []ec2.NetworkACLRule + rulesBlocks := module.GetReferencingResources(resource, "aws_network_acl_rule", "network_acl_id") + for _, ruleBlock := range rulesBlocks { + a.naclRuleIDs.Resolve(ruleBlock.ID()) + networkRules = append(networkRules, adaptNetworkACLRule(ruleBlock)) + } + return ec2.NetworkACL{ + Metadata: resource.GetMetadata(), + Rules: networkRules, + IsDefaultRule: defsecTypes.BoolDefault(false, resource.GetMetadata()), + } +} + +func adaptNetworkACLRule(resource *terraform.Block) ec2.NetworkACLRule { + var cidrs []defsecTypes.StringValue + + typeVal := defsecTypes.StringDefault("ingress", resource.GetMetadata()) + + egressAtrr := resource.GetAttribute("egress") + if egressAtrr.IsTrue() { + typeVal = defsecTypes.String("egress", egressAtrr.GetMetadata()) + } else if egressAtrr.IsNotNil() { + typeVal = defsecTypes.String("ingress", egressAtrr.GetMetadata()) + } + + actionAttr := resource.GetAttribute("rule_action") + actionVal := actionAttr.AsStringValueOrDefault("", resource) + + protocolAtrr := resource.GetAttribute("protocol") + protocolVal := protocolAtrr.AsStringValueOrDefault("-1", resource) + + cidrAttr := resource.GetAttribute("cidr_block") + if cidrAttr.IsNotNil() { + cidrs = append(cidrs, cidrAttr.AsStringValueOrDefault("", resource)) + } + ipv4cidrAttr := resource.GetAttribute("ipv6_cidr_block") + if ipv4cidrAttr.IsNotNil() { + cidrs = append(cidrs, ipv4cidrAttr.AsStringValueOrDefault("", resource)) + } + + return ec2.NetworkACLRule{ + Metadata: resource.GetMetadata(), + Type: typeVal, + Action: actionVal, + Protocol: protocolVal, + CIDRs: cidrs, + } +} diff --git a/internal/adapters/terraform/aws/ec2/vpc_test.go b/internal/adapters/terraform/aws/ec2/vpc_test.go new file mode 100644 index 000000000000..cdde2f6c6fdc --- /dev/null +++ b/internal/adapters/terraform/aws/ec2/vpc_test.go @@ -0,0 +1,339 @@ +package ec2 + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" +) + +func Test_AdaptVPC(t *testing.T) { + tests := []struct { + name string + terraform string + expected ec2.EC2 + }{ + { + name: "defined", + terraform: ` + resource "aws_flow_log" "this" { + vpc_id = aws_vpc.main.id + } + resource "aws_default_vpc" "default" { + tags = { + Name = "Default VPC" + } + } + + resource "aws_vpc" "main" { + cidr_block = "4.5.6.7/32" + } + + resource "aws_security_group" "example" { + name = "http" + description = "Allow inbound HTTP traffic" + + ingress { + description = "Rule #1" + from_port = 80 + to_port = 80 + protocol = "tcp" + cidr_blocks = [aws_vpc.main.cidr_block] + } + + egress { + cidr_blocks = ["1.2.3.4/32"] + } + } + + resource "aws_network_acl_rule" "example" { + egress = false + protocol = "tcp" + from_port = 22 + to_port = 22 + rule_action = "allow" + cidr_block = "10.0.0.0/16" + } + + resource "aws_security_group_rule" "example" { + type = "ingress" + description = "Rule #2" + security_group_id = aws_security_group.example.id + from_port = 22 + to_port = 22 + protocol = "tcp" + cidr_blocks = [ + "1.2.3.4/32", + "4.5.6.7/32", + ] + } +`, + expected: ec2.EC2{ + VPCs: []ec2.VPC{ + { + Metadata: defsecTypes.NewTestMetadata(), + IsDefault: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + ID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + FlowLogsEnabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + { + Metadata: defsecTypes.NewTestMetadata(), + IsDefault: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + ID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + FlowLogsEnabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + SecurityGroups: []ec2.SecurityGroup{ + { + Metadata: defsecTypes.NewTestMetadata(), + Description: defsecTypes.String("Allow inbound HTTP traffic", defsecTypes.NewTestMetadata()), + IsDefault: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + VPCID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + IngressRules: []ec2.SecurityGroupRule{ + { + Metadata: defsecTypes.NewTestMetadata(), + + Description: defsecTypes.String("Rule #1", defsecTypes.NewTestMetadata()), + CIDRs: []defsecTypes.StringValue{ + defsecTypes.String("4.5.6.7/32", defsecTypes.NewTestMetadata()), + }, + }, + { + Metadata: defsecTypes.NewTestMetadata(), + + Description: defsecTypes.String("Rule #2", defsecTypes.NewTestMetadata()), + CIDRs: []defsecTypes.StringValue{ + defsecTypes.String("1.2.3.4/32", defsecTypes.NewTestMetadata()), + defsecTypes.String("4.5.6.7/32", defsecTypes.NewTestMetadata()), + }, + }, + }, + + EgressRules: []ec2.SecurityGroupRule{ + { + Metadata: defsecTypes.NewTestMetadata(), + Description: defsecTypes.String("", defsecTypes.NewTestMetadata()), + CIDRs: []defsecTypes.StringValue{ + defsecTypes.String("1.2.3.4/32", defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + NetworkACLs: []ec2.NetworkACL{ + { + Metadata: defsecTypes.NewTestMetadata(), + Rules: []ec2.NetworkACLRule{ + { + Metadata: defsecTypes.NewTestMetadata(), + Type: defsecTypes.String("ingress", defsecTypes.NewTestMetadata()), + Action: defsecTypes.String("allow", defsecTypes.NewTestMetadata()), + Protocol: defsecTypes.String("tcp", defsecTypes.NewTestMetadata()), + CIDRs: []defsecTypes.StringValue{ + defsecTypes.String("10.0.0.0/16", defsecTypes.NewTestMetadata()), + }, + }, + }, + IsDefaultRule: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + { + name: "defaults", + terraform: ` + resource "aws_security_group" "example" { + ingress { + } + + egress { + } + } + + resource "aws_network_acl_rule" "example" { + } +`, + expected: ec2.EC2{ + SecurityGroups: []ec2.SecurityGroup{ + { + Metadata: defsecTypes.NewTestMetadata(), + Description: defsecTypes.String("Managed by Terraform", defsecTypes.NewTestMetadata()), + IsDefault: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + VPCID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + IngressRules: []ec2.SecurityGroupRule{ + { + Metadata: defsecTypes.NewTestMetadata(), + Description: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + }, + + EgressRules: []ec2.SecurityGroupRule{ + { + Metadata: defsecTypes.NewTestMetadata(), + Description: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + NetworkACLs: []ec2.NetworkACL{ + { + Metadata: defsecTypes.NewTestMetadata(), + Rules: []ec2.NetworkACLRule{ + { + Metadata: defsecTypes.NewTestMetadata(), + Type: defsecTypes.String("ingress", defsecTypes.NewTestMetadata()), + Action: defsecTypes.String("", defsecTypes.NewTestMetadata()), + Protocol: defsecTypes.String("-1", defsecTypes.NewTestMetadata()), + }, + }, + IsDefaultRule: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + { + name: "aws_flow_log refer to locals", + terraform: ` +locals { + vpc_id = try(aws_vpc.this.id, "") +} + +resource "aws_vpc" "this" { +} + +resource "aws_flow_log" "this" { + vpc_id = local.vpc_id +} +`, + expected: ec2.EC2{ + VPCs: []ec2.VPC{ + { + Metadata: defsecTypes.NewTestMetadata(), + IsDefault: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + ID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + FlowLogsEnabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := Adapt(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestVPCLines(t *testing.T) { + src := ` + resource "aws_default_vpc" "default" { + } + + resource "aws_vpc" "main" { + cidr_block = "4.5.6.7/32" + } + + resource "aws_security_group" "example" { + name = "http" + description = "Allow inbound HTTP traffic" + + ingress { + description = "HTTP from VPC" + from_port = 80 + to_port = 80 + protocol = "tcp" + cidr_blocks = [aws_vpc.main.cidr_block] + } + + egress { + cidr_blocks = ["1.2.3.4/32"] + } + } + + resource "aws_security_group_rule" "example" { + type = "ingress" + security_group_id = aws_security_group.example.id + from_port = 22 + to_port = 22 + protocol = "tcp" + cidr_blocks = [ + "1.2.3.4/32", + "4.5.6.7/32", + ] + } + + resource "aws_network_acl_rule" "example" { + egress = false + protocol = "tcp" + from_port = 22 + to_port = 22 + rule_action = "allow" + cidr_block = "10.0.0.0/16" + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.VPCs, 2) + require.Len(t, adapted.SecurityGroups, 1) + require.Len(t, adapted.NetworkACLs, 1) + + defaultVPC := adapted.VPCs[0] + securityGroup := adapted.SecurityGroups[0] + networkACL := adapted.NetworkACLs[0] + + assert.Equal(t, 2, defaultVPC.Metadata.Range().GetStartLine()) + assert.Equal(t, 3, defaultVPC.Metadata.Range().GetEndLine()) + + assert.Equal(t, 9, securityGroup.Metadata.Range().GetStartLine()) + assert.Equal(t, 24, securityGroup.Metadata.Range().GetEndLine()) + + assert.Equal(t, 11, securityGroup.Description.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 11, securityGroup.Description.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 13, securityGroup.IngressRules[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 19, securityGroup.IngressRules[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 14, securityGroup.IngressRules[0].Description.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 14, securityGroup.IngressRules[0].Description.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 18, securityGroup.IngressRules[0].CIDRs[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 18, securityGroup.IngressRules[0].CIDRs[0].GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 26, securityGroup.IngressRules[1].Metadata.Range().GetStartLine()) + assert.Equal(t, 36, securityGroup.IngressRules[1].Metadata.Range().GetEndLine()) + + assert.Equal(t, 32, securityGroup.IngressRules[1].CIDRs[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 35, securityGroup.IngressRules[1].CIDRs[0].GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 21, securityGroup.EgressRules[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 23, securityGroup.EgressRules[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 22, securityGroup.EgressRules[0].CIDRs[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 22, securityGroup.EgressRules[0].CIDRs[0].GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 38, networkACL.Rules[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 45, networkACL.Rules[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 39, networkACL.Rules[0].Type.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 39, networkACL.Rules[0].Type.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 40, networkACL.Rules[0].Protocol.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 40, networkACL.Rules[0].Protocol.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 43, networkACL.Rules[0].Action.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 43, networkACL.Rules[0].Action.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 44, networkACL.Rules[0].CIDRs[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 44, networkACL.Rules[0].CIDRs[0].GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/aws/ecr/adapt.go b/internal/adapters/terraform/aws/ecr/adapt.go new file mode 100644 index 000000000000..83741a7e1e33 --- /dev/null +++ b/internal/adapters/terraform/aws/ecr/adapt.go @@ -0,0 +1,113 @@ +package ecr + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/ecr" + iamp "github.com/aquasecurity/defsec/pkg/providers/aws/iam" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/iam" + "github.com/liamg/iamgo" +) + +func Adapt(modules terraform.Modules) ecr.ECR { + return ecr.ECR{ + Repositories: adaptRepositories(modules), + } +} + +func adaptRepositories(modules terraform.Modules) []ecr.Repository { + var repositories []ecr.Repository + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_ecr_repository") { + repositories = append(repositories, adaptRepository(resource, module, modules)) + } + } + return repositories +} + +func adaptRepository(resource *terraform.Block, module *terraform.Module, modules terraform.Modules) ecr.Repository { + repo := ecr.Repository{ + Metadata: resource.GetMetadata(), + ImageScanning: ecr.ImageScanning{ + Metadata: resource.GetMetadata(), + ScanOnPush: defsecTypes.BoolDefault(false, resource.GetMetadata()), + }, + ImageTagsImmutable: defsecTypes.BoolDefault(false, resource.GetMetadata()), + Policies: nil, + Encryption: ecr.Encryption{ + Metadata: resource.GetMetadata(), + Type: defsecTypes.StringDefault("AES256", resource.GetMetadata()), + KMSKeyID: defsecTypes.StringDefault("", resource.GetMetadata()), + }, + } + + if imageScanningBlock := resource.GetBlock("image_scanning_configuration"); imageScanningBlock.IsNotNil() { + repo.ImageScanning.Metadata = imageScanningBlock.GetMetadata() + scanOnPushAttr := imageScanningBlock.GetAttribute("scan_on_push") + repo.ImageScanning.ScanOnPush = scanOnPushAttr.AsBoolValueOrDefault(false, imageScanningBlock) + } + + mutabilityAttr := resource.GetAttribute("image_tag_mutability") + if mutabilityAttr.Equals("IMMUTABLE") { + repo.ImageTagsImmutable = defsecTypes.Bool(true, mutabilityAttr.GetMetadata()) + } else if mutabilityAttr.Equals("MUTABLE") { + repo.ImageTagsImmutable = defsecTypes.Bool(false, mutabilityAttr.GetMetadata()) + } + + policyBlocks := module.GetReferencingResources(resource, "aws_ecr_repository_policy", "repository") + for _, policyRes := range policyBlocks { + if policyAttr := policyRes.GetAttribute("policy"); policyAttr.IsString() { + + dataBlock, err := module.GetBlockByID(policyAttr.Value().AsString()) + if err != nil { + + parsed, err := iamgo.ParseString(policyAttr.Value().AsString()) + if err != nil { + continue + } + + policy := iamp.Policy{ + Metadata: policyRes.GetMetadata(), + Name: defsecTypes.StringDefault("", policyRes.GetMetadata()), + Document: iamp.Document{ + Parsed: *parsed, + Metadata: policyAttr.GetMetadata(), + }, + Builtin: defsecTypes.Bool(false, policyRes.GetMetadata()), + } + + repo.Policies = append(repo.Policies, policy) + } else if dataBlock.Type() == "data" && dataBlock.TypeLabel() == "aws_iam_policy_document" { + if doc, err := iam.ConvertTerraformDocument(modules, dataBlock); err == nil { + policy := iamp.Policy{ + Metadata: policyRes.GetMetadata(), + Name: defsecTypes.StringDefault("", policyRes.GetMetadata()), + Document: iamp.Document{ + Parsed: doc.Document, + Metadata: doc.Source.GetMetadata(), + IsOffset: true, + }, + Builtin: defsecTypes.Bool(false, policyRes.GetMetadata()), + } + repo.Policies = append(repo.Policies, policy) + } + } + } + } + + if encryptBlock := resource.GetBlock("encryption_configuration"); encryptBlock.IsNotNil() { + repo.Encryption.Metadata = encryptBlock.GetMetadata() + encryptionTypeAttr := encryptBlock.GetAttribute("encryption_type") + repo.Encryption.Type = encryptionTypeAttr.AsStringValueOrDefault("AES256", encryptBlock) + + kmsKeyAttr := encryptBlock.GetAttribute("kms_key") + repo.Encryption.KMSKeyID = kmsKeyAttr.AsStringValueOrDefault("", encryptBlock) + if kmsKeyAttr.IsResourceBlockReference("aws_kms_key") { + if keyBlock, err := module.GetReferencedBlock(kmsKeyAttr, encryptBlock); err == nil { + repo.Encryption.KMSKeyID = defsecTypes.String(keyBlock.FullName(), keyBlock.GetMetadata()) + } + } + } + + return repo +} diff --git a/internal/adapters/terraform/aws/ecr/adapt_test.go b/internal/adapters/terraform/aws/ecr/adapt_test.go new file mode 100644 index 000000000000..d6110cb1960a --- /dev/null +++ b/internal/adapters/terraform/aws/ecr/adapt_test.go @@ -0,0 +1,248 @@ +package ecr + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/ecr" + "github.com/aquasecurity/defsec/pkg/providers/aws/iam" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" + "github.com/liamg/iamgo" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptRepository(t *testing.T) { + tests := []struct { + name string + terraform string + expected ecr.Repository + }{ + { + name: "configured", + terraform: ` + resource "aws_kms_key" "ecr_kms" { + enable_key_rotation = true + } + + resource "aws_ecr_repository" "foo" { + name = "bar" + image_tag_mutability = "MUTABLE" + + image_scanning_configuration { + scan_on_push = true + } + + encryption_configuration { + encryption_type = "KMS" + kms_key = aws_kms_key.ecr_kms.key_id + } + } + + resource "aws_ecr_repository_policy" "foopolicy" { + repository = aws_ecr_repository.foo.name + + policy = < 0 { + var volumes []ecs.Volume + for _, volumeBlock := range volumeBlocks { + volumes = append(volumes, ecs.Volume{ + Metadata: volumeBlock.GetMetadata(), + EFSVolumeConfiguration: adaptEFSVolumeConfiguration(volumeBlock), + }) + } + return volumes + } + + return []ecs.Volume{} +} + +func adaptEFSVolumeConfiguration(volumeBlock *terraform.Block) ecs.EFSVolumeConfiguration { + EFSVolumeConfiguration := ecs.EFSVolumeConfiguration{ + Metadata: volumeBlock.GetMetadata(), + TransitEncryptionEnabled: types.BoolDefault(true, volumeBlock.GetMetadata()), + } + + if EFSConfigBlock := volumeBlock.GetBlock("efs_volume_configuration"); EFSConfigBlock.IsNotNil() { + EFSVolumeConfiguration.Metadata = EFSConfigBlock.GetMetadata() + transitEncryptionAttr := EFSConfigBlock.GetAttribute("transit_encryption") + EFSVolumeConfiguration.TransitEncryptionEnabled = types.Bool(transitEncryptionAttr.Equals("ENABLED"), EFSConfigBlock.GetMetadata()) + if transitEncryptionAttr.IsNotNil() { + EFSVolumeConfiguration.TransitEncryptionEnabled = types.Bool(transitEncryptionAttr.Equals("ENABLED"), transitEncryptionAttr.GetMetadata()) + } + } + + return EFSVolumeConfiguration +} diff --git a/internal/adapters/terraform/aws/ecs/adapt_test.go b/internal/adapters/terraform/aws/ecs/adapt_test.go new file mode 100644 index 000000000000..293422d0cd88 --- /dev/null +++ b/internal/adapters/terraform/aws/ecs/adapt_test.go @@ -0,0 +1,246 @@ +package ecs + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/ecs" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptClusterSettings(t *testing.T) { + tests := []struct { + name string + terraform string + expected ecs.ClusterSettings + }{ + { + name: "container insights enabled", + terraform: ` + resource "aws_ecs_cluster" "example" { + name = "services-cluster" + + setting { + name = "containerInsights" + value = "enabled" + } + } +`, + expected: ecs.ClusterSettings{ + Metadata: defsecTypes.NewTestMetadata(), + ContainerInsightsEnabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + { + name: "invalid name", + terraform: ` + resource "aws_ecs_cluster" "example" { + name = "services-cluster" + + setting { + name = "invalidName" + value = "enabled" + } + } +`, + expected: ecs.ClusterSettings{ + Metadata: defsecTypes.NewTestMetadata(), + ContainerInsightsEnabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + { + name: "defaults", + terraform: ` + resource "aws_ecs_cluster" "example" { + } +`, + expected: ecs.ClusterSettings{ + Metadata: defsecTypes.NewTestMetadata(), + ContainerInsightsEnabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptClusterSettings(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func Test_adaptTaskDefinitionResource(t *testing.T) { + tests := []struct { + name string + terraform string + expected ecs.TaskDefinition + }{ + { + name: "configured", + terraform: ` + resource "aws_ecs_task_definition" "example" { + family = "service" + container_definitions = < 0 { + orphanage := elb.LoadBalancer{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Type: defsecTypes.StringDefault(elb.TypeApplication, defsecTypes.NewUnmanagedMetadata()), + DropInvalidHeaderFields: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + Internal: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + Listeners: nil, + } + for _, listenerResource := range orphanResources { + orphanage.Listeners = append(orphanage.Listeners, adaptListener(listenerResource, "application")) + } + loadBalancers = append(loadBalancers, orphanage) + } + + return loadBalancers +} + +func (a *adapter) adaptLoadBalancer(resource *terraform.Block, module terraform.Modules) elb.LoadBalancer { + var listeners []elb.Listener + + typeAttr := resource.GetAttribute("load_balancer_type") + typeVal := typeAttr.AsStringValueOrDefault("application", resource) + + dropInvalidHeadersAttr := resource.GetAttribute("drop_invalid_header_fields") + dropInvalidHeadersVal := dropInvalidHeadersAttr.AsBoolValueOrDefault(false, resource) + + internalAttr := resource.GetAttribute("internal") + internalVal := internalAttr.AsBoolValueOrDefault(false, resource) + + listenerBlocks := module.GetReferencingResources(resource, "aws_lb_listener", "load_balancer_arn") + listenerBlocks = append(listenerBlocks, module.GetReferencingResources(resource, "aws_alb_listener", "load_balancer_arn")...) + + for _, listenerBlock := range listenerBlocks { + a.listenerIDs.Resolve(listenerBlock.ID()) + listeners = append(listeners, adaptListener(listenerBlock, typeVal.Value())) + } + return elb.LoadBalancer{ + Metadata: resource.GetMetadata(), + Type: typeVal, + DropInvalidHeaderFields: dropInvalidHeadersVal, + Internal: internalVal, + Listeners: listeners, + } +} + +func (a *adapter) adaptClassicLoadBalancer(resource *terraform.Block, module terraform.Modules) elb.LoadBalancer { + internalAttr := resource.GetAttribute("internal") + internalVal := internalAttr.AsBoolValueOrDefault(false, resource) + + return elb.LoadBalancer{ + Metadata: resource.GetMetadata(), + Type: defsecTypes.String("classic", resource.GetMetadata()), + DropInvalidHeaderFields: defsecTypes.BoolDefault(false, resource.GetMetadata()), + Internal: internalVal, + Listeners: nil, + } +} + +func adaptListener(listenerBlock *terraform.Block, typeVal string) elb.Listener { + listener := elb.Listener{ + Metadata: listenerBlock.GetMetadata(), + Protocol: defsecTypes.StringDefault("", listenerBlock.GetMetadata()), + TLSPolicy: defsecTypes.StringDefault("", listenerBlock.GetMetadata()), + DefaultActions: nil, + } + + protocolAttr := listenerBlock.GetAttribute("protocol") + if typeVal == "application" { + listener.Protocol = protocolAttr.AsStringValueOrDefault("HTTP", listenerBlock) + } + + sslPolicyAttr := listenerBlock.GetAttribute("ssl_policy") + listener.TLSPolicy = sslPolicyAttr.AsStringValueOrDefault("", listenerBlock) + + for _, defaultActionBlock := range listenerBlock.GetBlocks("default_action") { + action := elb.Action{ + Metadata: defaultActionBlock.GetMetadata(), + Type: defaultActionBlock.GetAttribute("type").AsStringValueOrDefault("", defaultActionBlock), + } + listener.DefaultActions = append(listener.DefaultActions, action) + } + + return listener +} diff --git a/internal/adapters/terraform/aws/elb/adapt_test.go b/internal/adapters/terraform/aws/elb/adapt_test.go new file mode 100644 index 000000000000..c0f4dae37115 --- /dev/null +++ b/internal/adapters/terraform/aws/elb/adapt_test.go @@ -0,0 +1,161 @@ +package elb + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/elb" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_Adapt(t *testing.T) { + tests := []struct { + name string + terraform string + expected elb.ELB + }{ + { + name: "configured", + terraform: ` + resource "aws_alb" "example" { + name = "good_alb" + internal = true + load_balancer_type = "application" + + access_logs { + bucket = aws_s3_bucket.lb_logs.bucket + prefix = "test-lb" + enabled = true + } + + drop_invalid_header_fields = true + } + + resource "aws_alb_listener" "example" { + load_balancer_arn = aws_alb.example.arn + protocol = "HTTPS" + ssl_policy = "ELBSecurityPolicy-TLS-1-1-2017-01" + + default_action { + type = "forward" + } + } +`, + expected: elb.ELB{ + LoadBalancers: []elb.LoadBalancer{ + { + Metadata: defsecTypes.NewTestMetadata(), + Type: defsecTypes.String("application", defsecTypes.NewTestMetadata()), + DropInvalidHeaderFields: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + Internal: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + Listeners: []elb.Listener{ + { + Metadata: defsecTypes.NewTestMetadata(), + Protocol: defsecTypes.String("HTTPS", defsecTypes.NewTestMetadata()), + TLSPolicy: defsecTypes.String("ELBSecurityPolicy-TLS-1-1-2017-01", defsecTypes.NewTestMetadata()), + DefaultActions: []elb.Action{ + { + Metadata: defsecTypes.NewTestMetadata(), + Type: defsecTypes.String("forward", defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "defaults", + terraform: ` + resource "aws_alb" "example" { + } +`, + expected: elb.ELB{ + LoadBalancers: []elb.LoadBalancer{ + { + Metadata: defsecTypes.NewTestMetadata(), + Type: defsecTypes.String("application", defsecTypes.NewTestMetadata()), + DropInvalidHeaderFields: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + Internal: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + Listeners: nil, + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := Adapt(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "aws_alb" "example" { + name = "good_alb" + internal = true + load_balancer_type = "application" + drop_invalid_header_fields = true + + access_logs { + bucket = aws_s3_bucket.lb_logs.bucket + prefix = "test-lb" + enabled = true + } + } + + resource "aws_alb_listener" "example" { + load_balancer_arn = aws_alb.example.arn + protocol = "HTTPS" + ssl_policy = "ELBSecurityPolicy-TLS-1-1-2017-01" + + default_action { + type = "forward" + } + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.LoadBalancers, 1) + loadBalancer := adapted.LoadBalancers[0] + + assert.Equal(t, 2, loadBalancer.Metadata.Range().GetStartLine()) + assert.Equal(t, 13, loadBalancer.Metadata.Range().GetEndLine()) + + assert.Equal(t, 4, loadBalancer.Internal.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 4, loadBalancer.Internal.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 5, loadBalancer.Type.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 5, loadBalancer.Type.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 6, loadBalancer.DropInvalidHeaderFields.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 6, loadBalancer.DropInvalidHeaderFields.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 15, loadBalancer.Listeners[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 23, loadBalancer.Listeners[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 17, loadBalancer.Listeners[0].Protocol.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 17, loadBalancer.Listeners[0].Protocol.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 18, loadBalancer.Listeners[0].TLSPolicy.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 18, loadBalancer.Listeners[0].TLSPolicy.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 20, loadBalancer.Listeners[0].DefaultActions[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 22, loadBalancer.Listeners[0].DefaultActions[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 21, loadBalancer.Listeners[0].DefaultActions[0].Type.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 21, loadBalancer.Listeners[0].DefaultActions[0].Type.GetMetadata().Range().GetEndLine()) + +} diff --git a/internal/adapters/terraform/aws/emr/adapt.go b/internal/adapters/terraform/aws/emr/adapt.go new file mode 100644 index 000000000000..0e85a1023ab5 --- /dev/null +++ b/internal/adapters/terraform/aws/emr/adapt.go @@ -0,0 +1,49 @@ +package emr + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/emr" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func Adapt(modules terraform.Modules) emr.EMR { + return emr.EMR{ + Clusters: adaptClusters(modules), + SecurityConfiguration: adaptSecurityConfigurations(modules), + } +} +func adaptClusters(modules terraform.Modules) []emr.Cluster { + var clusters []emr.Cluster + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_emr_cluster") { + clusters = append(clusters, adaptCluster(resource)) + } + } + return clusters +} + +func adaptCluster(resource *terraform.Block) emr.Cluster { + + return emr.Cluster{ + Metadata: resource.GetMetadata(), + } +} + +func adaptSecurityConfigurations(modules terraform.Modules) []emr.SecurityConfiguration { + var securityConfiguration []emr.SecurityConfiguration + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_emr_security_configuration") { + securityConfiguration = append(securityConfiguration, adaptSecurityConfiguration(resource)) + } + } + return securityConfiguration +} + +func adaptSecurityConfiguration(resource *terraform.Block) emr.SecurityConfiguration { + + return emr.SecurityConfiguration{ + Metadata: resource.GetMetadata(), + Name: resource.GetAttribute("name").AsStringValueOrDefault("", resource), + Configuration: resource.GetAttribute("configuration").AsStringValueOrDefault("", resource), + } + +} diff --git a/internal/adapters/terraform/aws/emr/adapt_test.go b/internal/adapters/terraform/aws/emr/adapt_test.go new file mode 100644 index 000000000000..b38dcc5811fe --- /dev/null +++ b/internal/adapters/terraform/aws/emr/adapt_test.go @@ -0,0 +1,116 @@ +package emr + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/emr" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" +) + +func Test_adaptSecurityConfiguration(t *testing.T) { + tests := []struct { + name string + terraform string + expected emr.SecurityConfiguration + }{ + { + name: "test", + terraform: ` + resource "aws_emr_security_configuration" "foo" { + name = "emrsc_test" + configuration = < 0 { + return &iam.Document{ + Parsed: documents[0].Document, + Metadata: documents[0].Source.GetMetadata(), + IsOffset: true, + }, nil + } + + if attr.IsString() { + + dataBlock, err := modules.GetBlockById(attr.Value().AsString()) + if err != nil { + parsed, err := iamgo.Parse([]byte(unescapeVars(attr.Value().AsString()))) + if err != nil { + return nil, err + } + return &iam.Document{ + Parsed: *parsed, + Metadata: attr.GetMetadata(), + IsOffset: false, + HasRefs: len(attr.AllReferences()) > 0, + }, nil + } else if dataBlock.Type() == "data" && dataBlock.TypeLabel() == "aws_iam_policy_document" { + if doc, err := ConvertTerraformDocument(modules, dataBlock); err == nil { + return &iam.Document{ + Metadata: dataBlock.GetMetadata(), + Parsed: doc.Document, + IsOffset: true, + HasRefs: false, + }, nil + } + } + } + + return &iam.Document{ + Metadata: owner.GetMetadata(), + }, nil +} + +func unescapeVars(input string) string { + return strings.ReplaceAll(input, "&{", "${") +} + +// ConvertTerraformDocument converts a terraform data policy into an iamgo policy https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/iam_policy_document +func ConvertTerraformDocument(modules terraform.Modules, block *terraform.Block) (*wrappedDocument, error) { + + builder := iamgo.NewPolicyBuilder() + + if sourceAttr := block.GetAttribute("source_json"); sourceAttr.IsString() { + doc, err := iamgo.ParseString(sourceAttr.Value().AsString()) + if err != nil { + return nil, err + } + builder = iamgo.PolicyBuilderFromDocument(*doc) + } + + if sourceDocumentsAttr := block.GetAttribute("source_policy_documents"); sourceDocumentsAttr.IsIterable() { + docs := findAllPolicies(modules, block, sourceDocumentsAttr) + for _, doc := range docs { + statements, _ := doc.Document.Statements() + for _, statement := range statements { + builder.WithStatement(statement) + } + } + } + + if idAttr := block.GetAttribute("policy_id"); idAttr.IsString() { + r := idAttr.GetMetadata().Range() + builder.WithId(idAttr.Value().AsString(), r.GetStartLine(), r.GetEndLine()) + } + + if versionAttr := block.GetAttribute("version"); versionAttr.IsString() { + r := versionAttr.GetMetadata().Range() + builder.WithVersion(versionAttr.Value().AsString(), r.GetStartLine(), r.GetEndLine()) + } + + for _, statementBlock := range block.GetBlocks("statement") { + statement := parseStatement(statementBlock) + builder.WithStatement(statement, statement.Range().StartLine, statement.Range().EndLine) + } + + if overrideDocumentsAttr := block.GetAttribute("override_policy_documents"); overrideDocumentsAttr.IsIterable() { + docs := findAllPolicies(modules, block, overrideDocumentsAttr) + for _, doc := range docs { + statements, _ := doc.Document.Statements() + for _, statement := range statements { + builder.WithStatement(statement, statement.Range().StartLine, statement.Range().EndLine) + } + } + } + + return &wrappedDocument{Document: builder.Build(), Source: block}, nil +} + +// nolint +func parseStatement(statementBlock *terraform.Block) iamgo.Statement { + + metadata := statementBlock.GetMetadata() + + builder := iamgo.NewStatementBuilder() + builder.WithRange(metadata.Range().GetStartLine(), metadata.Range().GetEndLine()) + + if sidAttr := statementBlock.GetAttribute("sid"); sidAttr.IsString() { + r := sidAttr.GetMetadata().Range() + builder.WithSid(sidAttr.Value().AsString(), r.GetStartLine(), r.GetEndLine()) + } + if actionsAttr := statementBlock.GetAttribute("actions"); actionsAttr.IsIterable() { + r := actionsAttr.GetMetadata().Range() + values := actionsAttr.AsStringValues().AsStrings() + builder.WithActions(values, r.GetStartLine(), r.GetEndLine()) + } + if notActionsAttr := statementBlock.GetAttribute("not_actions"); notActionsAttr.IsIterable() { + r := notActionsAttr.GetMetadata().Range() + values := notActionsAttr.AsStringValues().AsStrings() + builder.WithNotActions(values, r.GetStartLine(), r.GetEndLine()) + } + if resourcesAttr := statementBlock.GetAttribute("resources"); resourcesAttr.IsIterable() { + r := resourcesAttr.GetMetadata().Range() + values := resourcesAttr.AsStringValues().AsStrings() + builder.WithResources(values, r.GetStartLine(), r.GetEndLine()) + } + if notResourcesAttr := statementBlock.GetAttribute("not_resources"); notResourcesAttr.IsIterable() { + r := notResourcesAttr.GetMetadata().Range() + values := notResourcesAttr.AsStringValues().AsStrings() + builder.WithNotResources(values, r.GetStartLine(), r.GetEndLine()) + } + if effectAttr := statementBlock.GetAttribute("effect"); effectAttr.IsString() { + r := effectAttr.GetMetadata().Range() + builder.WithEffect(effectAttr.Value().AsString(), r.GetStartLine(), r.GetEndLine()) + } else { + builder.WithEffect(iamgo.EffectAllow) + } + + for _, principalBlock := range statementBlock.GetBlocks("principals") { + typeAttr := principalBlock.GetAttribute("type") + if !typeAttr.IsString() { + continue + } + identifiersAttr := principalBlock.GetAttribute("identifiers") + if !identifiersAttr.IsIterable() { + continue + } + r := principalBlock.GetMetadata().Range() + switch typeAttr.Value().AsString() { + case "*": + builder.WithAllPrincipals(true, r.GetStartLine(), r.GetEndLine()) + case "AWS": + values := identifiersAttr.AsStringValues().AsStrings() + builder.WithAWSPrincipals(values, r.GetStartLine(), r.GetEndLine()) + case "Federated": + values := identifiersAttr.AsStringValues().AsStrings() + builder.WithFederatedPrincipals(values, r.GetStartLine(), r.GetEndLine()) + case "Service": + values := identifiersAttr.AsStringValues().AsStrings() + builder.WithServicePrincipals(values, r.GetStartLine(), r.GetEndLine()) + case "CanonicalUser": + values := identifiersAttr.AsStringValues().AsStrings() + builder.WithCanonicalUsersPrincipals(values, r.GetStartLine(), r.GetEndLine()) + } + } + + for _, conditionBlock := range statementBlock.GetBlocks("condition") { + testAttr := conditionBlock.GetAttribute("test") + if !testAttr.IsString() { + continue + } + variableAttr := conditionBlock.GetAttribute("variable") + if !variableAttr.IsString() { + continue + } + valuesAttr := conditionBlock.GetAttribute("values") + values := valuesAttr.AsStringValues().AsStrings() + if valuesAttr.IsNil() || len(values) == 0 { + continue + } + + r := conditionBlock.GetMetadata().Range() + + builder.WithCondition( + testAttr.Value().AsString(), + variableAttr.Value().AsString(), + values, + r.GetStartLine(), + r.GetEndLine(), + ) + + } + return builder.Build() +} + +func findAllPolicies(modules terraform.Modules, parentBlock *terraform.Block, attr *terraform.Attribute) []wrappedDocument { + var documents []wrappedDocument + for _, ref := range attr.AllReferences() { + for _, b := range modules.GetBlocks() { + if b.Type() != "data" || b.TypeLabel() != "aws_iam_policy_document" { + continue + } + if ref.RefersTo(b.Reference()) { + document, err := ConvertTerraformDocument(modules, b) + if err != nil { + continue + } + documents = append(documents, *document) + continue + } + kref := *ref + kref.SetKey(parentBlock.Reference().RawKey()) + if kref.RefersTo(b.Reference()) { + document, err := ConvertTerraformDocument(modules, b) + if err != nil { + continue + } + documents = append(documents, *document) + } + } + } + return documents +} diff --git a/internal/adapters/terraform/aws/iam/groups.go b/internal/adapters/terraform/aws/iam/groups.go new file mode 100644 index 000000000000..d2b2ec8430e3 --- /dev/null +++ b/internal/adapters/terraform/aws/iam/groups.go @@ -0,0 +1,32 @@ +package iam + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/iam" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func adaptGroups(modules terraform.Modules) []iam.Group { + var groups []iam.Group + + for _, groupBlock := range modules.GetResourcesByType("aws_iam_group") { + group := iam.Group{ + Metadata: groupBlock.GetMetadata(), + Name: groupBlock.GetAttribute("name").AsStringValueOrDefault("", groupBlock), + } + + if policy, ok := applyForDependentResource( + modules, groupBlock.ID(), "name", "aws_iam_group_policy", "group", findPolicy(modules), + ); ok && policy != nil { + group.Policies = append(group.Policies, *policy) + } + + if policy, ok := applyForDependentResource( + modules, groupBlock.ID(), "name", "aws_iam_group_policy_attachment", "group", findAttachmentPolicy(modules), + ); ok && policy != nil { + group.Policies = append(group.Policies, *policy) + } + + groups = append(groups, group) + } + return groups +} diff --git a/internal/adapters/terraform/aws/iam/groups_test.go b/internal/adapters/terraform/aws/iam/groups_test.go new file mode 100644 index 000000000000..5e06c6974312 --- /dev/null +++ b/internal/adapters/terraform/aws/iam/groups_test.go @@ -0,0 +1,115 @@ +package iam + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/iam" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" +) + +func Test_adaptGroups(t *testing.T) { + tests := []struct { + name string + terraform string + expected []iam.Group + }{ + { + name: "policy", + terraform: ` + resource "aws_iam_group_policy" "my_developer_policy" { + name = "my_developer_policy" + group = aws_iam_group.my_developers.name + + policy = < 0 { + orphanage := lambda.Function{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Tracing: lambda.Tracing{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Mode: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + }, + Permissions: nil, + } + for _, permission := range orphanResources { + orphanage.Permissions = append(orphanage.Permissions, a.adaptPermission(permission)) + } + functions = append(functions, orphanage) + } + + return functions +} + +func (a *adapter) adaptFunction(function *terraform.Block, modules terraform.Modules, orphans terraform.ResourceIDResolutions) lambda.Function { + var permissions []lambda.Permission + for _, module := range modules { + for _, p := range module.GetResourcesByType("aws_lambda_permission") { + if referencedBlock, err := module.GetReferencedBlock(p.GetAttribute("function_name"), p); err == nil && referencedBlock == function { + permissions = append(permissions, a.adaptPermission(p)) + delete(orphans, p.ID()) + } + } + } + + return lambda.Function{ + Metadata: function.GetMetadata(), + Tracing: a.adaptTracing(function), + Permissions: permissions, + } +} + +func (a *adapter) adaptTracing(function *terraform.Block) lambda.Tracing { + if tracingConfig := function.GetBlock("tracing_config"); tracingConfig.IsNotNil() { + return lambda.Tracing{ + Metadata: tracingConfig.GetMetadata(), + Mode: tracingConfig.GetAttribute("mode").AsStringValueOrDefault("", tracingConfig), + } + } + + return lambda.Tracing{ + Metadata: function.GetMetadata(), + Mode: defsecTypes.StringDefault("", function.GetMetadata()), + } +} + +func (a *adapter) adaptPermission(permission *terraform.Block) lambda.Permission { + sourceARNAttr := permission.GetAttribute("source_arn") + sourceARN := sourceARNAttr.AsStringValueOrDefault("", permission) + + if len(sourceARNAttr.AllReferences()) > 0 { + sourceARN = defsecTypes.String(sourceARNAttr.AllReferences()[0].NameLabel(), sourceARNAttr.GetMetadata()) + } + + return lambda.Permission{ + Metadata: permission.GetMetadata(), + Principal: permission.GetAttribute("principal").AsStringValueOrDefault("", permission), + SourceARN: sourceARN, + } +} diff --git a/internal/adapters/terraform/aws/lambda/adapt_test.go b/internal/adapters/terraform/aws/lambda/adapt_test.go new file mode 100644 index 000000000000..64c884c02e8c --- /dev/null +++ b/internal/adapters/terraform/aws/lambda/adapt_test.go @@ -0,0 +1,155 @@ +package lambda + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/lambda" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" +) + +func Test_Adapt(t *testing.T) { + tests := []struct { + name string + terraform string + expected lambda.Lambda + }{ + { + name: "reference arn", + terraform: ` + resource "aws_lambda_function" "example" { + filename = "lambda_function_payload.zip" + function_name = "lambda_function_name" + role = aws_iam_role.iam_for_lambda.arn + runtime = "nodejs12.x" + + tracing_config { + mode = "Passthrough" + } + } + + resource "aws_lambda_permission" "example" { + statement_id = "AllowExecutionFromSNS" + action = "lambda:InvokeFunction" + function_name = aws_lambda_function.example.function_name + principal = "sns.amazonaws.com" + source_arn = aws_sns_topic.default.arn + } +`, + expected: lambda.Lambda{ + Functions: []lambda.Function{ + { + Metadata: defsecTypes.NewTestMetadata(), + Tracing: lambda.Tracing{ + Metadata: defsecTypes.NewTestMetadata(), + Mode: defsecTypes.String("Passthrough", defsecTypes.NewTestMetadata()), + }, + Permissions: []lambda.Permission{ + { + Metadata: defsecTypes.NewTestMetadata(), + Principal: defsecTypes.String("sns.amazonaws.com", defsecTypes.NewTestMetadata()), + SourceARN: defsecTypes.String("default", defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + }, + { + name: "defaults (with an orphan)", + terraform: ` + resource "aws_lambda_function" "example" { + tracing_config { + } + } + + resource "aws_lambda_permission" "example" { + } +`, + expected: lambda.Lambda{ + Functions: []lambda.Function{ + { + Metadata: defsecTypes.NewTestMetadata(), + Tracing: lambda.Tracing{ + Metadata: defsecTypes.NewTestMetadata(), + Mode: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + }, + { + Metadata: defsecTypes.NewTestMetadata(), + Tracing: lambda.Tracing{ + Metadata: defsecTypes.NewTestMetadata(), + Mode: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + Permissions: []lambda.Permission{ + { + Metadata: defsecTypes.NewTestMetadata(), + Principal: defsecTypes.String("", defsecTypes.NewTestMetadata()), + SourceARN: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := Adapt(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "aws_lambda_function" "example" { + filename = "lambda_function_payload.zip" + function_name = "lambda_function_name" + role = aws_iam_role.iam_for_lambda.arn + runtime = "nodejs12.x" + + tracing_config { + mode = "Passthrough" + } + } + + resource "aws_lambda_permission" "example" { + statement_id = "AllowExecutionFromSNS" + action = "lambda:InvokeFunction" + function_name = aws_lambda_function.example.function_name + principal = "sns.amazonaws.com" + source_arn = "string arn" + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Functions, 1) + function := adapted.Functions[0] + + assert.Equal(t, 2, function.Metadata.Range().GetStartLine()) + assert.Equal(t, 11, function.Metadata.Range().GetEndLine()) + + assert.Equal(t, 8, function.Tracing.Metadata.Range().GetStartLine()) + assert.Equal(t, 10, function.Tracing.Metadata.Range().GetEndLine()) + + assert.Equal(t, 9, function.Tracing.Mode.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 9, function.Tracing.Mode.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 13, function.Permissions[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 19, function.Permissions[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 17, function.Permissions[0].Principal.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 17, function.Permissions[0].Principal.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 18, function.Permissions[0].SourceARN.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 18, function.Permissions[0].SourceARN.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/aws/mq/adapt.go b/internal/adapters/terraform/aws/mq/adapt.go new file mode 100644 index 000000000000..c5da698dca8d --- /dev/null +++ b/internal/adapters/terraform/aws/mq/adapt.go @@ -0,0 +1,48 @@ +package mq + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/mq" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) mq.MQ { + return mq.MQ{ + Brokers: adaptBrokers(modules), + } +} + +func adaptBrokers(modules terraform.Modules) []mq.Broker { + var brokers []mq.Broker + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_mq_broker") { + brokers = append(brokers, adaptBroker(resource)) + } + } + return brokers +} + +func adaptBroker(resource *terraform.Block) mq.Broker { + + broker := mq.Broker{ + Metadata: resource.GetMetadata(), + PublicAccess: types.BoolDefault(false, resource.GetMetadata()), + Logging: mq.Logging{ + Metadata: resource.GetMetadata(), + General: types.BoolDefault(false, resource.GetMetadata()), + Audit: types.BoolDefault(false, resource.GetMetadata()), + }, + } + + publicAccessAttr := resource.GetAttribute("publicly_accessible") + broker.PublicAccess = publicAccessAttr.AsBoolValueOrDefault(false, resource) + if logsBlock := resource.GetBlock("logs"); logsBlock.IsNotNil() { + broker.Logging.Metadata = logsBlock.GetMetadata() + auditAttr := logsBlock.GetAttribute("audit") + broker.Logging.Audit = auditAttr.AsBoolValueOrDefault(false, logsBlock) + generalAttr := logsBlock.GetAttribute("general") + broker.Logging.General = generalAttr.AsBoolValueOrDefault(false, logsBlock) + } + + return broker +} diff --git a/internal/adapters/terraform/aws/mq/adapt_test.go b/internal/adapters/terraform/aws/mq/adapt_test.go new file mode 100644 index 000000000000..a7e110c3fc82 --- /dev/null +++ b/internal/adapters/terraform/aws/mq/adapt_test.go @@ -0,0 +1,119 @@ +package mq + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/mq" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptBroker(t *testing.T) { + tests := []struct { + name string + terraform string + expected mq.Broker + }{ + { + name: "audit logs", + terraform: ` + resource "aws_mq_broker" "example" { + logs { + audit = true + } + + publicly_accessible = false + } +`, + expected: mq.Broker{ + Metadata: defsecTypes.NewTestMetadata(), + PublicAccess: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + Logging: mq.Logging{ + Metadata: defsecTypes.NewTestMetadata(), + General: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + Audit: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + }, + { + name: "general logs", + terraform: ` + resource "aws_mq_broker" "example" { + logs { + general = true + } + + publicly_accessible = true + } +`, + expected: mq.Broker{ + Metadata: defsecTypes.NewTestMetadata(), + PublicAccess: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + Logging: mq.Logging{ + Metadata: defsecTypes.NewTestMetadata(), + General: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + Audit: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + }, + { + name: "defaults", + terraform: ` + resource "aws_mq_broker" "example" { + } +`, + expected: mq.Broker{ + Metadata: defsecTypes.NewTestMetadata(), + PublicAccess: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + Logging: mq.Logging{ + Metadata: defsecTypes.NewTestMetadata(), + General: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + Audit: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptBroker(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "aws_mq_broker" "example" { + logs { + general = true + } + + publicly_accessible = true + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Brokers, 1) + broker := adapted.Brokers[0] + + assert.Equal(t, 2, broker.Metadata.Range().GetStartLine()) + assert.Equal(t, 8, broker.Metadata.Range().GetEndLine()) + + assert.Equal(t, 3, broker.Logging.Metadata.Range().GetStartLine()) + assert.Equal(t, 5, broker.Logging.Metadata.Range().GetEndLine()) + + assert.Equal(t, 4, broker.Logging.General.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 4, broker.Logging.General.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 7, broker.PublicAccess.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 7, broker.PublicAccess.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/aws/msk/adapt.go b/internal/adapters/terraform/aws/msk/adapt.go new file mode 100644 index 000000000000..faf43df2197b --- /dev/null +++ b/internal/adapters/terraform/aws/msk/adapt.go @@ -0,0 +1,97 @@ +package msk + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/msk" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) msk.MSK { + return msk.MSK{ + Clusters: adaptClusters(modules), + } +} + +func adaptClusters(modules terraform.Modules) []msk.Cluster { + var clusters []msk.Cluster + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_msk_cluster") { + clusters = append(clusters, adaptCluster(resource)) + } + } + return clusters +} + +func adaptCluster(resource *terraform.Block) msk.Cluster { + cluster := msk.Cluster{ + Metadata: resource.GetMetadata(), + EncryptionInTransit: msk.EncryptionInTransit{ + Metadata: resource.GetMetadata(), + ClientBroker: defsecTypes.StringDefault("TLS_PLAINTEXT", resource.GetMetadata()), + }, + EncryptionAtRest: msk.EncryptionAtRest{ + Metadata: resource.GetMetadata(), + KMSKeyARN: defsecTypes.StringDefault("", resource.GetMetadata()), + Enabled: defsecTypes.BoolDefault(false, resource.GetMetadata()), + }, + Logging: msk.Logging{ + Metadata: resource.GetMetadata(), + Broker: msk.BrokerLogging{ + Metadata: resource.GetMetadata(), + S3: msk.S3Logging{ + Metadata: resource.GetMetadata(), + Enabled: defsecTypes.BoolDefault(false, resource.GetMetadata()), + }, + Cloudwatch: msk.CloudwatchLogging{ + Metadata: resource.GetMetadata(), + Enabled: defsecTypes.BoolDefault(false, resource.GetMetadata()), + }, + Firehose: msk.FirehoseLogging{ + Metadata: resource.GetMetadata(), + Enabled: defsecTypes.BoolDefault(false, resource.GetMetadata()), + }, + }, + }, + } + + if encryptBlock := resource.GetBlock("encryption_info"); encryptBlock.IsNotNil() { + if encryptionInTransitBlock := encryptBlock.GetBlock("encryption_in_transit"); encryptionInTransitBlock.IsNotNil() { + cluster.EncryptionInTransit.Metadata = encryptionInTransitBlock.GetMetadata() + if clientBrokerAttr := encryptionInTransitBlock.GetAttribute("client_broker"); clientBrokerAttr.IsNotNil() { + cluster.EncryptionInTransit.ClientBroker = clientBrokerAttr.AsStringValueOrDefault("TLS", encryptionInTransitBlock) + } + } + + if encryptionAtRestAttr := encryptBlock.GetAttribute("encryption_at_rest_kms_key_arn"); encryptionAtRestAttr.IsNotNil() { + cluster.EncryptionAtRest.Metadata = encryptionAtRestAttr.GetMetadata() + cluster.EncryptionAtRest.KMSKeyARN = encryptionAtRestAttr.AsStringValueOrDefault("", encryptBlock) + cluster.EncryptionAtRest.Enabled = defsecTypes.Bool(true, encryptionAtRestAttr.GetMetadata()) + } + } + + if logBlock := resource.GetBlock("logging_info"); logBlock.IsNotNil() { + cluster.Logging.Metadata = logBlock.GetMetadata() + if brokerLogsBlock := logBlock.GetBlock("broker_logs"); brokerLogsBlock.IsNotNil() { + cluster.Logging.Broker.Metadata = brokerLogsBlock.GetMetadata() + if brokerLogsBlock.HasChild("s3") { + if s3Block := brokerLogsBlock.GetBlock("s3"); s3Block.IsNotNil() { + s3enabledAttr := s3Block.GetAttribute("enabled") + cluster.Logging.Broker.S3.Metadata = s3Block.GetMetadata() + cluster.Logging.Broker.S3.Enabled = s3enabledAttr.AsBoolValueOrDefault(false, s3Block) + } + } + if cloudwatchBlock := brokerLogsBlock.GetBlock("cloudwatch_logs"); cloudwatchBlock.IsNotNil() { + cwEnabledAttr := cloudwatchBlock.GetAttribute("enabled") + cluster.Logging.Broker.Cloudwatch.Metadata = cloudwatchBlock.GetMetadata() + cluster.Logging.Broker.Cloudwatch.Enabled = cwEnabledAttr.AsBoolValueOrDefault(false, cloudwatchBlock) + } + if firehoseBlock := brokerLogsBlock.GetBlock("firehose"); firehoseBlock.IsNotNil() { + firehoseEnabledAttr := firehoseBlock.GetAttribute("enabled") + cluster.Logging.Broker.Firehose.Metadata = firehoseBlock.GetMetadata() + cluster.Logging.Broker.Firehose.Enabled = firehoseEnabledAttr.AsBoolValueOrDefault(false, firehoseBlock) + } + } + } + + return cluster +} diff --git a/internal/adapters/terraform/aws/msk/adapt_test.go b/internal/adapters/terraform/aws/msk/adapt_test.go new file mode 100644 index 000000000000..de3752b73991 --- /dev/null +++ b/internal/adapters/terraform/aws/msk/adapt_test.go @@ -0,0 +1,200 @@ +package msk + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/msk" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptCluster(t *testing.T) { + tests := []struct { + name string + terraform string + expected msk.Cluster + }{ + { + name: "configured", + terraform: ` + resource "aws_msk_cluster" "example" { + cluster_name = "example" + + encryption_info { + encryption_in_transit { + client_broker = "TLS" + in_cluster = true + } + encryption_at_rest_kms_key_arn = "foo-bar-key" + } + + logging_info { + broker_logs { + cloudwatch_logs { + enabled = true + log_group = aws_cloudwatch_log_group.test.name + } + firehose { + enabled = true + delivery_stream = aws_kinesis_firehose_delivery_stream.test_stream.name + } + s3 { + enabled = true + bucket = aws_s3_bucket.bucket.id + prefix = "logs/msk-" + } + } + } + } +`, + expected: msk.Cluster{ + Metadata: defsecTypes.NewTestMetadata(), + EncryptionInTransit: msk.EncryptionInTransit{ + Metadata: defsecTypes.NewTestMetadata(), + ClientBroker: defsecTypes.String("TLS", defsecTypes.NewTestMetadata()), + }, + EncryptionAtRest: msk.EncryptionAtRest{ + Metadata: defsecTypes.NewTestMetadata(), + KMSKeyARN: defsecTypes.String("foo-bar-key", defsecTypes.NewTestMetadata()), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + Logging: msk.Logging{ + Metadata: defsecTypes.NewTestMetadata(), + Broker: msk.BrokerLogging{ + Metadata: defsecTypes.NewTestMetadata(), + S3: msk.S3Logging{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + Cloudwatch: msk.CloudwatchLogging{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + Firehose: msk.FirehoseLogging{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + { + name: "defaults", + terraform: ` + resource "aws_msk_cluster" "example" { + } +`, + expected: msk.Cluster{ + Metadata: defsecTypes.NewTestMetadata(), + EncryptionInTransit: msk.EncryptionInTransit{ + Metadata: defsecTypes.NewTestMetadata(), + ClientBroker: defsecTypes.String("TLS_PLAINTEXT", defsecTypes.NewTestMetadata()), + }, + Logging: msk.Logging{ + Metadata: defsecTypes.NewTestMetadata(), + Broker: msk.BrokerLogging{ + Metadata: defsecTypes.NewTestMetadata(), + S3: msk.S3Logging{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + Cloudwatch: msk.CloudwatchLogging{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + Firehose: msk.FirehoseLogging{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptCluster(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "aws_msk_cluster" "example" { + cluster_name = "example" + + encryption_info { + encryption_in_transit { + client_broker = "TLS" + in_cluster = true + } + encryption_at_rest_kms_key_arn = "foo-bar-key" + } + + logging_info { + broker_logs { + cloudwatch_logs { + enabled = true + log_group = aws_cloudwatch_log_group.test.name + } + firehose { + enabled = true + delivery_stream = aws_kinesis_firehose_delivery_stream.test_stream.name + } + s3 { + enabled = true + bucket = aws_s3_bucket.bucket.id + prefix = "logs/msk-" + } + } + } + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Clusters, 1) + cluster := adapted.Clusters[0] + + assert.Equal(t, 2, cluster.Metadata.Range().GetStartLine()) + assert.Equal(t, 30, cluster.Metadata.Range().GetEndLine()) + + assert.Equal(t, 6, cluster.EncryptionInTransit.Metadata.Range().GetStartLine()) + assert.Equal(t, 9, cluster.EncryptionInTransit.Metadata.Range().GetEndLine()) + + assert.Equal(t, 10, cluster.EncryptionAtRest.Metadata.Range().GetStartLine()) + assert.Equal(t, 10, cluster.EncryptionAtRest.Metadata.Range().GetEndLine()) + + assert.Equal(t, 13, cluster.Logging.Metadata.Range().GetStartLine()) + assert.Equal(t, 29, cluster.Logging.Metadata.Range().GetEndLine()) + + assert.Equal(t, 14, cluster.Logging.Broker.Metadata.Range().GetStartLine()) + assert.Equal(t, 28, cluster.Logging.Broker.Metadata.Range().GetEndLine()) + + assert.Equal(t, 15, cluster.Logging.Broker.Cloudwatch.Metadata.Range().GetStartLine()) + assert.Equal(t, 18, cluster.Logging.Broker.Cloudwatch.Metadata.Range().GetEndLine()) + + assert.Equal(t, 16, cluster.Logging.Broker.Cloudwatch.Enabled.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 16, cluster.Logging.Broker.Cloudwatch.Enabled.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 19, cluster.Logging.Broker.Firehose.Metadata.Range().GetStartLine()) + assert.Equal(t, 22, cluster.Logging.Broker.Firehose.Metadata.Range().GetEndLine()) + + assert.Equal(t, 20, cluster.Logging.Broker.Firehose.Enabled.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 20, cluster.Logging.Broker.Firehose.Enabled.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 23, cluster.Logging.Broker.S3.Metadata.Range().GetStartLine()) + assert.Equal(t, 27, cluster.Logging.Broker.S3.Metadata.Range().GetEndLine()) + + assert.Equal(t, 24, cluster.Logging.Broker.S3.Enabled.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 24, cluster.Logging.Broker.S3.Enabled.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/aws/neptune/adapt.go b/internal/adapters/terraform/aws/neptune/adapt.go new file mode 100644 index 000000000000..7283624c33d2 --- /dev/null +++ b/internal/adapters/terraform/aws/neptune/adapt.go @@ -0,0 +1,50 @@ +package neptune + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/neptune" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) neptune.Neptune { + return neptune.Neptune{ + Clusters: adaptClusters(modules), + } +} + +func adaptClusters(modules terraform.Modules) []neptune.Cluster { + var clusters []neptune.Cluster + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_neptune_cluster") { + clusters = append(clusters, adaptCluster(resource)) + } + } + return clusters +} + +func adaptCluster(resource *terraform.Block) neptune.Cluster { + cluster := neptune.Cluster{ + Metadata: resource.GetMetadata(), + Logging: neptune.Logging{ + Metadata: resource.GetMetadata(), + Audit: defsecTypes.BoolDefault(false, resource.GetMetadata()), + }, + StorageEncrypted: defsecTypes.BoolDefault(false, resource.GetMetadata()), + KMSKeyID: defsecTypes.StringDefault("", resource.GetMetadata()), + } + + if enableLogExportsAttr := resource.GetAttribute("enable_cloudwatch_logs_exports"); enableLogExportsAttr.IsNotNil() { + cluster.Logging.Metadata = enableLogExportsAttr.GetMetadata() + if enableLogExportsAttr.Contains("audit") { + cluster.Logging.Audit = defsecTypes.Bool(true, enableLogExportsAttr.GetMetadata()) + } + } + + storageEncryptedAttr := resource.GetAttribute("storage_encrypted") + cluster.StorageEncrypted = storageEncryptedAttr.AsBoolValueOrDefault(false, resource) + + KMSKeyAttr := resource.GetAttribute("kms_key_arn") + cluster.KMSKeyID = KMSKeyAttr.AsStringValueOrDefault("", resource) + + return cluster +} diff --git a/internal/adapters/terraform/aws/neptune/adapt_test.go b/internal/adapters/terraform/aws/neptune/adapt_test.go new file mode 100644 index 000000000000..148b29f3b3e4 --- /dev/null +++ b/internal/adapters/terraform/aws/neptune/adapt_test.go @@ -0,0 +1,97 @@ +package neptune + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/neptune" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptCluster(t *testing.T) { + tests := []struct { + name string + terraform string + expected neptune.Cluster + }{ + { + name: "configured", + terraform: ` + resource "aws_neptune_cluster" "example" { + enable_cloudwatch_logs_exports = ["audit"] + storage_encrypted = true + kms_key_arn = "kms-key" + } +`, + expected: neptune.Cluster{ + Metadata: defsecTypes.NewTestMetadata(), + Logging: neptune.Logging{ + Metadata: defsecTypes.NewTestMetadata(), + Audit: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + StorageEncrypted: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("kms-key", defsecTypes.NewTestMetadata()), + }, + }, + { + name: "defaults", + terraform: ` + resource "aws_neptune_cluster" "example" { + } +`, + expected: neptune.Cluster{ + Metadata: defsecTypes.NewTestMetadata(), + Logging: neptune.Logging{ + Metadata: defsecTypes.NewTestMetadata(), + Audit: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + StorageEncrypted: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptCluster(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "aws_neptune_cluster" "example" { + enable_cloudwatch_logs_exports = ["audit"] + storage_encrypted = true + kms_key_arn = "kms-key" + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Clusters, 1) + cluster := adapted.Clusters[0] + + assert.Equal(t, 2, cluster.Metadata.Range().GetStartLine()) + assert.Equal(t, 6, cluster.Metadata.Range().GetEndLine()) + + assert.Equal(t, 3, cluster.Logging.Metadata.Range().GetStartLine()) + assert.Equal(t, 3, cluster.Logging.Metadata.Range().GetEndLine()) + + assert.Equal(t, 3, cluster.Logging.Audit.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, cluster.Logging.Audit.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 4, cluster.StorageEncrypted.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 4, cluster.StorageEncrypted.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 5, cluster.KMSKeyID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 5, cluster.KMSKeyID.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/aws/provider/adapt.go b/internal/adapters/terraform/aws/provider/adapt.go new file mode 100644 index 000000000000..b34fc8e730c5 --- /dev/null +++ b/internal/adapters/terraform/aws/provider/adapt.go @@ -0,0 +1,166 @@ +package provider + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/defsec/pkg/types" +) + +const ( + defaultMaxRetires = 25 + defaultSharedConfigFile = "~/.aws/config" + //#nosec G101 -- False positive + defaultSharedCredentialsFile = "~/.aws/credentials" +) + +func Adapt(modules terraform.Modules) []aws.TerraformProvider { + return adaptProviders(modules) +} + +func adaptProviders(modules terraform.Modules) []aws.TerraformProvider { + var providers []aws.TerraformProvider + for _, providerBlock := range modules.GetBlocks().OfType("provider") { + if providerBlock.Label() == "aws" { + providers = append(providers, adaptProvider(providerBlock)) + } + } + + return providers +} + +func adaptProvider(b *terraform.Block) aws.TerraformProvider { + return aws.TerraformProvider{ + Metadata: b.GetMetadata(), + Alias: getStringAttrValue("alias", b), + Version: getStringAttrValue("version", b), + AccessKey: getStringAttrValue("access_key", b), + AllowedAccountsIDs: b.GetAttribute("allowed_account_ids").AsStringValueSliceOrEmpty(), + AssumeRole: adaptAssumeRole(b), + AssumeRoleWithWebIdentity: adaptAssumeRoleWithWebIdentity(b), + CustomCABundle: getStringAttrValue("custom_ca_bundle", b), + DefaultTags: adaptDefaultTags(b), + EC2MetadataServiceEndpoint: getStringAttrValue("ec2_metadata_service_endpoint", b), + EC2MetadataServiceEndpointMode: getStringAttrValue("ec2_metadata_service_endpoint_mode", b), + Endpoints: adaptEndpoints(b), + ForbiddenAccountIDs: b.GetAttribute("forbidden_account_ids").AsStringValueSliceOrEmpty(), + HttpProxy: getStringAttrValue("http_proxy", b), + IgnoreTags: adaptIgnoreTags(b), + Insecure: b.GetAttribute("insecure").AsBoolValueOrDefault(false, b), + MaxRetries: b.GetAttribute("max_retries").AsIntValueOrDefault(defaultMaxRetires, b), + Profile: getStringAttrValue("profile", b), + Region: getStringAttrValue("region", b), + RetryMode: getStringAttrValue("retry_mode", b), + S3UsePathStyle: b.GetAttribute("s3_use_path_style").AsBoolValueOrDefault(false, b), + S3USEast1RegionalEndpoint: getStringAttrValue("s3_us_east_1_regional_endpoint", b), + SecretKey: getStringAttrValue("secret_key", b), + SharedConfigFiles: b.GetAttribute("shared_config_files").AsStringValuesOrDefault(b, defaultSharedConfigFile), + SharedCredentialsFiles: b.GetAttribute("shared_credentials_files").AsStringValuesOrDefault(b, defaultSharedCredentialsFile), + SkipCredentialsValidation: b.GetAttribute("skip_credentials_validation").AsBoolValueOrDefault(false, b), + SkipMetadataAPICheck: b.GetAttribute("skip_metadata_api_check").AsBoolValueOrDefault(false, b), + SkipRegionValidation: b.GetAttribute("skip_region_validation").AsBoolValueOrDefault(false, b), + SkipRequestingAccountID: b.GetAttribute("skip_requesting_account_id").AsBoolValueOrDefault(false, b), + STSRegion: getStringAttrValue("sts_region", b), + Token: getStringAttrValue("token", b), + UseDualstackEndpoint: b.GetAttribute("use_dualstack_endpoint").AsBoolValueOrDefault(false, b), + UseFIPSEndpoint: b.GetAttribute("use_fips_endpoint").AsBoolValueOrDefault(false, b), + } +} + +func adaptAssumeRole(p *terraform.Block) aws.AssumeRole { + assumeRoleBlock := p.GetBlock("assume_role") + + if assumeRoleBlock.IsNil() { + return aws.AssumeRole{ + Metadata: p.GetMetadata(), + Duration: types.StringDefault("", p.GetMetadata()), + ExternalID: types.StringDefault("", p.GetMetadata()), + Policy: types.StringDefault("", p.GetMetadata()), + RoleARN: types.StringDefault("", p.GetMetadata()), + SessionName: types.StringDefault("", p.GetMetadata()), + SourceIdentity: types.StringDefault("", p.GetMetadata()), + } + } + + return aws.AssumeRole{ + Metadata: assumeRoleBlock.GetMetadata(), + Duration: getStringAttrValue("duration", p), + ExternalID: getStringAttrValue("external_id", p), + Policy: getStringAttrValue("policy", p), + PolicyARNs: p.GetAttribute("policy_arns").AsStringValueSliceOrEmpty(), + RoleARN: getStringAttrValue("role_arn", p), + SessionName: getStringAttrValue("session_name", p), + SourceIdentity: getStringAttrValue("source_identity", p), + Tags: p.GetAttribute("tags").AsMapValue(), + TransitiveTagKeys: p.GetAttribute("transitive_tag_keys").AsStringValueSliceOrEmpty(), + } +} + +func adaptAssumeRoleWithWebIdentity(p *terraform.Block) aws.AssumeRoleWithWebIdentity { + block := p.GetBlock("assume_role_with_web_identity") + if block.IsNil() { + return aws.AssumeRoleWithWebIdentity{ + Metadata: p.GetMetadata(), + Duration: types.StringDefault("", p.GetMetadata()), + Policy: types.StringDefault("", p.GetMetadata()), + RoleARN: types.StringDefault("", p.GetMetadata()), + SessionName: types.StringDefault("", p.GetMetadata()), + WebIdentityToken: types.StringDefault("", p.GetMetadata()), + WebIdentityTokenFile: types.StringDefault("", p.GetMetadata()), + } + } + + return aws.AssumeRoleWithWebIdentity{ + Metadata: block.GetMetadata(), + Duration: getStringAttrValue("duration", p), + Policy: getStringAttrValue("policy", p), + PolicyARNs: p.GetAttribute("policy_arns").AsStringValueSliceOrEmpty(), + RoleARN: getStringAttrValue("role_arn", p), + SessionName: getStringAttrValue("session_name", p), + WebIdentityToken: getStringAttrValue("web_identity_token", p), + WebIdentityTokenFile: getStringAttrValue("web_identity_token_file", p), + } +} + +func adaptEndpoints(p *terraform.Block) types.MapValue { + block := p.GetBlock("endpoints") + if block.IsNil() { + return types.MapDefault(make(map[string]string), p.GetMetadata()) + } + + values := make(map[string]string) + + for name, attr := range block.Attributes() { + values[name] = attr.AsStringValueOrDefault("", block).Value() + } + + return types.Map(values, block.GetMetadata()) +} + +func adaptDefaultTags(p *terraform.Block) aws.DefaultTags { + attr, _ := p.GetNestedAttribute("default_tags.tags") + if attr.IsNil() { + return aws.DefaultTags{} + } + + return aws.DefaultTags{ + Metadata: attr.GetMetadata(), + Tags: attr.AsMapValue(), + } +} + +func adaptIgnoreTags(p *terraform.Block) aws.IgnoreTags { + block := p.GetBlock("ignore_tags") + if block.IsNil() { + return aws.IgnoreTags{} + } + + return aws.IgnoreTags{ + Metadata: block.GetMetadata(), + Keys: block.GetAttribute("keys").AsStringValueSliceOrEmpty(), + KeyPrefixes: block.GetAttribute("key_prefixes").AsStringValueSliceOrEmpty(), + } +} + +func getStringAttrValue(name string, parent *terraform.Block) types.StringValue { + return parent.GetAttribute(name).AsStringValueOrDefault("", parent) +} diff --git a/internal/adapters/terraform/aws/provider/adapt_test.go b/internal/adapters/terraform/aws/provider/adapt_test.go new file mode 100644 index 000000000000..acdd08ded4f2 --- /dev/null +++ b/internal/adapters/terraform/aws/provider/adapt_test.go @@ -0,0 +1,129 @@ +package provider + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/providers/aws" + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected []aws.TerraformProvider + }{ + { + name: "happy", + source: ` +variable "s3_use_path_style" { + default = true +} + +provider "aws" { + version = "~> 5.0" + region = "us-east-1" + profile = "localstack" + + access_key = "fake" + secret_key = "fake" + skip_credentials_validation = true + skip_metadata_api_check = true + skip_requesting_account_id = true + s3_use_path_style = var.s3_use_path_style + + endpoints { + dynamodb = "http://localhost:4566" + s3 = "http://localhost:4566" + } + + default_tags { + tags = { + Environment = "Local" + Name = "LocalStack" + } + } +}`, + expected: []aws.TerraformProvider{ + { + Version: types.String("~> 5.0", types.NewTestMetadata()), + Region: types.String("us-east-1", types.NewTestMetadata()), + DefaultTags: aws.DefaultTags{ + Metadata: types.NewTestMetadata(), + Tags: types.Map(map[string]string{ + "Environment": "Local", + "Name": "LocalStack", + }, types.NewTestMetadata()), + }, + Endpoints: types.Map(map[string]string{ + "dynamodb": "http://localhost:4566", + "s3": "http://localhost:4566", + }, types.NewTestMetadata()), + Profile: types.String("localstack", types.NewTestMetadata()), + AccessKey: types.String("fake", types.NewTestMetadata()), + SecretKey: types.String("fake", types.NewTestMetadata()), + SkipCredentialsValidation: types.Bool(true, types.NewTestMetadata()), + SkipMetadataAPICheck: types.Bool(true, types.NewTestMetadata()), + SkipRequestingAccountID: types.Bool(true, types.NewTestMetadata()), + S3UsePathStyle: types.Bool(true, types.NewTestMetadata()), + MaxRetries: types.IntDefault(defaultMaxRetires, types.NewTestMetadata()), + SharedConfigFiles: types.StringValueList{ + types.StringDefault(defaultSharedConfigFile, types.NewTestMetadata()), + }, + SharedCredentialsFiles: types.StringValueList{ + types.StringDefault(defaultSharedCredentialsFile, types.NewTestMetadata()), + }, + }, + }, + }, + { + name: "multiply provider configurations", + source: ` + +provider "aws" { + region = "us-east-1" +} + +provider "aws" { + alias = "west" + region = "us-west-2" +} +`, + expected: []aws.TerraformProvider{ + { + Region: types.String("us-east-1", types.NewTestMetadata()), + Endpoints: types.Map(make(map[string]string), types.NewTestMetadata()), + MaxRetries: types.IntDefault(defaultMaxRetires, types.NewTestMetadata()), + SharedConfigFiles: types.StringValueList{ + types.StringDefault(defaultSharedConfigFile, types.NewTestMetadata()), + }, + SharedCredentialsFiles: types.StringValueList{ + types.StringDefault(defaultSharedCredentialsFile, types.NewTestMetadata()), + }, + }, + { + Alias: types.String("west", types.NewTestMetadata()), + Region: types.String("us-west-2", types.NewTestMetadata()), + Endpoints: types.Map(make(map[string]string), types.NewTestMetadata()), + MaxRetries: types.IntDefault(defaultMaxRetires, types.NewTestMetadata()), + SharedConfigFiles: types.StringValueList{ + types.StringDefault(defaultSharedConfigFile, types.NewTestMetadata()), + }, + SharedCredentialsFiles: types.StringValueList{ + types.StringDefault(defaultSharedCredentialsFile, types.NewTestMetadata()), + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.source, ".tf") + testutil.AssertDefsecEqual(t, test.expected, Adapt(modules)) + }) + } +} diff --git a/internal/adapters/terraform/aws/rds/adapt.go b/internal/adapters/terraform/aws/rds/adapt.go new file mode 100644 index 000000000000..a03b3d124058 --- /dev/null +++ b/internal/adapters/terraform/aws/rds/adapt.go @@ -0,0 +1,256 @@ +package rds + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/rds" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) rds.RDS { + return rds.RDS{ + Instances: getInstances(modules), + Clusters: getClusters(modules), + Classic: getClassic(modules), + Snapshots: getSnapshots(modules), + ParameterGroups: getParameterGroups(modules), + } +} + +func getInstances(modules terraform.Modules) (instances []rds.Instance) { + for _, resource := range modules.GetResourcesByType("aws_db_instance") { + instances = append(instances, adaptInstance(resource, modules)) + } + + return instances +} + +func getParameterGroups(modules terraform.Modules) (parametergroups []rds.ParameterGroups) { + for _, resource := range modules.GetResourcesByType("aws_db_parameter_group") { + parametergroups = append(parametergroups, adaptDBParameterGroups(resource, modules)) + } + + return parametergroups +} + +func getSnapshots(modules terraform.Modules) (snapshots []rds.Snapshots) { + for _, resource := range modules.GetResourcesByType("aws_db_snapshot") { + snapshots = append(snapshots, adaptDBSnapshots(resource, modules)) + } + + return snapshots +} + +func getClusters(modules terraform.Modules) (clusters []rds.Cluster) { + + rdsInstanceMaps := modules.GetChildResourceIDMapByType("aws_rds_cluster_instance") + for _, resource := range modules.GetResourcesByType("aws_rds_cluster") { + cluster, instanceIDs := adaptCluster(resource, modules) + for _, id := range instanceIDs { + rdsInstanceMaps.Resolve(id) + } + clusters = append(clusters, cluster) + } + + orphanResources := modules.GetResourceByIDs(rdsInstanceMaps.Orphans()...) + + if len(orphanResources) > 0 { + orphanage := rds.Cluster{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + BackupRetentionPeriodDays: defsecTypes.IntDefault(1, defsecTypes.NewUnmanagedMetadata()), + ReplicationSourceARN: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + PerformanceInsights: rds.PerformanceInsights{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Enabled: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + KMSKeyID: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + }, + Instances: nil, + Encryption: rds.Encryption{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + EncryptStorage: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + KMSKeyID: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + }, + PublicAccess: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + Engine: defsecTypes.StringUnresolvable(defsecTypes.NewUnmanagedMetadata()), + LatestRestorableTime: defsecTypes.TimeUnresolvable(defsecTypes.NewUnmanagedMetadata()), + DeletionProtection: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + } + for _, orphan := range orphanResources { + orphanage.Instances = append(orphanage.Instances, adaptClusterInstance(orphan, modules)) + } + clusters = append(clusters, orphanage) + } + + return clusters +} + +func getClassic(modules terraform.Modules) rds.Classic { + classic := rds.Classic{ + DBSecurityGroups: nil, + } + for _, resource := range modules.GetResourcesByType("aws_db_security_group", "aws_redshift_security_group", "aws_elasticache_security_group") { + classic.DBSecurityGroups = append(classic.DBSecurityGroups, adaptClassicDBSecurityGroup(resource)) + } + return classic +} + +func adaptClusterInstance(resource *terraform.Block, modules terraform.Modules) rds.ClusterInstance { + clusterIdAttr := resource.GetAttribute("cluster_identifier") + clusterId := clusterIdAttr.AsStringValueOrDefault("", resource) + + if clusterIdAttr.IsResourceBlockReference("aws_rds_cluster") { + if referenced, err := modules.GetReferencedBlock(clusterIdAttr, resource); err == nil { + clusterId = defsecTypes.String(referenced.FullName(), referenced.GetMetadata()) + } + } + + return rds.ClusterInstance{ + ClusterIdentifier: clusterId, + Instance: adaptInstance(resource, modules), + } +} + +func adaptClassicDBSecurityGroup(resource *terraform.Block) rds.DBSecurityGroup { + return rds.DBSecurityGroup{ + Metadata: resource.GetMetadata(), + } +} + +func adaptInstance(resource *terraform.Block, modules terraform.Modules) rds.Instance { + + var ReadReplicaDBInstanceIdentifiers []defsecTypes.StringValue + rrdiAttr := resource.GetAttribute("replicate_source_db") + for _, rrdi := range rrdiAttr.AsStringValues() { + ReadReplicaDBInstanceIdentifiers = append(ReadReplicaDBInstanceIdentifiers, rrdi) + } + + var TagList []rds.TagList + tagres := resource.GetBlocks("tags") + for _, tagres := range tagres { + + TagList = append(TagList, rds.TagList{ + Metadata: tagres.GetMetadata(), + }) + } + + var EnabledCloudwatchLogsExports []defsecTypes.StringValue + ecweAttr := resource.GetAttribute("enabled_cloudwatch_logs_exports") + for _, ecwe := range ecweAttr.AsStringValues() { + EnabledCloudwatchLogsExports = append(EnabledCloudwatchLogsExports, ecwe) + } + + replicaSource := resource.GetAttribute("replicate_source_db") + replicaSourceValue := "" + if replicaSource.IsNotNil() { + if referenced, err := modules.GetReferencedBlock(replicaSource, resource); err == nil { + replicaSourceValue = referenced.ID() + } + } + return rds.Instance{ + Metadata: resource.GetMetadata(), + BackupRetentionPeriodDays: resource.GetAttribute("backup_retention_period").AsIntValueOrDefault(0, resource), + ReplicationSourceARN: defsecTypes.StringExplicit(replicaSourceValue, resource.GetMetadata()), + PerformanceInsights: adaptPerformanceInsights(resource), + Encryption: adaptEncryption(resource), + PublicAccess: resource.GetAttribute("publicly_accessible").AsBoolValueOrDefault(false, resource), + Engine: resource.GetAttribute("engine").AsStringValueOrDefault(rds.EngineAurora, resource), + IAMAuthEnabled: resource.GetAttribute("iam_database_authentication_enabled").AsBoolValueOrDefault(false, resource), + DeletionProtection: resource.GetAttribute("deletion_protection").AsBoolValueOrDefault(false, resource), + DBInstanceArn: resource.GetAttribute("arn").AsStringValueOrDefault("", resource), + StorageEncrypted: resource.GetAttribute("storage_encrypted").AsBoolValueOrDefault(true, resource), + DBInstanceIdentifier: resource.GetAttribute("identifier").AsStringValueOrDefault("", resource), + EngineVersion: resource.GetAttribute("engine_version").AsStringValueOrDefault("", resource), + AutoMinorVersionUpgrade: resource.GetAttribute("auto_minor_version_upgrade").AsBoolValueOrDefault(false, resource), + MultiAZ: resource.GetAttribute("multi_az").AsBoolValueOrDefault(false, resource), + PubliclyAccessible: resource.GetAttribute("publicly_accessible").AsBoolValueOrDefault(false, resource), + LatestRestorableTime: defsecTypes.TimeUnresolvable(resource.GetMetadata()), + ReadReplicaDBInstanceIdentifiers: ReadReplicaDBInstanceIdentifiers, + TagList: TagList, + EnabledCloudwatchLogsExports: EnabledCloudwatchLogsExports, + } +} + +func adaptDBParameterGroups(resource *terraform.Block, modules terraform.Modules) rds.ParameterGroups { + + var Parameters []rds.Parameters + paramres := resource.GetBlocks("parameter") + for _, paramres := range paramres { + + Parameters = append(Parameters, rds.Parameters{ + Metadata: paramres.GetMetadata(), + ParameterName: defsecTypes.StringDefault("", paramres.GetMetadata()), + ParameterValue: defsecTypes.StringDefault("", paramres.GetMetadata()), + }) + } + + return rds.ParameterGroups{ + Metadata: resource.GetMetadata(), + DBParameterGroupName: resource.GetAttribute("name").AsStringValueOrDefault("", resource), + DBParameterGroupFamily: resource.GetAttribute("family").AsStringValueOrDefault("", resource), + Parameters: Parameters, + } +} + +func adaptDBSnapshots(resource *terraform.Block, modules terraform.Modules) rds.Snapshots { + + return rds.Snapshots{ + Metadata: resource.GetMetadata(), + DBSnapshotIdentifier: resource.GetAttribute("db_snapshot_identifier").AsStringValueOrDefault("", resource), + DBSnapshotArn: resource.GetAttribute("db_snapshot_arn").AsStringValueOrDefault("", resource), + Encrypted: resource.GetAttribute("encrypted").AsBoolValueOrDefault(true, resource), + KmsKeyId: resource.GetAttribute("kms_key_id").AsStringValueOrDefault("", resource), + SnapshotAttributes: nil, + } +} + +func adaptCluster(resource *terraform.Block, modules terraform.Modules) (rds.Cluster, []string) { + + clusterInstances, ids := getClusterInstances(resource, modules) + + var public bool + for _, instance := range clusterInstances { + if instance.PublicAccess.IsTrue() { + public = true + break + } + } + + return rds.Cluster{ + Metadata: resource.GetMetadata(), + BackupRetentionPeriodDays: resource.GetAttribute("backup_retention_period").AsIntValueOrDefault(1, resource), + ReplicationSourceARN: resource.GetAttribute("replication_source_identifier").AsStringValueOrDefault("", resource), + PerformanceInsights: adaptPerformanceInsights(resource), + Instances: clusterInstances, + Encryption: adaptEncryption(resource), + PublicAccess: defsecTypes.Bool(public, resource.GetMetadata()), + Engine: resource.GetAttribute("engine").AsStringValueOrDefault(rds.EngineAurora, resource), + LatestRestorableTime: defsecTypes.TimeUnresolvable(resource.GetMetadata()), + AvailabilityZones: resource.GetAttribute("availability_zones").AsStringValueSliceOrEmpty(), + DeletionProtection: resource.GetAttribute("deletion_protection").AsBoolValueOrDefault(false, resource), + }, ids +} + +func getClusterInstances(resource *terraform.Block, modules terraform.Modules) (clusterInstances []rds.ClusterInstance, instanceIDs []string) { + clusterInstanceResources := modules.GetReferencingResources(resource, "aws_rds_cluster_instance", "cluster_identifier") + + for _, ciResource := range clusterInstanceResources { + instanceIDs = append(instanceIDs, ciResource.ID()) + clusterInstances = append(clusterInstances, adaptClusterInstance(ciResource, modules)) + } + return clusterInstances, instanceIDs +} + +func adaptPerformanceInsights(resource *terraform.Block) rds.PerformanceInsights { + return rds.PerformanceInsights{ + Metadata: resource.GetMetadata(), + Enabled: resource.GetAttribute("performance_insights_enabled").AsBoolValueOrDefault(false, resource), + KMSKeyID: resource.GetAttribute("performance_insights_kms_key_id").AsStringValueOrDefault("", resource), + } +} + +func adaptEncryption(resource *terraform.Block) rds.Encryption { + return rds.Encryption{ + Metadata: resource.GetMetadata(), + EncryptStorage: resource.GetAttribute("storage_encrypted").AsBoolValueOrDefault(false, resource), + KMSKeyID: resource.GetAttribute("kms_key_id").AsStringValueOrDefault("", resource), + } +} diff --git a/internal/adapters/terraform/aws/rds/adapt_test.go b/internal/adapters/terraform/aws/rds/adapt_test.go new file mode 100644 index 000000000000..c6e750b5277d --- /dev/null +++ b/internal/adapters/terraform/aws/rds/adapt_test.go @@ -0,0 +1,332 @@ +package rds + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/rds" + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy/test/testutil" +) + +func Test_Adapt(t *testing.T) { + tests := []struct { + name string + terraform string + expected rds.RDS + }{ + { + name: "defined", + terraform: ` + + resource "aws_rds_cluster" "example" { + engine = "aurora-mysql" + availability_zones = ["us-west-2a", "us-west-2b", "us-west-2c"] + backup_retention_period = 7 + kms_key_id = "kms_key_1" + storage_encrypted = true + replication_source_identifier = "arn-of-a-source-db-cluster" + deletion_protection = true + } + + resource "aws_rds_cluster_instance" "example" { + cluster_identifier = aws_rds_cluster.example.id + name = "bar" + performance_insights_enabled = true + performance_insights_kms_key_id = "performance_key_0" + kms_key_id = "kms_key_0" + storage_encrypted = true + } + + resource "aws_db_security_group" "example" { + # ... + } + + resource "aws_db_instance" "example" { + publicly_accessible = false + backup_retention_period = 5 + skip_final_snapshot = true + performance_insights_enabled = true + performance_insights_kms_key_id = "performance_key_1" + storage_encrypted = true + kms_key_id = "kms_key_2" + } +`, + expected: rds.RDS{ + Instances: []rds.Instance{ + { + Metadata: defsecTypes.NewTestMetadata(), + BackupRetentionPeriodDays: defsecTypes.Int(5, defsecTypes.NewTestMetadata()), + ReplicationSourceARN: defsecTypes.String("", defsecTypes.NewTestMetadata()), + PerformanceInsights: rds.PerformanceInsights{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("performance_key_1", defsecTypes.NewTestMetadata()), + }, + Encryption: rds.Encryption{ + Metadata: defsecTypes.NewTestMetadata(), + EncryptStorage: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("kms_key_2", defsecTypes.NewTestMetadata()), + }, + PublicAccess: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + Engine: defsecTypes.String(rds.EngineAurora, defsecTypes.NewTestMetadata()), + StorageEncrypted: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + Clusters: []rds.Cluster{ + { + Metadata: defsecTypes.NewTestMetadata(), + BackupRetentionPeriodDays: defsecTypes.Int(7, defsecTypes.NewTestMetadata()), + ReplicationSourceARN: defsecTypes.String("arn-of-a-source-db-cluster", defsecTypes.NewTestMetadata()), + PerformanceInsights: rds.PerformanceInsights{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + Encryption: rds.Encryption{ + Metadata: defsecTypes.NewTestMetadata(), + EncryptStorage: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("kms_key_1", defsecTypes.NewTestMetadata()), + }, + Instances: []rds.ClusterInstance{ + { + Instance: rds.Instance{ + Metadata: defsecTypes.NewTestMetadata(), + BackupRetentionPeriodDays: defsecTypes.Int(0, defsecTypes.NewTestMetadata()), + ReplicationSourceARN: defsecTypes.String("", defsecTypes.NewTestMetadata()), + PerformanceInsights: rds.PerformanceInsights{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("performance_key_0", defsecTypes.NewTestMetadata()), + }, + Encryption: rds.Encryption{ + Metadata: defsecTypes.NewTestMetadata(), + EncryptStorage: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("kms_key_0", defsecTypes.NewTestMetadata()), + }, + PublicAccess: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + Engine: defsecTypes.String(rds.EngineAurora, defsecTypes.NewTestMetadata()), + StorageEncrypted: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + ClusterIdentifier: defsecTypes.String("aws_rds_cluster.example", defsecTypes.NewTestMetadata()), + }, + }, + PublicAccess: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + Engine: defsecTypes.String(rds.EngineAuroraMysql, defsecTypes.NewTestMetadata()), + AvailabilityZones: defsecTypes.StringValueList{ + defsecTypes.String("us-west-2a", defsecTypes.NewTestMetadata()), + defsecTypes.String("us-west-2b", defsecTypes.NewTestMetadata()), + defsecTypes.String("us-west-2c", defsecTypes.NewTestMetadata()), + }, + DeletionProtection: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + Classic: rds.Classic{ + DBSecurityGroups: []rds.DBSecurityGroup{ + { + Metadata: defsecTypes.NewTestMetadata(), + }, + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := Adapt(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func Test_adaptInstance(t *testing.T) { + tests := []struct { + name string + terraform string + expected rds.Instance + }{ + { + name: "instance defaults", + terraform: ` + resource "aws_db_instance" "example" { + } +`, + expected: rds.Instance{ + Metadata: defsecTypes.NewTestMetadata(), + BackupRetentionPeriodDays: defsecTypes.Int(0, defsecTypes.NewTestMetadata()), + ReplicationSourceARN: defsecTypes.String("", defsecTypes.NewTestMetadata()), + PerformanceInsights: rds.PerformanceInsights{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + Encryption: rds.Encryption{ + Metadata: defsecTypes.NewTestMetadata(), + EncryptStorage: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + PublicAccess: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + Engine: defsecTypes.String(rds.EngineAurora, defsecTypes.NewTestMetadata()), + StorageEncrypted: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + IAMAuthEnabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptInstance(modules.GetBlocks()[0], modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func Test_adaptCluster(t *testing.T) { + tests := []struct { + name string + terraform string + expected rds.Cluster + }{ + { + name: "cluster defaults", + terraform: ` + resource "aws_rds_cluster" "example" { + } +`, + expected: rds.Cluster{ + Metadata: defsecTypes.NewTestMetadata(), + BackupRetentionPeriodDays: defsecTypes.Int(1, defsecTypes.NewTestMetadata()), + ReplicationSourceARN: defsecTypes.String("", defsecTypes.NewTestMetadata()), + PerformanceInsights: rds.PerformanceInsights{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + Encryption: rds.Encryption{ + Metadata: defsecTypes.NewTestMetadata(), + EncryptStorage: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + PublicAccess: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + Engine: defsecTypes.String(rds.EngineAurora, defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted, _ := adaptCluster(modules.GetBlocks()[0], modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "aws_rds_cluster" "example" { + backup_retention_period = 7 + kms_key_id = "kms_key_1" + storage_encrypted = true + replication_source_identifier = "arn-of-a-source-db-cluster" + } + + resource "aws_rds_cluster_instance" "example" { + cluster_identifier = aws_rds_cluster.example.id + backup_retention_period = 7 + performance_insights_enabled = true + performance_insights_kms_key_id = "performance_key" + storage_encrypted = true + kms_key_id = "kms_key_0" + } + + resource "aws_db_security_group" "example" { + } + + resource "aws_db_instance" "example" { + publicly_accessible = false + backup_retention_period = 7 + performance_insights_enabled = true + performance_insights_kms_key_id = "performance_key" + storage_encrypted = true + kms_key_id = "kms_key_0" + } +` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Clusters, 1) + require.Len(t, adapted.Instances, 1) + + cluster := adapted.Clusters[0] + instance := adapted.Instances[0] + classic := adapted.Classic + + assert.Equal(t, 2, cluster.Metadata.Range().GetStartLine()) + assert.Equal(t, 7, cluster.Metadata.Range().GetEndLine()) + + assert.Equal(t, 3, cluster.BackupRetentionPeriodDays.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, cluster.BackupRetentionPeriodDays.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 4, cluster.Encryption.KMSKeyID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 4, cluster.Encryption.KMSKeyID.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 5, cluster.Encryption.EncryptStorage.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 5, cluster.Encryption.EncryptStorage.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 6, cluster.ReplicationSourceARN.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 6, cluster.ReplicationSourceARN.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 9, cluster.Instances[0].Instance.Metadata.Range().GetStartLine()) + assert.Equal(t, 16, cluster.Instances[0].Instance.Metadata.Range().GetEndLine()) + + assert.Equal(t, 2, cluster.Instances[0].ClusterIdentifier.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 7, cluster.Instances[0].ClusterIdentifier.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 11, cluster.Instances[0].Instance.BackupRetentionPeriodDays.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 11, cluster.Instances[0].Instance.BackupRetentionPeriodDays.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 12, cluster.Instances[0].Instance.PerformanceInsights.Enabled.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 12, cluster.Instances[0].Instance.PerformanceInsights.Enabled.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 13, cluster.Instances[0].Instance.PerformanceInsights.KMSKeyID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 13, cluster.Instances[0].Instance.PerformanceInsights.KMSKeyID.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 14, cluster.Instances[0].Instance.Encryption.EncryptStorage.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 14, cluster.Instances[0].Instance.Encryption.EncryptStorage.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 15, cluster.Instances[0].Instance.Encryption.KMSKeyID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 15, cluster.Instances[0].Instance.Encryption.KMSKeyID.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 18, classic.DBSecurityGroups[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 19, classic.DBSecurityGroups[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 21, instance.Metadata.Range().GetStartLine()) + assert.Equal(t, 28, instance.Metadata.Range().GetEndLine()) + + assert.Equal(t, 22, instance.PublicAccess.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 22, instance.PublicAccess.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 23, instance.BackupRetentionPeriodDays.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 23, instance.BackupRetentionPeriodDays.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 24, instance.PerformanceInsights.Enabled.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 24, instance.PerformanceInsights.Enabled.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 25, instance.PerformanceInsights.KMSKeyID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 25, instance.PerformanceInsights.KMSKeyID.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 26, instance.Encryption.EncryptStorage.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 26, instance.Encryption.EncryptStorage.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 27, instance.Encryption.KMSKeyID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 27, instance.Encryption.KMSKeyID.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/aws/redshift/adapt.go b/internal/adapters/terraform/aws/redshift/adapt.go new file mode 100644 index 000000000000..2875b6649c46 --- /dev/null +++ b/internal/adapters/terraform/aws/redshift/adapt.go @@ -0,0 +1,117 @@ +package redshift + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/redshift" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) redshift.Redshift { + return redshift.Redshift{ + Clusters: adaptClusters(modules), + SecurityGroups: adaptSecurityGroups(modules), + ClusterParameters: adaptParameters(modules), + ReservedNodes: nil, + } +} + +func adaptClusters(modules terraform.Modules) []redshift.Cluster { + var clusters []redshift.Cluster + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_redshift_cluster") { + clusters = append(clusters, adaptCluster(resource, module)) + } + } + return clusters +} + +func adaptSecurityGroups(modules terraform.Modules) []redshift.SecurityGroup { + var securityGroups []redshift.SecurityGroup + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_redshift_security_group") { + securityGroups = append(securityGroups, adaptSecurityGroup(resource)) + } + } + return securityGroups +} + +func adaptParameters(modules terraform.Modules) []redshift.ClusterParameter { + var Parameters []redshift.ClusterParameter + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_redshift_parameter_group") { + for _, r := range resource.GetBlocks("parameter") { + Parameters = append(Parameters, adaptParameter(r)) + } + } + } + return Parameters +} + +func adaptCluster(resource *terraform.Block, module *terraform.Module) redshift.Cluster { + cluster := redshift.Cluster{ + Metadata: resource.GetMetadata(), + ClusterIdentifier: resource.GetAttribute("cluster_identifier").AsStringValueOrDefault("", resource), + NodeType: resource.GetAttribute("node_type").AsStringValueOrDefault("", resource), + MasterUsername: resource.GetAttribute("master_username").AsStringValueOrDefault("", resource), + NumberOfNodes: resource.GetAttribute("number_of_nodes").AsIntValueOrDefault(1, resource), + PubliclyAccessible: resource.GetAttribute("publicly_accessible").AsBoolValueOrDefault(true, resource), + LoggingEnabled: defsecTypes.Bool(false, resource.GetMetadata()), + AutomatedSnapshotRetentionPeriod: defsecTypes.Int(0, resource.GetMetadata()), + AllowVersionUpgrade: resource.GetAttribute("allow_version_upgrade").AsBoolValueOrDefault(true, resource), + VpcId: defsecTypes.String("", resource.GetMetadata()), + Encryption: redshift.Encryption{ + Metadata: resource.GetMetadata(), + Enabled: defsecTypes.BoolDefault(false, resource.GetMetadata()), + KMSKeyID: defsecTypes.StringDefault("", resource.GetMetadata()), + }, + EndPoint: redshift.EndPoint{ + Metadata: resource.GetMetadata(), + Port: resource.GetAttribute("port").AsIntValueOrDefault(5439, resource), + }, + SubnetGroupName: defsecTypes.StringDefault("", resource.GetMetadata()), + } + + encryptedAttr := resource.GetAttribute("encrypted") + cluster.Encryption.Enabled = encryptedAttr.AsBoolValueOrDefault(false, resource) + + if logBlock := resource.GetBlock("logging"); logBlock.IsNotNil() { + cluster.LoggingEnabled = logBlock.GetAttribute("enable").AsBoolValueOrDefault(false, logBlock) + } + + if snapBlock := resource.GetBlock("snapshot_copy"); snapBlock.IsNotNil() { + snapAttr := snapBlock.GetAttribute("retention_period") + cluster.AutomatedSnapshotRetentionPeriod = snapAttr.AsIntValueOrDefault(7, snapBlock) + } + + KMSKeyIDAttr := resource.GetAttribute("kms_key_id") + cluster.Encryption.KMSKeyID = KMSKeyIDAttr.AsStringValueOrDefault("", resource) + if KMSKeyIDAttr.IsResourceBlockReference("aws_kms_key") { + if kmsKeyBlock, err := module.GetReferencedBlock(KMSKeyIDAttr, resource); err == nil { + cluster.Encryption.KMSKeyID = defsecTypes.String(kmsKeyBlock.FullName(), kmsKeyBlock.GetMetadata()) + } + } + + subnetGroupNameAttr := resource.GetAttribute("cluster_subnet_group_name") + cluster.SubnetGroupName = subnetGroupNameAttr.AsStringValueOrDefault("", resource) + + return cluster +} + +func adaptSecurityGroup(resource *terraform.Block) redshift.SecurityGroup { + descriptionAttr := resource.GetAttribute("description") + descriptionVal := descriptionAttr.AsStringValueOrDefault("Managed by Terraform", resource) + + return redshift.SecurityGroup{ + Metadata: resource.GetMetadata(), + Description: descriptionVal, + } +} + +func adaptParameter(resource *terraform.Block) redshift.ClusterParameter { + + return redshift.ClusterParameter{ + Metadata: resource.GetMetadata(), + ParameterName: resource.GetAttribute("name").AsStringValueOrDefault("", resource), + ParameterValue: resource.GetAttribute("value").AsStringValueOrDefault("", resource), + } +} diff --git a/internal/adapters/terraform/aws/redshift/adapt_test.go b/internal/adapters/terraform/aws/redshift/adapt_test.go new file mode 100644 index 000000000000..3a852f5cb9a4 --- /dev/null +++ b/internal/adapters/terraform/aws/redshift/adapt_test.go @@ -0,0 +1,230 @@ +package redshift + +import ( + "fmt" + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/redshift" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" +) + +func Test_Adapt(t *testing.T) { + tests := []struct { + name string + terraform string + expected redshift.Redshift + }{ + { + name: "reference key id", + terraform: ` + resource "aws_kms_key" "redshift" { + enable_key_rotation = true + } + + resource "aws_redshift_cluster" "example" { + cluster_identifier = "tf-redshift-cluster" + publicly_accessible = false + number_of_nodes = 1 + allow_version_upgrade = false + port = 5440 + encrypted = true + kms_key_id = aws_kms_key.redshift.key_id + cluster_subnet_group_name = "redshift_subnet" + } + + resource "aws_redshift_security_group" "default" { + name = "redshift-sg" + description = "some description" + } +`, + expected: redshift.Redshift{ + Clusters: []redshift.Cluster{ + { + Metadata: defsecTypes.NewTestMetadata(), + ClusterIdentifier: defsecTypes.String("tf-redshift-cluster", defsecTypes.NewTestMetadata()), + PubliclyAccessible: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + NumberOfNodes: defsecTypes.Int(1, defsecTypes.NewTestMetadata()), + AllowVersionUpgrade: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + EndPoint: redshift.EndPoint{ + Metadata: defsecTypes.NewTestMetadata(), + Port: defsecTypes.Int(5440, defsecTypes.NewTestMetadata()), + }, + Encryption: redshift.Encryption{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("aws_kms_key.redshift", defsecTypes.NewTestMetadata()), + }, + SubnetGroupName: defsecTypes.String("redshift_subnet", defsecTypes.NewTestMetadata()), + }, + }, + SecurityGroups: []redshift.SecurityGroup{ + { + Metadata: defsecTypes.NewTestMetadata(), + Description: defsecTypes.String("some description", defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := Adapt(modules) + fmt.Println(adapted.SecurityGroups[0].Description.Value()) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func Test_adaptCluster(t *testing.T) { + tests := []struct { + name string + terraform string + expected redshift.Cluster + }{ + { + name: "key as string", + terraform: ` + resource "aws_redshift_cluster" "example" { + cluster_identifier = "tf-redshift-cluster" + publicly_accessible = false + number_of_nodes = 1 + allow_version_upgrade = false + port = 5440 + encrypted = true + kms_key_id = "key-id" + cluster_subnet_group_name = "redshift_subnet" + } +`, + expected: redshift.Cluster{ + Metadata: defsecTypes.NewTestMetadata(), + ClusterIdentifier: defsecTypes.String("tf-redshift-cluster", defsecTypes.NewTestMetadata()), + PubliclyAccessible: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + NumberOfNodes: defsecTypes.Int(1, defsecTypes.NewTestMetadata()), + AllowVersionUpgrade: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + EndPoint: redshift.EndPoint{ + Metadata: defsecTypes.NewTestMetadata(), + Port: defsecTypes.Int(5440, defsecTypes.NewTestMetadata()), + }, + Encryption: redshift.Encryption{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("key-id", defsecTypes.NewTestMetadata()), + }, + SubnetGroupName: defsecTypes.String("redshift_subnet", defsecTypes.NewTestMetadata()), + }, + }, + { + name: "defaults", + terraform: ` + resource "aws_redshift_cluster" "example" { + } +`, + expected: redshift.Cluster{ + Metadata: defsecTypes.NewTestMetadata(), + ClusterIdentifier: defsecTypes.String("", defsecTypes.NewTestMetadata()), + PubliclyAccessible: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + NumberOfNodes: defsecTypes.Int(1, defsecTypes.NewTestMetadata()), + AllowVersionUpgrade: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + EndPoint: redshift.EndPoint{ + Metadata: defsecTypes.NewTestMetadata(), + Port: defsecTypes.Int(5439, defsecTypes.NewTestMetadata()), + }, + Encryption: redshift.Encryption{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + KMSKeyID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + SubnetGroupName: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptCluster(modules.GetBlocks()[0], modules[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func Test_adaptSecurityGroup(t *testing.T) { + tests := []struct { + name string + terraform string + expected redshift.SecurityGroup + }{ + { + name: "defaults", + terraform: ` +resource "" "example" { +} +`, + expected: redshift.SecurityGroup{ + Metadata: defsecTypes.NewTestMetadata(), + Description: defsecTypes.String("Managed by Terraform", defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptSecurityGroup(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "aws_kms_key" "redshift" { + enable_key_rotation = true + } + + resource "aws_redshift_cluster" "example" { + cluster_identifier = "tf-redshift-cluster" + encrypted = true + kms_key_id = aws_kms_key.redshift.key_id + cluster_subnet_group_name = "subnet name" + } + + resource "aws_redshift_security_group" "default" { + name = "redshift-sg" + description = "some description" + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Clusters, 1) + require.Len(t, adapted.SecurityGroups, 1) + cluster := adapted.Clusters[0] + securityGroup := adapted.SecurityGroups[0] + + assert.Equal(t, 6, cluster.Metadata.Range().GetStartLine()) + assert.Equal(t, 11, cluster.Metadata.Range().GetEndLine()) + + assert.Equal(t, 8, cluster.Encryption.Enabled.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 8, cluster.Encryption.Enabled.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 2, cluster.Encryption.KMSKeyID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 4, cluster.Encryption.KMSKeyID.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 10, cluster.SubnetGroupName.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 10, cluster.SubnetGroupName.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 13, securityGroup.Metadata.Range().GetStartLine()) + assert.Equal(t, 16, securityGroup.Metadata.Range().GetEndLine()) + + assert.Equal(t, 15, securityGroup.Description.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 15, securityGroup.Description.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/aws/s3/adapt.go b/internal/adapters/terraform/aws/s3/adapt.go new file mode 100644 index 000000000000..56e61a8763f9 --- /dev/null +++ b/internal/adapters/terraform/aws/s3/adapt.go @@ -0,0 +1,18 @@ +package s3 + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/s3" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func Adapt(modules terraform.Modules) s3.S3 { + + a := &adapter{ + modules: modules, + bucketMap: make(map[string]*s3.Bucket), + } + + return s3.S3{ + Buckets: a.adaptBuckets(), + } +} diff --git a/internal/adapters/terraform/aws/s3/adapt_test.go b/internal/adapters/terraform/aws/s3/adapt_test.go new file mode 100644 index 000000000000..35d6a4e5aaca --- /dev/null +++ b/internal/adapters/terraform/aws/s3/adapt_test.go @@ -0,0 +1,385 @@ +package s3 + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/iam" + "github.com/aquasecurity/defsec/pkg/providers/aws/s3" + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" + "github.com/liamg/iamgo" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_PublicAccessBlock(t *testing.T) { + testCases := []struct { + desc string + source string + expectedBuckets int + hasPublicAccess bool + }{ + { + desc: "public access block is found when using the bucket name as the lookup", + source: ` +resource "aws_s3_bucket" "example" { + bucket = "bucketname" +} + +resource "aws_s3_bucket_public_access_block" "example_access_block"{ + bucket = "bucketname" +} +`, + expectedBuckets: 1, + hasPublicAccess: true, + }, + { + desc: "public access block is found when using the bucket name as the lookup", + source: ` +resource "aws_s3_bucket" "example" { + bucket = "bucketname" +} + +resource "aws_s3_bucket_public_access_block" "example_access_block"{ + bucket = aws_s3_bucket.example.id +} +`, + expectedBuckets: 1, + hasPublicAccess: true, + }, + } + for _, tC := range testCases { + t.Run(tC.desc, func(t *testing.T) { + + modules := tftestutil.CreateModulesFromSource(t, tC.source, ".tf") + s3Ctx := Adapt(modules) + + assert.Equal(t, tC.expectedBuckets, len(s3Ctx.Buckets)) + + for _, bucket := range s3Ctx.Buckets { + if tC.hasPublicAccess { + assert.NotNil(t, bucket.PublicAccessBlock) + } else { + assert.Nil(t, bucket.PublicAccessBlock) + } + } + + bucket := s3Ctx.Buckets[0] + assert.NotNil(t, bucket.PublicAccessBlock) + + }) + } + +} + +func Test_PublicAccessDoesNotReference(t *testing.T) { + testCases := []struct { + desc string + source string + }{ + { + desc: "just a bucket, no public access block", + source: ` +resource "aws_s3_bucket" "example" { + bucket = "bucketname" +} + `, + }, + { + desc: "bucket with unrelated public access block", + source: ` +resource "aws_s3_bucket" "example" { + bucket = "bucketname" +} + +resource "aws_s3_bucket_public_access_block" "example_access_block"{ + bucket = aws_s3_bucket.other.id +} + `, + }, + { + desc: "bucket with unrelated public access block via name", + source: ` +resource "aws_s3_bucket" "example" { + bucket = "bucketname" +} + +resource "aws_s3_bucket_public_access_block" "example_access_block"{ + bucket = "something" +} + `, + }, + } + for _, tC := range testCases { + t.Run(tC.desc, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, tC.source, ".tf") + s3Ctx := Adapt(modules) + require.Len(t, s3Ctx.Buckets, 1) + assert.Nil(t, s3Ctx.Buckets[0].PublicAccessBlock) + + }) + } +} + +func Test_Adapt(t *testing.T) { + tests := []struct { + name string + terraform string + expected s3.S3 + }{ + { + name: "basic", + terraform: ` + resource "aws_s3_bucket" "example" { + bucket = "bucket" + } + + resource "aws_s3_bucket_public_access_block" "example" { + bucket = aws_s3_bucket.example.id + + restrict_public_buckets = true + block_public_acls = true + block_public_policy = true + ignore_public_acls = true + + } + + resource "aws_s3_bucket_acl" "example" { + bucket = aws_s3_bucket.example.id + acl = "private" + } + + resource "aws_s3_bucket_server_side_encryption_configuration" "example" { + bucket = aws_s3_bucket.example.bucket + + rule { + apply_server_side_encryption_by_default { + kms_master_key_id = "string-key" + sse_algorithm = "aws:kms" + } + } + } + + resource "aws_s3_bucket_logging" "example" { + bucket = aws_s3_bucket.example.id + + target_bucket = aws_s3_bucket.example.id + target_prefix = "log/" + } + + resource "aws_s3_bucket_versioning" "versioning_example" { + bucket = aws_s3_bucket.example.id + versioning_configuration { + status = "Enabled" + mfa_delete = "Enabled" + } + } + + resource "aws_s3_bucket_policy" "allow_access_from_another_account" { + bucket = aws_s3_bucket.example.bucket + policy = data.aws_iam_policy_document.allow_access_from_another_account.json + } + + data "aws_iam_policy_document" "allow_access_from_another_account" { + statement { + + actions = [ + "s3:GetObject", + "s3:ListBucket", + ] + + resources = [ + "arn:aws:s3:::*", + ] + } + } + `, + expected: s3.S3{ + Buckets: []s3.Bucket{ + { + Metadata: defsecTypes.NewTestMetadata(), + Name: defsecTypes.String("bucket", defsecTypes.NewTestMetadata()), + PublicAccessBlock: &s3.PublicAccessBlock{ + Metadata: defsecTypes.NewTestMetadata(), + BlockPublicACLs: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + BlockPublicPolicy: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + IgnorePublicACLs: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + RestrictPublicBuckets: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + BucketPolicies: []iam.Policy{ + { + Metadata: defsecTypes.NewTestMetadata(), + Name: defsecTypes.String("", defsecTypes.NewTestMetadata()), + Document: func() iam.Document { + + builder := iamgo.NewPolicyBuilder() + + sb := iamgo.NewStatementBuilder() + sb.WithEffect(iamgo.EffectAllow) + sb.WithActions([]string{"s3:GetObject", "s3:ListBucket"}) + sb.WithResources([]string{"arn:aws:s3:::*"}) + + builder.WithStatement(sb.Build()) + + return iam.Document{ + Parsed: builder.Build(), + Metadata: defsecTypes.NewTestMetadata(), + IsOffset: true, + HasRefs: false, + } + }(), + Builtin: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + Encryption: s3.Encryption{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + Algorithm: defsecTypes.String("aws:kms", defsecTypes.NewTestMetadata()), + KMSKeyId: defsecTypes.String("string-key", defsecTypes.NewTestMetadata()), + }, + Versioning: s3.Versioning{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + MFADelete: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + Logging: s3.Logging{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + TargetBucket: defsecTypes.String("aws_s3_bucket.example", defsecTypes.NewTestMetadata()), + }, + ACL: defsecTypes.String("private", defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := Adapt(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "aws_s3_bucket" "example" { + bucket = "bucket" + } + + resource "aws_s3_bucket_public_access_block" "example" { + bucket = aws_s3_bucket.example.id + + restrict_public_buckets = true + block_public_acls = true + block_public_policy = true + ignore_public_acls = true + } + + resource "aws_s3_bucket_acl" "example" { + bucket = aws_s3_bucket.example.id + acl = "private" + } + + resource "aws_s3_bucket_server_side_encryption_configuration" "example" { + bucket = aws_s3_bucket.example.bucket + + rule { + apply_server_side_encryption_by_default { + kms_master_key_id = "string-key" + sse_algorithm = "aws:kms" + } + } + } + + resource "aws_s3_bucket_logging" "example" { + bucket = aws_s3_bucket.example.id + + target_bucket = aws_s3_bucket.example.id + target_prefix = "log/" + } + + resource "aws_s3_bucket_versioning" "versioning_example" { + bucket = aws_s3_bucket.example.id + versioning_configuration { + status = "Enabled" + } + } + + resource "aws_s3_bucket_policy" "allow_access_from_another_account" { + bucket = aws_s3_bucket.example.bucket + policy = data.aws_iam_policy_document.allow_access_from_another_account.json + } + + data "aws_iam_policy_document" "allow_access_from_another_account" { + statement { + + actions = [ + "s3:GetObject", + "s3:ListBucket", + ] + + resources = [ + "arn:aws:s3:::*", + ] + } + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Buckets, 1) + bucket := adapted.Buckets[0] + + assert.Equal(t, 2, bucket.Metadata.Range().GetStartLine()) + assert.Equal(t, 4, bucket.Metadata.Range().GetEndLine()) + + assert.Equal(t, 6, bucket.PublicAccessBlock.Metadata.Range().GetStartLine()) + assert.Equal(t, 13, bucket.PublicAccessBlock.Metadata.Range().GetEndLine()) + + assert.Equal(t, 9, bucket.PublicAccessBlock.RestrictPublicBuckets.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 9, bucket.PublicAccessBlock.RestrictPublicBuckets.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 10, bucket.PublicAccessBlock.BlockPublicACLs.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 10, bucket.PublicAccessBlock.BlockPublicACLs.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 11, bucket.PublicAccessBlock.BlockPublicPolicy.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 11, bucket.PublicAccessBlock.BlockPublicPolicy.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 12, bucket.PublicAccessBlock.IgnorePublicACLs.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 12, bucket.PublicAccessBlock.IgnorePublicACLs.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 17, bucket.ACL.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 17, bucket.ACL.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 20, bucket.Encryption.Metadata.Range().GetStartLine()) + assert.Equal(t, 29, bucket.Encryption.Metadata.Range().GetEndLine()) + + assert.Equal(t, 25, bucket.Encryption.KMSKeyId.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 25, bucket.Encryption.KMSKeyId.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 26, bucket.Encryption.Algorithm.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 26, bucket.Encryption.Algorithm.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 31, bucket.Logging.Metadata.Range().GetStartLine()) + assert.Equal(t, 36, bucket.Logging.Metadata.Range().GetEndLine()) + + assert.Equal(t, 34, bucket.Logging.TargetBucket.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 34, bucket.Logging.TargetBucket.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 38, bucket.Versioning.Metadata.Range().GetStartLine()) + assert.Equal(t, 43, bucket.Versioning.Metadata.Range().GetEndLine()) + + assert.Equal(t, 41, bucket.Versioning.Enabled.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 41, bucket.Versioning.Enabled.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 47, bucket.BucketPolicies[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 47, bucket.BucketPolicies[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 50, bucket.BucketPolicies[0].Document.Metadata.Range().GetStartLine()) + assert.Equal(t, 62, bucket.BucketPolicies[0].Document.Metadata.Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/aws/s3/bucket.go b/internal/adapters/terraform/aws/s3/bucket.go new file mode 100644 index 000000000000..b254e5d56a5b --- /dev/null +++ b/internal/adapters/terraform/aws/s3/bucket.go @@ -0,0 +1,283 @@ +package s3 + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/s3" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +type adapter struct { + modules terraform.Modules + bucketMap map[string]*s3.Bucket +} + +func (a *adapter) adaptBuckets() []s3.Bucket { + for _, block := range a.modules.GetResourcesByType("aws_s3_bucket") { + bucket := &s3.Bucket{ + Metadata: block.GetMetadata(), + Name: block.GetAttribute("bucket").AsStringValueOrDefault("", block), + PublicAccessBlock: nil, + BucketPolicies: nil, + Encryption: getEncryption(block, a), + Versioning: getVersioning(block, a), + Logging: getLogging(block, a), + ACL: getBucketAcl(block, a), + AccelerateConfigurationStatus: getAccelerateStatus(block, a), + BucketLocation: block.GetAttribute("region").AsStringValueOrDefault("", block), + LifecycleConfiguration: getLifecycle(block, a), + Website: getWebsite(block, a), + Objects: getObject(block, a), + } + a.bucketMap[block.ID()] = bucket + } + + a.adaptBucketPolicies() + a.adaptPublicAccessBlocks() + + var buckets []s3.Bucket + for _, bucket := range a.bucketMap { + buckets = append(buckets, *bucket) + } + + return buckets +} + +func getEncryption(block *terraform.Block, a *adapter) s3.Encryption { + if sseConfgihuration := block.GetBlock("server_side_encryption_configuration"); sseConfgihuration != nil { + return newS3Encryption(block, sseConfgihuration) + } + if val, ok := applyForBucketRelatedResource(a, block, "aws_s3_bucket_server_side_encryption_configuration", func(resource *terraform.Block) s3.Encryption { + return newS3Encryption(resource, resource) + }); ok { + return val + } + return s3.Encryption{ + Metadata: block.GetMetadata(), + Enabled: defsecTypes.BoolDefault(false, block.GetMetadata()), + KMSKeyId: defsecTypes.StringDefault("", block.GetMetadata()), + Algorithm: defsecTypes.StringDefault("", block.GetMetadata()), + } +} + +func newS3Encryption(root *terraform.Block, sseConfgihuration *terraform.Block) s3.Encryption { + return s3.Encryption{ + Metadata: root.GetMetadata(), + Enabled: isEncrypted(sseConfgihuration), + Algorithm: terraform.MapNestedAttribute( + sseConfgihuration, + "rule.apply_server_side_encryption_by_default.sse_algorithm", + func(attr *terraform.Attribute, parent *terraform.Block) defsecTypes.StringValue { + return attr.AsStringValueOrDefault("", parent) + }, + ), + KMSKeyId: terraform.MapNestedAttribute( + sseConfgihuration, + "rule.apply_server_side_encryption_by_default.kms_master_key_id", + func(attr *terraform.Attribute, parent *terraform.Block) defsecTypes.StringValue { + return attr.AsStringValueOrDefault("", parent) + }, + ), + } +} + +func getVersioning(block *terraform.Block, a *adapter) s3.Versioning { + versioning := s3.Versioning{ + Metadata: block.GetMetadata(), + Enabled: defsecTypes.BoolDefault(false, block.GetMetadata()), + MFADelete: defsecTypes.BoolDefault(false, block.GetMetadata()), + } + if lockBlock := block.GetBlock("object_lock_configuration"); lockBlock != nil { + if enabled := isObjeckLockEnabled(lockBlock); enabled != nil { + versioning.Enabled = *enabled + } + } + if vBlock := block.GetBlock("versioning"); vBlock != nil { + versioning.Enabled = vBlock.GetAttribute("enabled").AsBoolValueOrDefault(true, vBlock) + versioning.MFADelete = vBlock.GetAttribute("mfa_delete").AsBoolValueOrDefault(false, vBlock) + } + + if enabled, ok := applyForBucketRelatedResource(a, block, "aws_s3_bucket_object_lock_configuration", func(resource *terraform.Block) *defsecTypes.BoolValue { + if block.GetAttribute("object_lock_enabled").IsTrue() { + return isObjeckLockEnabled(resource) + } + return nil + }); ok && enabled != nil { + versioning.Enabled = *enabled + } + + if val, ok := applyForBucketRelatedResource(a, block, "aws_s3_bucket_versioning", getVersioningFromResource); ok { + return val + } + return versioning +} + +func isObjeckLockEnabled(resource *terraform.Block) *defsecTypes.BoolValue { + var val defsecTypes.BoolValue + attr := resource.GetAttribute("object_lock_enabled") + switch { + case attr.IsNil(): // enabled by default + val = defsecTypes.BoolDefault(true, resource.GetMetadata()) + case attr.Equals("Enabled"): + val = defsecTypes.Bool(true, attr.GetMetadata()) + } + return &val +} + +// from aws_s3_bucket_versioning +func getVersioningFromResource(block *terraform.Block) s3.Versioning { + versioning := s3.Versioning{ + Metadata: block.GetMetadata(), + Enabled: defsecTypes.BoolDefault(false, block.GetMetadata()), + MFADelete: defsecTypes.BoolDefault(false, block.GetMetadata()), + } + if config := block.GetBlock("versioning_configuration"); config != nil { + if status := config.GetAttribute("status"); status.IsNotNil() { + versioning.Enabled = defsecTypes.Bool(status.Equals("Enabled", terraform.IgnoreCase), status.GetMetadata()) + } + if mfa := config.GetAttribute("mfa_delete"); mfa.IsNotNil() { + versioning.MFADelete = defsecTypes.Bool(mfa.Equals("Enabled", terraform.IgnoreCase), mfa.GetMetadata()) + } + } + return versioning +} + +func getLogging(block *terraform.Block, a *adapter) s3.Logging { + if loggingBlock := block.GetBlock("logging"); loggingBlock.IsNotNil() { + targetBucket := loggingBlock.GetAttribute("target_bucket").AsStringValueOrDefault("", loggingBlock) + if referencedBlock, err := a.modules.GetReferencedBlock(loggingBlock.GetAttribute("target_bucket"), loggingBlock); err == nil { + targetBucket = defsecTypes.String(referencedBlock.FullName(), loggingBlock.GetAttribute("target_bucket").GetMetadata()) + } + return s3.Logging{ + Metadata: loggingBlock.GetMetadata(), + Enabled: defsecTypes.Bool(true, loggingBlock.GetMetadata()), + TargetBucket: targetBucket, + } + } + + if val, ok := applyForBucketRelatedResource(a, block, "aws_s3_bucket_logging", func(resource *terraform.Block) s3.Logging { + targetBucket := resource.GetAttribute("target-bucket").AsStringValueOrDefault("", resource) + if referencedBlock, err := a.modules.GetReferencedBlock(resource.GetAttribute("target_bucket"), resource); err == nil { + targetBucket = defsecTypes.String(referencedBlock.FullName(), resource.GetAttribute("target_bucket").GetMetadata()) + } + return s3.Logging{ + Metadata: resource.GetMetadata(), + Enabled: hasLogging(resource), + TargetBucket: targetBucket, + } + }); ok { + return val + } + + return s3.Logging{ + Metadata: block.GetMetadata(), + Enabled: defsecTypes.Bool(false, block.GetMetadata()), + TargetBucket: defsecTypes.StringDefault("", block.GetMetadata()), + } +} + +func getBucketAcl(block *terraform.Block, a *adapter) defsecTypes.StringValue { + aclAttr := block.GetAttribute("acl") + if aclAttr.IsString() { + return aclAttr.AsStringValueOrDefault("private", block) + } + + if val, ok := applyForBucketRelatedResource(a, block, "aws_s3_bucket_acl", func(resource *terraform.Block) defsecTypes.StringValue { + return resource.GetAttribute("acl").AsStringValueOrDefault("private", resource) + }); ok { + return val + } + return defsecTypes.StringDefault("private", block.GetMetadata()) +} + +func isEncrypted(sseConfgihuration *terraform.Block) defsecTypes.BoolValue { + return terraform.MapNestedAttribute( + sseConfgihuration, + "rule.apply_server_side_encryption_by_default.sse_algorithm", + func(attr *terraform.Attribute, parent *terraform.Block) defsecTypes.BoolValue { + if attr.IsNil() { + return defsecTypes.BoolDefault(false, parent.GetMetadata()) + } + return defsecTypes.Bool( + true, + attr.GetMetadata(), + ) + }, + ) +} + +func hasLogging(b *terraform.Block) defsecTypes.BoolValue { + if loggingBlock := b.GetBlock("logging"); loggingBlock.IsNotNil() { + if targetAttr := loggingBlock.GetAttribute("target_bucket"); targetAttr.IsNotNil() && targetAttr.IsNotEmpty() { + return defsecTypes.Bool(true, targetAttr.GetMetadata()) + } + return defsecTypes.BoolDefault(false, loggingBlock.GetMetadata()) + } + if targetBucket := b.GetAttribute("target_bucket"); targetBucket.IsNotNil() { + return defsecTypes.Bool(true, targetBucket.GetMetadata()) + } + return defsecTypes.BoolDefault(false, b.GetMetadata()) +} + +func getLifecycle(b *terraform.Block, a *adapter) []s3.Rules { + + var rules []s3.Rules + for _, r := range a.modules.GetReferencingResources(b, "aws_s3_bucket_lifecycle_configuration", "bucket") { + ruleblock := r.GetBlocks("rule") + for _, rule := range ruleblock { + rules = append(rules, s3.Rules{ + Metadata: rule.GetMetadata(), + Status: rule.GetAttribute("status").AsStringValueOrDefault("Enabled", rule), + }) + } + } + return rules +} + +func getWebsite(b *terraform.Block, a *adapter) (website *s3.Website) { + for _, r := range a.modules.GetReferencingResources(b, "aws_s3_bucket_website_configuration", "bucket") { + website = &s3.Website{ + Metadata: r.GetMetadata(), + } + } + return website +} + +func getObject(b *terraform.Block, a *adapter) []s3.Contents { + var object []s3.Contents + for _, r := range a.modules.GetReferencingResources(b, "aws_s3_object", "bucket") { + object = append(object, s3.Contents{ + Metadata: r.GetMetadata(), + }) + } + return object +} + +func getAccelerateStatus(b *terraform.Block, a *adapter) defsecTypes.StringValue { + var status defsecTypes.StringValue + for _, r := range a.modules.GetReferencingResources(b, " aws_s3_bucket_accelerate_configuration", "bucket") { + status = r.GetAttribute("status").AsStringValueOrDefault("Enabled", r) + } + return status +} + +func applyForBucketRelatedResource[T any](a *adapter, block *terraform.Block, resType string, fn func(resource *terraform.Block) T) (T, bool) { + for _, resource := range a.modules.GetResourcesByType(resType) { + bucketAttr := resource.GetAttribute("bucket") + if bucketAttr.IsNotNil() { + if bucketAttr.IsString() { + actualBucketName := block.GetAttribute("bucket").AsStringValueOrDefault("", block).Value() + if bucketAttr.Equals(block.ID()) || bucketAttr.Equals(actualBucketName) { + return fn(resource), true + } + } + if referencedBlock, err := a.modules.GetReferencedBlock(bucketAttr, resource); err == nil { + if referencedBlock.ID() == block.ID() { + return fn(resource), true + } + } + } + + } + var res T + return res, false +} diff --git a/internal/adapters/terraform/aws/s3/bucket_test.go b/internal/adapters/terraform/aws/s3/bucket_test.go new file mode 100644 index 000000000000..4fcdc2e50ef9 --- /dev/null +++ b/internal/adapters/terraform/aws/s3/bucket_test.go @@ -0,0 +1,331 @@ +package s3 + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/stretchr/testify/assert" +) + +func Test_GetBuckets(t *testing.T) { + + source := ` +resource "aws_s3_bucket" "bucket1" { + + +} +` + modules := tftestutil.CreateModulesFromSource(t, source, ".tf") + + s3 := Adapt(modules) + + assert.Equal(t, 1, len(s3.Buckets)) + +} + +func Test_BucketGetACL(t *testing.T) { + + source := ` +resource "aws_s3_bucket" "example" { + bucket = "yournamehere" + acl = "authenticated-read" + + # ... other configuration ... +}` + modules := tftestutil.CreateModulesFromSource(t, source, ".tf") + + s3 := Adapt(modules) + + assert.Equal(t, 1, len(s3.Buckets)) + assert.Equal(t, "authenticated-read", s3.Buckets[0].ACL.Value()) + +} + +func Test_V4BucketGetACL(t *testing.T) { + + source := ` +resource "aws_s3_bucket" "example" { + bucket = "yournamehere" +} + +resource "aws_s3_bucket_acl" "example" { + bucket = aws_s3_bucket.example.id + acl = "authenticated-read" +}` + modules := tftestutil.CreateModulesFromSource(t, source, ".tf") + + s3 := Adapt(modules) + + assert.Equal(t, 1, len(s3.Buckets)) + assert.Equal(t, "authenticated-read", s3.Buckets[0].ACL.Value()) + +} + +func Test_BucketGetLogging(t *testing.T) { + + source := ` +resource "aws_s3_bucket" "example" { + bucket = "yournamehere" + + # ... other configuration ... + logging { + target_bucket = aws_s3_bucket.log_bucket.id + target_prefix = "log/" + } +} +` + modules := tftestutil.CreateModulesFromSource(t, source, ".tf") + + s3 := Adapt(modules) + + assert.Equal(t, 1, len(s3.Buckets)) + assert.True(t, s3.Buckets[0].Logging.Enabled.Value()) + +} + +func Test_V4BucketGetLogging(t *testing.T) { + + source := ` +resource "aws_s3_bucket" "log_bucket" { + bucket = "example-log-bucket" + + # ... other configuration ... +} + +resource "aws_s3_bucket" "example" { + bucket = "yournamehere" + + # ... other configuration ... +} + +resource "aws_s3_bucket_logging" "example" { + bucket = aws_s3_bucket.example.id + target_bucket = aws_s3_bucket.log_bucket.id + target_prefix = "log/" +} +` + modules := tftestutil.CreateModulesFromSource(t, source, ".tf") + + s3 := Adapt(modules) + + assert.Equal(t, 2, len(s3.Buckets)) + for _, bucket := range s3.Buckets { + switch bucket.Name.Value() { + case "yournamehere": + assert.True(t, bucket.Logging.Enabled.Value()) + case "example-log-bucket": + assert.False(t, bucket.Logging.Enabled.Value()) + } + } +} + +func Test_BucketGetVersioning(t *testing.T) { + source := ` +resource "aws_s3_bucket" "example" { + bucket = "yournamehere" + + # ... other configuration ... + versioning { + enabled = true + } +}` + modules := tftestutil.CreateModulesFromSource(t, source, ".tf") + + s3 := Adapt(modules) + + assert.Equal(t, 1, len(s3.Buckets)) + assert.True(t, s3.Buckets[0].Versioning.Enabled.Value()) +} + +func Test_V4BucketGetVersioning(t *testing.T) { + source := ` +resource "aws_s3_bucket" "example" { + bucket = "yournamehere" + + # ... other configuration ... +} + +resource "aws_s3_bucket_versioning" "example" { + bucket = aws_s3_bucket.example.id + versioning_configuration { + status = "Enabled" + } +}` + modules := tftestutil.CreateModulesFromSource(t, source, ".tf") + + s3 := Adapt(modules) + + assert.Equal(t, 1, len(s3.Buckets)) + assert.True(t, s3.Buckets[0].Versioning.Enabled.Value()) +} + +func Test_BucketGetVersioningWithLockDeprecated(t *testing.T) { + source := ` +resource "aws_s3_bucket" "example" { + bucket = "mybucket" + object_lock_configuration { + object_lock_enabled = "Enabled" + } +} +` + modules := tftestutil.CreateModulesFromSource(t, source, ".tf") + + s3 := Adapt(modules) + + assert.Equal(t, 1, len(s3.Buckets)) + assert.True(t, s3.Buckets[0].Versioning.Enabled.Value()) + +} + +func Test_BucketGetVersioningWithLockForNewBucket(t *testing.T) { + source := ` +resource "aws_s3_bucket" "example" { + bucket = "mybucket" + object_lock_enabled = true +} + +resource "aws_s3_bucket_object_lock_configuration" "example" { + bucket = aws_s3_bucket.example.id +} +` + modules := tftestutil.CreateModulesFromSource(t, source, ".tf") + + s3 := Adapt(modules) + + assert.Equal(t, 1, len(s3.Buckets)) + assert.True(t, s3.Buckets[0].Versioning.Enabled.Value()) + +} + +func Test_BucketGetVersioningWhenLockDisabledButVersioningEnabled(t *testing.T) { + source := ` +resource "aws_s3_bucket" "example" { + bucket = "mybucket" +} + +resource "aws_s3_bucket_object_lock_configuration" "example" { + bucket = aws_s3_bucket.example.id +} + +resource "aws_s3_bucket_versioning" "example" { + bucket = aws_s3_bucket.example.id + versioning_configuration { + status = "Enabled" + } +} +` + modules := tftestutil.CreateModulesFromSource(t, source, ".tf") + + s3 := Adapt(modules) + + assert.Equal(t, 1, len(s3.Buckets)) + assert.True(t, s3.Buckets[0].Versioning.Enabled.Value()) + +} + +func Test_BucketGetEncryption(t *testing.T) { + + source := ` + resource "aws_s3_bucket" "example" { + bucket = "yournamehere" + + # ... other configuration ... + server_side_encryption_configuration { + rule { + apply_server_side_encryption_by_default { + kms_master_key_id = aws_kms_key.mykey.arn + sse_algorithm = "aws:kms" + } + } + } +}` + modules := tftestutil.CreateModulesFromSource(t, source, ".tf") + + s3 := Adapt(modules) + + assert.Equal(t, 1, len(s3.Buckets)) + assert.True(t, s3.Buckets[0].Encryption.Enabled.Value()) +} + +func Test_V4BucketGetEncryption(t *testing.T) { + + source := ` +resource "aws_s3_bucket" "example" { + bucket = "yournamehere" + + # ... other configuration ... +} + +resource "aws_s3_bucket_server_side_encryption_configuration" "example" { + bucket = aws_s3_bucket.example.id + + rule { + apply_server_side_encryption_by_default { + kms_master_key_id = aws_kms_key.mykey.arn + sse_algorithm = "aws:kms" + } + } +} +` + modules := tftestutil.CreateModulesFromSource(t, source, ".tf") + + s3 := Adapt(modules) + + assert.Equal(t, 1, len(s3.Buckets)) + assert.True(t, s3.Buckets[0].Encryption.Enabled.Value()) +} + +func Test_BucketWithPolicy(t *testing.T) { + + source := ` +resource "aws_s3_bucket" "bucket1" { + bucket = "lol" +} + +resource "aws_s3_bucket_policy" "allow_access_from_another_account" { + bucket = aws_s3_bucket.bucket1.id + policy = data.aws_iam_policy_document.allow_access_from_another_account.json +} + +data "aws_iam_policy_document" "allow_access_from_another_account" { + statement { + principals { + type = "AWS" + identifiers = ["123456789012"] + } + + actions = [ + "s3:GetObject", + "s3:ListBucket", + ] + + resources = [ + aws_s3_bucket.bucket1.arn, + ] + } +} + +` + modules := tftestutil.CreateModulesFromSource(t, source, ".tf") + + s3 := Adapt(modules) + + require.Equal(t, 1, len(s3.Buckets)) + require.Equal(t, 1, len(s3.Buckets[0].BucketPolicies)) + + policy := s3.Buckets[0].BucketPolicies[0] + + statements, _ := policy.Document.Parsed.Statements() + require.Equal(t, 1, len(statements)) + + principals, _ := statements[0].Principals() + actions, _ := statements[0].Actions() + + awsPrincipals, _ := principals.AWS() + require.Equal(t, 1, len(awsPrincipals)) + require.Equal(t, 2, len(actions)) + +} diff --git a/internal/adapters/terraform/aws/s3/policies.go b/internal/adapters/terraform/aws/s3/policies.go new file mode 100644 index 000000000000..dc3f39294b27 --- /dev/null +++ b/internal/adapters/terraform/aws/s3/policies.go @@ -0,0 +1,53 @@ +package s3 + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/iam" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + iamAdapter "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/iam" +) + +func (a *adapter) adaptBucketPolicies() { + + for _, b := range a.modules.GetResourcesByType("aws_s3_bucket_policy") { + + policyAttr := b.GetAttribute("policy") + if policyAttr.IsNil() { + continue + } + doc, err := iamAdapter.ParsePolicyFromAttr(policyAttr, b, a.modules) + if err != nil { + continue + } + + policy := iam.Policy{ + Metadata: policyAttr.GetMetadata(), + Name: defsecTypes.StringDefault("", b.GetMetadata()), + Document: *doc, + Builtin: defsecTypes.Bool(false, b.GetMetadata()), + } + + var bucketName string + bucketAttr := b.GetAttribute("bucket") + + if bucketAttr.IsNotNil() { + if referencedBlock, err := a.modules.GetReferencedBlock(bucketAttr, b); err == nil { + if bucket, ok := a.bucketMap[referencedBlock.ID()]; ok { + bucket.BucketPolicies = append(bucket.BucketPolicies, policy) + a.bucketMap[referencedBlock.ID()] = bucket + continue + } + } + } + + if bucketAttr.IsString() { + bucketName = bucketAttr.Value().AsString() + for id, bucket := range a.bucketMap { + if bucket.Name.EqualTo(bucketName) { + bucket.BucketPolicies = append(bucket.BucketPolicies, policy) + a.bucketMap[id] = bucket + break + } + } + } + } +} diff --git a/internal/adapters/terraform/aws/s3/public_access_block.go b/internal/adapters/terraform/aws/s3/public_access_block.go new file mode 100644 index 000000000000..0c9455485dda --- /dev/null +++ b/internal/adapters/terraform/aws/s3/public_access_block.go @@ -0,0 +1,41 @@ +package s3 + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/s3" +) + +func (a *adapter) adaptPublicAccessBlocks() { + + for _, b := range a.modules.GetResourcesByType("aws_s3_bucket_public_access_block") { + + pba := s3.PublicAccessBlock{ + Metadata: b.GetMetadata(), + BlockPublicACLs: b.GetAttribute("block_public_acls").AsBoolValueOrDefault(false, b), + BlockPublicPolicy: b.GetAttribute("block_public_policy").AsBoolValueOrDefault(false, b), + IgnorePublicACLs: b.GetAttribute("ignore_public_acls").AsBoolValueOrDefault(false, b), + RestrictPublicBuckets: b.GetAttribute("restrict_public_buckets").AsBoolValueOrDefault(false, b), + } + + var bucketName string + bucketAttr := b.GetAttribute("bucket") + if bucketAttr.IsNotNil() { + if referencedBlock, err := a.modules.GetReferencedBlock(bucketAttr, b); err == nil { + if bucket, ok := a.bucketMap[referencedBlock.ID()]; ok { + bucket.PublicAccessBlock = &pba + a.bucketMap[referencedBlock.ID()] = bucket + continue + } + } + } + if bucketAttr.IsString() { + bucketName = bucketAttr.Value().AsString() + for id, bucket := range a.bucketMap { + if bucketAttr.Equals(id) || bucket.Name.EqualTo(bucketName) { + bucket.PublicAccessBlock = &pba + a.bucketMap[id] = bucket + continue + } + } + } + } +} diff --git a/internal/adapters/terraform/aws/sns/adapt.go b/internal/adapters/terraform/aws/sns/adapt.go new file mode 100644 index 000000000000..c746dc9520f3 --- /dev/null +++ b/internal/adapters/terraform/aws/sns/adapt.go @@ -0,0 +1,38 @@ +package sns + +import ( + "github.com/aquasecurity/defsec/pkg/providers/aws/sns" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) sns.SNS { + return sns.SNS{ + Topics: adaptTopics(modules), + } +} + +func adaptTopics(modules terraform.Modules) []sns.Topic { + var topics []sns.Topic + for _, module := range modules { + for _, resource := range module.GetResourcesByType("aws_sns_topic") { + topics = append(topics, adaptTopic(resource)) + } + } + return topics +} + +func adaptTopic(resourceBlock *terraform.Block) sns.Topic { + return sns.Topic{ + Metadata: resourceBlock.GetMetadata(), + ARN: types.StringDefault("", resourceBlock.GetMetadata()), + Encryption: adaptEncryption(resourceBlock), + } +} + +func adaptEncryption(resourceBlock *terraform.Block) sns.Encryption { + return sns.Encryption{ + Metadata: resourceBlock.GetMetadata(), + KMSKeyID: resourceBlock.GetAttribute("kms_master_key_id").AsStringValueOrDefault("", resourceBlock), + } +} diff --git a/internal/adapters/terraform/aws/sns/adapt_test.go b/internal/adapters/terraform/aws/sns/adapt_test.go new file mode 100644 index 000000000000..1213829a1d11 --- /dev/null +++ b/internal/adapters/terraform/aws/sns/adapt_test.go @@ -0,0 +1,82 @@ +package sns + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/sns" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptTopic(t *testing.T) { + tests := []struct { + name string + terraform string + expected sns.Topic + }{ + { + name: "defined", + terraform: ` + resource "aws_sns_topic" "good_example" { + kms_master_key_id = "/blah" + } +`, + expected: sns.Topic{ + Metadata: defsecTypes.NewTestMetadata(), + ARN: defsecTypes.String("", defsecTypes.NewTestMetadata()), + Encryption: sns.Encryption{ + Metadata: defsecTypes.NewTestMetadata(), + KMSKeyID: defsecTypes.String("/blah", defsecTypes.NewTestMetadata()), + }, + }, + }, + { + name: "default", + terraform: ` + resource "aws_sns_topic" "good_example" { + } +`, + expected: sns.Topic{ + Metadata: defsecTypes.NewTestMetadata(), + ARN: defsecTypes.String("", defsecTypes.NewTestMetadata()), + Encryption: sns.Encryption{ + Metadata: defsecTypes.NewTestMetadata(), + KMSKeyID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptTopic(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "aws_sns_topic" "good_example" { + kms_master_key_id = "/blah" + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Topics, 1) + topic := adapted.Topics[0] + + assert.Equal(t, 2, topic.Metadata.Range().GetStartLine()) + assert.Equal(t, 4, topic.Metadata.Range().GetEndLine()) + + assert.Equal(t, 3, topic.Encryption.KMSKeyID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, topic.Encryption.KMSKeyID.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/aws/sqs/adapt.go b/internal/adapters/terraform/aws/sqs/adapt.go new file mode 100644 index 000000000000..04bca31e7101 --- /dev/null +++ b/internal/adapters/terraform/aws/sqs/adapt.go @@ -0,0 +1,167 @@ +package sqs + +import ( + iamp "github.com/aquasecurity/defsec/pkg/providers/aws/iam" + "github.com/aquasecurity/defsec/pkg/providers/aws/sqs" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/iam" + "github.com/liamg/iamgo" + + "github.com/google/uuid" +) + +func Adapt(modules terraform.Modules) sqs.SQS { + return sqs.SQS{ + Queues: (&adapter{ + modules: modules, + queues: make(map[string]sqs.Queue), + }).adaptQueues(), + } +} + +type adapter struct { + modules terraform.Modules + queues map[string]sqs.Queue +} + +func (a *adapter) adaptQueues() []sqs.Queue { + for _, resource := range a.modules.GetResourcesByType("aws_sqs_queue") { + a.adaptQueue(resource) + } + + for _, policyBlock := range a.modules.GetResourcesByType("aws_sqs_queue_policy") { + + policy := iamp.Policy{ + Metadata: policyBlock.GetMetadata(), + Name: defsecTypes.StringDefault("", policyBlock.GetMetadata()), + Document: iamp.Document{ + Metadata: policyBlock.GetMetadata(), + }, + Builtin: defsecTypes.Bool(false, policyBlock.GetMetadata()), + } + if attr := policyBlock.GetAttribute("policy"); attr.IsString() { + dataBlock, err := a.modules.GetBlockById(attr.Value().AsString()) + if err != nil { + parsed, err := iamgo.ParseString(attr.Value().AsString()) + if err != nil { + continue + } + policy.Document.Parsed = *parsed + policy.Document.Metadata = attr.GetMetadata() + } else if dataBlock.Type() == "data" && dataBlock.TypeLabel() == "aws_iam_policy_document" { + if doc, err := iam.ConvertTerraformDocument(a.modules, dataBlock); err == nil { + policy.Document.Parsed = doc.Document + policy.Document.Metadata = doc.Source.GetMetadata() + policy.Document.IsOffset = true + } + } + } else if refBlock, err := a.modules.GetReferencedBlock(attr, policyBlock); err == nil { + if refBlock.Type() == "data" && refBlock.TypeLabel() == "aws_iam_policy_document" { + if doc, err := iam.ConvertTerraformDocument(a.modules, refBlock); err == nil { + policy.Document.Parsed = doc.Document + policy.Document.Metadata = doc.Source.GetMetadata() + } + } + } + + if urlAttr := policyBlock.GetAttribute("queue_url"); urlAttr.IsNotNil() { + if refBlock, err := a.modules.GetReferencedBlock(urlAttr, policyBlock); err == nil { + if queue, ok := a.queues[refBlock.ID()]; ok { + queue.Policies = append(queue.Policies, policy) + a.queues[refBlock.ID()] = queue + continue + } + } + } + + a.queues[uuid.NewString()] = sqs.Queue{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + QueueURL: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + Encryption: sqs.Encryption{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + ManagedEncryption: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + KMSKeyID: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + }, + Policies: []iamp.Policy{policy}, + } + } + + var queues []sqs.Queue + for _, queue := range a.queues { + queues = append(queues, queue) + } + return queues +} + +func (a *adapter) adaptQueue(resource *terraform.Block) { + + kmsKeyIdAttr := resource.GetAttribute("kms_master_key_id") + kmsKeyIdVal := kmsKeyIdAttr.AsStringValueOrDefault("", resource) + managedEncryption := resource.GetAttribute("sqs_managed_sse_enabled") + + var policies []iamp.Policy + if attr := resource.GetAttribute("policy"); attr.IsString() { + + dataBlock, err := a.modules.GetBlockById(attr.Value().AsString()) + if err != nil { + policy := iamp.Policy{ + Metadata: attr.GetMetadata(), + Name: defsecTypes.StringDefault("", attr.GetMetadata()), + Document: iamp.Document{ + Metadata: attr.GetMetadata(), + }, + Builtin: defsecTypes.Bool(false, attr.GetMetadata()), + } + parsed, err := iamgo.ParseString(attr.Value().AsString()) + if err == nil { + policy.Document.Parsed = *parsed + policy.Document.Metadata = attr.GetMetadata() + policy.Metadata = attr.GetMetadata() + policies = append(policies, policy) + } + } else if dataBlock.Type() == "data" && dataBlock.TypeLabel() == "aws_iam_policy_document" { + if doc, err := iam.ConvertTerraformDocument(a.modules, dataBlock); err == nil { + policy := iamp.Policy{ + Metadata: attr.GetMetadata(), + Name: defsecTypes.StringDefault("", attr.GetMetadata()), + Document: iamp.Document{ + Metadata: doc.Source.GetMetadata(), + Parsed: doc.Document, + IsOffset: true, + HasRefs: false, + }, + Builtin: defsecTypes.Bool(false, attr.GetMetadata()), + } + policies = append(policies, policy) + } + } + + } else if refBlock, err := a.modules.GetReferencedBlock(attr, resource); err == nil { + if refBlock.Type() == "data" && refBlock.TypeLabel() == "aws_iam_policy_document" { + if doc, err := iam.ConvertTerraformDocument(a.modules, refBlock); err == nil { + policy := iamp.Policy{ + Metadata: doc.Source.GetMetadata(), + Name: defsecTypes.StringDefault("", doc.Source.GetMetadata()), + Document: iamp.Document{ + Metadata: doc.Source.GetMetadata(), + Parsed: doc.Document, + }, + Builtin: defsecTypes.Bool(false, refBlock.GetMetadata()), + } + policies = append(policies, policy) + } + } + } + + a.queues[resource.ID()] = sqs.Queue{ + Metadata: resource.GetMetadata(), + QueueURL: defsecTypes.StringDefault("", resource.GetMetadata()), + Encryption: sqs.Encryption{ + Metadata: resource.GetMetadata(), + ManagedEncryption: managedEncryption.AsBoolValueOrDefault(false, resource), + KMSKeyID: kmsKeyIdVal, + }, + Policies: policies, + } +} diff --git a/internal/adapters/terraform/aws/sqs/adapt_test.go b/internal/adapters/terraform/aws/sqs/adapt_test.go new file mode 100644 index 000000000000..f6a191ec23eb --- /dev/null +++ b/internal/adapters/terraform/aws/sqs/adapt_test.go @@ -0,0 +1,140 @@ +package sqs + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/aws/iam" + "github.com/aquasecurity/defsec/pkg/providers/aws/sqs" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" + "github.com/liamg/iamgo" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_Adapt(t *testing.T) { + tests := []struct { + name string + terraform string + expected sqs.SQS + }{ + { + name: "np kms key", + terraform: ` + resource "aws_sqs_queue" "good_example" { + + policy = <= azurerm 2.97.0 + if omsAgentBlock := resource.GetBlock("oms_agent"); omsAgentBlock.IsNotNil() { + cluster.AddonProfile.OMSAgent.Metadata = omsAgentBlock.GetMetadata() + cluster.AddonProfile.OMSAgent.Enabled = defsecTypes.Bool(true, omsAgentBlock.GetMetadata()) + } + + // azurerm < 2.99.0 + if resource.HasChild("role_based_access_control") { + roleBasedAccessControlBlock := resource.GetBlock("role_based_access_control") + rbEnabledAttr := roleBasedAccessControlBlock.GetAttribute("enabled") + cluster.RoleBasedAccessControl.Metadata = roleBasedAccessControlBlock.GetMetadata() + cluster.RoleBasedAccessControl.Enabled = rbEnabledAttr.AsBoolValueOrDefault(false, roleBasedAccessControlBlock) + } + if resource.HasChild("role_based_access_control_enabled") { + // azurerm >= 2.99.0 + roleBasedAccessControlEnabledAttr := resource.GetAttribute("role_based_access_control_enabled") + cluster.RoleBasedAccessControl.Metadata = roleBasedAccessControlEnabledAttr.GetMetadata() + cluster.RoleBasedAccessControl.Enabled = roleBasedAccessControlEnabledAttr.AsBoolValueOrDefault(false, resource) + } + + if resource.HasChild("azure_active_directory_role_based_access_control") { + azureRoleBasedAccessControl := resource.GetBlock("azure_active_directory_role_based_access_control") + if azureRoleBasedAccessControl.IsNotNil() { + enabledAttr := azureRoleBasedAccessControl.GetAttribute("azure_rbac_enabled") + if !cluster.RoleBasedAccessControl.Enabled.IsTrue() { + cluster.RoleBasedAccessControl.Metadata = azureRoleBasedAccessControl.GetMetadata() + cluster.RoleBasedAccessControl.Enabled = enabledAttr.AsBoolValueOrDefault(false, azureRoleBasedAccessControl) + } + } + } + return cluster +} diff --git a/internal/adapters/terraform/azure/container/adapt_test.go b/internal/adapters/terraform/azure/container/adapt_test.go new file mode 100644 index 000000000000..a43fcdf35f97 --- /dev/null +++ b/internal/adapters/terraform/azure/container/adapt_test.go @@ -0,0 +1,262 @@ +package container + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/azure/container" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptCluster(t *testing.T) { + tests := []struct { + name string + terraform string + expected container.KubernetesCluster + }{ + { + name: "defined", + terraform: ` + resource "azurerm_kubernetes_cluster" "example" { + private_cluster_enabled = true + + network_profile { + network_policy = "calico" + } + + api_server_access_profile { + + authorized_ip_ranges = [ + "1.2.3.4/32" + ] + + } + + addon_profile { + oms_agent { + enabled = true + } + } + + role_based_access_control { + enabled = true + } + } +`, + expected: container.KubernetesCluster{ + Metadata: defsecTypes.NewTestMetadata(), + NetworkProfile: container.NetworkProfile{ + Metadata: defsecTypes.NewTestMetadata(), + NetworkPolicy: defsecTypes.String("calico", defsecTypes.NewTestMetadata()), + }, + EnablePrivateCluster: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + APIServerAuthorizedIPRanges: []defsecTypes.StringValue{ + defsecTypes.String("1.2.3.4/32", defsecTypes.NewTestMetadata()), + }, + AddonProfile: container.AddonProfile{ + Metadata: defsecTypes.NewTestMetadata(), + OMSAgent: container.OMSAgent{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + RoleBasedAccessControl: container.RoleBasedAccessControl{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + }, + { + name: "rbac with a new syntax", + terraform: ` + resource "azurerm_kubernetes_cluster" "example" { + role_based_access_control_enabled = true + } +`, + expected: container.KubernetesCluster{ + Metadata: defsecTypes.NewTestMetadata(), + NetworkProfile: container.NetworkProfile{ + Metadata: defsecTypes.NewTestMetadata(), + NetworkPolicy: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + EnablePrivateCluster: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + AddonProfile: container.AddonProfile{ + Metadata: defsecTypes.NewTestMetadata(), + OMSAgent: container.OMSAgent{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + RoleBasedAccessControl: container.RoleBasedAccessControl{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + }, + { + name: "defaults", + terraform: ` + resource "azurerm_kubernetes_cluster" "example" { + } +`, + expected: container.KubernetesCluster{ + Metadata: defsecTypes.NewTestMetadata(), + NetworkProfile: container.NetworkProfile{ + Metadata: defsecTypes.NewTestMetadata(), + NetworkPolicy: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + EnablePrivateCluster: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + AddonProfile: container.AddonProfile{ + Metadata: defsecTypes.NewTestMetadata(), + OMSAgent: container.OMSAgent{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + RoleBasedAccessControl: container.RoleBasedAccessControl{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + }, + { + name: "rbac off with k8s rbac on", + terraform: ` +resource "azurerm_kubernetes_cluster" "misreporting_example" { + role_based_access_control_enabled = true # Enable k8s RBAC + azure_active_directory_role_based_access_control { + managed = true # Enable AKS-managed Azure AAD integration + azure_rbac_enabled = false # Explicitly disable Azure RBAC for Kubernetes Authorization + } + } +`, + expected: container.KubernetesCluster{ + Metadata: defsecTypes.NewTestMetadata(), + NetworkProfile: container.NetworkProfile{ + Metadata: defsecTypes.NewTestMetadata(), + NetworkPolicy: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + EnablePrivateCluster: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + AddonProfile: container.AddonProfile{ + Metadata: defsecTypes.NewTestMetadata(), + OMSAgent: container.OMSAgent{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + RoleBasedAccessControl: container.RoleBasedAccessControl{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptCluster(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "azurerm_kubernetes_cluster" "example" { + private_cluster_enabled = true + + network_profile { + network_policy = "calico" + } + + api_server_access_profile { + + authorized_ip_ranges = [ + "1.2.3.4/32" + ] + + } + + addon_profile { + oms_agent { + enabled = true + } + } + + role_based_access_control { + enabled = true + } + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.KubernetesClusters, 1) + cluster := adapted.KubernetesClusters[0] + + assert.Equal(t, 3, cluster.EnablePrivateCluster.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, cluster.EnablePrivateCluster.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 5, cluster.NetworkProfile.Metadata.Range().GetStartLine()) + assert.Equal(t, 7, cluster.NetworkProfile.Metadata.Range().GetEndLine()) + + assert.Equal(t, 6, cluster.NetworkProfile.NetworkPolicy.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 6, cluster.NetworkProfile.NetworkPolicy.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 11, cluster.APIServerAuthorizedIPRanges[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 13, cluster.APIServerAuthorizedIPRanges[0].GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 17, cluster.AddonProfile.Metadata.Range().GetStartLine()) + assert.Equal(t, 21, cluster.AddonProfile.Metadata.Range().GetEndLine()) + + assert.Equal(t, 18, cluster.AddonProfile.OMSAgent.Metadata.Range().GetStartLine()) + assert.Equal(t, 20, cluster.AddonProfile.OMSAgent.Metadata.Range().GetEndLine()) + + assert.Equal(t, 19, cluster.AddonProfile.OMSAgent.Enabled.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 19, cluster.AddonProfile.OMSAgent.Enabled.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 23, cluster.RoleBasedAccessControl.Metadata.Range().GetStartLine()) + assert.Equal(t, 25, cluster.RoleBasedAccessControl.Metadata.Range().GetEndLine()) + + assert.Equal(t, 24, cluster.RoleBasedAccessControl.Enabled.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 24, cluster.RoleBasedAccessControl.Enabled.GetMetadata().Range().GetEndLine()) +} + +func TestWithLocals(t *testing.T) { + src := ` + variable "ip_whitelist" { + description = "IP Ranges with allowed access." + type = list(string) + default = ["1.2.3.4"] +} + +locals { + ip_whitelist = concat(var.ip_whitelist, split(",", data.azurerm_public_ip.build_agents.ip_address)) +} + +resource "azurerm_kubernetes_cluster" "aks" { + # not working + api_server_access_profile { + authorized_ip_ranges = local.ip_whitelist + } + # working + api_server_access_profile { + authorized_ip_ranges = concat(var.ip_whitelist, split(",", data.azurerm_public_ip.example.ip_address)) + } +}` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.KubernetesClusters, 1) + cluster := adapted.KubernetesClusters[0] + require.Len(t, cluster.APIServerAuthorizedIPRanges, 1) + assert.False(t, cluster.APIServerAuthorizedIPRanges[0].GetMetadata().IsResolvable()) +} diff --git a/internal/adapters/terraform/azure/database/adapt.go b/internal/adapters/terraform/azure/database/adapt.go new file mode 100644 index 000000000000..4ec4027ab718 --- /dev/null +++ b/internal/adapters/terraform/azure/database/adapt.go @@ -0,0 +1,439 @@ +package database + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/database" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) database.Database { + + mssqlAdapter := mssqlAdapter{ + alertPolicyIDs: modules.GetChildResourceIDMapByType("azurerm_mssql_server_security_alert_policy"), + auditingPolicyIDs: modules.GetChildResourceIDMapByType("azurerm_mssql_server_extended_auditing_policy", "azurerm_mssql_database_extended_auditing_policy"), + firewallIDs: modules.GetChildResourceIDMapByType("azurerm_sql_firewall_rule", "azurerm_mssql_firewall_rule"), + } + + mysqlAdapter := mysqlAdapter{ + firewallIDs: modules.GetChildResourceIDMapByType("azurerm_mysql_firewall_rule"), + } + + mariaDBAdapter := mariaDBAdapter{ + firewallIDs: modules.GetChildResourceIDMapByType("azurerm_mariadb_firewall_rule"), + } + + postgresqlAdapter := postgresqlAdapter{ + firewallIDs: modules.GetChildResourceIDMapByType("azurerm_postgresql_firewall_rule"), + } + + return database.Database{ + MSSQLServers: mssqlAdapter.adaptMSSQLServers(modules), + MariaDBServers: mariaDBAdapter.adaptMariaDBServers(modules), + MySQLServers: mysqlAdapter.adaptMySQLServers(modules), + PostgreSQLServers: postgresqlAdapter.adaptPostgreSQLServers(modules), + } +} + +type mssqlAdapter struct { + alertPolicyIDs terraform.ResourceIDResolutions + auditingPolicyIDs terraform.ResourceIDResolutions + firewallIDs terraform.ResourceIDResolutions +} + +type mysqlAdapter struct { + firewallIDs terraform.ResourceIDResolutions +} + +type mariaDBAdapter struct { + firewallIDs terraform.ResourceIDResolutions +} + +type postgresqlAdapter struct { + firewallIDs terraform.ResourceIDResolutions +} + +func (a *mssqlAdapter) adaptMSSQLServers(modules terraform.Modules) []database.MSSQLServer { + var mssqlServers []database.MSSQLServer + for _, module := range modules { + for _, resource := range module.GetResourcesByType("azurerm_sql_server") { + mssqlServers = append(mssqlServers, a.adaptMSSQLServer(resource, module)) + } + for _, resource := range module.GetResourcesByType("azurerm_mssql_server") { + mssqlServers = append(mssqlServers, a.adaptMSSQLServer(resource, module)) + } + } + + orphanResources := modules.GetResourceByIDs(a.alertPolicyIDs.Orphans()...) + + if len(orphanResources) > 0 { + orphanage := database.MSSQLServer{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Server: database.Server{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + EnableSSLEnforcement: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + MinimumTLSVersion: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + EnablePublicNetworkAccess: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + FirewallRules: nil, + }, + ExtendedAuditingPolicies: nil, + SecurityAlertPolicies: nil, + } + for _, policy := range orphanResources { + orphanage.SecurityAlertPolicies = append(orphanage.SecurityAlertPolicies, adaptMSSQLSecurityAlertPolicy(policy)) + } + mssqlServers = append(mssqlServers, orphanage) + + } + + orphanResources = modules.GetResourceByIDs(a.auditingPolicyIDs.Orphans()...) + + if len(orphanResources) > 0 { + orphanage := database.MSSQLServer{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Server: database.Server{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + EnableSSLEnforcement: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + MinimumTLSVersion: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + EnablePublicNetworkAccess: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + FirewallRules: nil, + }, + } + for _, policy := range orphanResources { + orphanage.ExtendedAuditingPolicies = append(orphanage.ExtendedAuditingPolicies, adaptMSSQLExtendedAuditingPolicy(policy)) + } + mssqlServers = append(mssqlServers, orphanage) + + } + + orphanResources = modules.GetResourceByIDs(a.firewallIDs.Orphans()...) + + if len(orphanResources) > 0 { + orphanage := database.MSSQLServer{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + } + for _, policy := range orphanResources { + orphanage.FirewallRules = append(orphanage.FirewallRules, adaptFirewallRule(policy)) + } + mssqlServers = append(mssqlServers, orphanage) + + } + + return mssqlServers +} +func (a *mysqlAdapter) adaptMySQLServers(modules terraform.Modules) []database.MySQLServer { + var mySQLServers []database.MySQLServer + for _, module := range modules { + for _, resource := range module.GetResourcesByType("azurerm_mysql_server") { + mySQLServers = append(mySQLServers, a.adaptMySQLServer(resource, module)) + } + } + + orphanResources := modules.GetResourceByIDs(a.firewallIDs.Orphans()...) + + if len(orphanResources) > 0 { + orphanage := database.MySQLServer{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Server: database.Server{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + EnableSSLEnforcement: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + MinimumTLSVersion: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + EnablePublicNetworkAccess: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + FirewallRules: nil, + }, + } + for _, policy := range orphanResources { + orphanage.FirewallRules = append(orphanage.FirewallRules, adaptFirewallRule(policy)) + } + mySQLServers = append(mySQLServers, orphanage) + + } + + return mySQLServers +} + +func (a *mariaDBAdapter) adaptMariaDBServers(modules terraform.Modules) []database.MariaDBServer { + var mariaDBServers []database.MariaDBServer + for _, module := range modules { + for _, resource := range module.GetResourcesByType("azurerm_mariadb_server") { + mariaDBServers = append(mariaDBServers, a.adaptMariaDBServer(resource, module)) + } + } + + orphanResources := modules.GetResourceByIDs(a.firewallIDs.Orphans()...) + + if len(orphanResources) > 0 { + orphanage := database.MariaDBServer{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Server: database.Server{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + EnableSSLEnforcement: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + MinimumTLSVersion: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + EnablePublicNetworkAccess: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + FirewallRules: nil, + }, + } + for _, policy := range orphanResources { + orphanage.FirewallRules = append(orphanage.FirewallRules, adaptFirewallRule(policy)) + } + mariaDBServers = append(mariaDBServers, orphanage) + + } + + return mariaDBServers +} + +func (a *postgresqlAdapter) adaptPostgreSQLServers(modules terraform.Modules) []database.PostgreSQLServer { + var postgreSQLServers []database.PostgreSQLServer + for _, module := range modules { + for _, resource := range module.GetResourcesByType("azurerm_postgresql_server") { + postgreSQLServers = append(postgreSQLServers, a.adaptPostgreSQLServer(resource, module)) + } + } + + orphanResources := modules.GetResourceByIDs(a.firewallIDs.Orphans()...) + + if len(orphanResources) > 0 { + orphanage := database.PostgreSQLServer{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Server: database.Server{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + EnableSSLEnforcement: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + MinimumTLSVersion: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + EnablePublicNetworkAccess: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + FirewallRules: nil, + }, + Config: database.PostgresSQLConfig{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + LogCheckpoints: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + ConnectionThrottling: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + LogConnections: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + }, + } + for _, policy := range orphanResources { + orphanage.FirewallRules = append(orphanage.FirewallRules, adaptFirewallRule(policy)) + } + postgreSQLServers = append(postgreSQLServers, orphanage) + + } + + return postgreSQLServers +} + +func (a *mssqlAdapter) adaptMSSQLServer(resource *terraform.Block, module *terraform.Module) database.MSSQLServer { + minTLSVersionVal := defsecTypes.StringDefault("", resource.GetMetadata()) + publicAccessVal := defsecTypes.BoolDefault(true, resource.GetMetadata()) + enableSSLEnforcementVal := defsecTypes.BoolDefault(false, resource.GetMetadata()) + + var auditingPolicies []database.ExtendedAuditingPolicy + var alertPolicies []database.SecurityAlertPolicy + var firewallRules []database.FirewallRule + + if resource.TypeLabel() == "azurerm_mssql_server" { + minTLSVersionAttr := resource.GetAttribute("minimum_tls_version") + minTLSVersionVal = minTLSVersionAttr.AsStringValueOrDefault("", resource) + + publicAccessAttr := resource.GetAttribute("public_network_access_enabled") + publicAccessVal = publicAccessAttr.AsBoolValueOrDefault(true, resource) + + } + + alertPolicyBlocks := module.GetReferencingResources(resource, "azurerm_mssql_server_security_alert_policy", "server_name") + for _, alertBlock := range alertPolicyBlocks { + a.alertPolicyIDs.Resolve(alertBlock.ID()) + alertPolicies = append(alertPolicies, adaptMSSQLSecurityAlertPolicy(alertBlock)) + } + + auditingPoliciesBlocks := module.GetReferencingResources(resource, "azurerm_mssql_server_extended_auditing_policy", "server_id") + if resource.HasChild("extended_auditing_policy") { + auditingPoliciesBlocks = append(auditingPoliciesBlocks, resource.GetBlocks("extended_auditing_policy")...) + } + + databasesRes := module.GetReferencingResources(resource, "azurerm_mssql_database", "server_id") + for _, databaseRes := range databasesRes { + dbAuditingBlocks := module.GetReferencingResources(databaseRes, "azurerm_mssql_database_extended_auditing_policy", "database_id") + auditingPoliciesBlocks = append(auditingPoliciesBlocks, dbAuditingBlocks...) + } + + for _, auditBlock := range auditingPoliciesBlocks { + a.auditingPolicyIDs.Resolve(auditBlock.ID()) + auditingPolicies = append(auditingPolicies, adaptMSSQLExtendedAuditingPolicy(auditBlock)) + } + + firewallRuleBlocks := module.GetReferencingResources(resource, "azurerm_sql_firewall_rule", "server_name") + firewallRuleBlocks = append(firewallRuleBlocks, module.GetReferencingResources(resource, "azurerm_mssql_firewall_rule", "server_id")...) + for _, firewallBlock := range firewallRuleBlocks { + a.firewallIDs.Resolve(firewallBlock.ID()) + firewallRules = append(firewallRules, adaptFirewallRule(firewallBlock)) + } + + return database.MSSQLServer{ + Metadata: resource.GetMetadata(), + Server: database.Server{ + Metadata: resource.GetMetadata(), + EnableSSLEnforcement: enableSSLEnforcementVal, + MinimumTLSVersion: minTLSVersionVal, + EnablePublicNetworkAccess: publicAccessVal, + FirewallRules: firewallRules, + }, + ExtendedAuditingPolicies: auditingPolicies, + SecurityAlertPolicies: alertPolicies, + } +} + +func (a *mysqlAdapter) adaptMySQLServer(resource *terraform.Block, module *terraform.Module) database.MySQLServer { + var firewallRules []database.FirewallRule + + enableSSLEnforcementAttr := resource.GetAttribute("ssl_enforcement_enabled") + enableSSLEnforcementVal := enableSSLEnforcementAttr.AsBoolValueOrDefault(false, resource) + + minTLSVersionAttr := resource.GetAttribute("ssl_minimal_tls_version_enforced") + minTLSVersionVal := minTLSVersionAttr.AsStringValueOrDefault("TLSEnforcementDisabled", resource) + + publicAccessAttr := resource.GetAttribute("public_network_access_enabled") + publicAccessVal := publicAccessAttr.AsBoolValueOrDefault(true, resource) + + firewallRuleBlocks := module.GetReferencingResources(resource, "azurerm_mysql_firewall_rule", "server_name") + for _, firewallBlock := range firewallRuleBlocks { + a.firewallIDs.Resolve(firewallBlock.ID()) + firewallRules = append(firewallRules, adaptFirewallRule(firewallBlock)) + } + + return database.MySQLServer{ + Metadata: resource.GetMetadata(), + Server: database.Server{ + Metadata: resource.GetMetadata(), + EnableSSLEnforcement: enableSSLEnforcementVal, + MinimumTLSVersion: minTLSVersionVal, + EnablePublicNetworkAccess: publicAccessVal, + FirewallRules: firewallRules, + }, + } +} + +func (a *mariaDBAdapter) adaptMariaDBServer(resource *terraform.Block, module *terraform.Module) database.MariaDBServer { + var firewallRules []database.FirewallRule + + enableSSLEnforcementAttr := resource.GetAttribute("ssl_enforcement_enabled") + enableSSLEnforcementVal := enableSSLEnforcementAttr.AsBoolValueOrDefault(false, resource) + + publicAccessAttr := resource.GetAttribute("public_network_access_enabled") + publicAccessVal := publicAccessAttr.AsBoolValueOrDefault(true, resource) + + firewallRuleBlocks := module.GetReferencingResources(resource, "azurerm_mariadb_firewall_rule", "server_name") + for _, firewallBlock := range firewallRuleBlocks { + a.firewallIDs.Resolve(firewallBlock.ID()) + firewallRules = append(firewallRules, adaptFirewallRule(firewallBlock)) + } + + return database.MariaDBServer{ + Metadata: resource.GetMetadata(), + Server: database.Server{ + Metadata: resource.GetMetadata(), + EnableSSLEnforcement: enableSSLEnforcementVal, + MinimumTLSVersion: defsecTypes.StringDefault("", resource.GetMetadata()), + EnablePublicNetworkAccess: publicAccessVal, + FirewallRules: firewallRules, + }, + } +} + +func (a *postgresqlAdapter) adaptPostgreSQLServer(resource *terraform.Block, module *terraform.Module) database.PostgreSQLServer { + var firewallRules []database.FirewallRule + + enableSSLEnforcementAttr := resource.GetAttribute("ssl_enforcement_enabled") + enableSSLEnforcementVal := enableSSLEnforcementAttr.AsBoolValueOrDefault(false, resource) + + minTLSVersionAttr := resource.GetAttribute("ssl_minimal_tls_version_enforced") + minTLSVersionVal := minTLSVersionAttr.AsStringValueOrDefault("TLSEnforcementDisabled", resource) + + publicAccessAttr := resource.GetAttribute("public_network_access_enabled") + publicAccessVal := publicAccessAttr.AsBoolValueOrDefault(true, resource) + + firewallRuleBlocks := module.GetReferencingResources(resource, "azurerm_postgresql_firewall_rule", "server_name") + for _, firewallBlock := range firewallRuleBlocks { + a.firewallIDs.Resolve(firewallBlock.ID()) + firewallRules = append(firewallRules, adaptFirewallRule(firewallBlock)) + } + + configBlocks := module.GetReferencingResources(resource, "azurerm_postgresql_configuration", "server_name") + config := adaptPostgreSQLConfig(resource, configBlocks) + + return database.PostgreSQLServer{ + Metadata: resource.GetMetadata(), + Server: database.Server{ + Metadata: resource.GetMetadata(), + EnableSSLEnforcement: enableSSLEnforcementVal, + MinimumTLSVersion: minTLSVersionVal, + EnablePublicNetworkAccess: publicAccessVal, + FirewallRules: firewallRules, + }, + Config: config, + } +} + +func adaptPostgreSQLConfig(resource *terraform.Block, configBlocks []*terraform.Block) database.PostgresSQLConfig { + config := database.PostgresSQLConfig{ + Metadata: resource.GetMetadata(), + LogCheckpoints: defsecTypes.BoolDefault(false, resource.GetMetadata()), + ConnectionThrottling: defsecTypes.BoolDefault(false, resource.GetMetadata()), + LogConnections: defsecTypes.BoolDefault(false, resource.GetMetadata()), + } + + for _, configBlock := range configBlocks { + + nameAttr := configBlock.GetAttribute("name") + valAttr := configBlock.GetAttribute("value") + + if nameAttr.Equals("log_checkpoints") { + config.LogCheckpoints = defsecTypes.Bool(valAttr.Equals("on"), valAttr.GetMetadata()) + } + if nameAttr.Equals("connection_throttling") { + config.ConnectionThrottling = defsecTypes.Bool(valAttr.Equals("on"), valAttr.GetMetadata()) + } + if nameAttr.Equals("log_connections") { + config.LogConnections = defsecTypes.Bool(valAttr.Equals("on"), valAttr.GetMetadata()) + } + } + + return config +} + +func adaptMSSQLSecurityAlertPolicy(resource *terraform.Block) database.SecurityAlertPolicy { + + emailAddressesAttr := resource.GetAttribute("email_addresses") + disabledAlertsAttr := resource.GetAttribute("disabled_alerts") + + emailAccountAdminsAttr := resource.GetAttribute("email_account_admins") + emailAccountAdminsVal := emailAccountAdminsAttr.AsBoolValueOrDefault(false, resource) + + return database.SecurityAlertPolicy{ + Metadata: resource.GetMetadata(), + EmailAddresses: emailAddressesAttr.AsStringValues(), + DisabledAlerts: disabledAlertsAttr.AsStringValues(), + EmailAccountAdmins: emailAccountAdminsVal, + } +} + +func adaptFirewallRule(resource *terraform.Block) database.FirewallRule { + startIPAttr := resource.GetAttribute("start_ip_address") + startIPVal := startIPAttr.AsStringValueOrDefault("", resource) + + endIPAttr := resource.GetAttribute("end_ip_address") + endIPVal := endIPAttr.AsStringValueOrDefault("", resource) + + return database.FirewallRule{ + Metadata: resource.GetMetadata(), + StartIP: startIPVal, + EndIP: endIPVal, + } +} + +func adaptMSSQLExtendedAuditingPolicy(resource *terraform.Block) database.ExtendedAuditingPolicy { + retentionInDaysAttr := resource.GetAttribute("retention_in_days") + retentionInDaysVal := retentionInDaysAttr.AsIntValueOrDefault(0, resource) + + return database.ExtendedAuditingPolicy{ + Metadata: resource.GetMetadata(), + RetentionInDays: retentionInDaysVal, + } +} diff --git a/internal/adapters/terraform/azure/database/adapt_test.go b/internal/adapters/terraform/azure/database/adapt_test.go new file mode 100644 index 000000000000..401b8603fa7d --- /dev/null +++ b/internal/adapters/terraform/azure/database/adapt_test.go @@ -0,0 +1,454 @@ +package database + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/azure/database" + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_Adapt(t *testing.T) { + tests := []struct { + name string + terraform string + expected database.Database + }{ + { + name: "postgresql", + terraform: ` + resource "azurerm_postgresql_server" "example" { + name = "example" + + public_network_access_enabled = true + ssl_enforcement_enabled = true + ssl_minimal_tls_version_enforced = "TLS1_2" + } + + resource "azurerm_postgresql_configuration" "example" { + name = "log_connections" + resource_group_name = azurerm_resource_group.example.name + server_name = azurerm_postgresql_server.example.name + value = "on" + } + + resource "azurerm_postgresql_configuration" "example" { + name = "log_checkpoints" + resource_group_name = azurerm_resource_group.example.name + server_name = azurerm_postgresql_server.example.name + value = "on" + } + + resource "azurerm_postgresql_configuration" "example" { + name = "connection_throttling" + resource_group_name = azurerm_resource_group.example.name + server_name = azurerm_postgresql_server.example.name + value = "on" + } + + resource "azurerm_postgresql_firewall_rule" "example" { + name = "office" + resource_group_name = azurerm_resource_group.example.name + server_name = azurerm_postgresql_server.example.name + start_ip_address = "40.112.8.12" + end_ip_address = "40.112.8.12" + } +`, + expected: database.Database{ + PostgreSQLServers: []database.PostgreSQLServer{ + { + Metadata: defsecTypes.NewTestMetadata(), + Server: database.Server{ + Metadata: defsecTypes.NewTestMetadata(), + EnableSSLEnforcement: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + MinimumTLSVersion: defsecTypes.String("TLS1_2", defsecTypes.NewTestMetadata()), + EnablePublicNetworkAccess: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + FirewallRules: []database.FirewallRule{ + { + Metadata: defsecTypes.NewTestMetadata(), + StartIP: defsecTypes.String("40.112.8.12", defsecTypes.NewTestMetadata()), + EndIP: defsecTypes.String("40.112.8.12", defsecTypes.NewTestMetadata()), + }, + }, + }, + Config: database.PostgresSQLConfig{ + Metadata: defsecTypes.NewTestMetadata(), + LogConnections: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + LogCheckpoints: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + ConnectionThrottling: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + { + name: "mariadb", + terraform: ` + resource "azurerm_mariadb_server" "example" { + name = "example-mariadb-server" + location = azurerm_resource_group.example.location + resource_group_name = azurerm_resource_group.example.name + + public_network_access_enabled = false + ssl_enforcement_enabled = true + } + + resource "azurerm_mariadb_firewall_rule" "example" { + name = "test-rule" + server_name = azurerm_mariadb_server.example.name + start_ip_address = "40.112.0.0" + end_ip_address = "40.112.255.255" + } +`, + expected: database.Database{ + MariaDBServers: []database.MariaDBServer{ + { + Metadata: defsecTypes.NewTestMetadata(), + Server: database.Server{ + Metadata: defsecTypes.NewTestMetadata(), + EnableSSLEnforcement: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + MinimumTLSVersion: defsecTypes.String("", defsecTypes.NewTestMetadata()), + EnablePublicNetworkAccess: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + FirewallRules: []database.FirewallRule{ + { + Metadata: defsecTypes.NewTestMetadata(), + StartIP: defsecTypes.String("40.112.0.0", defsecTypes.NewTestMetadata()), + EndIP: defsecTypes.String("40.112.255.255", defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + }, + }, + { + name: "mysql", + terraform: ` + resource "azurerm_mysql_server" "example" { + public_network_access_enabled = true + ssl_enforcement_enabled = true + ssl_minimal_tls_version_enforced = "TLS1_2" + } + + resource "azurerm_mysql_firewall_rule" "example" { + server_name = azurerm_mysql_server.example.name + start_ip_address = "40.112.8.12" + end_ip_address = "40.112.8.12" + } + `, + expected: database.Database{ + MySQLServers: []database.MySQLServer{ + { + Metadata: defsecTypes.NewTestMetadata(), + Server: database.Server{ + Metadata: defsecTypes.NewTestMetadata(), + EnableSSLEnforcement: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + MinimumTLSVersion: defsecTypes.String("TLS1_2", defsecTypes.NewTestMetadata()), + EnablePublicNetworkAccess: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + FirewallRules: []database.FirewallRule{ + { + Metadata: defsecTypes.NewTestMetadata(), + StartIP: defsecTypes.String("40.112.8.12", defsecTypes.NewTestMetadata()), + EndIP: defsecTypes.String("40.112.8.12", defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + }, + }, + { + name: "ms sql", + terraform: ` + resource "azurerm_mssql_server" "example" { + name = "mssqlserver" + minimum_tls_version = "1.2" + public_network_access_enabled = false + } + + resource "azurerm_mssql_firewall_rule" "example" { + name = "FirewallRule1" + server_id = azurerm_mssql_server.example.id + start_ip_address = "10.0.17.62" + end_ip_address = "10.0.17.62" + } + + resource "azurerm_mssql_server_security_alert_policy" "example" { + resource_group_name = azurerm_resource_group.example.name + server_name = azurerm_mssql_server.example.name + disabled_alerts = [ + "Sql_Injection", + "Data_Exfiltration" + ] + email_account_admins = true + email_addresses = [ + "example@example.com" + ] + } + + resource "azurerm_mssql_server_extended_auditing_policy" "example" { + server_id = azurerm_mssql_server.example.id + retention_in_days = 6 + } + `, + expected: database.Database{ + MSSQLServers: []database.MSSQLServer{ + { + Metadata: defsecTypes.NewTestMetadata(), + Server: database.Server{ + Metadata: defsecTypes.NewTestMetadata(), + MinimumTLSVersion: defsecTypes.String("1.2", defsecTypes.NewTestMetadata()), + EnablePublicNetworkAccess: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + EnableSSLEnforcement: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + FirewallRules: []database.FirewallRule{ + { + Metadata: defsecTypes.NewTestMetadata(), + StartIP: defsecTypes.String("10.0.17.62", defsecTypes.NewTestMetadata()), + EndIP: defsecTypes.String("10.0.17.62", defsecTypes.NewTestMetadata()), + }, + }, + }, + ExtendedAuditingPolicies: []database.ExtendedAuditingPolicy{ + { + Metadata: defsecTypes.NewTestMetadata(), + RetentionInDays: defsecTypes.Int(6, defsecTypes.NewTestMetadata()), + }, + }, + SecurityAlertPolicies: []database.SecurityAlertPolicy{ + { + Metadata: defsecTypes.NewTestMetadata(), + EmailAddresses: []defsecTypes.StringValue{ + defsecTypes.String("example@example.com", defsecTypes.NewTestMetadata()), + }, + DisabledAlerts: []defsecTypes.StringValue{ + defsecTypes.String("Sql_Injection", defsecTypes.NewTestMetadata()), + defsecTypes.String("Data_Exfiltration", defsecTypes.NewTestMetadata()), + }, + EmailAccountAdmins: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := Adapt(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "azurerm_postgresql_server" "example" { + public_network_access_enabled = true + ssl_enforcement_enabled = true + ssl_minimal_tls_version_enforced = "TLS1_2" + } + + resource "azurerm_postgresql_configuration" "example" { + name = "log_connections" + server_name = azurerm_postgresql_server.example.name + value = "on" + } + + resource "azurerm_postgresql_configuration" "example" { + name = "log_checkpoints" + server_name = azurerm_postgresql_server.example.name + value = "on" + } + + resource "azurerm_postgresql_configuration" "example" { + name = "connection_throttling" + server_name = azurerm_postgresql_server.example.name + value = "on" + } + + resource "azurerm_postgresql_firewall_rule" "example" { + name = "office" + server_name = azurerm_postgresql_server.example.name + start_ip_address = "40.112.8.12" + end_ip_address = "40.112.8.12" + } + + resource "azurerm_mariadb_server" "example" { + public_network_access_enabled = false + ssl_enforcement_enabled = true + } + + resource "azurerm_mariadb_firewall_rule" "example" { + name = "test-rule" + server_name = azurerm_mariadb_server.example.name + start_ip_address = "40.112.0.0" + end_ip_address = "40.112.255.255" + } + + resource "azurerm_mysql_server" "example" { + public_network_access_enabled = true + ssl_enforcement_enabled = true + ssl_minimal_tls_version_enforced = "TLS1_2" + } + + resource "azurerm_mysql_firewall_rule" "example" { + server_name = azurerm_mysql_server.example.name + start_ip_address = "40.112.8.12" + end_ip_address = "40.112.8.12" + } + + resource "azurerm_mssql_server" "example" { + name = "mssqlserver" + public_network_access_enabled = false + minimum_tls_version = "1.2" + } + + resource "azurerm_mssql_firewall_rule" "example" { + name = "FirewallRule1" + server_id = azurerm_mssql_server.example.id + start_ip_address = "10.0.17.62" + end_ip_address = "10.0.17.62" + } + + resource "azurerm_mssql_server_security_alert_policy" "example" { + server_name = azurerm_mssql_server.example.name + disabled_alerts = [ + "Sql_Injection", + "Data_Exfiltration" + ] + email_account_admins = true + email_addresses = [ + "example@example.com" + ] + } + + resource "azurerm_mssql_server_extended_auditing_policy" "example" { + server_id = azurerm_mssql_server.example.id + retention_in_days = 6 + } + ` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.PostgreSQLServers, 1) + require.Len(t, adapted.MariaDBServers, 1) + require.Len(t, adapted.MySQLServers, 1) + require.Len(t, adapted.MSSQLServers, 1) + + postgres := adapted.PostgreSQLServers[0] + mariadb := adapted.MariaDBServers[0] + mysql := adapted.MySQLServers[0] + mssql := adapted.MSSQLServers[0] + + assert.Equal(t, 2, postgres.Metadata.Range().GetStartLine()) + assert.Equal(t, 6, postgres.Metadata.Range().GetEndLine()) + + assert.Equal(t, 3, postgres.EnablePublicNetworkAccess.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, postgres.EnablePublicNetworkAccess.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 4, postgres.EnableSSLEnforcement.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 4, postgres.EnableSSLEnforcement.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 5, postgres.MinimumTLSVersion.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 5, postgres.MinimumTLSVersion.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 11, postgres.Config.LogConnections.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 11, postgres.Config.LogConnections.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 17, postgres.Config.LogCheckpoints.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 17, postgres.Config.LogCheckpoints.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 23, postgres.Config.ConnectionThrottling.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 23, postgres.Config.ConnectionThrottling.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 26, postgres.FirewallRules[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 31, postgres.FirewallRules[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 29, postgres.FirewallRules[0].StartIP.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 29, postgres.FirewallRules[0].StartIP.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 30, postgres.FirewallRules[0].EndIP.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 30, postgres.FirewallRules[0].EndIP.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 33, mariadb.Metadata.Range().GetStartLine()) + assert.Equal(t, 36, mariadb.Metadata.Range().GetEndLine()) + + assert.Equal(t, 34, mariadb.EnablePublicNetworkAccess.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 34, mariadb.EnablePublicNetworkAccess.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 35, mariadb.EnableSSLEnforcement.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 35, mariadb.EnableSSLEnforcement.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 38, mariadb.FirewallRules[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 43, mariadb.FirewallRules[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 41, mariadb.FirewallRules[0].StartIP.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 41, mariadb.FirewallRules[0].StartIP.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 42, mariadb.FirewallRules[0].EndIP.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 42, mariadb.FirewallRules[0].EndIP.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 45, mysql.Metadata.Range().GetStartLine()) + assert.Equal(t, 49, mysql.Metadata.Range().GetEndLine()) + + assert.Equal(t, 46, mysql.EnablePublicNetworkAccess.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 46, mysql.EnablePublicNetworkAccess.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 47, mysql.EnableSSLEnforcement.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 47, mysql.EnableSSLEnforcement.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 48, mysql.MinimumTLSVersion.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 48, mysql.MinimumTLSVersion.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 51, mysql.FirewallRules[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 55, mysql.FirewallRules[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 53, mysql.FirewallRules[0].StartIP.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 53, mysql.FirewallRules[0].StartIP.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 54, mysql.FirewallRules[0].EndIP.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 54, mysql.FirewallRules[0].EndIP.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 57, mssql.Metadata.Range().GetStartLine()) + assert.Equal(t, 61, mssql.Metadata.Range().GetEndLine()) + + assert.Equal(t, 59, mssql.EnablePublicNetworkAccess.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 59, mssql.EnablePublicNetworkAccess.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 60, mssql.MinimumTLSVersion.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 60, mssql.MinimumTLSVersion.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 63, mssql.FirewallRules[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 68, mssql.FirewallRules[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 66, mssql.FirewallRules[0].StartIP.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 66, mssql.FirewallRules[0].StartIP.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 67, mssql.FirewallRules[0].EndIP.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 67, mssql.FirewallRules[0].EndIP.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 70, mssql.SecurityAlertPolicies[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 80, mssql.SecurityAlertPolicies[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 72, mssql.SecurityAlertPolicies[0].DisabledAlerts[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 75, mssql.SecurityAlertPolicies[0].DisabledAlerts[0].GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 76, mssql.SecurityAlertPolicies[0].EmailAccountAdmins.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 76, mssql.SecurityAlertPolicies[0].EmailAccountAdmins.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 77, mssql.SecurityAlertPolicies[0].EmailAddresses[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 79, mssql.SecurityAlertPolicies[0].EmailAddresses[0].GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 82, mssql.ExtendedAuditingPolicies[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 85, mssql.ExtendedAuditingPolicies[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 84, mssql.ExtendedAuditingPolicies[0].RetentionInDays.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 84, mssql.ExtendedAuditingPolicies[0].RetentionInDays.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/azure/datafactory/adapt.go b/internal/adapters/terraform/azure/datafactory/adapt.go new file mode 100644 index 000000000000..7fd12d1e4218 --- /dev/null +++ b/internal/adapters/terraform/azure/datafactory/adapt.go @@ -0,0 +1,33 @@ +package datafactory + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/datafactory" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func Adapt(modules terraform.Modules) datafactory.DataFactory { + return datafactory.DataFactory{ + DataFactories: adaptFactories(modules), + } +} + +func adaptFactories(modules terraform.Modules) []datafactory.Factory { + var factories []datafactory.Factory + + for _, module := range modules { + for _, resource := range module.GetResourcesByType("azurerm_data_factory") { + factories = append(factories, adaptFactory(resource)) + } + } + return factories +} + +func adaptFactory(resource *terraform.Block) datafactory.Factory { + enablePublicNetworkAttr := resource.GetAttribute("public_network_enabled") + enablePublicNetworkVal := enablePublicNetworkAttr.AsBoolValueOrDefault(true, resource) + + return datafactory.Factory{ + Metadata: resource.GetMetadata(), + EnablePublicNetwork: enablePublicNetworkVal, + } +} diff --git a/internal/adapters/terraform/azure/datafactory/adapt_test.go b/internal/adapters/terraform/azure/datafactory/adapt_test.go new file mode 100644 index 000000000000..acd13315d904 --- /dev/null +++ b/internal/adapters/terraform/azure/datafactory/adapt_test.go @@ -0,0 +1,79 @@ +package datafactory + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/azure/datafactory" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptFactory(t *testing.T) { + tests := []struct { + name string + terraform string + expected datafactory.Factory + }{ + { + name: "defined", + terraform: ` + resource "azurerm_data_factory" "example" { + name = "example" + location = azurerm_resource_group.example.location + resource_group_name = azurerm_resource_group.example.name + public_network_enabled = false + } +`, + expected: datafactory.Factory{ + Metadata: defsecTypes.NewTestMetadata(), + EnablePublicNetwork: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + { + name: "default", + terraform: ` + resource "azurerm_data_factory" "example" { + name = "example" + } +`, + expected: datafactory.Factory{ + Metadata: defsecTypes.NewTestMetadata(), + EnablePublicNetwork: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptFactory(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "azurerm_data_factory" "example" { + name = "example" + location = azurerm_resource_group.example.location + resource_group_name = azurerm_resource_group.example.name + public_network_enabled = false + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.DataFactories, 1) + dataFactory := adapted.DataFactories[0] + + assert.Equal(t, 6, dataFactory.EnablePublicNetwork.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 6, dataFactory.EnablePublicNetwork.GetMetadata().Range().GetEndLine()) + +} diff --git a/internal/adapters/terraform/azure/datalake/adapt.go b/internal/adapters/terraform/azure/datalake/adapt.go new file mode 100644 index 000000000000..b55bf7a2e581 --- /dev/null +++ b/internal/adapters/terraform/azure/datalake/adapt.go @@ -0,0 +1,38 @@ +package datalake + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/datalake" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) datalake.DataLake { + return datalake.DataLake{ + Stores: adaptStores(modules), + } +} + +func adaptStores(modules terraform.Modules) []datalake.Store { + var stores []datalake.Store + + for _, module := range modules { + for _, resource := range module.GetResourcesByType("azurerm_data_lake_store") { + stores = append(stores, adaptStore(resource)) + } + } + return stores +} + +func adaptStore(resource *terraform.Block) datalake.Store { + store := datalake.Store{ + Metadata: resource.GetMetadata(), + EnableEncryption: types.BoolDefault(true, resource.GetMetadata()), + } + encryptionStateAttr := resource.GetAttribute("encryption_state") + if encryptionStateAttr.Equals("Disabled") { + store.EnableEncryption = types.Bool(false, encryptionStateAttr.GetMetadata()) + } else if encryptionStateAttr.Equals("Enabled") { + store.EnableEncryption = types.Bool(true, encryptionStateAttr.GetMetadata()) + } + return store +} diff --git a/internal/adapters/terraform/azure/datalake/adapt_test.go b/internal/adapters/terraform/azure/datalake/adapt_test.go new file mode 100644 index 000000000000..41fd476522cf --- /dev/null +++ b/internal/adapters/terraform/azure/datalake/adapt_test.go @@ -0,0 +1,83 @@ +package datalake + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/azure/datalake" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptStore(t *testing.T) { + tests := []struct { + name string + terraform string + expected datalake.Store + }{ + { + name: "enabled", + terraform: ` + resource "azurerm_data_lake_store" "good_example" { + encryption_state = "Enabled" + } +`, + expected: datalake.Store{ + Metadata: defsecTypes.NewTestMetadata(), + EnableEncryption: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + { + name: "disabled", + terraform: ` + resource "azurerm_data_lake_store" "good_example" { + encryption_state = "Disabled" + } +`, + expected: datalake.Store{ + Metadata: defsecTypes.NewTestMetadata(), + EnableEncryption: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + { + name: "enabled by default", + terraform: ` + resource "azurerm_data_lake_store" "good_example" { + } +`, + expected: datalake.Store{ + Metadata: defsecTypes.NewTestMetadata(), + EnableEncryption: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptStore(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "azurerm_data_lake_store" "good_example" { + encryption_state = "Disabled" + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Stores, 1) + store := adapted.Stores[0] + + assert.Equal(t, 3, store.EnableEncryption.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, store.EnableEncryption.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/azure/keyvault/adapt.go b/internal/adapters/terraform/azure/keyvault/adapt.go new file mode 100644 index 000000000000..c78d39115bff --- /dev/null +++ b/internal/adapters/terraform/azure/keyvault/adapt.go @@ -0,0 +1,159 @@ +package keyvault + +import ( + "time" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/terraform" + + "github.com/aquasecurity/defsec/pkg/providers/azure/keyvault" +) + +func Adapt(modules terraform.Modules) keyvault.KeyVault { + adapter := adapter{ + vaultSecretIDs: modules.GetChildResourceIDMapByType("azurerm_key_vault_secret"), + vaultKeyIDs: modules.GetChildResourceIDMapByType("azurerm_key_vault_key"), + } + + return keyvault.KeyVault{ + Vaults: adapter.adaptVaults(modules), + } +} + +type adapter struct { + vaultSecretIDs terraform.ResourceIDResolutions + vaultKeyIDs terraform.ResourceIDResolutions +} + +func (a *adapter) adaptVaults(modules terraform.Modules) []keyvault.Vault { + + var vaults []keyvault.Vault + for _, module := range modules { + for _, resource := range module.GetResourcesByType("azurerm_key_vault") { + vaults = append(vaults, a.adaptVault(resource, module)) + + } + } + + orphanResources := modules.GetResourceByIDs(a.vaultSecretIDs.Orphans()...) + + if len(orphanResources) > 0 { + orphanage := keyvault.Vault{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Secrets: nil, + Keys: nil, + EnablePurgeProtection: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + SoftDeleteRetentionDays: defsecTypes.IntDefault(0, defsecTypes.NewUnmanagedMetadata()), + NetworkACLs: keyvault.NetworkACLs{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + DefaultAction: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + }, + } + for _, secretResource := range orphanResources { + orphanage.Secrets = append(orphanage.Secrets, adaptSecret(secretResource)) + } + vaults = append(vaults, orphanage) + } + + orphanResources = modules.GetResourceByIDs(a.vaultKeyIDs.Orphans()...) + + if len(orphanResources) > 0 { + orphanage := keyvault.Vault{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Secrets: nil, + Keys: nil, + EnablePurgeProtection: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + SoftDeleteRetentionDays: defsecTypes.IntDefault(0, defsecTypes.NewUnmanagedMetadata()), + NetworkACLs: keyvault.NetworkACLs{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + DefaultAction: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + }, + } + for _, secretResource := range orphanResources { + orphanage.Keys = append(orphanage.Keys, adaptKey(secretResource)) + } + vaults = append(vaults, orphanage) + } + + return vaults +} + +func (a *adapter) adaptVault(resource *terraform.Block, module *terraform.Module) keyvault.Vault { + var keys []keyvault.Key + var secrets []keyvault.Secret + + defaultActionVal := defsecTypes.StringDefault("", resource.GetMetadata()) + + secretBlocks := module.GetReferencingResources(resource, "azurerm_key_vault_secret", "key_vault_id") + for _, secretBlock := range secretBlocks { + a.vaultSecretIDs.Resolve(secretBlock.ID()) + secrets = append(secrets, adaptSecret(secretBlock)) + } + + keyBlocks := module.GetReferencingResources(resource, "azurerm_key_vault_key", "key_vault_id") + for _, keyBlock := range keyBlocks { + a.vaultKeyIDs.Resolve(keyBlock.ID()) + keys = append(keys, adaptKey(keyBlock)) + } + + purgeProtectionAttr := resource.GetAttribute("purge_protection_enabled") + purgeProtectionVal := purgeProtectionAttr.AsBoolValueOrDefault(false, resource) + + softDeleteRetentionDaysAttr := resource.GetAttribute("soft_delete_retention_days") + softDeleteRetentionDaysVal := softDeleteRetentionDaysAttr.AsIntValueOrDefault(0, resource) + + aclMetadata := defsecTypes.NewUnmanagedMetadata() + if aclBlock := resource.GetBlock("network_acls"); aclBlock.IsNotNil() { + aclMetadata = aclBlock.GetMetadata() + defaultActionAttr := aclBlock.GetAttribute("default_action") + defaultActionVal = defaultActionAttr.AsStringValueOrDefault("", resource.GetBlock("network_acls")) + } + + return keyvault.Vault{ + Metadata: resource.GetMetadata(), + Secrets: secrets, + Keys: keys, + EnablePurgeProtection: purgeProtectionVal, + SoftDeleteRetentionDays: softDeleteRetentionDaysVal, + NetworkACLs: keyvault.NetworkACLs{ + Metadata: aclMetadata, + DefaultAction: defaultActionVal, + }, + } +} + +func adaptSecret(resource *terraform.Block) keyvault.Secret { + contentTypeAttr := resource.GetAttribute("content_type") + contentTypeVal := contentTypeAttr.AsStringValueOrDefault("", resource) + + return keyvault.Secret{ + Metadata: resource.GetMetadata(), + ContentType: contentTypeVal, + ExpiryDate: resolveExpiryDate(resource), + } +} + +func adaptKey(resource *terraform.Block) keyvault.Key { + + return keyvault.Key{ + Metadata: resource.GetMetadata(), + ExpiryDate: resolveExpiryDate(resource), + } +} + +func resolveExpiryDate(resource *terraform.Block) defsecTypes.TimeValue { + expiryDateAttr := resource.GetAttribute("expiration_date") + expiryDateVal := defsecTypes.TimeDefault(time.Time{}, resource.GetMetadata()) + + if expiryDateAttr.IsString() { + expiryDateString := expiryDateAttr.Value().AsString() + if expiryDate, err := time.Parse(time.RFC3339, expiryDateString); err == nil { + expiryDateVal = defsecTypes.Time(expiryDate, expiryDateAttr.GetMetadata()) + } + } else if expiryDateAttr.IsNotNil() { + expiryDateVal = defsecTypes.TimeUnresolvable(expiryDateAttr.GetMetadata()) + } + + return expiryDateVal +} diff --git a/internal/adapters/terraform/azure/keyvault/adapt_test.go b/internal/adapters/terraform/azure/keyvault/adapt_test.go new file mode 100644 index 000000000000..b7e668712697 --- /dev/null +++ b/internal/adapters/terraform/azure/keyvault/adapt_test.go @@ -0,0 +1,271 @@ +package keyvault + +import ( + "testing" + "time" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/azure/keyvault" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_Adapt(t *testing.T) { + tests := []struct { + name string + terraform string + expected keyvault.KeyVault + }{ + { + name: "defined", + terraform: ` + resource "azurerm_key_vault" "example" { + name = "examplekeyvault" + enabled_for_disk_encryption = true + soft_delete_retention_days = 7 + purge_protection_enabled = true + + network_acls { + bypass = "AzureServices" + default_action = "Deny" + } + } +`, + expected: keyvault.KeyVault{ + Vaults: []keyvault.Vault{ + { + Metadata: defsecTypes.NewTestMetadata(), + EnablePurgeProtection: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + SoftDeleteRetentionDays: defsecTypes.Int(7, defsecTypes.NewTestMetadata()), + NetworkACLs: keyvault.NetworkACLs{ + Metadata: defsecTypes.NewTestMetadata(), + DefaultAction: defsecTypes.String("Deny", defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + { + name: "defaults", + terraform: ` + resource "azurerm_key_vault" "example" { + } +`, + expected: keyvault.KeyVault{ + Vaults: []keyvault.Vault{ + { + Metadata: defsecTypes.NewTestMetadata(), + EnablePurgeProtection: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + SoftDeleteRetentionDays: defsecTypes.Int(0, defsecTypes.NewTestMetadata()), + NetworkACLs: keyvault.NetworkACLs{ + Metadata: defsecTypes.NewTestMetadata(), + DefaultAction: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := Adapt(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func Test_adaptSecret(t *testing.T) { + tests := []struct { + name string + terraform string + expected keyvault.Secret + }{ + { + name: "defaults", + terraform: ` + resource "azurerm_key_vault_secret" "example" { + } +`, + expected: keyvault.Secret{ + Metadata: defsecTypes.NewTestMetadata(), + ContentType: defsecTypes.String("", defsecTypes.NewTestMetadata()), + ExpiryDate: defsecTypes.Time(time.Time{}, defsecTypes.NewTestMetadata()), + }, + }, + { + name: "defined", + terraform: ` + resource "azurerm_key_vault_secret" "example" { + content_type = "password" + expiration_date = "1982-12-31T00:00:00Z" + } +`, + expected: keyvault.Secret{ + Metadata: defsecTypes.NewTestMetadata(), + ContentType: defsecTypes.String("password", defsecTypes.NewTestMetadata()), + ExpiryDate: defsecTypes.Time(func(timeVal string) time.Time { + parsed, _ := time.Parse(time.RFC3339, timeVal) + return parsed + }("1982-12-31T00:00:00Z"), defsecTypes.NewTestMetadata())}, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptSecret(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func Test_adaptKey(t *testing.T) { + tests := []struct { + name string + terraform string + expected keyvault.Key + }{ + { + name: "defined", + terraform: ` + resource "azurerm_key_vault_key" "example" { + name = "generated-certificate" + expiration_date = "1982-12-31T00:00:00Z" + } +`, + expected: keyvault.Key{ + Metadata: defsecTypes.NewTestMetadata(), + ExpiryDate: defsecTypes.Time(func(timeVal string) time.Time { + parsed, _ := time.Parse(time.RFC3339, timeVal) + return parsed + }("1982-12-31T00:00:00Z"), defsecTypes.NewTestMetadata()), + }, + }, + { + name: "defaults", + terraform: ` + resource "azurerm_key_vault_key" "example" { + } +`, + expected: keyvault.Key{ + Metadata: defsecTypes.NewTestMetadata(), + ExpiryDate: defsecTypes.Time(time.Time{}, defsecTypes.NewTestMetadata()), + }, + }, + { + name: "expiration date refers to the resource", + terraform: ` +terraform { + required_version = ">=1.3.0" + required_providers { + azurerm = { + source = "hashicorp/azurerm" + version = ">=3.0.0" + } + time = { + source = "hashicorp/time" + version = ">=0.9.0" + } + } +} + +resource "azurerm_key_vault" "this" { + name = "keyvault" + location = "us-west" + resource_group_name = "resource-group" + tenant_id = "tenant-id" + sku_name = "Standard" +} + +resource "time_offset" "expiry" { + offset_years = 1 + base_rfc3339 = "YYYY-MM-DDTHH:MM:SSZ" +} + +resource "azurerm_key_vault_key" "this" { + name = "key" + key_vault_id = azurerm_key_vault.this.id + key_type = "RSA" + key_size = 2048 + key_opts = ["decrypt", "encrypt", "sign", "unwrapKey", "verify", "wrapKey"] + expiration_date = time_offset.expiry.rfc3339 +} +`, + expected: keyvault.Key{ + Metadata: defsecTypes.NewTestMetadata(), + ExpiryDate: defsecTypes.TimeUnresolvable(defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptKey(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "azurerm_key_vault" "example" { + name = "examplekeyvault" + enabled_for_disk_encryption = true + soft_delete_retention_days = 7 + purge_protection_enabled = true + + network_acls { + bypass = "AzureServices" + default_action = "Deny" + } + } + + resource "azurerm_key_vault_key" "example" { + key_vault_id = azurerm_key_vault.example.id + name = "generated-certificate" + expiration_date = "1982-12-31T00:00:00Z" + } + + resource "azurerm_key_vault_secret" "example" { + key_vault_id = azurerm_key_vault.example.id + content_type = "password" + expiration_date = "1982-12-31T00:00:00Z" + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Vaults, 1) + require.Len(t, adapted.Vaults[0].Keys, 1) + require.Len(t, adapted.Vaults[0].Secrets, 1) + + vault := adapted.Vaults[0] + key := vault.Keys[0] + secret := vault.Secrets[0] + + assert.Equal(t, 5, vault.SoftDeleteRetentionDays.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 5, vault.SoftDeleteRetentionDays.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 6, vault.EnablePurgeProtection.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 6, vault.EnablePurgeProtection.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 10, vault.NetworkACLs.DefaultAction.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 10, vault.NetworkACLs.DefaultAction.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 17, key.ExpiryDate.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 17, key.ExpiryDate.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 22, secret.ContentType.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 22, secret.ContentType.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 23, secret.ExpiryDate.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 23, secret.ExpiryDate.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/azure/monitor/adapt.go b/internal/adapters/terraform/azure/monitor/adapt.go new file mode 100644 index 000000000000..f70648d92038 --- /dev/null +++ b/internal/adapters/terraform/azure/monitor/adapt.go @@ -0,0 +1,56 @@ +package monitor + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/monitor" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) monitor.Monitor { + return monitor.Monitor{ + LogProfiles: adaptLogProfiles(modules), + } +} + +func adaptLogProfiles(modules terraform.Modules) []monitor.LogProfile { + var logProfiles []monitor.LogProfile + + for _, module := range modules { + for _, resource := range module.GetResourcesByType("azurerm_monitor_log_profile") { + logProfiles = append(logProfiles, adaptLogProfile(resource)) + } + } + return logProfiles +} + +func adaptLogProfile(resource *terraform.Block) monitor.LogProfile { + + logProfile := monitor.LogProfile{ + Metadata: resource.GetMetadata(), + RetentionPolicy: monitor.RetentionPolicy{ + Metadata: resource.GetMetadata(), + Enabled: defsecTypes.BoolDefault(false, resource.GetMetadata()), + Days: defsecTypes.IntDefault(0, resource.GetMetadata()), + }, + Categories: nil, + Locations: nil, + } + + if retentionPolicyBlock := resource.GetBlock("retention_policy"); retentionPolicyBlock.IsNotNil() { + logProfile.RetentionPolicy.Metadata = retentionPolicyBlock.GetMetadata() + enabledAttr := retentionPolicyBlock.GetAttribute("enabled") + logProfile.RetentionPolicy.Enabled = enabledAttr.AsBoolValueOrDefault(false, resource) + daysAttr := retentionPolicyBlock.GetAttribute("days") + logProfile.RetentionPolicy.Days = daysAttr.AsIntValueOrDefault(0, resource) + } + + if categoriesAttr := resource.GetAttribute("categories"); categoriesAttr.IsNotNil() { + logProfile.Categories = categoriesAttr.AsStringValues() + } + + if locationsAttr := resource.GetAttribute("locations"); locationsAttr.IsNotNil() { + logProfile.Locations = locationsAttr.AsStringValues() + } + + return logProfile +} diff --git a/internal/adapters/terraform/azure/monitor/adapt_test.go b/internal/adapters/terraform/azure/monitor/adapt_test.go new file mode 100644 index 000000000000..f7894c53da59 --- /dev/null +++ b/internal/adapters/terraform/azure/monitor/adapt_test.go @@ -0,0 +1,128 @@ +package monitor + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/azure/monitor" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptLogProfile(t *testing.T) { + tests := []struct { + name string + terraform string + expected monitor.LogProfile + }{ + { + name: "defined", + terraform: ` + resource "azurerm_monitor_log_profile" "example" { + categories = [ + "Action", + "Delete", + "Write", + ] + + retention_policy { + enabled = true + days = 365 + } + + locations = [ + "eastus", + "eastus2", + "southcentralus" + ] + } +`, + expected: monitor.LogProfile{ + Metadata: defsecTypes.NewTestMetadata(), + Categories: []defsecTypes.StringValue{ + defsecTypes.String("Action", defsecTypes.NewTestMetadata()), + defsecTypes.String("Delete", defsecTypes.NewTestMetadata()), + defsecTypes.String("Write", defsecTypes.NewTestMetadata()), + }, + RetentionPolicy: monitor.RetentionPolicy{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + Days: defsecTypes.Int(365, defsecTypes.NewTestMetadata()), + }, + Locations: []defsecTypes.StringValue{ + defsecTypes.String("eastus", defsecTypes.NewTestMetadata()), + defsecTypes.String("eastus2", defsecTypes.NewTestMetadata()), + defsecTypes.String("southcentralus", defsecTypes.NewTestMetadata()), + }, + }, + }, + { + name: "default", + terraform: ` + resource "azurerm_monitor_log_profile" "example" { + } +`, + expected: monitor.LogProfile{ + Metadata: defsecTypes.NewTestMetadata(), + RetentionPolicy: monitor.RetentionPolicy{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + Days: defsecTypes.Int(0, defsecTypes.NewTestMetadata()), + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptLogProfile(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "azurerm_monitor_log_profile" "example" { + categories = [ + "Action", + "Delete", + "Write", + ] + + retention_policy { + enabled = true + days = 365 + } + + locations = [ + "eastus", + "eastus2", + "southcentralus" + ] + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.LogProfiles, 1) + logProfile := adapted.LogProfiles[0] + + assert.Equal(t, 3, logProfile.Categories[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 7, logProfile.Categories[0].GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 10, logProfile.RetentionPolicy.Enabled.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 10, logProfile.RetentionPolicy.Enabled.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 11, logProfile.RetentionPolicy.Days.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 11, logProfile.RetentionPolicy.Days.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 14, logProfile.Locations[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 18, logProfile.Locations[0].GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/azure/network/adapt.go b/internal/adapters/terraform/azure/network/adapt.go new file mode 100644 index 000000000000..899c0fe767d5 --- /dev/null +++ b/internal/adapters/terraform/azure/network/adapt.go @@ -0,0 +1,220 @@ +package network + +import ( + "strconv" + "strings" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/terraform" + + "github.com/aquasecurity/defsec/pkg/providers/azure/network" + + "github.com/google/uuid" +) + +func Adapt(modules terraform.Modules) network.Network { + return network.Network{ + SecurityGroups: (&adapter{ + modules: modules, + groups: make(map[string]network.SecurityGroup), + }).adaptSecurityGroups(), + NetworkWatcherFlowLogs: adaptWatcherLogs(modules), + } +} + +type adapter struct { + modules terraform.Modules + groups map[string]network.SecurityGroup +} + +func (a *adapter) adaptSecurityGroups() []network.SecurityGroup { + + for _, module := range a.modules { + for _, resource := range module.GetResourcesByType("azurerm_network_security_group") { + a.adaptSecurityGroup(resource) + } + } + + for _, ruleBlock := range a.modules.GetResourcesByType("azurerm_network_security_rule") { + rule := a.adaptSGRule(ruleBlock) + + groupAttr := ruleBlock.GetAttribute("network_security_group_name") + if groupAttr.IsNotNil() { + if referencedBlock, err := a.modules.GetReferencedBlock(groupAttr, ruleBlock); err == nil { + if group, ok := a.groups[referencedBlock.ID()]; ok { + group.Rules = append(group.Rules, rule) + a.groups[referencedBlock.ID()] = group + continue + } + } + + } + + a.groups[uuid.NewString()] = network.SecurityGroup{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Rules: []network.SecurityGroupRule{rule}, + } + } + + var securityGroups []network.SecurityGroup + for _, group := range a.groups { + securityGroups = append(securityGroups, group) + } + + return securityGroups +} + +func adaptWatcherLogs(modules terraform.Modules) []network.NetworkWatcherFlowLog { + var watcherLogs []network.NetworkWatcherFlowLog + + for _, module := range modules { + for _, resource := range module.GetResourcesByType("azurerm_network_watcher_flow_log") { + watcherLogs = append(watcherLogs, adaptWatcherLog(resource)) + } + } + return watcherLogs +} + +func (a *adapter) adaptSecurityGroup(resource *terraform.Block) { + var rules []network.SecurityGroupRule + for _, ruleBlock := range resource.GetBlocks("security_rule") { + rules = append(rules, a.adaptSGRule(ruleBlock)) + } + a.groups[resource.ID()] = network.SecurityGroup{ + Metadata: resource.GetMetadata(), + Rules: rules, + } +} + +func (a *adapter) adaptSGRule(ruleBlock *terraform.Block) network.SecurityGroupRule { + + rule := network.SecurityGroupRule{ + Metadata: ruleBlock.GetMetadata(), + Outbound: defsecTypes.BoolDefault(false, ruleBlock.GetMetadata()), + Allow: defsecTypes.BoolDefault(true, ruleBlock.GetMetadata()), + SourceAddresses: nil, + SourcePorts: nil, + DestinationAddresses: nil, + DestinationPorts: nil, + Protocol: ruleBlock.GetAttribute("protocol").AsStringValueOrDefault("", ruleBlock), + } + + accessAttr := ruleBlock.GetAttribute("access") + if accessAttr.Equals("Allow") { + rule.Allow = defsecTypes.Bool(true, accessAttr.GetMetadata()) + } else if accessAttr.Equals("Deny") { + rule.Allow = defsecTypes.Bool(false, accessAttr.GetMetadata()) + } + + directionAttr := ruleBlock.GetAttribute("direction") + if directionAttr.Equals("Inbound") { + rule.Outbound = defsecTypes.Bool(false, directionAttr.GetMetadata()) + } else if directionAttr.Equals("Outbound") { + rule.Outbound = defsecTypes.Bool(true, directionAttr.GetMetadata()) + } + + a.adaptSource(ruleBlock, &rule) + a.adaptDestination(ruleBlock, &rule) + + return rule +} + +func (a *adapter) adaptSource(ruleBlock *terraform.Block, rule *network.SecurityGroupRule) { + if sourceAddressAttr := ruleBlock.GetAttribute("source_address_prefix"); sourceAddressAttr.IsString() { + rule.SourceAddresses = append(rule.SourceAddresses, sourceAddressAttr.AsStringValueOrDefault("", ruleBlock)) + } else if sourceAddressPrefixesAttr := ruleBlock.GetAttribute("source_address_prefixes"); sourceAddressPrefixesAttr.IsNotNil() { + rule.SourceAddresses = append(rule.SourceAddresses, sourceAddressPrefixesAttr.AsStringValues()...) + } + + if sourcePortRangesAttr := ruleBlock.GetAttribute("source_port_ranges"); sourcePortRangesAttr.IsNotNil() { + ports := sourcePortRangesAttr.AsStringValues() + for _, value := range ports { + rule.SourcePorts = append(rule.SourcePorts, expandRange(value.Value(), value.GetMetadata())) + } + } else if sourcePortRangeAttr := ruleBlock.GetAttribute("source_port_range"); sourcePortRangeAttr.IsString() { + rule.SourcePorts = append(rule.SourcePorts, expandRange(sourcePortRangeAttr.Value().AsString(), sourcePortRangeAttr.GetMetadata())) + } else if sourcePortRangeAttr := ruleBlock.GetAttribute("source_port_range"); sourcePortRangeAttr.IsNumber() { + f := sourcePortRangeAttr.AsNumber() + rule.SourcePorts = append(rule.SourcePorts, network.PortRange{ + Metadata: sourcePortRangeAttr.GetMetadata(), + Start: int(f), + End: int(f), + }) + } +} + +func (a *adapter) adaptDestination(ruleBlock *terraform.Block, rule *network.SecurityGroupRule) { + if destAddressAttr := ruleBlock.GetAttribute("destination_address_prefix"); destAddressAttr.IsString() { + rule.DestinationAddresses = append(rule.DestinationAddresses, destAddressAttr.AsStringValueOrDefault("", ruleBlock)) + } else if destAddressPrefixesAttr := ruleBlock.GetAttribute("destination_address_prefixes"); destAddressPrefixesAttr.IsNotNil() { + rule.DestinationAddresses = append(rule.DestinationAddresses, destAddressPrefixesAttr.AsStringValues()...) + } + + if destPortRangesAttr := ruleBlock.GetAttribute("destination_port_ranges"); destPortRangesAttr.IsNotNil() { + ports := destPortRangesAttr.AsStringValues() + for _, value := range ports { + rule.DestinationPorts = append(rule.DestinationPorts, expandRange(value.Value(), destPortRangesAttr.GetMetadata())) + } + } else if destPortRangeAttr := ruleBlock.GetAttribute("destination_port_range"); destPortRangeAttr.IsString() { + rule.DestinationPorts = append(rule.DestinationPorts, expandRange(destPortRangeAttr.Value().AsString(), destPortRangeAttr.GetMetadata())) + } else if destPortRangeAttr := ruleBlock.GetAttribute("destination_port_range"); destPortRangeAttr.IsNumber() { + f := destPortRangeAttr.AsNumber() + rule.DestinationPorts = append(rule.DestinationPorts, network.PortRange{ + Metadata: destPortRangeAttr.GetMetadata(), + Start: int(f), + End: int(f), + }) + } +} + +func expandRange(r string, m defsecTypes.Metadata) network.PortRange { + start := 0 + end := 65535 + switch { + case r == "*": + case strings.Contains(r, "-"): + if parts := strings.Split(r, "-"); len(parts) == 2 { + if p1, err := strconv.ParseInt(parts[0], 10, 32); err == nil { + start = int(p1) + } + if p2, err := strconv.ParseInt(parts[1], 10, 32); err == nil { + end = int(p2) + } + } + default: + if val, err := strconv.ParseInt(r, 10, 32); err == nil { + start = int(val) + end = int(val) + } + } + + return network.PortRange{ + Metadata: m, + Start: start, + End: end, + } +} + +func adaptWatcherLog(resource *terraform.Block) network.NetworkWatcherFlowLog { + flowLog := network.NetworkWatcherFlowLog{ + Metadata: resource.GetMetadata(), + RetentionPolicy: network.RetentionPolicy{ + Metadata: resource.GetMetadata(), + Enabled: defsecTypes.BoolDefault(false, resource.GetMetadata()), + Days: defsecTypes.IntDefault(0, resource.GetMetadata()), + }, + } + + if retentionPolicyBlock := resource.GetBlock("retention_policy"); retentionPolicyBlock.IsNotNil() { + flowLog.RetentionPolicy.Metadata = retentionPolicyBlock.GetMetadata() + + enabledAttr := retentionPolicyBlock.GetAttribute("enabled") + flowLog.RetentionPolicy.Enabled = enabledAttr.AsBoolValueOrDefault(false, retentionPolicyBlock) + + daysAttr := retentionPolicyBlock.GetAttribute("days") + flowLog.RetentionPolicy.Days = daysAttr.AsIntValueOrDefault(0, retentionPolicyBlock) + } + + return flowLog +} diff --git a/internal/adapters/terraform/azure/network/adapt_test.go b/internal/adapters/terraform/azure/network/adapt_test.go new file mode 100644 index 000000000000..74e1f3f26aca --- /dev/null +++ b/internal/adapters/terraform/azure/network/adapt_test.go @@ -0,0 +1,262 @@ +package network + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/azure/network" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" +) + +func Test_Adapt(t *testing.T) { + tests := []struct { + name string + terraform string + expected network.Network + }{ + { + name: "defined", + terraform: ` + resource "azurerm_network_security_rule" "example" { + name = "example_security_rule" + network_security_group_name = azurerm_network_security_group.example.name + direction = "Inbound" + access = "Allow" + protocol = "TCP" + source_port_range = "*" + destination_port_ranges = ["3389"] + source_address_prefix = "4.53.160.75" + destination_address_prefix = "*" + } + + resource "azurerm_network_security_group" "example" { + name = "tf-appsecuritygroup" + } + + resource "azurerm_network_watcher_flow_log" "example" { + resource_group_name = azurerm_resource_group.example.name + name = "example-log" + + retention_policy { + enabled = true + days = 7 + } + } +`, + expected: network.Network{ + SecurityGroups: []network.SecurityGroup{ + { + Metadata: defsecTypes.NewTestMetadata(), + Rules: []network.SecurityGroupRule{ + { + Metadata: defsecTypes.NewTestMetadata(), + Outbound: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + Allow: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + SourceAddresses: []defsecTypes.StringValue{ + defsecTypes.String("4.53.160.75", defsecTypes.NewTestMetadata()), + }, + DestinationAddresses: []defsecTypes.StringValue{ + defsecTypes.String("*", defsecTypes.NewTestMetadata()), + }, + SourcePorts: []network.PortRange{ + { + Metadata: defsecTypes.NewTestMetadata(), + Start: 0, + End: 65535, + }, + }, + DestinationPorts: []network.PortRange{ + { + Metadata: defsecTypes.NewTestMetadata(), + Start: 3389, + End: 3389, + }, + }, + Protocol: defsecTypes.String("TCP", defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + NetworkWatcherFlowLogs: []network.NetworkWatcherFlowLog{ + { + Metadata: defsecTypes.NewTestMetadata(), + RetentionPolicy: network.RetentionPolicy{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + Days: defsecTypes.Int(7, defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + { + name: "defaults", + terraform: ` + resource "azurerm_network_security_group" "example" { + name = "tf-appsecuritygroup" + security_rule { + } + } +`, + expected: network.Network{ + SecurityGroups: []network.SecurityGroup{ + { + Metadata: defsecTypes.NewTestMetadata(), + Rules: []network.SecurityGroupRule{ + { + Metadata: defsecTypes.NewTestMetadata(), + Outbound: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + Allow: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + Protocol: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := Adapt(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func Test_adaptWatcherLog(t *testing.T) { + tests := []struct { + name string + terraform string + expected network.NetworkWatcherFlowLog + }{ + { + name: "defined", + terraform: ` + resource "azurerm_network_watcher_flow_log" "watcher" { + retention_policy { + enabled = true + days = 90 + } + } +`, + expected: network.NetworkWatcherFlowLog{ + Metadata: defsecTypes.NewTestMetadata(), + RetentionPolicy: network.RetentionPolicy{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + Days: defsecTypes.Int(90, defsecTypes.NewTestMetadata()), + }, + }, + }, + { + name: "defaults", + terraform: ` + resource "azurerm_network_watcher_flow_log" "watcher" { + retention_policy { + } + } +`, + expected: network.NetworkWatcherFlowLog{ + Metadata: defsecTypes.NewTestMetadata(), + RetentionPolicy: network.RetentionPolicy{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + Days: defsecTypes.Int(0, defsecTypes.NewTestMetadata()), + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptWatcherLog(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "azurerm_network_security_group" "example" { + name = "tf-appsecuritygroup" + } + + resource "azurerm_network_security_rule" "example" { + name = "example_security_rule" + network_security_group_name = azurerm_network_security_group.example.name + direction = "Inbound" + access = "Allow" + protocol = "TCP" + source_port_range = "*" + destination_port_ranges = ["3389"] + source_address_prefix = "4.53.160.75" + destination_address_prefix = "*" + } + + resource "azurerm_network_watcher_flow_log" "example" { + resource_group_name = azurerm_resource_group.example.name + name = "example-log" + + retention_policy { + enabled = true + days = 7 + } + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.SecurityGroups, 1) + require.Len(t, adapted.NetworkWatcherFlowLogs, 1) + + securityGroup := adapted.SecurityGroups[0] + rule := securityGroup.Rules[0] + watcher := adapted.NetworkWatcherFlowLogs[0] + + assert.Equal(t, 2, securityGroup.Metadata.Range().GetStartLine()) + assert.Equal(t, 4, securityGroup.Metadata.Range().GetEndLine()) + + assert.Equal(t, 6, rule.Metadata.Range().GetStartLine()) + assert.Equal(t, 16, rule.Metadata.Range().GetEndLine()) + + assert.Equal(t, 9, rule.Outbound.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 9, rule.Outbound.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 10, rule.Allow.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 10, rule.Allow.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 11, rule.Protocol.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 11, rule.Protocol.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 12, rule.SourcePorts[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 12, rule.SourcePorts[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 13, rule.DestinationPorts[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 13, rule.DestinationPorts[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 14, rule.SourceAddresses[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 14, rule.SourceAddresses[0].GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 15, rule.DestinationAddresses[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 15, rule.DestinationAddresses[0].GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 18, watcher.Metadata.Range().GetStartLine()) + assert.Equal(t, 26, watcher.Metadata.Range().GetEndLine()) + + assert.Equal(t, 22, watcher.RetentionPolicy.Metadata.Range().GetStartLine()) + assert.Equal(t, 25, watcher.RetentionPolicy.Metadata.Range().GetEndLine()) + + assert.Equal(t, 23, watcher.RetentionPolicy.Enabled.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 23, watcher.RetentionPolicy.Enabled.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 24, watcher.RetentionPolicy.Days.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 24, watcher.RetentionPolicy.Days.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/azure/securitycenter/adapt.go b/internal/adapters/terraform/azure/securitycenter/adapt.go new file mode 100644 index 000000000000..90e02d933a3a --- /dev/null +++ b/internal/adapters/terraform/azure/securitycenter/adapt.go @@ -0,0 +1,59 @@ +package securitycenter + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/securitycenter" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func Adapt(modules terraform.Modules) securitycenter.SecurityCenter { + return securitycenter.SecurityCenter{ + Contacts: adaptContacts(modules), + Subscriptions: adaptSubscriptions(modules), + } +} + +func adaptContacts(modules terraform.Modules) []securitycenter.Contact { + var contacts []securitycenter.Contact + + for _, module := range modules { + for _, resource := range module.GetResourcesByType("azurerm_security_center_contact") { + contacts = append(contacts, adaptContact(resource)) + } + } + return contacts +} + +func adaptSubscriptions(modules terraform.Modules) []securitycenter.SubscriptionPricing { + var subscriptions []securitycenter.SubscriptionPricing + + for _, module := range modules { + for _, resource := range module.GetResourcesByType("azurerm_security_center_subscription_pricing") { + subscriptions = append(subscriptions, adaptSubscription(resource)) + } + } + return subscriptions +} + +func adaptContact(resource *terraform.Block) securitycenter.Contact { + enableAlertNotifAttr := resource.GetAttribute("alert_notifications") + enableAlertNotifVal := enableAlertNotifAttr.AsBoolValueOrDefault(false, resource) + + phoneAttr := resource.GetAttribute("phone") + phoneVal := phoneAttr.AsStringValueOrDefault("", resource) + + return securitycenter.Contact{ + Metadata: resource.GetMetadata(), + EnableAlertNotifications: enableAlertNotifVal, + Phone: phoneVal, + } +} + +func adaptSubscription(resource *terraform.Block) securitycenter.SubscriptionPricing { + tierAttr := resource.GetAttribute("tier") + tierVal := tierAttr.AsStringValueOrDefault("Free", resource) + + return securitycenter.SubscriptionPricing{ + Metadata: resource.GetMetadata(), + Tier: tierVal, + } +} diff --git a/internal/adapters/terraform/azure/securitycenter/adapt_test.go b/internal/adapters/terraform/azure/securitycenter/adapt_test.go new file mode 100644 index 000000000000..1454259aa3d5 --- /dev/null +++ b/internal/adapters/terraform/azure/securitycenter/adapt_test.go @@ -0,0 +1,137 @@ +package securitycenter + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/azure/securitycenter" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptContact(t *testing.T) { + tests := []struct { + name string + terraform string + expected securitycenter.Contact + }{ + { + name: "defined", + terraform: ` + resource "azurerm_security_center_contact" "example" { + phone = "+1-555-555-5555" + alert_notifications = true + } +`, + expected: securitycenter.Contact{ + Metadata: defsecTypes.NewTestMetadata(), + EnableAlertNotifications: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + Phone: defsecTypes.String("+1-555-555-5555", defsecTypes.NewTestMetadata()), + }, + }, + { + name: "defaults", + terraform: ` + resource "azurerm_security_center_contact" "example" { + } +`, + expected: securitycenter.Contact{ + Metadata: defsecTypes.NewTestMetadata(), + EnableAlertNotifications: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + Phone: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptContact(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func Test_adaptSubscription(t *testing.T) { + tests := []struct { + name string + terraform string + expected securitycenter.SubscriptionPricing + }{ + { + name: "free tier", + terraform: ` + resource "azurerm_security_center_subscription_pricing" "example" { + tier = "Free" + }`, + expected: securitycenter.SubscriptionPricing{ + Metadata: defsecTypes.NewTestMetadata(), + Tier: defsecTypes.String("Free", defsecTypes.NewTestMetadata()), + }, + }, + { + name: "default - free tier", + terraform: ` + resource "azurerm_security_center_subscription_pricing" "example" { + }`, + expected: securitycenter.SubscriptionPricing{ + Metadata: defsecTypes.NewTestMetadata(), + Tier: defsecTypes.String("Free", defsecTypes.NewTestMetadata()), + }, + }, + { + name: "standard tier", + terraform: ` + resource "azurerm_security_center_subscription_pricing" "example" { + tier = "Standard" + }`, + expected: securitycenter.SubscriptionPricing{ + Metadata: defsecTypes.NewTestMetadata(), + Tier: defsecTypes.String("Standard", defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptSubscription(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "azurerm_security_center_contact" "example" { + phone = "+1-555-555-5555" + alert_notifications = true + } + + resource "azurerm_security_center_subscription_pricing" "example" { + tier = "Standard" + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Contacts, 1) + require.Len(t, adapted.Subscriptions, 1) + + contact := adapted.Contacts[0] + sub := adapted.Subscriptions[0] + + assert.Equal(t, 3, contact.Phone.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, contact.Phone.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 4, contact.EnableAlertNotifications.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 4, contact.EnableAlertNotifications.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 8, sub.Tier.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 8, sub.Tier.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/azure/storage/adapt.go b/internal/adapters/terraform/azure/storage/adapt.go new file mode 100644 index 000000000000..4519460b5eb2 --- /dev/null +++ b/internal/adapters/terraform/azure/storage/adapt.go @@ -0,0 +1,173 @@ +package storage + +import ( + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/azure/storage" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func Adapt(modules terraform.Modules) storage.Storage { + accounts, containers, networkRules := adaptAccounts(modules) + + orphanAccount := storage.Account{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + NetworkRules: adaptOrphanNetworkRules(modules, networkRules), + EnforceHTTPS: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + Containers: adaptOrphanContainers(modules, containers), + QueueProperties: storage.QueueProperties{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + EnableLogging: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + }, + MinimumTLSVersion: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + } + + accounts = append(accounts, orphanAccount) + + return storage.Storage{ + Accounts: accounts, + } +} + +func adaptOrphanContainers(modules terraform.Modules, containers []string) (orphans []storage.Container) { + accountedFor := make(map[string]bool) + for _, container := range containers { + accountedFor[container] = true + } + for _, module := range modules { + for _, containerResource := range module.GetResourcesByType("azurerm_storage_container") { + if _, ok := accountedFor[containerResource.ID()]; ok { + continue + } + orphans = append(orphans, adaptContainer(containerResource)) + } + } + + return orphans +} + +func adaptOrphanNetworkRules(modules terraform.Modules, networkRules []string) (orphans []storage.NetworkRule) { + accountedFor := make(map[string]bool) + for _, networkRule := range networkRules { + accountedFor[networkRule] = true + } + + for _, module := range modules { + for _, networkRuleResource := range module.GetResourcesByType("azurerm_storage_account_network_rules") { + if _, ok := accountedFor[networkRuleResource.ID()]; ok { + continue + } + + orphans = append(orphans, adaptNetworkRule(networkRuleResource)) + } + } + + return orphans +} + +func adaptAccounts(modules terraform.Modules) ([]storage.Account, []string, []string) { + var accounts []storage.Account + var accountedForContainers []string + var accountedForNetworkRules []string + + for _, module := range modules { + for _, resource := range module.GetResourcesByType("azurerm_storage_account") { + account := adaptAccount(resource) + containerResource := module.GetReferencingResources(resource, "azurerm_storage_container", "storage_account_name") + for _, containerBlock := range containerResource { + accountedForContainers = append(accountedForContainers, containerBlock.ID()) + account.Containers = append(account.Containers, adaptContainer(containerBlock)) + } + networkRulesResource := module.GetReferencingResources(resource, "azurerm_storage_account_network_rules", "storage_account_name") + for _, networkRuleBlock := range networkRulesResource { + accountedForNetworkRules = append(accountedForNetworkRules, networkRuleBlock.ID()) + account.NetworkRules = append(account.NetworkRules, adaptNetworkRule(networkRuleBlock)) + } + for _, queueBlock := range module.GetReferencingResources(resource, "azurerm_storage_queue", "storage_account_name") { + queue := storage.Queue{ + Metadata: queueBlock.GetMetadata(), + Name: queueBlock.GetAttribute("name").AsStringValueOrDefault("", queueBlock), + } + account.Queues = append(account.Queues, queue) + } + accounts = append(accounts, account) + } + } + + return accounts, accountedForContainers, accountedForNetworkRules +} + +func adaptAccount(resource *terraform.Block) storage.Account { + account := storage.Account{ + Metadata: resource.GetMetadata(), + NetworkRules: nil, + EnforceHTTPS: defsecTypes.BoolDefault(true, resource.GetMetadata()), + Containers: nil, + QueueProperties: storage.QueueProperties{ + Metadata: resource.GetMetadata(), + EnableLogging: defsecTypes.BoolDefault(false, resource.GetMetadata()), + }, + MinimumTLSVersion: defsecTypes.StringDefault("TLS1_2", resource.GetMetadata()), + } + + networkRulesBlocks := resource.GetBlocks("network_rules") + for _, networkBlock := range networkRulesBlocks { + account.NetworkRules = append(account.NetworkRules, adaptNetworkRule(networkBlock)) + } + + httpsOnlyAttr := resource.GetAttribute("enable_https_traffic_only") + account.EnforceHTTPS = httpsOnlyAttr.AsBoolValueOrDefault(true, resource) + + queuePropertiesBlock := resource.GetBlock("queue_properties") + if queuePropertiesBlock.IsNotNil() { + account.QueueProperties.Metadata = queuePropertiesBlock.GetMetadata() + loggingBlock := queuePropertiesBlock.GetBlock("logging") + if loggingBlock.IsNotNil() { + account.QueueProperties.EnableLogging = defsecTypes.Bool(true, loggingBlock.GetMetadata()) + } + } + + minTLSVersionAttr := resource.GetAttribute("min_tls_version") + account.MinimumTLSVersion = minTLSVersionAttr.AsStringValueOrDefault("TLS1_0", resource) + return account +} + +func adaptContainer(resource *terraform.Block) storage.Container { + accessTypeAttr := resource.GetAttribute("container_access_type") + publicAccess := defsecTypes.StringDefault(storage.PublicAccessOff, resource.GetMetadata()) + + if accessTypeAttr.Equals("blob") { + publicAccess = defsecTypes.String(storage.PublicAccessBlob, accessTypeAttr.GetMetadata()) + } else if accessTypeAttr.Equals("container") { + publicAccess = defsecTypes.String(storage.PublicAccessContainer, accessTypeAttr.GetMetadata()) + } + + return storage.Container{ + Metadata: resource.GetMetadata(), + PublicAccess: publicAccess, + } +} + +func adaptNetworkRule(resource *terraform.Block) storage.NetworkRule { + var allowByDefault defsecTypes.BoolValue + var bypass []defsecTypes.StringValue + + defaultActionAttr := resource.GetAttribute("default_action") + + if defaultActionAttr.IsNotNil() { + allowByDefault = defsecTypes.Bool(defaultActionAttr.Equals("Allow", terraform.IgnoreCase), defaultActionAttr.GetMetadata()) + } else { + allowByDefault = defsecTypes.BoolDefault(false, resource.GetMetadata()) + } + + if resource.HasChild("bypass") { + bypassAttr := resource.GetAttribute("bypass") + bypass = bypassAttr.AsStringValues() + } + + return storage.NetworkRule{ + Metadata: resource.GetMetadata(), + Bypass: bypass, + AllowByDefault: allowByDefault, + } +} diff --git a/internal/adapters/terraform/azure/storage/adapt_test.go b/internal/adapters/terraform/azure/storage/adapt_test.go new file mode 100644 index 000000000000..c0e3b85f3c99 --- /dev/null +++ b/internal/adapters/terraform/azure/storage/adapt_test.go @@ -0,0 +1,252 @@ +package storage + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/azure/storage" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" +) + +func Test_Adapt(t *testing.T) { + tests := []struct { + name string + terraform string + expected storage.Storage + }{ + { + name: "defined", + terraform: ` + resource "azurerm_resource_group" "example" { + name = "example" + } + + resource "azurerm_storage_account" "example" { + name = "storageaccountname" + resource_group_name = azurerm_resource_group.example.name + + network_rules { + default_action = "Deny" + bypass = ["Metrics", "AzureServices"] + } + + enable_https_traffic_only = true + queue_properties { + logging { + delete = true + read = true + write = true + version = "1.0" + retention_policy_days = 10 + } + } + min_tls_version = "TLS1_2" + } + + resource "azurerm_storage_account_network_rules" "test" { + resource_group_name = azurerm_resource_group.example.name + storage_account_name = azurerm_storage_account.example.name + + default_action = "Allow" + bypass = ["Metrics"] + } + + resource "azurerm_storage_container" "example" { + storage_account_name = azurerm_storage_account.example.name + resource_group_name = azurerm_resource_group.example.name + container_access_type = "blob" + } +`, + expected: storage.Storage{ + Accounts: []storage.Account{ + + { + Metadata: defsecTypes.NewTestMetadata(), + EnforceHTTPS: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + MinimumTLSVersion: defsecTypes.String("TLS1_2", defsecTypes.NewTestMetadata()), + NetworkRules: []storage.NetworkRule{ + { + Metadata: defsecTypes.NewTestMetadata(), + Bypass: []defsecTypes.StringValue{ + defsecTypes.String("Metrics", defsecTypes.NewTestMetadata()), + defsecTypes.String("AzureServices", defsecTypes.NewTestMetadata()), + }, + AllowByDefault: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + { + Metadata: defsecTypes.NewTestMetadata(), + Bypass: []defsecTypes.StringValue{ + defsecTypes.String("Metrics", defsecTypes.NewTestMetadata()), + }, + AllowByDefault: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + QueueProperties: storage.QueueProperties{ + Metadata: defsecTypes.NewTestMetadata(), + EnableLogging: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + Containers: []storage.Container{ + { + Metadata: defsecTypes.NewTestMetadata(), + PublicAccess: defsecTypes.String("blob", defsecTypes.NewTestMetadata()), + }, + }, + }, + { + Metadata: defsecTypes.NewUnmanagedMetadata(), + EnforceHTTPS: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + QueueProperties: storage.QueueProperties{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + EnableLogging: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + }, + MinimumTLSVersion: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + }, + }, + }, + }, + { + name: "orphans", + terraform: ` + resource "azurerm_storage_account_network_rules" "test" { + default_action = "Allow" + bypass = ["Metrics"] + } + + resource "azurerm_storage_container" "example" { + container_access_type = "blob" + } +`, + expected: storage.Storage{ + Accounts: []storage.Account{ + { + Metadata: defsecTypes.NewUnmanagedMetadata(), + EnforceHTTPS: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + NetworkRules: []storage.NetworkRule{ + { + Metadata: defsecTypes.NewTestMetadata(), + Bypass: []defsecTypes.StringValue{ + defsecTypes.String("Metrics", defsecTypes.NewTestMetadata()), + }, + AllowByDefault: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + QueueProperties: storage.QueueProperties{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + EnableLogging: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + }, + MinimumTLSVersion: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + Containers: []storage.Container{ + { + Metadata: defsecTypes.NewTestMetadata(), + PublicAccess: defsecTypes.String("blob", defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := Adapt(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "azurerm_resource_group" "example" { + name = "example" + location = "West Europe" + } + + resource "azurerm_storage_account" "example" { + resource_group_name = azurerm_resource_group.example.name + + enable_https_traffic_only = true + min_tls_version = "TLS1_2" + + queue_properties { + logging { + delete = true + read = true + write = true + version = "1.0" + retention_policy_days = 10 + } + } + + network_rules { + default_action = "Deny" + bypass = ["Metrics", "AzureServices"] + } + } + + resource "azurerm_storage_account_network_rules" "test" { + resource_group_name = azurerm_resource_group.example.name + storage_account_name = azurerm_storage_account.example.name + + default_action = "Allow" + bypass = ["Metrics"] + } + + resource "azurerm_storage_container" "example" { + storage_account_name = azurerm_storage_account.example.name + resource_group_name = azurerm_resource_group.example.name + container_access_type = "blob" + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Accounts, 2) //+orphans holder + account := adapted.Accounts[0] + + assert.Equal(t, 7, account.Metadata.Range().GetStartLine()) + assert.Equal(t, 27, account.Metadata.Range().GetEndLine()) + + assert.Equal(t, 10, account.EnforceHTTPS.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 10, account.EnforceHTTPS.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 11, account.MinimumTLSVersion.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 11, account.MinimumTLSVersion.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 13, account.QueueProperties.Metadata.Range().GetStartLine()) + assert.Equal(t, 21, account.QueueProperties.Metadata.Range().GetEndLine()) + + assert.Equal(t, 14, account.QueueProperties.EnableLogging.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 20, account.QueueProperties.EnableLogging.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 23, account.NetworkRules[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 26, account.NetworkRules[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 24, account.NetworkRules[0].AllowByDefault.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 24, account.NetworkRules[0].AllowByDefault.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 25, account.NetworkRules[0].Bypass[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 25, account.NetworkRules[0].Bypass[0].GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 29, account.NetworkRules[1].Metadata.Range().GetStartLine()) + assert.Equal(t, 35, account.NetworkRules[1].Metadata.Range().GetEndLine()) + + assert.Equal(t, 33, account.NetworkRules[1].AllowByDefault.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 33, account.NetworkRules[1].AllowByDefault.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 34, account.NetworkRules[1].Bypass[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 34, account.NetworkRules[1].Bypass[0].GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 37, account.Containers[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 41, account.Containers[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 40, account.Containers[0].PublicAccess.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 40, account.Containers[0].PublicAccess.GetMetadata().Range().GetEndLine()) + +} diff --git a/internal/adapters/terraform/azure/synapse/adapt.go b/internal/adapters/terraform/azure/synapse/adapt.go new file mode 100644 index 000000000000..6e5743dccc80 --- /dev/null +++ b/internal/adapters/terraform/azure/synapse/adapt.go @@ -0,0 +1,32 @@ +package synapse + +import ( + "github.com/aquasecurity/defsec/pkg/providers/azure/synapse" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func Adapt(modules terraform.Modules) synapse.Synapse { + return synapse.Synapse{ + Workspaces: adaptWorkspaces(modules), + } +} + +func adaptWorkspaces(modules terraform.Modules) []synapse.Workspace { + var workspaces []synapse.Workspace + for _, module := range modules { + for _, resource := range module.GetResourcesByType("azurerm_synapse_workspace") { + workspaces = append(workspaces, adaptWorkspace(resource)) + } + } + return workspaces +} + +func adaptWorkspace(resource *terraform.Block) synapse.Workspace { + enableManagedVNAttr := resource.GetAttribute("managed_virtual_network_enabled") + enableManagedVNVal := enableManagedVNAttr.AsBoolValueOrDefault(false, resource) + + return synapse.Workspace{ + Metadata: resource.GetMetadata(), + EnableManagedVirtualNetwork: enableManagedVNVal, + } +} diff --git a/internal/adapters/terraform/azure/synapse/adapt_test.go b/internal/adapters/terraform/azure/synapse/adapt_test.go new file mode 100644 index 000000000000..8c1146733c4d --- /dev/null +++ b/internal/adapters/terraform/azure/synapse/adapt_test.go @@ -0,0 +1,83 @@ +package synapse + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/azure/synapse" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptWorkspace(t *testing.T) { + tests := []struct { + name string + terraform string + expected synapse.Workspace + }{ + { + name: "enabled", + terraform: ` + resource "azurerm_synapse_workspace" "example" { + managed_virtual_network_enabled = true + } +`, + expected: synapse.Workspace{ + Metadata: defsecTypes.NewTestMetadata(), + EnableManagedVirtualNetwork: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + { + name: "disabled", + terraform: ` + resource "azurerm_synapse_workspace" "example" { + managed_virtual_network_enabled = false + } +`, + expected: synapse.Workspace{ + Metadata: defsecTypes.NewTestMetadata(), + EnableManagedVirtualNetwork: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + { + name: "default", + terraform: ` + resource "azurerm_synapse_workspace" "example" { + } +`, + expected: synapse.Workspace{ + Metadata: defsecTypes.NewTestMetadata(), + EnableManagedVirtualNetwork: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptWorkspace(modules.GetBlocks()[0]) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "azurerm_synapse_workspace" "example" { + managed_virtual_network_enabled = true + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Workspaces, 1) + workspace := adapted.Workspaces[0] + + assert.Equal(t, 3, workspace.EnableManagedVirtualNetwork.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, workspace.EnableManagedVirtualNetwork.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/cloudstack/adapt.go b/internal/adapters/terraform/cloudstack/adapt.go new file mode 100644 index 000000000000..6be5887cf6b5 --- /dev/null +++ b/internal/adapters/terraform/cloudstack/adapt.go @@ -0,0 +1,13 @@ +package cloudstack + +import ( + "github.com/aquasecurity/defsec/pkg/providers/cloudstack" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/trivy/internal/adapters/terraform/cloudstack/compute" +) + +func Adapt(modules terraform.Modules) cloudstack.CloudStack { + return cloudstack.CloudStack{ + Compute: compute.Adapt(modules), + } +} diff --git a/internal/adapters/terraform/cloudstack/compute/adapt.go b/internal/adapters/terraform/cloudstack/compute/adapt.go new file mode 100644 index 000000000000..7104f74e2846 --- /dev/null +++ b/internal/adapters/terraform/cloudstack/compute/adapt.go @@ -0,0 +1,49 @@ +package compute + +import ( + "encoding/base64" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/terraform" + + "github.com/aquasecurity/defsec/pkg/providers/cloudstack/compute" +) + +func Adapt(modules terraform.Modules) compute.Compute { + return compute.Compute{ + Instances: adaptInstances(modules), + } +} + +func adaptInstances(modules terraform.Modules) []compute.Instance { + var instances []compute.Instance + for _, module := range modules { + for _, resource := range module.GetResourcesByType("cloudstack_instance") { + instances = append(instances, adaptInstance(resource)) + } + } + return instances +} + +func adaptInstance(resource *terraform.Block) compute.Instance { + userDataAttr := resource.GetAttribute("user_data") + var encoded []byte + var err error + + if userDataAttr.IsNotNil() && userDataAttr.IsString() { + encoded, err = base64.StdEncoding.DecodeString(userDataAttr.Value().AsString()) + if err != nil { + encoded = []byte(userDataAttr.Value().AsString()) + } + return compute.Instance{ + Metadata: resource.GetMetadata(), + UserData: types.String(string(encoded), userDataAttr.GetMetadata()), + } + } + + return compute.Instance{ + Metadata: resource.GetMetadata(), + UserData: types.StringDefault("", resource.GetMetadata()), + } +} diff --git a/internal/adapters/terraform/cloudstack/compute/adapt_test.go b/internal/adapters/terraform/cloudstack/compute/adapt_test.go new file mode 100644 index 000000000000..b94cd90983a9 --- /dev/null +++ b/internal/adapters/terraform/cloudstack/compute/adapt_test.go @@ -0,0 +1,91 @@ +package compute + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/cloudstack/compute" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptInstance(t *testing.T) { + tests := []struct { + name string + terraform string + expected compute.Instance + }{ + { + name: "sensitive user data", + terraform: ` + resource "cloudstack_instance" "web" { + name = "server-1" + user_data = < 0 { + cluster.NodeConfig = cluster.NodePools[0].NodeConfig + a.clusterMap[id] = cluster + } + } + + var clusters []gke.Cluster + for _, cluster := range a.clusterMap { + clusters = append(clusters, cluster) + } + return clusters +} + +func (a *adapter) adaptCluster(resource *terraform.Block, module *terraform.Module) { + + cluster := gke.Cluster{ + Metadata: resource.GetMetadata(), + NodePools: nil, + IPAllocationPolicy: gke.IPAllocationPolicy{ + Metadata: resource.GetMetadata(), + Enabled: defsecTypes.BoolDefault(false, resource.GetMetadata()), + }, + MasterAuthorizedNetworks: gke.MasterAuthorizedNetworks{ + Metadata: resource.GetMetadata(), + Enabled: defsecTypes.BoolDefault(false, resource.GetMetadata()), + CIDRs: []defsecTypes.StringValue{}, + }, + NetworkPolicy: gke.NetworkPolicy{ + Metadata: resource.GetMetadata(), + Enabled: defsecTypes.BoolDefault(false, resource.GetMetadata()), + }, + PrivateCluster: gke.PrivateCluster{ + Metadata: resource.GetMetadata(), + EnablePrivateNodes: defsecTypes.BoolDefault(false, resource.GetMetadata()), + }, + LoggingService: defsecTypes.StringDefault("logging.googleapis.com/kubernetes", resource.GetMetadata()), + MonitoringService: defsecTypes.StringDefault("monitoring.googleapis.com/kubernetes", resource.GetMetadata()), + MasterAuth: gke.MasterAuth{ + Metadata: resource.GetMetadata(), + ClientCertificate: gke.ClientCertificate{ + Metadata: resource.GetMetadata(), + IssueCertificate: defsecTypes.BoolDefault(false, resource.GetMetadata()), + }, + Username: defsecTypes.StringDefault("", resource.GetMetadata()), + Password: defsecTypes.StringDefault("", resource.GetMetadata()), + }, + NodeConfig: gke.NodeConfig{ + Metadata: resource.GetMetadata(), + ImageType: defsecTypes.StringDefault("", resource.GetMetadata()), + WorkloadMetadataConfig: gke.WorkloadMetadataConfig{ + Metadata: resource.GetMetadata(), + NodeMetadata: defsecTypes.StringDefault("", resource.GetMetadata()), + }, + ServiceAccount: defsecTypes.StringDefault("", resource.GetMetadata()), + EnableLegacyEndpoints: defsecTypes.BoolDefault(true, resource.GetMetadata()), + }, + EnableShieldedNodes: defsecTypes.BoolDefault(true, resource.GetMetadata()), + EnableLegacyABAC: defsecTypes.BoolDefault(false, resource.GetMetadata()), + ResourceLabels: defsecTypes.MapDefault(make(map[string]string), resource.GetMetadata()), + RemoveDefaultNodePool: defsecTypes.BoolDefault(false, resource.GetMetadata()), + EnableAutpilot: defsecTypes.BoolDefault(false, resource.GetMetadata()), + } + + if allocBlock := resource.GetBlock("ip_allocation_policy"); allocBlock.IsNotNil() { + cluster.IPAllocationPolicy.Metadata = allocBlock.GetMetadata() + cluster.IPAllocationPolicy.Enabled = defsecTypes.Bool(true, allocBlock.GetMetadata()) + } + + if blocks := resource.GetBlocks("master_authorized_networks_config"); len(blocks) > 0 { + cluster.MasterAuthorizedNetworks = adaptMasterAuthNetworksAsBlocks(resource, blocks) + } + + if policyBlock := resource.GetBlock("network_policy"); policyBlock.IsNotNil() { + enabledAttr := policyBlock.GetAttribute("enabled") + cluster.NetworkPolicy.Metadata = policyBlock.GetMetadata() + cluster.NetworkPolicy.Enabled = enabledAttr.AsBoolValueOrDefault(false, policyBlock) + } + + if privBlock := resource.GetBlock("private_cluster_config"); privBlock.IsNotNil() { + privateNodesEnabledAttr := privBlock.GetAttribute("enable_private_nodes") + cluster.PrivateCluster.Metadata = privBlock.GetMetadata() + cluster.PrivateCluster.EnablePrivateNodes = privateNodesEnabledAttr.AsBoolValueOrDefault(false, privBlock) + } + + loggingAttr := resource.GetAttribute("logging_service") + cluster.LoggingService = loggingAttr.AsStringValueOrDefault("logging.googleapis.com/kubernetes", resource) + monitoringServiceAttr := resource.GetAttribute("monitoring_service") + cluster.MonitoringService = monitoringServiceAttr.AsStringValueOrDefault("monitoring.googleapis.com/kubernetes", resource) + + if masterBlock := resource.GetBlock("master_auth"); masterBlock.IsNotNil() { + cluster.MasterAuth = adaptMasterAuth(masterBlock) + } + + if configBlock := resource.GetBlock("node_config"); configBlock.IsNotNil() { + if configBlock.GetBlock("metadata").IsNotNil() { + cluster.NodeConfig.Metadata = configBlock.GetBlock("metadata").GetMetadata() + } + cluster.NodeConfig = adaptNodeConfig(configBlock) + } + + cluster.EnableShieldedNodes = resource.GetAttribute("enable_shielded_nodes").AsBoolValueOrDefault(true, resource) + + enableLegacyABACAttr := resource.GetAttribute("enable_legacy_abac") + cluster.EnableLegacyABAC = enableLegacyABACAttr.AsBoolValueOrDefault(false, resource) + + cluster.EnableAutpilot = resource.GetAttribute("enable_autopilot").AsBoolValueOrDefault(false, resource) + + resourceLabelsAttr := resource.GetAttribute("resource_labels") + if resourceLabelsAttr.IsNotNil() { + cluster.ResourceLabels = resourceLabelsAttr.AsMapValue() + } + + cluster.RemoveDefaultNodePool = resource.GetAttribute("remove_default_node_pool").AsBoolValueOrDefault(false, resource) + + a.clusterMap[resource.ID()] = cluster +} + +func (a *adapter) adaptNodePools() { + for _, nodePoolBlock := range a.modules.GetResourcesByType("google_container_node_pool") { + a.adaptNodePool(nodePoolBlock) + } +} + +func (a *adapter) adaptNodePool(resource *terraform.Block) { + nodeConfig := gke.NodeConfig{ + Metadata: resource.GetMetadata(), + ImageType: defsecTypes.StringDefault("", resource.GetMetadata()), + WorkloadMetadataConfig: gke.WorkloadMetadataConfig{ + Metadata: resource.GetMetadata(), + NodeMetadata: defsecTypes.StringDefault("", resource.GetMetadata()), + }, + ServiceAccount: defsecTypes.StringDefault("", resource.GetMetadata()), + EnableLegacyEndpoints: defsecTypes.BoolDefault(true, resource.GetMetadata()), + } + + management := gke.Management{ + Metadata: resource.GetMetadata(), + EnableAutoRepair: defsecTypes.BoolDefault(false, resource.GetMetadata()), + EnableAutoUpgrade: defsecTypes.BoolDefault(false, resource.GetMetadata()), + } + + if resource.HasChild("management") { + management.Metadata = resource.GetBlock("management").GetMetadata() + + autoRepairAttr := resource.GetBlock("management").GetAttribute("auto_repair") + management.EnableAutoRepair = autoRepairAttr.AsBoolValueOrDefault(false, resource.GetBlock("management")) + + autoUpgradeAttr := resource.GetBlock("management").GetAttribute("auto_upgrade") + management.EnableAutoUpgrade = autoUpgradeAttr.AsBoolValueOrDefault(false, resource.GetBlock("management")) + } + + if resource.HasChild("node_config") { + nodeConfig = adaptNodeConfig(resource.GetBlock("node_config")) + } + + nodePool := gke.NodePool{ + Metadata: resource.GetMetadata(), + Management: management, + NodeConfig: nodeConfig, + } + + clusterAttr := resource.GetAttribute("cluster") + if referencedCluster, err := a.modules.GetReferencedBlock(clusterAttr, resource); err == nil { + if referencedCluster.TypeLabel() == "google_container_cluster" { + if cluster, ok := a.clusterMap[referencedCluster.ID()]; ok { + cluster.NodePools = append(cluster.NodePools, nodePool) + a.clusterMap[referencedCluster.ID()] = cluster + return + } + } + } + + // we didn't find a cluster to put the nodepool in, so create a placeholder + a.clusterMap[uuid.NewString()] = gke.Cluster{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + NodePools: []gke.NodePool{nodePool}, + IPAllocationPolicy: gke.IPAllocationPolicy{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Enabled: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + }, + MasterAuthorizedNetworks: gke.MasterAuthorizedNetworks{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Enabled: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + CIDRs: nil, + }, + NetworkPolicy: gke.NetworkPolicy{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Enabled: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + }, + PrivateCluster: gke.PrivateCluster{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + EnablePrivateNodes: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + }, + LoggingService: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + MonitoringService: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + MasterAuth: gke.MasterAuth{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + ClientCertificate: gke.ClientCertificate{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + IssueCertificate: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + }, + Username: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + Password: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + }, + NodeConfig: gke.NodeConfig{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + ImageType: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + WorkloadMetadataConfig: gke.WorkloadMetadataConfig{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + NodeMetadata: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + }, + ServiceAccount: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + EnableLegacyEndpoints: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + }, + EnableShieldedNodes: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + EnableLegacyABAC: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + ResourceLabels: defsecTypes.MapDefault(nil, defsecTypes.NewUnmanagedMetadata()), + RemoveDefaultNodePool: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + EnableAutpilot: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + } +} + +func adaptNodeConfig(resource *terraform.Block) gke.NodeConfig { + + config := gke.NodeConfig{ + Metadata: resource.GetMetadata(), + ImageType: resource.GetAttribute("image_type").AsStringValueOrDefault("", resource), + WorkloadMetadataConfig: gke.WorkloadMetadataConfig{ + Metadata: resource.GetMetadata(), + NodeMetadata: defsecTypes.StringDefault("UNSPECIFIED", resource.GetMetadata()), + }, + ServiceAccount: resource.GetAttribute("service_account").AsStringValueOrDefault("", resource), + EnableLegacyEndpoints: defsecTypes.BoolDefault(true, resource.GetMetadata()), + } + + if metadata := resource.GetAttribute("metadata"); metadata.IsNotNil() { + legacyMetadata := metadata.MapValue("disable-legacy-endpoints") + if legacyMetadata.IsWhollyKnown() && legacyMetadata.Type() == cty.Bool { + config.EnableLegacyEndpoints = defsecTypes.Bool(legacyMetadata.False(), metadata.GetMetadata()) + } + } + + workloadBlock := resource.GetBlock("workload_metadata_config") + if workloadBlock.IsNotNil() { + config.WorkloadMetadataConfig.Metadata = workloadBlock.GetMetadata() + modeAttr := workloadBlock.GetAttribute("node_metadata") + if modeAttr.IsNil() { + modeAttr = workloadBlock.GetAttribute("mode") // try newest version + } + config.WorkloadMetadataConfig.NodeMetadata = modeAttr.AsStringValueOrDefault("UNSPECIFIED", workloadBlock) + } + + return config +} + +func adaptMasterAuth(resource *terraform.Block) gke.MasterAuth { + clientCert := gke.ClientCertificate{ + Metadata: resource.GetMetadata(), + IssueCertificate: defsecTypes.BoolDefault(false, resource.GetMetadata()), + } + + if resource.HasChild("client_certificate_config") { + clientCertAttr := resource.GetBlock("client_certificate_config").GetAttribute("issue_client_certificate") + clientCert.IssueCertificate = clientCertAttr.AsBoolValueOrDefault(false, resource.GetBlock("client_certificate_config")) + clientCert.Metadata = resource.GetBlock("client_certificate_config").GetMetadata() + } + + username := resource.GetAttribute("username").AsStringValueOrDefault("", resource) + password := resource.GetAttribute("password").AsStringValueOrDefault("", resource) + + return gke.MasterAuth{ + Metadata: resource.GetMetadata(), + ClientCertificate: clientCert, + Username: username, + Password: password, + } +} + +func adaptMasterAuthNetworksAsBlocks(parent *terraform.Block, blocks terraform.Blocks) gke.MasterAuthorizedNetworks { + var cidrs []defsecTypes.StringValue + for _, block := range blocks { + for _, cidrBlock := range block.GetBlocks("cidr_blocks") { + if cidrAttr := cidrBlock.GetAttribute("cidr_block"); cidrAttr.IsNotNil() { + cidrs = append(cidrs, cidrAttr.AsStringValues()...) + } + } + } + enabled := defsecTypes.Bool(true, blocks[0].GetMetadata()) + return gke.MasterAuthorizedNetworks{ + Metadata: blocks[0].GetMetadata(), + Enabled: enabled, + CIDRs: cidrs, + } +} diff --git a/internal/adapters/terraform/google/gke/adapt_test.go b/internal/adapters/terraform/google/gke/adapt_test.go new file mode 100644 index 000000000000..9f25fa3f3abe --- /dev/null +++ b/internal/adapters/terraform/google/gke/adapt_test.go @@ -0,0 +1,412 @@ +package gke + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/google/gke" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" +) + +func Test_Adapt(t *testing.T) { + tests := []struct { + name string + terraform string + expected gke.GKE + }{ + { + name: "separately defined pool", + terraform: ` +resource "google_service_account" "default" { + account_id = "service-account-id" + display_name = "Service Account" +} + +resource "google_container_cluster" "example" { + name = "my-gke-cluster" + + node_config { + metadata = { + disable-legacy-endpoints = true + } + } + + pod_security_policy_config { + enabled = "true" + } + + enable_legacy_abac = "true" + enable_shielded_nodes = "true" + + remove_default_node_pool = true + initial_node_count = 1 + monitoring_service = "monitoring.googleapis.com/kubernetes" + logging_service = "logging.googleapis.com/kubernetes" + + master_auth { + client_certificate_config { + issue_client_certificate = true + } + } + + master_authorized_networks_config { + cidr_blocks { + cidr_block = "10.10.128.0/24" + display_name = "internal" + } + } + + resource_labels = { + "env" = "staging" + } + + private_cluster_config { + enable_private_nodes = true + } + + network_policy { + enabled = true + } + + ip_allocation_policy {} + + enable_autopilot = true +} + +resource "google_container_node_pool" "primary_preemptible_nodes" { + cluster = google_container_cluster.example.name + node_count = 1 + + node_config { + service_account = google_service_account.default.email + metadata = { + disable-legacy-endpoints = true + } + image_type = "COS_CONTAINERD" + workload_metadata_config { + mode = "GCE_METADATA" + } + } + management { + auto_repair = true + auto_upgrade = true + } +} +`, + expected: gke.GKE{ + Clusters: []gke.Cluster{ + { + Metadata: defsecTypes.NewTestMetadata(), + NodeConfig: gke.NodeConfig{ + Metadata: defsecTypes.NewTestMetadata(), + ImageType: defsecTypes.String("COS_CONTAINERD", defsecTypes.NewTestMetadata()), + WorkloadMetadataConfig: gke.WorkloadMetadataConfig{ + Metadata: defsecTypes.NewTestMetadata(), + NodeMetadata: defsecTypes.String("GCE_METADATA", defsecTypes.NewTestMetadata()), + }, + ServiceAccount: defsecTypes.String("", defsecTypes.NewTestMetadata()), + EnableLegacyEndpoints: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + NodePools: []gke.NodePool{ + { + Metadata: defsecTypes.NewTestMetadata(), + Management: gke.Management{ + Metadata: defsecTypes.NewTestMetadata(), + EnableAutoRepair: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + EnableAutoUpgrade: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + NodeConfig: gke.NodeConfig{ + Metadata: defsecTypes.NewTestMetadata(), + ImageType: defsecTypes.String("COS_CONTAINERD", defsecTypes.NewTestMetadata()), + WorkloadMetadataConfig: gke.WorkloadMetadataConfig{ + Metadata: defsecTypes.NewTestMetadata(), + NodeMetadata: defsecTypes.String("GCE_METADATA", defsecTypes.NewTestMetadata()), + }, + ServiceAccount: defsecTypes.String("", defsecTypes.NewTestMetadata()), + EnableLegacyEndpoints: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + }, + IPAllocationPolicy: gke.IPAllocationPolicy{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + MasterAuthorizedNetworks: gke.MasterAuthorizedNetworks{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + CIDRs: []defsecTypes.StringValue{ + defsecTypes.String("10.10.128.0/24", defsecTypes.NewTestMetadata()), + }, + }, + NetworkPolicy: gke.NetworkPolicy{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + PrivateCluster: gke.PrivateCluster{ + Metadata: defsecTypes.NewTestMetadata(), + EnablePrivateNodes: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + LoggingService: defsecTypes.String("logging.googleapis.com/kubernetes", defsecTypes.NewTestMetadata()), + MonitoringService: defsecTypes.String("monitoring.googleapis.com/kubernetes", defsecTypes.NewTestMetadata()), + MasterAuth: gke.MasterAuth{ + Metadata: defsecTypes.NewTestMetadata(), + ClientCertificate: gke.ClientCertificate{ + Metadata: defsecTypes.NewTestMetadata(), + IssueCertificate: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + Username: defsecTypes.String("", defsecTypes.NewTestMetadata()), + Password: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + EnableShieldedNodes: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + EnableLegacyABAC: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + ResourceLabels: defsecTypes.Map(map[string]string{ + "env": "staging", + }, defsecTypes.NewTestMetadata()), + RemoveDefaultNodePool: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + EnableAutpilot: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + { + name: "default node pool", + terraform: ` +resource "google_container_cluster" "example" { + node_config { + service_account = "service-account" + metadata = { + disable-legacy-endpoints = true + } + image_type = "COS" + workload_metadata_config { + mode = "GCE_METADATA" + } + } +} +`, + expected: gke.GKE{ + Clusters: []gke.Cluster{ + { + Metadata: defsecTypes.NewTestMetadata(), + NodeConfig: gke.NodeConfig{ + Metadata: defsecTypes.NewTestMetadata(), + ImageType: defsecTypes.String("COS", defsecTypes.NewTestMetadata()), + WorkloadMetadataConfig: gke.WorkloadMetadataConfig{ + Metadata: defsecTypes.NewTestMetadata(), + NodeMetadata: defsecTypes.String("GCE_METADATA", defsecTypes.NewTestMetadata()), + }, + ServiceAccount: defsecTypes.String("service-account", defsecTypes.NewTestMetadata()), + EnableLegacyEndpoints: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + + IPAllocationPolicy: gke.IPAllocationPolicy{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + MasterAuthorizedNetworks: gke.MasterAuthorizedNetworks{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + CIDRs: []defsecTypes.StringValue{}, + }, + NetworkPolicy: gke.NetworkPolicy{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + PrivateCluster: gke.PrivateCluster{ + Metadata: defsecTypes.NewTestMetadata(), + EnablePrivateNodes: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + LoggingService: defsecTypes.String("logging.googleapis.com/kubernetes", defsecTypes.NewTestMetadata()), + MonitoringService: defsecTypes.String("monitoring.googleapis.com/kubernetes", defsecTypes.NewTestMetadata()), + MasterAuth: gke.MasterAuth{ + Metadata: defsecTypes.NewTestMetadata(), + ClientCertificate: gke.ClientCertificate{ + Metadata: defsecTypes.NewTestMetadata(), + IssueCertificate: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + Username: defsecTypes.String("", defsecTypes.NewTestMetadata()), + Password: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + EnableShieldedNodes: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + EnableLegacyABAC: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + ResourceLabels: defsecTypes.Map(map[string]string{}, defsecTypes.NewTestMetadata()), + RemoveDefaultNodePool: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := Adapt(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` +resource "google_container_cluster" "example" { + + node_config { + metadata = { + disable-legacy-endpoints = true + } + } + pod_security_policy_config { + enabled = "true" + } + + enable_legacy_abac = "true" + enable_shielded_nodes = "true" + + remove_default_node_pool = true + monitoring_service = "monitoring.googleapis.com/kubernetes" + logging_service = "logging.googleapis.com/kubernetes" + + master_auth { + client_certificate_config { + issue_client_certificate = true + } + } + + master_authorized_networks_config { + cidr_blocks { + cidr_block = "10.10.128.0/24" + } + } + + resource_labels = { + "env" = "staging" + } + + private_cluster_config { + enable_private_nodes = true + } + + network_policy { + enabled = true + } + ip_allocation_policy {} +} + +resource "google_container_node_pool" "primary_preemptible_nodes" { + cluster = google_container_cluster.example.name + + node_config { + metadata = { + disable-legacy-endpoints = true + } + service_account = google_service_account.default.email + image_type = "COS_CONTAINERD" + + workload_metadata_config { + mode = "GCE_METADATA" + } + } + management { + auto_repair = true + auto_upgrade = true + } +} +` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Clusters, 1) + cluster := adapted.Clusters[0] + nodePool := cluster.NodePools[0] + + assert.Equal(t, 2, cluster.Metadata.Range().GetStartLine()) + assert.Equal(t, 44, cluster.Metadata.Range().GetEndLine()) + + assert.Equal(t, 49, cluster.NodeConfig.Metadata.Range().GetStartLine()) + assert.Equal(t, 59, cluster.NodeConfig.Metadata.Range().GetEndLine()) + + assert.Equal(t, 50, cluster.NodeConfig.EnableLegacyEndpoints.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 52, cluster.NodeConfig.EnableLegacyEndpoints.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 13, cluster.EnableLegacyABAC.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 13, cluster.EnableLegacyABAC.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 14, cluster.EnableShieldedNodes.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 14, cluster.EnableShieldedNodes.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 16, cluster.RemoveDefaultNodePool.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 16, cluster.RemoveDefaultNodePool.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 17, cluster.MonitoringService.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 17, cluster.MonitoringService.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 18, cluster.LoggingService.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 18, cluster.LoggingService.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 20, cluster.MasterAuth.Metadata.Range().GetStartLine()) + assert.Equal(t, 24, cluster.MasterAuth.Metadata.Range().GetEndLine()) + + assert.Equal(t, 21, cluster.MasterAuth.ClientCertificate.Metadata.Range().GetStartLine()) + assert.Equal(t, 23, cluster.MasterAuth.ClientCertificate.Metadata.Range().GetEndLine()) + + assert.Equal(t, 22, cluster.MasterAuth.ClientCertificate.IssueCertificate.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 22, cluster.MasterAuth.ClientCertificate.IssueCertificate.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 26, cluster.MasterAuthorizedNetworks.Metadata.Range().GetStartLine()) + assert.Equal(t, 30, cluster.MasterAuthorizedNetworks.Metadata.Range().GetEndLine()) + + assert.Equal(t, 28, cluster.MasterAuthorizedNetworks.CIDRs[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 28, cluster.MasterAuthorizedNetworks.CIDRs[0].GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 32, cluster.ResourceLabels.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 34, cluster.ResourceLabels.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 36, cluster.PrivateCluster.Metadata.Range().GetStartLine()) + assert.Equal(t, 38, cluster.PrivateCluster.Metadata.Range().GetEndLine()) + + assert.Equal(t, 37, cluster.PrivateCluster.EnablePrivateNodes.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 37, cluster.PrivateCluster.EnablePrivateNodes.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 40, cluster.NetworkPolicy.Metadata.Range().GetStartLine()) + assert.Equal(t, 42, cluster.NetworkPolicy.Metadata.Range().GetEndLine()) + + assert.Equal(t, 41, cluster.NetworkPolicy.Enabled.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 41, cluster.NetworkPolicy.Enabled.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 43, cluster.IPAllocationPolicy.Metadata.Range().GetStartLine()) + assert.Equal(t, 43, cluster.IPAllocationPolicy.Metadata.Range().GetEndLine()) + + assert.Equal(t, 46, nodePool.Metadata.Range().GetStartLine()) + assert.Equal(t, 64, nodePool.Metadata.Range().GetEndLine()) + + assert.Equal(t, 49, nodePool.NodeConfig.Metadata.Range().GetStartLine()) + assert.Equal(t, 59, nodePool.NodeConfig.Metadata.Range().GetEndLine()) + + assert.Equal(t, 53, nodePool.NodeConfig.ServiceAccount.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 53, nodePool.NodeConfig.ServiceAccount.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 54, nodePool.NodeConfig.ImageType.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 54, nodePool.NodeConfig.ImageType.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 56, nodePool.NodeConfig.WorkloadMetadataConfig.Metadata.Range().GetStartLine()) + assert.Equal(t, 58, nodePool.NodeConfig.WorkloadMetadataConfig.Metadata.Range().GetEndLine()) + + assert.Equal(t, 57, nodePool.NodeConfig.WorkloadMetadataConfig.NodeMetadata.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 57, nodePool.NodeConfig.WorkloadMetadataConfig.NodeMetadata.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 60, nodePool.Management.Metadata.Range().GetStartLine()) + assert.Equal(t, 63, nodePool.Management.Metadata.Range().GetEndLine()) + + assert.Equal(t, 61, nodePool.Management.EnableAutoRepair.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 61, nodePool.Management.EnableAutoRepair.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 62, nodePool.Management.EnableAutoUpgrade.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 62, nodePool.Management.EnableAutoUpgrade.GetMetadata().Range().GetEndLine()) + +} diff --git a/internal/adapters/terraform/google/iam/adapt.go b/internal/adapters/terraform/google/iam/adapt.go new file mode 100644 index 000000000000..45d082af945b --- /dev/null +++ b/internal/adapters/terraform/google/iam/adapt.go @@ -0,0 +1,108 @@ +package iam + +import ( + "github.com/aquasecurity/defsec/pkg/providers/google/iam" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/google/uuid" +) + +func Adapt(modules terraform.Modules) iam.IAM { + return (&adapter{ + orgs: make(map[string]iam.Organization), + modules: modules, + }).Adapt() +} + +type adapter struct { + modules terraform.Modules + orgs map[string]iam.Organization + folders []parentedFolder + projects []parentedProject + workloadIdentityPoolProviders []iam.WorkloadIdentityPoolProvider +} + +func (a *adapter) Adapt() iam.IAM { + a.adaptOrganizationIAM() + a.adaptFolders() + a.adaptFolderIAM() + a.adaptProjects() + a.adaptProjectIAM() + a.adaptWorkloadIdentityPoolProviders() + return a.merge() +} + +func (a *adapter) addOrg(blockID string) { + if _, ok := a.orgs[blockID]; !ok { + a.orgs[blockID] = iam.Organization{ + Metadata: types.NewUnmanagedMetadata(), + } + } +} + +func (a *adapter) merge() iam.IAM { + + // add projects to folders, orgs +PROJECT: + for _, project := range a.projects { + for i, folder := range a.folders { + if project.folderBlockID != "" && project.folderBlockID == folder.blockID { + folder.folder.Projects = append(folder.folder.Projects, project.project) + a.folders[i] = folder + continue PROJECT + } + } + if project.orgBlockID != "" { + if org, ok := a.orgs[project.orgBlockID]; ok { + org.Projects = append(org.Projects, project.project) + a.orgs[project.orgBlockID] = org + continue PROJECT + } + } + + org := iam.Organization{ + Metadata: types.NewUnmanagedMetadata(), + Projects: []iam.Project{project.project}, + } + a.orgs[uuid.NewString()] = org + } + + // add folders to folders, orgs +FOLDER_NESTED: + for _, folder := range a.folders { + for i, existing := range a.folders { + if folder.parentBlockID != "" && folder.parentBlockID == existing.blockID { + existing.folder.Folders = append(existing.folder.Folders, folder.folder) + a.folders[i] = existing + continue FOLDER_NESTED + } + + } + } +FOLDER_ORG: + for _, folder := range a.folders { + if folder.parentBlockID != "" { + if org, ok := a.orgs[folder.parentBlockID]; ok { + org.Folders = append(org.Folders, folder.folder) + a.orgs[folder.parentBlockID] = org + continue FOLDER_ORG + } + } else { + // add to placeholder? + org := iam.Organization{ + Metadata: types.NewUnmanagedMetadata(), + Folders: []iam.Folder{folder.folder}, + } + a.orgs[uuid.NewString()] = org + } + } + + output := iam.IAM{ + Organizations: nil, + WorkloadIdentityPoolProviders: a.workloadIdentityPoolProviders, + } + for _, org := range a.orgs { + output.Organizations = append(output.Organizations, org) + } + return output +} diff --git a/internal/adapters/terraform/google/iam/adapt_test.go b/internal/adapters/terraform/google/iam/adapt_test.go new file mode 100644 index 000000000000..e49ba44a466c --- /dev/null +++ b/internal/adapters/terraform/google/iam/adapt_test.go @@ -0,0 +1,266 @@ +package iam + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/google/iam" + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_Adapt(t *testing.T) { + tests := []struct { + name string + terraform string + expected iam.IAM + }{ + { + name: "basic", + terraform: ` + data "google_organization" "org" { + domain = "example.com" + } + + resource "google_project" "my_project" { + name = "My Project" + project_id = "your-project-id" + org_id = data.google_organization.org.id + auto_create_network = true + } + + resource "google_folder" "department1" { + display_name = "Department 1" + parent = data.google_organization.org.id + } + + resource "google_folder_iam_member" "admin" { + folder = google_folder.department1.name + role = "roles/editor" + member = "user:alice@gmail.com" + } + + resource "google_folder_iam_binding" "folder-123" { + folder = google_folder.department1.name + role = "roles/nothing" + members = [ + "user:not-alice@gmail.com", + ] + } + + resource "google_organization_iam_member" "org-123" { + org_id = data.google_organization.org.id + role = "roles/whatever" + member = "user:member@gmail.com" + } + + resource "google_organization_iam_binding" "binding" { + org_id = data.google_organization.org.id + role = "roles/browser" + + members = [ + "user:member_2@gmail.com", + ] + } + + resource "google_iam_workload_identity_pool_provider" "example" { + workload_identity_pool_id = "example-pool" + workload_identity_pool_provider_id = "example-provider" + attribute_condition = "assertion.repository_owner=='your-github-organization'" + } +`, + expected: iam.IAM{ + Organizations: []iam.Organization{ + { + Metadata: defsecTypes.NewTestMetadata(), + + Projects: []iam.Project{ + { + Metadata: defsecTypes.NewTestMetadata(), + AutoCreateNetwork: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + + Folders: []iam.Folder{ + { + Metadata: defsecTypes.NewTestMetadata(), + Members: []iam.Member{ + { + Metadata: defsecTypes.NewTestMetadata(), + Member: defsecTypes.String("user:alice@gmail.com", defsecTypes.NewTestMetadata()), + Role: defsecTypes.String("roles/editor", defsecTypes.NewTestMetadata()), + DefaultServiceAccount: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + Bindings: []iam.Binding{ + { + Metadata: defsecTypes.NewTestMetadata(), + Members: []defsecTypes.StringValue{ + defsecTypes.String("user:not-alice@gmail.com", defsecTypes.NewTestMetadata()), + }, + Role: defsecTypes.String("roles/nothing", defsecTypes.NewTestMetadata()), + IncludesDefaultServiceAccount: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + Members: []iam.Member{ + { + Metadata: defsecTypes.NewTestMetadata(), + Member: defsecTypes.String("user:member@gmail.com", defsecTypes.NewTestMetadata()), + Role: defsecTypes.String("roles/whatever", defsecTypes.NewTestMetadata()), + DefaultServiceAccount: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + Bindings: []iam.Binding{ + { + Metadata: defsecTypes.NewTestMetadata(), + Members: []defsecTypes.StringValue{ + defsecTypes.String("user:member_2@gmail.com", defsecTypes.NewTestMetadata())}, + Role: defsecTypes.String("roles/browser", defsecTypes.NewTestMetadata()), + IncludesDefaultServiceAccount: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + WorkloadIdentityPoolProviders: []iam.WorkloadIdentityPoolProvider{ + { + Metadata: defsecTypes.NewTestMetadata(), + + WorkloadIdentityPoolId: defsecTypes.String("example-pool", defsecTypes.NewTestMetadata()), + WorkloadIdentityPoolProviderId: defsecTypes.String("example-provider", defsecTypes.NewTestMetadata()), + AttributeCondition: defsecTypes.String("assertion.repository_owner=='your-github-organization'", defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := Adapt(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + data "google_organization" "org" { + domain = "example.com" + } + + resource "google_project" "my_project" { + name = "My Project" + project_id = "your-project-id" + org_id = data.google_organization.org.id + auto_create_network = true + } + + resource "google_folder" "department1" { + display_name = "Department 1" + parent = data.google_organization.org.id + } + + resource "google_folder_iam_binding" "folder-123" { + folder = google_folder.department1.name + role = "roles/nothing" + members = [ + "user:not-alice@gmail.com", + ] + } + + resource "google_folder_iam_member" "admin" { + folder = google_folder.department1.name + role = "roles/editor" + member = "user:alice@gmail.com" + } + + resource "google_organization_iam_member" "org-123" { + org_id = data.google_organization.org.id + role = "roles/whatever" + member = "user:member@gmail.com" + } + + resource "google_organization_iam_binding" "binding" { + org_id = data.google_organization.org.id + role = "roles/browser" + + members = [ + "user:member_2@gmail.com", + ] + } + + resource "google_iam_workload_identity_pool_provider" "example" { + workload_identity_pool_id = "example-pool" + workload_identity_pool_provider_id = "example-provider" + attribute_condition = "assertion.repository_owner=='your-github-organization'" + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Organizations, 1) + require.Len(t, adapted.Organizations[0].Projects, 1) + require.Len(t, adapted.Organizations[0].Folders, 1) + require.Len(t, adapted.Organizations[0].Bindings, 1) + require.Len(t, adapted.Organizations[0].Members, 1) + require.Len(t, adapted.WorkloadIdentityPoolProviders, 1) + + project := adapted.Organizations[0].Projects[0] + folder := adapted.Organizations[0].Folders[0] + binding := adapted.Organizations[0].Bindings[0] + member := adapted.Organizations[0].Members[0] + pool := adapted.WorkloadIdentityPoolProviders[0] + + assert.Equal(t, 6, project.Metadata.Range().GetStartLine()) + assert.Equal(t, 11, project.Metadata.Range().GetEndLine()) + + assert.Equal(t, 10, project.AutoCreateNetwork.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 10, project.AutoCreateNetwork.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 13, folder.Metadata.Range().GetStartLine()) + assert.Equal(t, 16, folder.Metadata.Range().GetEndLine()) + + assert.Equal(t, 18, folder.Bindings[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 24, folder.Bindings[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 20, folder.Bindings[0].Role.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 20, folder.Bindings[0].Role.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 21, folder.Bindings[0].Members[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 23, folder.Bindings[0].Members[0].GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 26, folder.Members[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 30, folder.Members[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 29, folder.Members[0].Member.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 29, folder.Members[0].Member.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 28, folder.Members[0].Role.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 28, folder.Members[0].Role.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 32, member.Metadata.Range().GetStartLine()) + assert.Equal(t, 36, member.Metadata.Range().GetEndLine()) + + assert.Equal(t, 34, member.Role.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 34, member.Role.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 35, member.Member.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 35, member.Member.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 38, binding.Metadata.Range().GetStartLine()) + assert.Equal(t, 45, binding.Metadata.Range().GetEndLine()) + + assert.Equal(t, 40, binding.Role.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 40, binding.Role.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 42, binding.Members[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 44, binding.Members[0].GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 51, pool.Metadata.Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/google/iam/convert.go b/internal/adapters/terraform/google/iam/convert.go new file mode 100644 index 000000000000..f364f02a8f9c --- /dev/null +++ b/internal/adapters/terraform/google/iam/convert.go @@ -0,0 +1,26 @@ +package iam + +import ( + "github.com/aquasecurity/defsec/pkg/providers/google/iam" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +func ParsePolicyBlock(block *terraform.Block) []iam.Binding { + var bindings []iam.Binding + for _, bindingBlock := range block.GetBlocks("binding") { + binding := iam.Binding{ + Metadata: bindingBlock.GetMetadata(), + Members: nil, + Role: bindingBlock.GetAttribute("role").AsStringValueOrDefault("", bindingBlock), + IncludesDefaultServiceAccount: defsecTypes.BoolDefault(false, bindingBlock.GetMetadata()), + } + membersAttr := bindingBlock.GetAttribute("members") + members := membersAttr.AsStringValues().AsStrings() + for _, member := range members { + binding.Members = append(binding.Members, defsecTypes.String(member, membersAttr.GetMetadata())) + } + bindings = append(bindings, binding) + } + return bindings +} diff --git a/internal/adapters/terraform/google/iam/folder_iam.go b/internal/adapters/terraform/google/iam/folder_iam.go new file mode 100644 index 000000000000..51b09f185ba8 --- /dev/null +++ b/internal/adapters/terraform/google/iam/folder_iam.go @@ -0,0 +1,117 @@ +package iam + +import ( + "github.com/aquasecurity/defsec/pkg/providers/google/iam" + "github.com/aquasecurity/defsec/pkg/types" +) + +// see https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/google_folder_iam + +func (a *adapter) adaptFolderIAM() { + a.adaptFolderMembers() + a.adaptFolderBindings() +} + +func (a *adapter) adaptFolderMembers() { + for _, iamBlock := range a.modules.GetResourcesByType("google_folder_iam_member") { + member := a.adaptMember(iamBlock) + folderAttr := iamBlock.GetAttribute("folder") + if refBlock, err := a.modules.GetReferencedBlock(folderAttr, iamBlock); err == nil { + if refBlock.TypeLabel() == "google_folder" { + var foundFolder bool + for i, folder := range a.folders { + if folder.blockID == refBlock.ID() { + folder.folder.Members = append(folder.folder.Members, member) + a.folders[i] = folder + foundFolder = true + break + } + } + if foundFolder { + continue + } + } + } + + // we didn't find the folder - add an unmanaged one + a.folders = append(a.folders, parentedFolder{ + folder: iam.Folder{ + Metadata: types.NewUnmanagedMetadata(), + Members: []iam.Member{member}, + }, + }) + } +} + +func (a *adapter) adaptFolderBindings() { + + for _, iamBlock := range a.modules.GetResourcesByType("google_folder_iam_policy") { + + policyAttr := iamBlock.GetAttribute("policy_data") + if policyAttr.IsNil() { + continue + } + policyBlock, err := a.modules.GetReferencedBlock(policyAttr, iamBlock) + if err != nil { + continue + } + bindings := ParsePolicyBlock(policyBlock) + folderAttr := iamBlock.GetAttribute("folder") + + if refBlock, err := a.modules.GetReferencedBlock(folderAttr, iamBlock); err == nil { + if refBlock.TypeLabel() == "google_folder" { + var foundFolder bool + for i, folder := range a.folders { + if folder.blockID == refBlock.ID() { + folder.folder.Bindings = append(folder.folder.Bindings, bindings...) + a.folders[i] = folder + foundFolder = true + break + } + } + if foundFolder { + continue + } + + } + } + + // we didn't find the project - add an unmanaged one + a.folders = append(a.folders, parentedFolder{ + folder: iam.Folder{ + Metadata: types.NewUnmanagedMetadata(), + Bindings: bindings, + }, + }) + } + + for _, iamBlock := range a.modules.GetResourcesByType("google_folder_iam_binding") { + binding := a.adaptBinding(iamBlock) + folderAttr := iamBlock.GetAttribute("folder") + if refBlock, err := a.modules.GetReferencedBlock(folderAttr, iamBlock); err == nil { + if refBlock.TypeLabel() == "google_folder" { + var foundFolder bool + for i, folder := range a.folders { + if folder.blockID == refBlock.ID() { + folder.folder.Bindings = append(folder.folder.Bindings, binding) + a.folders[i] = folder + foundFolder = true + break + } + } + if foundFolder { + continue + } + + } + } + + // we didn't find the folder - add an unmanaged one + a.folders = append(a.folders, parentedFolder{ + folder: iam.Folder{ + Metadata: types.NewUnmanagedMetadata(), + Bindings: []iam.Binding{binding}, + }, + }) + } +} diff --git a/internal/adapters/terraform/google/iam/folders.go b/internal/adapters/terraform/google/iam/folders.go new file mode 100644 index 000000000000..6e8de9641c6f --- /dev/null +++ b/internal/adapters/terraform/google/iam/folders.go @@ -0,0 +1,40 @@ +package iam + +import ( + "github.com/aquasecurity/defsec/pkg/providers/google/iam" +) + +type parentedFolder struct { + blockID string + parentBlockID string + parentRef string + folder iam.Folder +} + +func (a *adapter) adaptFolders() { + for _, folderBlock := range a.modules.GetResourcesByType("google_folder") { + var folder parentedFolder + parentAttr := folderBlock.GetAttribute("parent") + if parentAttr.IsNil() { + continue + } + + folder.folder.Metadata = folderBlock.GetMetadata() + folder.blockID = folderBlock.ID() + if parentAttr.IsString() { + folder.parentRef = parentAttr.Value().AsString() + } + + if referencedBlock, err := a.modules.GetReferencedBlock(parentAttr, folderBlock); err == nil { + if referencedBlock.TypeLabel() == "google_folder" { + folder.parentBlockID = referencedBlock.ID() + } + if referencedBlock.TypeLabel() == "google_organization" { + folder.parentBlockID = referencedBlock.ID() + a.addOrg(folder.parentBlockID) + } + } + + a.folders = append(a.folders, folder) + } +} diff --git a/internal/adapters/terraform/google/iam/org_iam.go b/internal/adapters/terraform/google/iam/org_iam.go new file mode 100644 index 000000000000..bf56dabd3866 --- /dev/null +++ b/internal/adapters/terraform/google/iam/org_iam.go @@ -0,0 +1,113 @@ +package iam + +import ( + "github.com/aquasecurity/defsec/pkg/providers/google/iam" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/google/uuid" +) + +// see https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/google_organization_iam + +func (a *adapter) adaptOrganizationIAM() { + a.adaptOrganizationMembers() + a.adaptOrganizationBindings() +} + +func (a *adapter) adaptOrganizationMembers() { + for _, iamBlock := range a.modules.GetResourcesByType("google_organization_iam_member") { + member := a.adaptMember(iamBlock) + organizationAttr := iamBlock.GetAttribute("organization") + if organizationAttr.IsNil() { + organizationAttr = iamBlock.GetAttribute("org_id") + } + + if refBlock, err := a.modules.GetReferencedBlock(organizationAttr, iamBlock); err == nil { + if refBlock.TypeLabel() == "google_organization" { + a.addOrg(refBlock.ID()) + org, ok := a.orgs[refBlock.ID()] + if !ok { + org = iam.Organization{ + Metadata: refBlock.GetMetadata(), + Folders: nil, + Projects: nil, + Members: []iam.Member{member}, + Bindings: nil, + } + } + org.Members = append(org.Members, member) + a.orgs[refBlock.ID()] = org + continue + } + } + + // we didn't find the organization - add an unmanaged one + placeholderID := uuid.NewString() + org := iam.Organization{ + Metadata: types.NewUnmanagedMetadata(), + Members: []iam.Member{member}, + } + a.orgs[placeholderID] = org + + } +} + +func (a *adapter) adaptOrganizationBindings() { + + for _, iamBlock := range a.modules.GetResourcesByType("google_organization_iam_policy") { + + policyAttr := iamBlock.GetAttribute("policy_data") + if policyAttr.IsNil() { + continue + } + policyBlock, err := a.modules.GetReferencedBlock(policyAttr, iamBlock) + if err != nil { + continue + } + bindings := ParsePolicyBlock(policyBlock) + orgAttr := iamBlock.GetAttribute("organization") + + if refBlock, err := a.modules.GetReferencedBlock(orgAttr, iamBlock); err == nil { + if refBlock.TypeLabel() == "google_organization" { + if org, ok := a.orgs[refBlock.ID()]; ok { + org.Bindings = append(org.Bindings, bindings...) + a.orgs[refBlock.ID()] = org + continue + } + } + } + + // we didn't find the organization - add an unmanaged one + placeholderID := uuid.NewString() + org := iam.Organization{ + Metadata: types.NewUnmanagedMetadata(), + Bindings: bindings, + } + a.orgs[placeholderID] = org + } + + for _, iamBlock := range a.modules.GetResourcesByType("google_organization_iam_binding") { + binding := a.adaptBinding(iamBlock) + organizationAttr := iamBlock.GetAttribute("organization") + if organizationAttr.IsNil() { + organizationAttr = iamBlock.GetAttribute("org_id") + } + + if refBlock, err := a.modules.GetReferencedBlock(organizationAttr, iamBlock); err == nil { + if refBlock.TypeLabel() == "google_organization" { + a.addOrg(refBlock.ID()) + org := a.orgs[refBlock.ID()] + org.Bindings = append(org.Bindings, binding) + a.orgs[refBlock.ID()] = org + continue + } + } + + // we didn't find the organization - add an unmanaged one + placeholderID := uuid.NewString() + org := iam.Organization{ + Metadata: types.NewUnmanagedMetadata(), + Bindings: []iam.Binding{binding}, + } + a.orgs[placeholderID] = org + } +} diff --git a/internal/adapters/terraform/google/iam/project_iam.go b/internal/adapters/terraform/google/iam/project_iam.go new file mode 100644 index 000000000000..bac596af7569 --- /dev/null +++ b/internal/adapters/terraform/google/iam/project_iam.go @@ -0,0 +1,287 @@ +package iam + +import ( + "strings" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/terraform" + + "github.com/aquasecurity/defsec/pkg/providers/google/iam" +) + +// see https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/google_project_iam + +func (a *adapter) adaptProjectIAM() { + a.adaptProjectMembers() + a.adaptProjectBindings() +} + +func (a *adapter) adaptMember(iamBlock *terraform.Block) iam.Member { + return AdaptMember(iamBlock, a.modules) +} + +func AdaptMember(iamBlock *terraform.Block, modules terraform.Modules) iam.Member { + member := iam.Member{ + Metadata: iamBlock.GetMetadata(), + Member: iamBlock.GetAttribute("member").AsStringValueOrDefault("", iamBlock), + Role: iamBlock.GetAttribute("role").AsStringValueOrDefault("", iamBlock), + DefaultServiceAccount: defsecTypes.BoolDefault(false, iamBlock.GetMetadata()), + } + + memberAttr := iamBlock.GetAttribute("member") + if referencedBlock, err := modules.GetReferencedBlock(memberAttr, iamBlock); err == nil { + if strings.HasSuffix(referencedBlock.TypeLabel(), "_default_service_account") { + member.DefaultServiceAccount = defsecTypes.Bool(true, memberAttr.GetMetadata()) + } + } + + return member +} + +var projectMemberResources = []string{ + "google_project_iam_member", + "google_cloud_run_service_iam_member", + "google_compute_instance_iam_member", + "google_compute_subnetwork_iam_member", + "google_data_catalog_entry_group_iam_member", + "google_folder_iam_member", + "google_pubsub_subscription_iam_member", + "google_pubsub_topic_iam_member", + "google_sourcerepo_repository_iam_member", + "google_spanner_database_iam_member", + "google_spanner_instance_iam_member", + "google_storage_bucket_iam_member", +} + +func (a *adapter) adaptProjectMembers() { + + for _, memberType := range projectMemberResources { + for _, iamBlock := range a.modules.GetResourcesByType(memberType) { + member := a.adaptMember(iamBlock) + projectAttr := iamBlock.GetAttribute("project") + if projectAttr.IsString() { + var foundProject bool + projectID := projectAttr.Value().AsString() + for i, project := range a.projects { + if project.id == projectID { + project.project.Members = append(project.project.Members, member) + a.projects[i] = project + foundProject = true + break + } + } + if foundProject { + continue + } + } + + if refBlock, err := a.modules.GetReferencedBlock(projectAttr, iamBlock); err == nil { + if refBlock.TypeLabel() == "google_project" { + var foundProject bool + for i, project := range a.projects { + if project.blockID == refBlock.ID() { + project.project.Members = append(project.project.Members, member) + a.projects[i] = project + foundProject = true + break + } + } + if foundProject { + continue + } + + } + } + + // we didn't find the project - add an unmanaged one + // unless it already belongs to an existing folder + var foundFolder bool + if refBlock, err := a.modules.GetReferencedBlock(iamBlock.GetAttribute("folder"), iamBlock); err == nil { + for _, folder := range a.folders { + if folder.blockID == refBlock.ID() { + foundFolder = true + } + } + } + if foundFolder { + continue + } + + a.projects = append(a.projects, parentedProject{ + project: iam.Project{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + AutoCreateNetwork: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + Members: []iam.Member{member}, + Bindings: nil, + }, + }) + } + } +} + +func (a *adapter) adaptBinding(iamBlock *terraform.Block) iam.Binding { + return AdaptBinding(iamBlock, a.modules) +} + +func AdaptBinding(iamBlock *terraform.Block, modules terraform.Modules) iam.Binding { + binding := iam.Binding{ + Metadata: iamBlock.GetMetadata(), + Members: nil, + Role: iamBlock.GetAttribute("role").AsStringValueOrDefault("", iamBlock), + IncludesDefaultServiceAccount: defsecTypes.BoolDefault(false, iamBlock.GetMetadata()), + } + membersAttr := iamBlock.GetAttribute("members") + members := membersAttr.AsStringValues().AsStrings() + for _, member := range members { + binding.Members = append(binding.Members, defsecTypes.String(member, membersAttr.GetMetadata())) + } + if referencedBlock, err := modules.GetReferencedBlock(membersAttr, iamBlock); err == nil { + if strings.HasSuffix(referencedBlock.TypeLabel(), "_default_service_account") { + binding.IncludesDefaultServiceAccount = defsecTypes.Bool(true, membersAttr.GetMetadata()) + } + } + return binding +} + +var projectBindingResources = []string{ + "google_project_iam_binding", + "google_cloud_run_service_iam_binding", + "google_compute_instance_iam_binding", + "google_compute_subnetwork_iam_binding", + "google_data_catalog_entry_group_iam_binding", + "google_folder_iam_binding", + "google_pubsub_subscription_iam_binding", + "google_pubsub_topic_iam_binding", + "google_sourcerepo_repository_iam_binding", + "google_spanner_database_iam_binding", + "google_spanner_instance_iam_binding", + "google_storage_bucket_iam_binding", +} + +func (a *adapter) adaptProjectDataBindings() { + for _, iamBlock := range a.modules.GetResourcesByType("google_project_iam_policy") { + + policyAttr := iamBlock.GetAttribute("policy_data") + if policyAttr.IsNil() { + continue + } + policyBlock, err := a.modules.GetReferencedBlock(policyAttr, iamBlock) + if err != nil { + continue + } + bindings := ParsePolicyBlock(policyBlock) + projectAttr := iamBlock.GetAttribute("project") + if projectAttr.IsString() { + var foundProject bool + projectID := projectAttr.Value().AsString() + for i, project := range a.projects { + if project.id == projectID { + project.project.Bindings = append(project.project.Bindings, bindings...) + a.projects[i] = project + foundProject = true + break + } + } + if foundProject { + continue + } + } + + if refBlock, err := a.modules.GetReferencedBlock(projectAttr, iamBlock); err == nil { + if refBlock.TypeLabel() == "google_project" { + var foundProject bool + for i, project := range a.projects { + if project.blockID == refBlock.ID() { + project.project.Bindings = append(project.project.Bindings, bindings...) + a.projects[i] = project + foundProject = true + break + } + } + if foundProject { + continue + } + + } + } + + // we didn't find the project - add an unmanaged one + a.projects = append(a.projects, parentedProject{ + project: iam.Project{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + AutoCreateNetwork: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + Members: nil, + Bindings: bindings, + }, + }) + } + +} + +func (a *adapter) adaptProjectBindings() { + + a.adaptProjectDataBindings() + + for _, bindingType := range projectBindingResources { + for _, iamBlock := range a.modules.GetResourcesByType(bindingType) { + binding := a.adaptBinding(iamBlock) + projectAttr := iamBlock.GetAttribute("project") + if projectAttr.IsString() { + var foundProject bool + projectID := projectAttr.Value().AsString() + for i, project := range a.projects { + if project.id == projectID { + project.project.Bindings = append(project.project.Bindings, binding) + a.projects[i] = project + foundProject = true + break + } + } + if foundProject { + continue + } + } + + if refBlock, err := a.modules.GetReferencedBlock(projectAttr, iamBlock); err == nil { + if refBlock.TypeLabel() == "google_project" { + var foundProject bool + for i, project := range a.projects { + if project.blockID == refBlock.ID() { + project.project.Bindings = append(project.project.Bindings, binding) + a.projects[i] = project + foundProject = true + break + } + } + if foundProject { + continue + } + + } + } + + // we didn't find the project - add an unmanaged one + // unless it already belongs to an existing folder + var foundFolder bool + if refBlock, err := a.modules.GetReferencedBlock(iamBlock.GetAttribute("folder"), iamBlock); err == nil { + for _, folder := range a.folders { + if folder.blockID == refBlock.ID() { + foundFolder = true + } + } + } + if foundFolder { + continue + } + a.projects = append(a.projects, parentedProject{ + project: iam.Project{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + AutoCreateNetwork: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + Members: nil, + Bindings: []iam.Binding{binding}, + }, + }) + } + } +} diff --git a/internal/adapters/terraform/google/iam/project_iam_test.go b/internal/adapters/terraform/google/iam/project_iam_test.go new file mode 100644 index 000000000000..3d3270ca4637 --- /dev/null +++ b/internal/adapters/terraform/google/iam/project_iam_test.go @@ -0,0 +1,59 @@ +package iam + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/google/iam" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" +) + +func Test_AdaptBinding(t *testing.T) { + tests := []struct { + name string + terraform string + expected iam.Binding + }{ + { + name: "defined", + terraform: ` + resource "google_organization_iam_binding" "binding" { + org_id = data.google_organization.org.id + role = "roles/browser" + + members = [ + "user:alice@gmail.com", + ] + }`, + expected: iam.Binding{ + Metadata: defsecTypes.NewTestMetadata(), + Members: []defsecTypes.StringValue{ + defsecTypes.String("user:alice@gmail.com", defsecTypes.NewTestMetadata())}, + Role: defsecTypes.String("roles/browser", defsecTypes.NewTestMetadata()), + IncludesDefaultServiceAccount: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + { + name: "defaults", + terraform: ` + resource "google_organization_iam_binding" "binding" { + }`, + expected: iam.Binding{ + Metadata: defsecTypes.NewTestMetadata(), + Role: defsecTypes.String("", defsecTypes.NewTestMetadata()), + IncludesDefaultServiceAccount: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := AdaptBinding(modules.GetBlocks()[0], modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} diff --git a/internal/adapters/terraform/google/iam/projects.go b/internal/adapters/terraform/google/iam/projects.go new file mode 100644 index 000000000000..e064dc8d0bd8 --- /dev/null +++ b/internal/adapters/terraform/google/iam/projects.go @@ -0,0 +1,58 @@ +package iam + +import ( + "github.com/aquasecurity/defsec/pkg/providers/google/iam" +) + +type parentedProject struct { + blockID string + orgBlockID string + folderBlockID string + id string + orgID string + folderID string + project iam.Project +} + +func (a *adapter) adaptProjects() { + for _, projectBlock := range a.modules.GetResourcesByType("google_project") { + var project parentedProject + project.project.Metadata = projectBlock.GetMetadata() + idAttr := projectBlock.GetAttribute("project_id") + if !idAttr.IsString() { + continue + } + project.id = idAttr.Value().AsString() + + project.blockID = projectBlock.ID() + + orgAttr := projectBlock.GetAttribute("org_id") + if orgAttr.IsString() { + project.orgID = orgAttr.Value().AsString() + } + folderAttr := projectBlock.GetAttribute("folder_id") + if folderAttr.IsString() { + project.folderID = folderAttr.Value().AsString() + } + + autoCreateNetworkAttr := projectBlock.GetAttribute("auto_create_network") + project.project.AutoCreateNetwork = autoCreateNetworkAttr.AsBoolValueOrDefault(true, projectBlock) + + if orgAttr.IsNotNil() { + if referencedBlock, err := a.modules.GetReferencedBlock(orgAttr, projectBlock); err == nil { + if referencedBlock.TypeLabel() == "google_organization" { + project.orgBlockID = referencedBlock.ID() + a.addOrg(project.orgBlockID) + } + } + } + if folderAttr.IsNotNil() { + if referencedBlock, err := a.modules.GetReferencedBlock(folderAttr, projectBlock); err == nil { + if referencedBlock.TypeLabel() == "google_folder" { + project.folderBlockID = referencedBlock.ID() + } + } + } + a.projects = append(a.projects, project) + } +} diff --git a/internal/adapters/terraform/google/iam/workload_identity_pool_providers.go b/internal/adapters/terraform/google/iam/workload_identity_pool_providers.go new file mode 100644 index 000000000000..70d68511ad70 --- /dev/null +++ b/internal/adapters/terraform/google/iam/workload_identity_pool_providers.go @@ -0,0 +1,18 @@ +package iam + +import ( + "github.com/aquasecurity/defsec/pkg/providers/google/iam" +) + +// See https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/iam_workload_identity_pool_provider + +func (a *adapter) adaptWorkloadIdentityPoolProviders() { + for _, resource := range a.modules.GetResourcesByType("google_iam_workload_identity_pool_provider") { + a.workloadIdentityPoolProviders = append(a.workloadIdentityPoolProviders, iam.WorkloadIdentityPoolProvider{ + Metadata: resource.GetMetadata(), + WorkloadIdentityPoolId: resource.GetAttribute("workload_identity_pool_id").AsStringValueOrDefault("", resource), + WorkloadIdentityPoolProviderId: resource.GetAttribute("workload_identity_pool_provider_id").AsStringValueOrDefault("", resource), + AttributeCondition: resource.GetAttribute("attribute_condition").AsStringValueOrDefault("", resource), + }) + } +} diff --git a/internal/adapters/terraform/google/kms/adapt.go b/internal/adapters/terraform/google/kms/adapt.go new file mode 100644 index 000000000000..1b76b7d8b501 --- /dev/null +++ b/internal/adapters/terraform/google/kms/adapt.go @@ -0,0 +1,60 @@ +package kms + +import ( + "strconv" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/terraform" + + "github.com/aquasecurity/defsec/pkg/providers/google/kms" +) + +func Adapt(modules terraform.Modules) kms.KMS { + return kms.KMS{ + KeyRings: adaptKeyRings(modules), + } +} + +func adaptKeyRings(modules terraform.Modules) []kms.KeyRing { + var keyRings []kms.KeyRing + for _, module := range modules { + for _, resource := range module.GetResourcesByType("google_kms_key_ring") { + var keys []kms.Key + + keyBlocks := module.GetReferencingResources(resource, "google_kms_crypto_key", "key_ring") + for _, keyBlock := range keyBlocks { + keys = append(keys, adaptKey(keyBlock)) + } + keyRings = append(keyRings, kms.KeyRing{ + Metadata: resource.GetMetadata(), + Keys: keys, + }) + } + } + return keyRings +} + +func adaptKey(resource *terraform.Block) kms.Key { + + key := kms.Key{ + Metadata: resource.GetMetadata(), + RotationPeriodSeconds: types.IntDefault(-1, resource.GetMetadata()), + } + + rotationPeriodAttr := resource.GetAttribute("rotation_period") + if !rotationPeriodAttr.IsString() { + return key + } + rotationStr := rotationPeriodAttr.Value().AsString() + if rotationStr[len(rotationStr)-1:] != "s" { + return key + } + seconds, err := strconv.Atoi(rotationStr[:len(rotationStr)-1]) + if err != nil { + return key + } + + key.RotationPeriodSeconds = types.Int(seconds, rotationPeriodAttr.GetMetadata()) + return key +} diff --git a/internal/adapters/terraform/google/kms/adapt_test.go b/internal/adapters/terraform/google/kms/adapt_test.go new file mode 100644 index 000000000000..09043e00c049 --- /dev/null +++ b/internal/adapters/terraform/google/kms/adapt_test.go @@ -0,0 +1,126 @@ +package kms + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/google/kms" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_adaptKeyRings(t *testing.T) { + tests := []struct { + name string + terraform string + expected []kms.KeyRing + }{ + { + name: "configured", + terraform: ` + resource "google_kms_key_ring" "keyring" { + name = "keyring-example" + } + + resource "google_kms_crypto_key" "example-key" { + name = "crypto-key-example" + key_ring = google_kms_key_ring.keyring.id + rotation_period = "7776000s" + } +`, + expected: []kms.KeyRing{ + { + Metadata: defsecTypes.NewTestMetadata(), + Keys: []kms.Key{ + { + Metadata: defsecTypes.NewTestMetadata(), + RotationPeriodSeconds: defsecTypes.Int(7776000, defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + { + name: "no keys", + terraform: ` + resource "google_kms_key_ring" "keyring" { + name = "keyring-example" + } + +`, + expected: []kms.KeyRing{ + { + Metadata: defsecTypes.NewTestMetadata(), + }, + }, + }, + { + name: "default rotation period", + terraform: ` + resource "google_kms_key_ring" "keyring" { + name = "keyring-example" + } + + resource "google_kms_crypto_key" "example-key" { + name = "crypto-key-example" + key_ring = google_kms_key_ring.keyring.id + } +`, + expected: []kms.KeyRing{ + { + Metadata: defsecTypes.NewTestMetadata(), + Keys: []kms.Key{ + { + Metadata: defsecTypes.NewTestMetadata(), + RotationPeriodSeconds: defsecTypes.Int(-1, defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptKeyRings(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "google_kms_key_ring" "keyring" { + name = "keyring-example" + } + + resource "google_kms_crypto_key" "example-key" { + name = "crypto-key-example" + key_ring = google_kms_key_ring.keyring.id + rotation_period = "7776000s" + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.KeyRings, 1) + require.Len(t, adapted.KeyRings[0].Keys, 1) + + key := adapted.KeyRings[0].Keys[0] + + assert.Equal(t, 2, adapted.KeyRings[0].Metadata.Range().GetStartLine()) + assert.Equal(t, 4, adapted.KeyRings[0].Metadata.Range().GetEndLine()) + + assert.Equal(t, 6, key.Metadata.Range().GetStartLine()) + assert.Equal(t, 10, key.Metadata.Range().GetEndLine()) + + assert.Equal(t, 9, key.RotationPeriodSeconds.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 9, key.RotationPeriodSeconds.GetMetadata().Range().GetEndLine()) + +} diff --git a/internal/adapters/terraform/google/sql/adapt.go b/internal/adapters/terraform/google/sql/adapt.go new file mode 100644 index 000000000000..b6cb39f39c3c --- /dev/null +++ b/internal/adapters/terraform/google/sql/adapt.go @@ -0,0 +1,156 @@ +package sql + +import ( + "strconv" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/terraform" + + "github.com/aquasecurity/defsec/pkg/providers/google/sql" +) + +func Adapt(modules terraform.Modules) sql.SQL { + return sql.SQL{ + Instances: adaptInstances(modules), + } +} + +func adaptInstances(modules terraform.Modules) []sql.DatabaseInstance { + var instances []sql.DatabaseInstance + for _, module := range modules { + for _, resource := range module.GetResourcesByType("google_sql_database_instance") { + instances = append(instances, adaptInstance(resource)) + } + } + return instances +} + +func adaptInstance(resource *terraform.Block) sql.DatabaseInstance { + + instance := sql.DatabaseInstance{ + Metadata: resource.GetMetadata(), + DatabaseVersion: resource.GetAttribute("database_version").AsStringValueOrDefault("", resource), + IsReplica: defsecTypes.BoolDefault(false, resource.GetMetadata()), + Settings: sql.Settings{ + Metadata: resource.GetMetadata(), + Flags: sql.Flags{ + Metadata: resource.GetMetadata(), + LogTempFileSize: defsecTypes.IntDefault(-1, resource.GetMetadata()), + LocalInFile: defsecTypes.BoolDefault(false, resource.GetMetadata()), + ContainedDatabaseAuthentication: defsecTypes.BoolDefault(true, resource.GetMetadata()), + CrossDBOwnershipChaining: defsecTypes.BoolDefault(true, resource.GetMetadata()), + LogCheckpoints: defsecTypes.BoolDefault(false, resource.GetMetadata()), + LogConnections: defsecTypes.BoolDefault(false, resource.GetMetadata()), + LogDisconnections: defsecTypes.BoolDefault(false, resource.GetMetadata()), + LogLockWaits: defsecTypes.BoolDefault(false, resource.GetMetadata()), + LogMinMessages: defsecTypes.StringDefault("", resource.GetMetadata()), + LogMinDurationStatement: defsecTypes.IntDefault(-1, resource.GetMetadata()), + }, + Backups: sql.Backups{ + Metadata: resource.GetMetadata(), + Enabled: defsecTypes.BoolDefault(false, resource.GetMetadata()), + }, + IPConfiguration: sql.IPConfiguration{ + Metadata: resource.GetMetadata(), + RequireTLS: defsecTypes.BoolDefault(false, resource.GetMetadata()), + EnableIPv4: defsecTypes.BoolDefault(true, resource.GetMetadata()), + AuthorizedNetworks: nil, + }, + }, + } + + if attr := resource.GetAttribute("master_instance_name"); attr.IsNotNil() { + instance.IsReplica = defsecTypes.Bool(true, attr.GetMetadata()) + } + + if settingsBlock := resource.GetBlock("settings"); settingsBlock.IsNotNil() { + instance.Settings.Metadata = settingsBlock.GetMetadata() + if blocks := settingsBlock.GetBlocks("database_flags"); len(blocks) > 0 { + adaptFlags(blocks, &instance.Settings.Flags) + } + if backupBlock := settingsBlock.GetBlock("backup_configuration"); backupBlock.IsNotNil() { + instance.Settings.Backups.Metadata = backupBlock.GetMetadata() + backupConfigEnabledAttr := backupBlock.GetAttribute("enabled") + instance.Settings.Backups.Enabled = backupConfigEnabledAttr.AsBoolValueOrDefault(false, backupBlock) + } + if settingsBlock.HasChild("ip_configuration") { + instance.Settings.IPConfiguration = adaptIPConfig(settingsBlock.GetBlock("ip_configuration")) + } + } + return instance +} + +// nolint +func adaptFlags(resources terraform.Blocks, flags *sql.Flags) { + for _, resource := range resources { + + nameAttr := resource.GetAttribute("name") + valueAttr := resource.GetAttribute("value") + + if !nameAttr.IsString() || valueAttr.IsNil() { + continue + } + + switch nameAttr.Value().AsString() { + case "log_temp_files": + if logTempInt, err := strconv.Atoi(valueAttr.Value().AsString()); err == nil { + flags.LogTempFileSize = defsecTypes.Int(logTempInt, nameAttr.GetMetadata()) + } + case "log_min_messages": + flags.LogMinMessages = valueAttr.AsStringValueOrDefault("", resource) + case "log_min_duration_statement": + if logMinDS, err := strconv.Atoi(valueAttr.Value().AsString()); err == nil { + flags.LogMinDurationStatement = defsecTypes.Int(logMinDS, nameAttr.GetMetadata()) + } + case "local_infile": + flags.LocalInFile = defsecTypes.Bool(valueAttr.Equals("on"), valueAttr.GetMetadata()) + case "log_checkpoints": + flags.LogCheckpoints = defsecTypes.Bool(valueAttr.Equals("on"), valueAttr.GetMetadata()) + case "log_connections": + flags.LogConnections = defsecTypes.Bool(valueAttr.Equals("on"), valueAttr.GetMetadata()) + case "log_disconnections": + flags.LogDisconnections = defsecTypes.Bool(valueAttr.Equals("on"), valueAttr.GetMetadata()) + case "log_lock_waits": + flags.LogLockWaits = defsecTypes.Bool(valueAttr.Equals("on"), valueAttr.GetMetadata()) + case "contained database authentication": + flags.ContainedDatabaseAuthentication = defsecTypes.Bool(valueAttr.Equals("on"), valueAttr.GetMetadata()) + case "cross db ownership chaining": + flags.CrossDBOwnershipChaining = defsecTypes.Bool(valueAttr.Equals("on"), valueAttr.GetMetadata()) + } + } +} + +func adaptIPConfig(resource *terraform.Block) sql.IPConfiguration { + var authorizedNetworks []struct { + Name defsecTypes.StringValue + CIDR defsecTypes.StringValue + } + + tlsRequiredAttr := resource.GetAttribute("require_ssl") + tlsRequiredVal := tlsRequiredAttr.AsBoolValueOrDefault(false, resource) + + ipv4enabledAttr := resource.GetAttribute("ipv4_enabled") + ipv4enabledVal := ipv4enabledAttr.AsBoolValueOrDefault(true, resource) + + authNetworksBlocks := resource.GetBlocks("authorized_networks") + for _, authBlock := range authNetworksBlocks { + nameVal := authBlock.GetAttribute("name").AsStringValueOrDefault("", authBlock) + cidrVal := authBlock.GetAttribute("value").AsStringValueOrDefault("", authBlock) + + authorizedNetworks = append(authorizedNetworks, struct { + Name defsecTypes.StringValue + CIDR defsecTypes.StringValue + }{ + Name: nameVal, + CIDR: cidrVal, + }) + } + + return sql.IPConfiguration{ + Metadata: resource.GetMetadata(), + RequireTLS: tlsRequiredVal, + EnableIPv4: ipv4enabledVal, + AuthorizedNetworks: authorizedNetworks, + } +} diff --git a/internal/adapters/terraform/google/sql/adapt_test.go b/internal/adapters/terraform/google/sql/adapt_test.go new file mode 100644 index 000000000000..b6a7060f2830 --- /dev/null +++ b/internal/adapters/terraform/google/sql/adapt_test.go @@ -0,0 +1,278 @@ +package sql + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/google/sql" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_Adapt(t *testing.T) { + tests := []struct { + name string + terraform string + expected sql.SQL + }{ + { + name: "default flags", + terraform: ` + resource "google_sql_database_instance" "db" { + database_version = "POSTGRES_12" + settings { + backup_configuration { + enabled = true + } + ip_configuration { + ipv4_enabled = false + authorized_networks { + value = "108.12.12.0/24" + name = "internal" + } + require_ssl = true + } + } + } +`, + expected: sql.SQL{ + Instances: []sql.DatabaseInstance{ + { + Metadata: defsecTypes.NewTestMetadata(), + IsReplica: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + DatabaseVersion: defsecTypes.String("POSTGRES_12", defsecTypes.NewTestMetadata()), + Settings: sql.Settings{ + Metadata: defsecTypes.NewTestMetadata(), + Backups: sql.Backups{ + Metadata: defsecTypes.NewTestMetadata(), + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + Flags: sql.Flags{ + Metadata: defsecTypes.NewTestMetadata(), + LogMinDurationStatement: defsecTypes.Int(-1, defsecTypes.NewTestMetadata()), + ContainedDatabaseAuthentication: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + CrossDBOwnershipChaining: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + LocalInFile: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + LogCheckpoints: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + LogConnections: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + LogDisconnections: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + LogLockWaits: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + LogMinMessages: defsecTypes.String("", defsecTypes.NewTestMetadata()), + LogTempFileSize: defsecTypes.Int(-1, defsecTypes.NewTestMetadata()), + }, + IPConfiguration: sql.IPConfiguration{ + Metadata: defsecTypes.NewTestMetadata(), + RequireTLS: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + EnableIPv4: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + AuthorizedNetworks: []struct { + Name defsecTypes.StringValue + CIDR defsecTypes.StringValue + }{ + { + Name: defsecTypes.String("internal", defsecTypes.NewTestMetadata()), + CIDR: defsecTypes.String("108.12.12.0/24", defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := Adapt(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func Test_adaptInstances(t *testing.T) { + tests := []struct { + name string + terraform string + expected []sql.DatabaseInstance + }{ + { + name: "all flags", + terraform: ` +resource "google_sql_database_instance" "backup_source_instance" { + name = "test-instance" + database_version = "POSTGRES_11" + + project = "test-project" + region = "europe-west6" + deletion_protection = false + settings { + tier = "db-f1-micro" + backup_configuration { + enabled = true + } + ip_configuration { + ipv4_enabled = false + private_network = "test-network" + require_ssl = true + } + database_flags { + name = "log_connections" + value = "on" + } + database_flags { + name = "log_temp_files" + value = "0" + } + database_flags { + name = "log_checkpoints" + value = "on" + } + database_flags { + name = "log_disconnections" + value = "on" + } + database_flags { + name = "log_lock_waits" + value = "on" + } + } +} + `, + expected: []sql.DatabaseInstance{ + { + Metadata: defsecTypes.NewTestMetadata(), + DatabaseVersion: defsecTypes.String("POSTGRES_11", defsecTypes.NewTestMetadata()), + IsReplica: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + Settings: sql.Settings{ + Backups: sql.Backups{ + Enabled: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + Flags: sql.Flags{ + LogConnections: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + LogTempFileSize: defsecTypes.Int(0, defsecTypes.NewTestMetadata()), + LogCheckpoints: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + LogDisconnections: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + LogLockWaits: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + ContainedDatabaseAuthentication: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + CrossDBOwnershipChaining: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + LocalInFile: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + LogMinDurationStatement: defsecTypes.Int(-1, defsecTypes.NewTestMetadata()), + LogMinMessages: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + IPConfiguration: sql.IPConfiguration{ + EnableIPv4: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + RequireTLS: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptInstances(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "google_sql_database_instance" "backup_source_instance" { + name = "test-instance" + database_version = "POSTGRES_11" + + settings { + backup_configuration { + enabled = true + } + + ip_configuration { + ipv4_enabled = false + require_ssl = true + authorized_networks { + name = "internal" + value = "108.12.12.0/24" + } + } + + database_flags { + name = "log_connections" + value = "on" + } + database_flags { + name = "log_temp_files" + value = "0" + } + database_flags { + name = "log_checkpoints" + value = "on" + } + database_flags { + name = "log_disconnections" + value = "on" + } + database_flags { + name = "log_lock_waits" + value = "on" + } + } + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Instances, 1) + instance := adapted.Instances[0] + + assert.Equal(t, 2, instance.Metadata.Range().GetStartLine()) + assert.Equal(t, 41, instance.Metadata.Range().GetEndLine()) + + assert.Equal(t, 4, instance.DatabaseVersion.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 4, instance.DatabaseVersion.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 6, instance.Settings.Metadata.Range().GetStartLine()) + assert.Equal(t, 40, instance.Settings.Metadata.Range().GetEndLine()) + + assert.Equal(t, 7, instance.Settings.Backups.Metadata.Range().GetStartLine()) + assert.Equal(t, 9, instance.Settings.Backups.Metadata.Range().GetEndLine()) + + assert.Equal(t, 8, instance.Settings.Backups.Enabled.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 8, instance.Settings.Backups.Enabled.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 11, instance.Settings.IPConfiguration.Metadata.Range().GetStartLine()) + assert.Equal(t, 18, instance.Settings.IPConfiguration.Metadata.Range().GetEndLine()) + + assert.Equal(t, 12, instance.Settings.IPConfiguration.EnableIPv4.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 12, instance.Settings.IPConfiguration.EnableIPv4.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 13, instance.Settings.IPConfiguration.RequireTLS.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 13, instance.Settings.IPConfiguration.RequireTLS.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 15, instance.Settings.IPConfiguration.AuthorizedNetworks[0].Name.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 15, instance.Settings.IPConfiguration.AuthorizedNetworks[0].Name.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 16, instance.Settings.IPConfiguration.AuthorizedNetworks[0].CIDR.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 16, instance.Settings.IPConfiguration.AuthorizedNetworks[0].CIDR.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 22, instance.Settings.Flags.LogConnections.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 22, instance.Settings.Flags.LogConnections.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 25, instance.Settings.Flags.LogTempFileSize.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 25, instance.Settings.Flags.LogTempFileSize.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 34, instance.Settings.Flags.LogDisconnections.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 34, instance.Settings.Flags.LogDisconnections.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 38, instance.Settings.Flags.LogLockWaits.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 38, instance.Settings.Flags.LogLockWaits.GetMetadata().Range().GetEndLine()) + +} diff --git a/internal/adapters/terraform/google/storage/adapt.go b/internal/adapters/terraform/google/storage/adapt.go new file mode 100644 index 000000000000..36aff1b0e2a9 --- /dev/null +++ b/internal/adapters/terraform/google/storage/adapt.go @@ -0,0 +1,129 @@ +package storage + +import ( + "github.com/aquasecurity/defsec/pkg/providers/google/storage" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +func Adapt(modules terraform.Modules) storage.Storage { + return storage.Storage{ + Buckets: (&adapter{modules: modules}).adaptBuckets(), + } +} + +type adapter struct { + modules terraform.Modules + bindings []parentedBinding + members []parentedMember + bindingMap terraform.ResourceIDResolutions + memberMap terraform.ResourceIDResolutions +} + +func (a *adapter) adaptBuckets() []storage.Bucket { + + a.bindingMap = a.modules.GetChildResourceIDMapByType("google_storage_bucket_iam_binding", "google_storage_bucket_iam_policy") + a.memberMap = a.modules.GetChildResourceIDMapByType("google_storage_bucket_iam_member") + + a.adaptMembers() + a.adaptBindings() + + var buckets []storage.Bucket + for _, module := range a.modules { + for _, resource := range module.GetResourcesByType("google_storage_bucket") { + buckets = append(buckets, a.adaptBucketResource(resource)) + } + } + + orphanage := storage.Bucket{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Name: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + Location: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + EnableUniformBucketLevelAccess: defsecTypes.BoolDefault(false, defsecTypes.NewUnmanagedMetadata()), + Members: nil, + Bindings: nil, + } + for _, orphanedBindingID := range a.bindingMap.Orphans() { + for _, binding := range a.bindings { + if binding.blockID == orphanedBindingID { + orphanage.Bindings = append(orphanage.Bindings, binding.bindings...) + break + } + } + } + for _, orphanedMemberID := range a.memberMap.Orphans() { + for _, member := range a.members { + if member.blockID == orphanedMemberID { + orphanage.Members = append(orphanage.Members, member.member) + break + } + } + } + if len(orphanage.Bindings) > 0 || len(orphanage.Members) > 0 { + buckets = append(buckets, orphanage) + } + + return buckets +} + +func (a *adapter) adaptBucketResource(resourceBlock *terraform.Block) storage.Bucket { + + nameAttr := resourceBlock.GetAttribute("name") + nameValue := nameAttr.AsStringValueOrDefault("", resourceBlock) + + locationAttr := resourceBlock.GetAttribute("location") + locationValue := locationAttr.AsStringValueOrDefault("", resourceBlock) + + // See https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/storage_bucket#uniform_bucket_level_access + ublaAttr := resourceBlock.GetAttribute("uniform_bucket_level_access") + ublaValue := ublaAttr.AsBoolValueOrDefault(false, resourceBlock) + + bucket := storage.Bucket{ + Metadata: resourceBlock.GetMetadata(), + Name: nameValue, + Location: locationValue, + EnableUniformBucketLevelAccess: ublaValue, + Members: nil, + Bindings: nil, + Encryption: storage.BucketEncryption{ + Metadata: resourceBlock.GetMetadata(), + DefaultKMSKeyName: defsecTypes.StringDefault("", resourceBlock.GetMetadata()), + }, + } + + if encBlock := resourceBlock.GetBlock("encryption"); encBlock.IsNotNil() { + bucket.Encryption.Metadata = encBlock.GetMetadata() + kmsKeyNameAttr := encBlock.GetAttribute("default_kms_key_name") + bucket.Encryption.DefaultKMSKeyName = kmsKeyNameAttr.AsStringValueOrDefault("", encBlock) + } + + var name string + if nameAttr.IsString() { + name = nameAttr.Value().AsString() + } + + for _, member := range a.members { + if member.bucketBlockID == resourceBlock.ID() { + bucket.Members = append(bucket.Members, member.member) + a.memberMap.Resolve(member.blockID) + continue + } + if name != "" && name == member.bucketID { + bucket.Members = append(bucket.Members, member.member) + a.memberMap.Resolve(member.blockID) + } + } + for _, binding := range a.bindings { + if binding.bucketBlockID == resourceBlock.ID() { + bucket.Bindings = append(bucket.Bindings, binding.bindings...) + a.bindingMap.Resolve(binding.blockID) + continue + } + if name != "" && name == binding.bucketID { + bucket.Bindings = append(bucket.Bindings, binding.bindings...) + a.bindingMap.Resolve(binding.blockID) + } + } + + return bucket +} diff --git a/internal/adapters/terraform/google/storage/adapt_test.go b/internal/adapters/terraform/google/storage/adapt_test.go new file mode 100644 index 000000000000..2dcef331d1c6 --- /dev/null +++ b/internal/adapters/terraform/google/storage/adapt_test.go @@ -0,0 +1,198 @@ +package storage + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/google/iam" + "github.com/aquasecurity/defsec/pkg/providers/google/storage" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + "github.com/aquasecurity/trivy/test/testutil" +) + +func Test_Adapt(t *testing.T) { + tests := []struct { + name string + terraform string + expected storage.Storage + }{ + { + name: "defined", + terraform: ` + resource "google_storage_bucket" "static-site" { + name = "image-store.com" + location = "EU" + uniform_bucket_level_access = true + + encryption { + default_kms_key_name = "default-kms-key-name" + } + } + + resource "google_storage_bucket_iam_binding" "binding" { + bucket = google_storage_bucket.static-site.name + role = "roles/storage.admin #1" + members = [ + "group:test@example.com", + ] + } + + resource "google_storage_bucket_iam_member" "example" { + member = "serviceAccount:test@example.com" + bucket = google_storage_bucket.static-site.name + role = "roles/storage.admin #2" + }`, + expected: storage.Storage{ + Buckets: []storage.Bucket{ + { + Metadata: defsecTypes.NewTestMetadata(), + Name: defsecTypes.String("image-store.com", defsecTypes.NewTestMetadata()), + Location: defsecTypes.String("EU", defsecTypes.NewTestMetadata()), + EnableUniformBucketLevelAccess: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + Bindings: []iam.Binding{ + { + Metadata: defsecTypes.NewTestMetadata(), + Members: []defsecTypes.StringValue{ + defsecTypes.String("group:test@example.com", defsecTypes.NewTestMetadata()), + }, + Role: defsecTypes.String("roles/storage.admin #1", defsecTypes.NewTestMetadata()), + IncludesDefaultServiceAccount: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + Members: []iam.Member{ + { + Metadata: defsecTypes.NewTestMetadata(), + Member: defsecTypes.String("serviceAccount:test@example.com", defsecTypes.NewTestMetadata()), + Role: defsecTypes.String("roles/storage.admin #2", defsecTypes.NewTestMetadata()), + DefaultServiceAccount: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + Encryption: storage.BucketEncryption{ + Metadata: defsecTypes.NewTestMetadata(), + DefaultKMSKeyName: defsecTypes.String("default-kms-key-name", defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + { + name: "defaults", + terraform: ` + resource "google_storage_bucket" "static-site" { + } + + resource "google_storage_bucket_iam_binding" "binding" { + bucket = google_storage_bucket.static-site.name + } + + resource "google_storage_bucket_iam_member" "example" { + bucket = google_storage_bucket.static-site.name + }`, + expected: storage.Storage{ + Buckets: []storage.Bucket{ + { + Metadata: defsecTypes.NewTestMetadata(), + Name: defsecTypes.String("", defsecTypes.NewTestMetadata()), + Location: defsecTypes.String("", defsecTypes.NewTestMetadata()), + EnableUniformBucketLevelAccess: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + Bindings: []iam.Binding{ + { + Metadata: defsecTypes.NewTestMetadata(), + Role: defsecTypes.String("", defsecTypes.NewTestMetadata()), + IncludesDefaultServiceAccount: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + Members: []iam.Member{ + { + Metadata: defsecTypes.NewTestMetadata(), + Member: defsecTypes.String("", defsecTypes.NewTestMetadata()), + Role: defsecTypes.String("", defsecTypes.NewTestMetadata()), + DefaultServiceAccount: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + Encryption: storage.BucketEncryption{ + Metadata: defsecTypes.NewTestMetadata(), + DefaultKMSKeyName: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := Adapt(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} + +func TestLines(t *testing.T) { + src := ` + resource "google_storage_bucket" "static-site" { + name = "image-store.com" + location = "EU" + uniform_bucket_level_access = true + } + + resource "google_storage_bucket_iam_binding" "binding" { + bucket = google_storage_bucket.static-site.name + role = "roles/storage.admin #1" + members = [ + "group:test@example.com", + ] + } + + resource "google_storage_bucket_iam_member" "example" { + member = "serviceAccount:test@example.com" + bucket = google_storage_bucket.static-site.name + role = "roles/storage.admin #2" + }` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Buckets, 1) + require.Len(t, adapted.Buckets[0].Bindings, 1) + require.Len(t, adapted.Buckets[0].Members, 1) + + bucket := adapted.Buckets[0] + binding := adapted.Buckets[0].Bindings[0] + member := adapted.Buckets[0].Members[0] + + assert.Equal(t, 2, bucket.Metadata.Range().GetStartLine()) + assert.Equal(t, 6, bucket.Metadata.Range().GetEndLine()) + + assert.Equal(t, 3, bucket.Name.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, bucket.Name.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 4, bucket.Location.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 4, bucket.Location.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 5, bucket.EnableUniformBucketLevelAccess.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 5, bucket.EnableUniformBucketLevelAccess.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 8, binding.Metadata.Range().GetStartLine()) + assert.Equal(t, 14, binding.Metadata.Range().GetEndLine()) + + assert.Equal(t, 10, binding.Role.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 10, binding.Role.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 11, binding.Members[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 13, binding.Members[0].GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 16, member.Metadata.Range().GetStartLine()) + assert.Equal(t, 20, member.Metadata.Range().GetEndLine()) + + assert.Equal(t, 17, member.Member.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 17, member.Member.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 19, member.Role.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 19, member.Role.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/google/storage/iam.go b/internal/adapters/terraform/google/storage/iam.go new file mode 100644 index 000000000000..24b88a657f94 --- /dev/null +++ b/internal/adapters/terraform/google/storage/iam.go @@ -0,0 +1,96 @@ +package storage + +import ( + iamTypes "github.com/aquasecurity/defsec/pkg/providers/google/iam" + "github.com/aquasecurity/trivy/internal/adapters/terraform/google/iam" +) + +type parentedBinding struct { + blockID string + bucketID string + bucketBlockID string + bindings []iamTypes.Binding +} + +type parentedMember struct { + blockID string + bucketID string + bucketBlockID string + member iamTypes.Member +} + +func (a *adapter) adaptBindings() { + + for _, iamBlock := range a.modules.GetResourcesByType("google_storage_bucket_iam_policy") { + var parented parentedBinding + parented.blockID = iamBlock.ID() + + bucketAttr := iamBlock.GetAttribute("bucket") + if bucketAttr.IsString() { + parented.bucketID = bucketAttr.Value().AsString() + } + + if refBlock, err := a.modules.GetReferencedBlock(bucketAttr, iamBlock); err == nil { + if refBlock.TypeLabel() == "google_storage_bucket" { + parented.bucketBlockID = refBlock.ID() + } + } + + policyAttr := iamBlock.GetAttribute("policy_data") + if policyAttr.IsNil() { + continue + } + + policyBlock, err := a.modules.GetReferencedBlock(policyAttr, iamBlock) + if err != nil { + continue + } + + parented.bindings = iam.ParsePolicyBlock(policyBlock) + a.bindings = append(a.bindings, parented) + } + + for _, iamBlock := range a.modules.GetResourcesByType("google_storage_bucket_iam_binding") { + + var parented parentedBinding + parented.blockID = iamBlock.ID() + parented.bindings = []iamTypes.Binding{iam.AdaptBinding(iamBlock, a.modules)} + + bucketAttr := iamBlock.GetAttribute("bucket") + if bucketAttr.IsString() { + parented.bucketID = bucketAttr.Value().AsString() + } + + if refBlock, err := a.modules.GetReferencedBlock(bucketAttr, iamBlock); err == nil { + if refBlock.TypeLabel() == "google_storage_bucket" { + parented.bucketBlockID = refBlock.ID() + } + } + + a.bindings = append(a.bindings, parented) + } +} + +func (a *adapter) adaptMembers() { + + for _, iamBlock := range a.modules.GetResourcesByType("google_storage_bucket_iam_member") { + + var parented parentedMember + parented.blockID = iamBlock.ID() + parented.member = iam.AdaptMember(iamBlock, a.modules) + + bucketAttr := iamBlock.GetAttribute("bucket") + if bucketAttr.IsString() { + parented.bucketID = bucketAttr.Value().AsString() + } + + if refBlock, err := a.modules.GetReferencedBlock(bucketAttr, iamBlock); err == nil { + if refBlock.TypeLabel() == "google_storage_bucket" { + parented.bucketBlockID = refBlock.ID() + } + } + + a.members = append(a.members, parented) + } + +} diff --git a/internal/adapters/terraform/kubernetes/adapt.go b/internal/adapters/terraform/kubernetes/adapt.go new file mode 100644 index 000000000000..1af3cc29a973 --- /dev/null +++ b/internal/adapters/terraform/kubernetes/adapt.go @@ -0,0 +1,94 @@ +package kubernetes + +import ( + "github.com/aquasecurity/defsec/pkg/providers/kubernetes" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func Adapt(modules terraform.Modules) kubernetes.Kubernetes { + return kubernetes.Kubernetes{ + NetworkPolicies: adaptNetworkPolicies(modules), + } +} + +func adaptNetworkPolicies(modules terraform.Modules) []kubernetes.NetworkPolicy { + var networkPolicies []kubernetes.NetworkPolicy + for _, module := range modules { + for _, resource := range module.GetResourcesByType("kubernetes_network_policy") { + networkPolicies = append(networkPolicies, adaptNetworkPolicy(resource)) + } + } + return networkPolicies +} + +func adaptNetworkPolicy(resourceBlock *terraform.Block) kubernetes.NetworkPolicy { + + policy := kubernetes.NetworkPolicy{ + Metadata: resourceBlock.GetMetadata(), + Spec: kubernetes.NetworkPolicySpec{ + Metadata: resourceBlock.GetMetadata(), + Egress: kubernetes.Egress{ + Metadata: resourceBlock.GetMetadata(), + Ports: nil, + DestinationCIDRs: nil, + }, + Ingress: kubernetes.Ingress{ + Metadata: resourceBlock.GetMetadata(), + Ports: nil, + SourceCIDRs: nil, + }, + }, + } + + if specBlock := resourceBlock.GetBlock("spec"); specBlock.IsNotNil() { + if egressBlock := specBlock.GetBlock("egress"); egressBlock.IsNotNil() { + policy.Spec.Egress.Metadata = egressBlock.GetMetadata() + for _, port := range egressBlock.GetBlocks("ports") { + numberAttr := port.GetAttribute("number") + numberVal := numberAttr.AsStringValueOrDefault("", port) + + protocolAttr := port.GetAttribute("protocol") + protocolVal := protocolAttr.AsStringValueOrDefault("", port) + + policy.Spec.Egress.Ports = append(policy.Spec.Egress.Ports, kubernetes.Port{ + Metadata: port.GetMetadata(), + Number: numberVal, + Protocol: protocolVal, + }) + } + + for _, to := range egressBlock.GetBlocks("to") { + cidrAtrr := to.GetBlock("ip_block").GetAttribute("cidr") + cidrVal := cidrAtrr.AsStringValueOrDefault("", to) + + policy.Spec.Egress.DestinationCIDRs = append(policy.Spec.Egress.DestinationCIDRs, cidrVal) + } + } + + if ingressBlock := specBlock.GetBlock("ingress"); ingressBlock.IsNotNil() { + policy.Spec.Ingress.Metadata = ingressBlock.GetMetadata() + for _, port := range ingressBlock.GetBlocks("ports") { + numberAttr := port.GetAttribute("number") + numberVal := numberAttr.AsStringValueOrDefault("", port) + + protocolAttr := port.GetAttribute("protocol") + protocolVal := protocolAttr.AsStringValueOrDefault("", port) + + policy.Spec.Ingress.Ports = append(policy.Spec.Ingress.Ports, kubernetes.Port{ + Metadata: port.GetMetadata(), + Number: numberVal, + Protocol: protocolVal, + }) + } + + for _, from := range ingressBlock.GetBlocks("from") { + cidrAtrr := from.GetBlock("ip_block").GetAttribute("cidr") + cidrVal := cidrAtrr.AsStringValueOrDefault("", from) + + policy.Spec.Ingress.SourceCIDRs = append(policy.Spec.Ingress.SourceCIDRs, cidrVal) + } + } + } + + return policy +} diff --git a/internal/adapters/terraform/nifcloud/computing/adapt.go b/internal/adapters/terraform/nifcloud/computing/adapt.go new file mode 100644 index 000000000000..e58f4e2bb59e --- /dev/null +++ b/internal/adapters/terraform/nifcloud/computing/adapt.go @@ -0,0 +1,16 @@ +package computing + +import ( + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/computing" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func Adapt(modules terraform.Modules) computing.Computing { + + sgAdapter := sgAdapter{sgRuleIDs: modules.GetChildResourceIDMapByType("nifcloud_security_group_rule")} + + return computing.Computing{ + SecurityGroups: sgAdapter.adaptSecurityGroups(modules), + Instances: adaptInstances(modules), + } +} diff --git a/internal/adapters/terraform/nifcloud/computing/adapt_test.go b/internal/adapters/terraform/nifcloud/computing/adapt_test.go new file mode 100644 index 000000000000..d92848402304 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/computing/adapt_test.go @@ -0,0 +1,61 @@ +package computing + +import ( + "testing" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestLines(t *testing.T) { + src := ` +resource "nifcloud_instance" "example" { + security_group = nifcloud_security_group.example.group_name + + network_interface { + network_id = "net-COMMON_PRIVATE" + } +} + +resource "nifcloud_security_group" "example" { + group_name = "example" + description = "memo" +} + +resource "nifcloud_security_group_rule" "example" { + type = "IN" + security_group_names = [nifcloud_security_group.example.group_name] + from_port = 22 + to_port = 22 + protocol = "TCP" + description = "memo" + cidr_ip = "1.2.3.4/32" +} +` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Instances, 1) + require.Len(t, adapted.SecurityGroups, 1) + + instance := adapted.Instances[0] + sg := adapted.SecurityGroups[0] + + assert.Equal(t, 3, instance.SecurityGroup.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, instance.SecurityGroup.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 6, instance.NetworkInterfaces[0].NetworkID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 6, instance.NetworkInterfaces[0].NetworkID.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 12, sg.Description.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 12, sg.Description.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 21, sg.IngressRules[0].Description.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 21, sg.IngressRules[0].Description.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 22, sg.IngressRules[0].CIDR.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 22, sg.IngressRules[0].CIDR.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/nifcloud/computing/instance.go b/internal/adapters/terraform/nifcloud/computing/instance.go new file mode 100644 index 000000000000..afb91e58f1fb --- /dev/null +++ b/internal/adapters/terraform/nifcloud/computing/instance.go @@ -0,0 +1,35 @@ +package computing + +import ( + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/computing" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func adaptInstances(modules terraform.Modules) []computing.Instance { + var instances []computing.Instance + + for _, resource := range modules.GetResourcesByType("nifcloud_instance") { + instances = append(instances, adaptInstance(resource)) + } + return instances +} + +func adaptInstance(resource *terraform.Block) computing.Instance { + var networkInterfaces []computing.NetworkInterface + networkInterfaceBlocks := resource.GetBlocks("network_interface") + for _, networkInterfaceBlock := range networkInterfaceBlocks { + networkInterfaces = append( + networkInterfaces, + computing.NetworkInterface{ + Metadata: networkInterfaceBlock.GetMetadata(), + NetworkID: networkInterfaceBlock.GetAttribute("network_id").AsStringValueOrDefault("", resource), + }, + ) + } + + return computing.Instance{ + Metadata: resource.GetMetadata(), + SecurityGroup: resource.GetAttribute("security_group").AsStringValueOrDefault("", resource), + NetworkInterfaces: networkInterfaces, + } +} diff --git a/internal/adapters/terraform/nifcloud/computing/instance_test.go b/internal/adapters/terraform/nifcloud/computing/instance_test.go new file mode 100644 index 000000000000..45832714bee0 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/computing/instance_test.go @@ -0,0 +1,71 @@ +package computing + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/computing" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" +) + +func Test_adaptInstances(t *testing.T) { + tests := []struct { + name string + terraform string + expected []computing.Instance + }{ + { + name: "configured", + terraform: ` + resource "nifcloud_instance" "my_example" { + security_group = "example-security-group" + network_interface { + network_id = "net-COMMON_PRIVATE" + } + } +`, + expected: []computing.Instance{{ + Metadata: defsecTypes.NewTestMetadata(), + SecurityGroup: defsecTypes.String("example-security-group", defsecTypes.NewTestMetadata()), + NetworkInterfaces: []computing.NetworkInterface{ + { + Metadata: defsecTypes.NewTestMetadata(), + NetworkID: defsecTypes.String("net-COMMON_PRIVATE", defsecTypes.NewTestMetadata()), + }, + }, + }}, + }, + { + name: "defaults", + terraform: ` + resource "nifcloud_instance" "my_example" { + network_interface { + } + } +`, + + expected: []computing.Instance{{ + Metadata: defsecTypes.NewTestMetadata(), + SecurityGroup: defsecTypes.String("", defsecTypes.NewTestMetadata()), + NetworkInterfaces: []computing.NetworkInterface{ + { + Metadata: defsecTypes.NewTestMetadata(), + NetworkID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + }, + }}, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptInstances(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} diff --git a/internal/adapters/terraform/nifcloud/computing/security_group.go b/internal/adapters/terraform/nifcloud/computing/security_group.go new file mode 100644 index 000000000000..575a02dd5723 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/computing/security_group.go @@ -0,0 +1,76 @@ +package computing + +import ( + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/computing" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +type sgAdapter struct { + sgRuleIDs terraform.ResourceIDResolutions +} + +func (a *sgAdapter) adaptSecurityGroups(modules terraform.Modules) []computing.SecurityGroup { + var securityGroups []computing.SecurityGroup + for _, resource := range modules.GetResourcesByType("nifcloud_security_group") { + securityGroups = append(securityGroups, a.adaptSecurityGroup(resource, modules)) + } + orphanResources := modules.GetResourceByIDs(a.sgRuleIDs.Orphans()...) + if len(orphanResources) > 0 { + orphanage := computing.SecurityGroup{ + Metadata: defsecTypes.NewUnmanagedMetadata(), + Description: defsecTypes.StringDefault("", defsecTypes.NewUnmanagedMetadata()), + IngressRules: nil, + } + for _, sgRule := range orphanResources { + if sgRule.GetAttribute("type").Equals("IN") { + orphanage.IngressRules = append(orphanage.IngressRules, adaptSGRule(sgRule, modules)) + } + if sgRule.GetAttribute("type").Equals("OUT") { + orphanage.EgressRules = append(orphanage.EgressRules, adaptSGRule(sgRule, modules)) + } + } + securityGroups = append(securityGroups, orphanage) + } + + return securityGroups +} + +func (a *sgAdapter) adaptSecurityGroup(resource *terraform.Block, module terraform.Modules) computing.SecurityGroup { + var ingressRules, egressRules []computing.SecurityGroupRule + + descriptionAttr := resource.GetAttribute("description") + descriptionVal := descriptionAttr.AsStringValueOrDefault("", resource) + + rulesBlocks := module.GetReferencingResources(resource, "nifcloud_security_group_rule", "security_group_names") + for _, ruleBlock := range rulesBlocks { + a.sgRuleIDs.Resolve(ruleBlock.ID()) + if ruleBlock.GetAttribute("type").Equals("IN") { + ingressRules = append(ingressRules, adaptSGRule(ruleBlock, module)) + } + if ruleBlock.GetAttribute("type").Equals("OUT") { + egressRules = append(egressRules, adaptSGRule(ruleBlock, module)) + } + } + + return computing.SecurityGroup{ + Metadata: resource.GetMetadata(), + Description: descriptionVal, + IngressRules: ingressRules, + EgressRules: egressRules, + } +} + +func adaptSGRule(resource *terraform.Block, modules terraform.Modules) computing.SecurityGroupRule { + ruleDescAttr := resource.GetAttribute("description") + ruleDescVal := ruleDescAttr.AsStringValueOrDefault("", resource) + + cidrAttr := resource.GetAttribute("cidr_ip") + cidrVal := cidrAttr.AsStringValueOrDefault("", resource) + + return computing.SecurityGroupRule{ + Metadata: resource.GetMetadata(), + Description: ruleDescVal, + CIDR: cidrVal, + } +} diff --git a/internal/adapters/terraform/nifcloud/computing/security_group_test.go b/internal/adapters/terraform/nifcloud/computing/security_group_test.go new file mode 100644 index 000000000000..70800804b76d --- /dev/null +++ b/internal/adapters/terraform/nifcloud/computing/security_group_test.go @@ -0,0 +1,86 @@ +package computing + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/computing" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" +) + +func Test_adaptSecurityGroups(t *testing.T) { + tests := []struct { + name string + terraform string + expected []computing.SecurityGroup + }{ + { + name: "configured", + terraform: ` + resource "nifcloud_security_group" "example" { + group_name = "example" + description = "memo" + } + + resource "nifcloud_security_group_rule" "example" { + type = "IN" + security_group_names = [nifcloud_security_group.example.group_name] + from_port = 22 + to_port = 22 + protocol = "TCP" + description = "memo" + cidr_ip = "1.2.3.4/32" + } +`, + expected: []computing.SecurityGroup{{ + Metadata: defsecTypes.NewTestMetadata(), + Description: defsecTypes.String("memo", defsecTypes.NewTestMetadata()), + IngressRules: []computing.SecurityGroupRule{ + { + Metadata: defsecTypes.NewTestMetadata(), + CIDR: defsecTypes.String("1.2.3.4/32", defsecTypes.NewTestMetadata()), + Description: defsecTypes.String("memo", defsecTypes.NewTestMetadata()), + }, + }, + }}, + }, + { + name: "defaults", + terraform: ` + resource "nifcloud_security_group" "example" { + } + + resource "nifcloud_security_group_rule" "example" { + type = "IN" + security_group_names = [nifcloud_security_group.example.group_name] + } + +`, + + expected: []computing.SecurityGroup{{ + Metadata: defsecTypes.NewTestMetadata(), + Description: defsecTypes.String("", defsecTypes.NewTestMetadata()), + IngressRules: []computing.SecurityGroupRule{ + { + Metadata: defsecTypes.NewTestMetadata(), + CIDR: defsecTypes.String("", defsecTypes.NewTestMetadata()), + Description: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + }, + }}, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + sgAdapter := sgAdapter{sgRuleIDs: modules.GetChildResourceIDMapByType("nifcloud_security_group_rule")} + adapted := sgAdapter.adaptSecurityGroups(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} diff --git a/internal/adapters/terraform/nifcloud/dns/adapt.go b/internal/adapters/terraform/nifcloud/dns/adapt.go new file mode 100644 index 000000000000..5abe9697a8a7 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/dns/adapt.go @@ -0,0 +1,12 @@ +package dns + +import ( + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/dns" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func Adapt(modules terraform.Modules) dns.DNS { + return dns.DNS{ + Records: adaptRecords(modules), + } +} diff --git a/internal/adapters/terraform/nifcloud/dns/adapt_test.go b/internal/adapters/terraform/nifcloud/dns/adapt_test.go new file mode 100644 index 000000000000..e5e60e9d9853 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/dns/adapt_test.go @@ -0,0 +1,32 @@ +package dns + +import ( + "testing" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestLines(t *testing.T) { + src := ` +resource "nifcloud_dns_record" "example" { + type = "A" + record = "example-record" +} +` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.Records, 1) + + record := adapted.Records[0] + + assert.Equal(t, 3, record.Type.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, record.Type.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 4, record.Record.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 4, record.Record.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/nifcloud/dns/record.go b/internal/adapters/terraform/nifcloud/dns/record.go new file mode 100644 index 000000000000..c0fdcc010a78 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/dns/record.go @@ -0,0 +1,23 @@ +package dns + +import ( + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/dns" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func adaptRecords(modules terraform.Modules) []dns.Record { + var records []dns.Record + + for _, resource := range modules.GetResourcesByType("nifcloud_dns_record") { + records = append(records, adaptRecord(resource)) + } + return records +} + +func adaptRecord(resource *terraform.Block) dns.Record { + return dns.Record{ + Metadata: resource.GetMetadata(), + Record: resource.GetAttribute("record").AsStringValueOrDefault("", resource), + Type: resource.GetAttribute("type").AsStringValueOrDefault("", resource), + } +} diff --git a/internal/adapters/terraform/nifcloud/dns/record_test.go b/internal/adapters/terraform/nifcloud/dns/record_test.go new file mode 100644 index 000000000000..ccf7e4cc7bd1 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/dns/record_test.go @@ -0,0 +1,56 @@ +package dns + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/dns" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" +) + +func Test_adaptRecords(t *testing.T) { + tests := []struct { + name string + terraform string + expected []dns.Record + }{ + { + name: "configured", + terraform: ` + resource "nifcloud_dns_record" "example" { + type = "A" + record = "example-record" + } +`, + expected: []dns.Record{{ + Metadata: defsecTypes.NewTestMetadata(), + Type: defsecTypes.String("A", defsecTypes.NewTestMetadata()), + Record: defsecTypes.String("example-record", defsecTypes.NewTestMetadata()), + }}, + }, + { + name: "defaults", + terraform: ` + resource "nifcloud_dns_record" "example" { + } +`, + + expected: []dns.Record{{ + Metadata: defsecTypes.NewTestMetadata(), + Type: defsecTypes.String("", defsecTypes.NewTestMetadata()), + Record: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }}, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptRecords(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} diff --git a/internal/adapters/terraform/nifcloud/nas/adapt.go b/internal/adapters/terraform/nifcloud/nas/adapt.go new file mode 100644 index 000000000000..615eac7a8df6 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/nas/adapt.go @@ -0,0 +1,13 @@ +package nas + +import ( + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/nas" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func Adapt(modules terraform.Modules) nas.NAS { + return nas.NAS{ + NASSecurityGroups: adaptNASSecurityGroups(modules), + NASInstances: adaptNASInstances(modules), + } +} diff --git a/internal/adapters/terraform/nifcloud/nas/adapt_test.go b/internal/adapters/terraform/nifcloud/nas/adapt_test.go new file mode 100644 index 000000000000..0998303dbed6 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/nas/adapt_test.go @@ -0,0 +1,44 @@ +package nas + +import ( + "testing" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestLines(t *testing.T) { + src := ` +resource "nifcloud_nas_instance" "example" { + network_id = "example-network" +} + +resource "nifcloud_nas_security_group" "example" { + description = "memo" + + rule { + cidr_ip = "0.0.0.0/0" + } +} +` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.NASInstances, 1) + require.Len(t, adapted.NASSecurityGroups, 1) + + nasInstance := adapted.NASInstances[0] + nasSecurityGroup := adapted.NASSecurityGroups[0] + + assert.Equal(t, 3, nasInstance.NetworkID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, nasInstance.NetworkID.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 7, nasSecurityGroup.Description.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 7, nasSecurityGroup.Description.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 10, nasSecurityGroup.CIDRs[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 10, nasSecurityGroup.CIDRs[0].GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/nifcloud/nas/nas_instance.go b/internal/adapters/terraform/nifcloud/nas/nas_instance.go new file mode 100644 index 000000000000..e8558c5f42b5 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/nas/nas_instance.go @@ -0,0 +1,22 @@ +package nas + +import ( + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/nas" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func adaptNASInstances(modules terraform.Modules) []nas.NASInstance { + var nasInstances []nas.NASInstance + + for _, resource := range modules.GetResourcesByType("nifcloud_nas_instance") { + nasInstances = append(nasInstances, adaptNASInstance(resource)) + } + return nasInstances +} + +func adaptNASInstance(resource *terraform.Block) nas.NASInstance { + return nas.NASInstance{ + Metadata: resource.GetMetadata(), + NetworkID: resource.GetAttribute("network_id").AsStringValueOrDefault("net-COMMON_PRIVATE", resource), + } +} diff --git a/internal/adapters/terraform/nifcloud/nas/nas_instance_test.go b/internal/adapters/terraform/nifcloud/nas/nas_instance_test.go new file mode 100644 index 000000000000..26b9e1408a2f --- /dev/null +++ b/internal/adapters/terraform/nifcloud/nas/nas_instance_test.go @@ -0,0 +1,54 @@ +package nas + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/nas" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" +) + +func Test_adaptNASInstances(t *testing.T) { + tests := []struct { + name string + terraform string + expected []nas.NASInstance + }{ + { + name: "configured", + terraform: ` + resource "nifcloud_nas_instance" "example" { + network_id = "example-network" + } +`, + expected: []nas.NASInstance{{ + Metadata: defsecTypes.NewTestMetadata(), + NetworkID: defsecTypes.String("example-network", defsecTypes.NewTestMetadata()), + }}, + }, + { + name: "defaults", + terraform: ` + resource "nifcloud_nas_instance" "example" { + } +`, + + expected: []nas.NASInstance{{ + Metadata: defsecTypes.NewTestMetadata(), + NetworkID: defsecTypes.String("net-COMMON_PRIVATE", defsecTypes.NewTestMetadata()), + }}, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptNASInstances(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} diff --git a/internal/adapters/terraform/nifcloud/nas/nas_security_group.go b/internal/adapters/terraform/nifcloud/nas/nas_security_group.go new file mode 100644 index 000000000000..e7dfd593d8dd --- /dev/null +++ b/internal/adapters/terraform/nifcloud/nas/nas_security_group.go @@ -0,0 +1,30 @@ +package nas + +import ( + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/nas" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +func adaptNASSecurityGroups(modules terraform.Modules) []nas.NASSecurityGroup { + var nasSecurityGroups []nas.NASSecurityGroup + + for _, resource := range modules.GetResourcesByType("nifcloud_nas_security_group") { + nasSecurityGroups = append(nasSecurityGroups, adaptNASSecurityGroup(resource)) + } + return nasSecurityGroups +} + +func adaptNASSecurityGroup(resource *terraform.Block) nas.NASSecurityGroup { + var cidrs []defsecTypes.StringValue + + for _, rule := range resource.GetBlocks("rule") { + cidrs = append(cidrs, rule.GetAttribute("cidr_ip").AsStringValueOrDefault("", resource)) + } + + return nas.NASSecurityGroup{ + Metadata: resource.GetMetadata(), + Description: resource.GetAttribute("description").AsStringValueOrDefault("", resource), + CIDRs: cidrs, + } +} diff --git a/internal/adapters/terraform/nifcloud/nas/nas_security_group_test.go b/internal/adapters/terraform/nifcloud/nas/nas_security_group_test.go new file mode 100644 index 000000000000..01e93aade40b --- /dev/null +++ b/internal/adapters/terraform/nifcloud/nas/nas_security_group_test.go @@ -0,0 +1,66 @@ +package nas + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/nas" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" +) + +func Test_adaptNASSecurityGroups(t *testing.T) { + tests := []struct { + name string + terraform string + expected []nas.NASSecurityGroup + }{ + { + name: "configured", + terraform: ` + resource "nifcloud_nas_security_group" "example" { + description = "memo" + + rule { + cidr_ip = "0.0.0.0/0" + } + } +`, + expected: []nas.NASSecurityGroup{{ + Metadata: defsecTypes.NewTestMetadata(), + Description: defsecTypes.String("memo", defsecTypes.NewTestMetadata()), + CIDRs: []defsecTypes.StringValue{ + defsecTypes.String("0.0.0.0/0", defsecTypes.NewTestMetadata()), + }, + }}, + }, + { + name: "defaults", + terraform: ` + resource "nifcloud_nas_security_group" "example" { + rule { + } + } +`, + + expected: []nas.NASSecurityGroup{{ + Metadata: defsecTypes.NewTestMetadata(), + Description: defsecTypes.String("", defsecTypes.NewTestMetadata()), + CIDRs: []defsecTypes.StringValue{ + defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + }}, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptNASSecurityGroups(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} diff --git a/internal/adapters/terraform/nifcloud/network/adapt.go b/internal/adapters/terraform/nifcloud/network/adapt.go new file mode 100644 index 000000000000..4c1c10acd1b9 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/network/adapt.go @@ -0,0 +1,16 @@ +package network + +import ( + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/network" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func Adapt(modules terraform.Modules) network.Network { + + return network.Network{ + ElasticLoadBalancers: adaptElasticLoadBalancers(modules), + LoadBalancers: adaptLoadBalancers(modules), + Routers: adaptRouters(modules), + VpnGateways: adaptVpnGateways(modules), + } +} diff --git a/internal/adapters/terraform/nifcloud/network/adapt_test.go b/internal/adapters/terraform/nifcloud/network/adapt_test.go new file mode 100644 index 000000000000..9255e7e16d3b --- /dev/null +++ b/internal/adapters/terraform/nifcloud/network/adapt_test.go @@ -0,0 +1,83 @@ +package network + +import ( + "testing" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestLines(t *testing.T) { + src := ` +resource "nifcloud_elb" "example" { + protocol = "HTTP" + + network_interface { + network_id = "net-COMMON_PRIVATE" + is_vip_network = false + } +} + +resource "nifcloud_load_balancer" "example" { + ssl_policy_id = "example-ssl-policy-id" + load_balancer_port = 8080 +} + +resource "nifcloud_router" "example" { + security_group = nifcloud_security_group.example.group_name + + network_interface { + network_id = "net-COMMON_PRIVATE" + } +} + +resource "nifcloud_security_group" "example" { + group_name = "example" + description = "memo" +} + +resource "nifcloud_vpn_gateway" "example" { + security_group = nifcloud_security_group.example.group_name +} +` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.ElasticLoadBalancers, 1) + require.Len(t, adapted.LoadBalancers, 1) + require.Len(t, adapted.Routers, 1) + require.Len(t, adapted.VpnGateways, 1) + + elb := adapted.ElasticLoadBalancers[0] + lb := adapted.LoadBalancers[0] + router := adapted.Routers[0] + vpngw := adapted.VpnGateways[0] + + assert.Equal(t, 3, elb.Listeners[0].Protocol.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, elb.Listeners[0].Protocol.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 6, elb.NetworkInterfaces[0].NetworkID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 6, elb.NetworkInterfaces[0].NetworkID.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 7, elb.NetworkInterfaces[0].IsVipNetwork.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 7, elb.NetworkInterfaces[0].IsVipNetwork.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 12, lb.Listeners[0].TLSPolicy.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 12, lb.Listeners[0].TLSPolicy.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 13, lb.Listeners[0].Protocol.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 13, lb.Listeners[0].Protocol.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 17, router.SecurityGroup.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 17, router.SecurityGroup.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 20, router.NetworkInterfaces[0].NetworkID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 20, router.NetworkInterfaces[0].NetworkID.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 30, vpngw.SecurityGroup.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 30, vpngw.SecurityGroup.GetMetadata().Range().GetEndLine()) + +} diff --git a/internal/adapters/terraform/nifcloud/network/elastic_load_balancer.go b/internal/adapters/terraform/nifcloud/network/elastic_load_balancer.go new file mode 100644 index 000000000000..efe3c510fbc3 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/network/elastic_load_balancer.go @@ -0,0 +1,50 @@ +package network + +import ( + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/network" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func adaptElasticLoadBalancers(modules terraform.Modules) []network.ElasticLoadBalancer { + var elasticLoadBalancers []network.ElasticLoadBalancer + + for _, resource := range modules.GetResourcesByType("nifcloud_elb") { + elasticLoadBalancers = append(elasticLoadBalancers, adaptElasticLoadBalancer(resource, modules)) + } + return elasticLoadBalancers +} + +func adaptElasticLoadBalancer(resource *terraform.Block, modules terraform.Modules) network.ElasticLoadBalancer { + var listeners []network.ElasticLoadBalancerListener + var networkInterfaces []network.NetworkInterface + + networkInterfaceBlocks := resource.GetBlocks("network_interface") + for _, networkInterfaceBlock := range networkInterfaceBlocks { + networkInterfaces = append( + networkInterfaces, + network.NetworkInterface{ + Metadata: networkInterfaceBlock.GetMetadata(), + NetworkID: networkInterfaceBlock.GetAttribute("network_id").AsStringValueOrDefault("", resource), + IsVipNetwork: networkInterfaceBlock.GetAttribute("is_vip_network").AsBoolValueOrDefault(true, resource), + }, + ) + } + + listeners = append(listeners, adaptElasticLoadBalancerListener(resource)) + for _, listenerBlock := range modules.GetReferencingResources(resource, "nifcloud_elb_listener", "elb_id") { + listeners = append(listeners, adaptElasticLoadBalancerListener(listenerBlock)) + } + + return network.ElasticLoadBalancer{ + Metadata: resource.GetMetadata(), + NetworkInterfaces: networkInterfaces, + Listeners: listeners, + } +} + +func adaptElasticLoadBalancerListener(resource *terraform.Block) network.ElasticLoadBalancerListener { + return network.ElasticLoadBalancerListener{ + Metadata: resource.GetMetadata(), + Protocol: resource.GetAttribute("protocol").AsStringValueOrDefault("", resource), + } +} diff --git a/internal/adapters/terraform/nifcloud/network/elastic_load_balancer_test.go b/internal/adapters/terraform/nifcloud/network/elastic_load_balancer_test.go new file mode 100644 index 000000000000..06bb3a96e78a --- /dev/null +++ b/internal/adapters/terraform/nifcloud/network/elastic_load_balancer_test.go @@ -0,0 +1,90 @@ +package network + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/network" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" +) + +func Test_adaptElasticLoadBalancers(t *testing.T) { + tests := []struct { + name string + terraform string + expected []network.ElasticLoadBalancer + }{ + { + name: "configured", + terraform: ` + resource "nifcloud_elb" "example" { + protocol = "HTTP" + + network_interface { + network_id = "net-COMMON_PRIVATE" + is_vip_network = false + } + } + + resource "nifcloud_elb_listener" "example" { + elb_id = nifcloud_elb.example.id + protocol = "HTTPS" + } +`, + expected: []network.ElasticLoadBalancer{{ + Metadata: defsecTypes.NewTestMetadata(), + NetworkInterfaces: []network.NetworkInterface{ + { + Metadata: defsecTypes.NewTestMetadata(), + NetworkID: defsecTypes.String("net-COMMON_PRIVATE", defsecTypes.NewTestMetadata()), + IsVipNetwork: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }, + }, + Listeners: []network.ElasticLoadBalancerListener{ + { + Metadata: defsecTypes.NewTestMetadata(), + Protocol: defsecTypes.String("HTTP", defsecTypes.NewTestMetadata()), + }, + { + Metadata: defsecTypes.NewTestMetadata(), + Protocol: defsecTypes.String("HTTPS", defsecTypes.NewTestMetadata()), + }, + }, + }}, + }, + { + name: "defaults", + terraform: ` + resource "nifcloud_elb" "example" { + network_interface { + } + } +`, + + expected: []network.ElasticLoadBalancer{{ + Metadata: defsecTypes.NewTestMetadata(), + NetworkInterfaces: []network.NetworkInterface{ + { + Metadata: defsecTypes.NewTestMetadata(), + NetworkID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + IsVipNetwork: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }, + }, + Listeners: []network.ElasticLoadBalancerListener{{ + Metadata: defsecTypes.NewTestMetadata(), + }}, + }}, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptElasticLoadBalancers(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} diff --git a/internal/adapters/terraform/nifcloud/network/load_balancer.go b/internal/adapters/terraform/nifcloud/network/load_balancer.go new file mode 100644 index 000000000000..0e27c58d377b --- /dev/null +++ b/internal/adapters/terraform/nifcloud/network/load_balancer.go @@ -0,0 +1,67 @@ +package network + +import ( + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/network" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +func adaptLoadBalancers(modules terraform.Modules) []network.LoadBalancer { + var loadBalancers []network.LoadBalancer + + for _, resource := range modules.GetResourcesByType("nifcloud_load_balancer") { + loadBalancers = append(loadBalancers, adaptLoadBalancer(resource, modules)) + } + + return loadBalancers +} + +func adaptLoadBalancer(resource *terraform.Block, modules terraform.Modules) network.LoadBalancer { + var listeners []network.LoadBalancerListener + + listeners = append(listeners, adaptListener(resource)) + for _, listenerBlock := range modules.GetReferencingResources(resource, "nifcloud_load_balancer_listener", "load_balancer_name") { + listeners = append(listeners, adaptListener(listenerBlock)) + } + + return network.LoadBalancer{ + Metadata: resource.GetMetadata(), + Listeners: listeners, + } +} + +func adaptListener(resource *terraform.Block) network.LoadBalancerListener { + protocolVal := defsecTypes.String("", resource.GetMetadata()) + policyVal := defsecTypes.String("", resource.GetMetadata()) + + portAttr := resource.GetAttribute("load_balancer_port") + if portAttr.IsNotNil() && portAttr.IsNumber() { + port := portAttr.AsNumber() + switch port { + case 21: + protocolVal = defsecTypes.String("FTP", portAttr.GetMetadata()) + case 80: + protocolVal = defsecTypes.String("HTTP", portAttr.GetMetadata()) + case 443: + protocolVal = defsecTypes.String("HTTPS", portAttr.GetMetadata()) + default: + protocolVal = defsecTypes.String("custom", portAttr.GetMetadata()) + } + } + + policyIDAttr := resource.GetAttribute("ssl_policy_id") + if policyIDAttr.IsNotNil() && policyIDAttr.IsString() { + policyVal = policyIDAttr.AsStringValueOrDefault("", resource) + } + + policyNameAttr := resource.GetAttribute("ssl_policy_name") + if policyNameAttr.IsNotNil() && policyNameAttr.IsString() { + policyVal = policyNameAttr.AsStringValueOrDefault("", resource) + } + + return network.LoadBalancerListener{ + Metadata: resource.GetMetadata(), + Protocol: protocolVal, + TLSPolicy: policyVal, + } +} diff --git a/internal/adapters/terraform/nifcloud/network/load_balancer_test.go b/internal/adapters/terraform/nifcloud/network/load_balancer_test.go new file mode 100644 index 000000000000..cbcebb11d774 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/network/load_balancer_test.go @@ -0,0 +1,75 @@ +package network + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/network" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" +) + +func Test_adaptLoadBalancers(t *testing.T) { + tests := []struct { + name string + terraform string + expected []network.LoadBalancer + }{ + { + name: "configured", + terraform: ` + resource "nifcloud_load_balancer" "example" { + load_balancer_name = "example" + load_balancer_port = 80 + ssl_policy_id = "example-ssl-policy-id" + } + + resource "nifcloud_load_balancer_listener" "example" { + load_balancer_name = nifcloud_load_balancer.example.load_balancer_name + load_balancer_port = 443 + ssl_policy_name = "example-ssl-policy-name" + } + +`, + expected: []network.LoadBalancer{{ + Metadata: defsecTypes.NewTestMetadata(), + Listeners: []network.LoadBalancerListener{ + { + Metadata: defsecTypes.NewTestMetadata(), + TLSPolicy: defsecTypes.String("example-ssl-policy-id", defsecTypes.NewTestMetadata()), + Protocol: defsecTypes.String("HTTP", defsecTypes.NewTestMetadata()), + }, + { + Metadata: defsecTypes.NewTestMetadata(), + TLSPolicy: defsecTypes.String("example-ssl-policy-name", defsecTypes.NewTestMetadata()), + Protocol: defsecTypes.String("HTTPS", defsecTypes.NewTestMetadata()), + }, + }, + }}, + }, + { + name: "defaults", + terraform: ` + resource "nifcloud_load_balancer" "example" { + } +`, + + expected: []network.LoadBalancer{{ + Metadata: defsecTypes.NewTestMetadata(), + Listeners: []network.LoadBalancerListener{{ + Metadata: defsecTypes.NewTestMetadata(), + }}, + }}, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptLoadBalancers(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} diff --git a/internal/adapters/terraform/nifcloud/network/router.go b/internal/adapters/terraform/nifcloud/network/router.go new file mode 100644 index 000000000000..6804820381d0 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/network/router.go @@ -0,0 +1,37 @@ +package network + +import ( + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/network" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/defsec/pkg/types" +) + +func adaptRouters(modules terraform.Modules) []network.Router { + var routers []network.Router + + for _, resource := range modules.GetResourcesByType("nifcloud_router") { + routers = append(routers, adaptRouter(resource)) + } + return routers +} + +func adaptRouter(resource *terraform.Block) network.Router { + var networkInterfaces []network.NetworkInterface + networkInterfaceBlocks := resource.GetBlocks("network_interface") + for _, networkInterfaceBlock := range networkInterfaceBlocks { + networkInterfaces = append( + networkInterfaces, + network.NetworkInterface{ + Metadata: networkInterfaceBlock.GetMetadata(), + NetworkID: networkInterfaceBlock.GetAttribute("network_id").AsStringValueOrDefault("", resource), + IsVipNetwork: types.Bool(false, networkInterfaceBlock.GetMetadata()), + }, + ) + } + + return network.Router{ + Metadata: resource.GetMetadata(), + SecurityGroup: resource.GetAttribute("security_group").AsStringValueOrDefault("", resource), + NetworkInterfaces: networkInterfaces, + } +} diff --git a/internal/adapters/terraform/nifcloud/network/router_test.go b/internal/adapters/terraform/nifcloud/network/router_test.go new file mode 100644 index 000000000000..3c2fe55ab92b --- /dev/null +++ b/internal/adapters/terraform/nifcloud/network/router_test.go @@ -0,0 +1,70 @@ +package network + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/network" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" +) + +func Test_adaptRouters(t *testing.T) { + tests := []struct { + name string + terraform string + expected []network.Router + }{ + { + name: "configured", + terraform: ` + resource "nifcloud_router" "example" { + security_group = "example-security-group" + network_interface { + network_id = "net-COMMON_PRIVATE" + } + } +`, + expected: []network.Router{{ + Metadata: defsecTypes.NewTestMetadata(), + SecurityGroup: defsecTypes.String("example-security-group", defsecTypes.NewTestMetadata()), + NetworkInterfaces: []network.NetworkInterface{ + { + Metadata: defsecTypes.NewTestMetadata(), + NetworkID: defsecTypes.String("net-COMMON_PRIVATE", defsecTypes.NewTestMetadata()), + }, + }, + }}, + }, + { + name: "defaults", + terraform: ` + resource "nifcloud_router" "example" { + network_interface { + } + } +`, + + expected: []network.Router{{ + Metadata: defsecTypes.NewTestMetadata(), + SecurityGroup: defsecTypes.String("", defsecTypes.NewTestMetadata()), + NetworkInterfaces: []network.NetworkInterface{ + { + Metadata: defsecTypes.NewTestMetadata(), + NetworkID: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + }, + }}, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptRouters(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} diff --git a/internal/adapters/terraform/nifcloud/network/vpn_gateway.go b/internal/adapters/terraform/nifcloud/network/vpn_gateway.go new file mode 100644 index 000000000000..ca607a646ea0 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/network/vpn_gateway.go @@ -0,0 +1,22 @@ +package network + +import ( + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/network" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func adaptVpnGateways(modules terraform.Modules) []network.VpnGateway { + var vpnGateways []network.VpnGateway + + for _, resource := range modules.GetResourcesByType("nifcloud_vpn_gateway") { + vpnGateways = append(vpnGateways, adaptVpnGateway(resource)) + } + return vpnGateways +} + +func adaptVpnGateway(resource *terraform.Block) network.VpnGateway { + return network.VpnGateway{ + Metadata: resource.GetMetadata(), + SecurityGroup: resource.GetAttribute("security_group").AsStringValueOrDefault("", resource), + } +} diff --git a/internal/adapters/terraform/nifcloud/network/vpn_gateway_test.go b/internal/adapters/terraform/nifcloud/network/vpn_gateway_test.go new file mode 100644 index 000000000000..d589bd2fd7c6 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/network/vpn_gateway_test.go @@ -0,0 +1,53 @@ +package network + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/network" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" +) + +func Test_adaptVpnGateways(t *testing.T) { + tests := []struct { + name string + terraform string + expected []network.VpnGateway + }{ + { + name: "configured", + terraform: ` + resource "nifcloud_vpn_gateway" "example" { + security_group = "example-security-group" + } +`, + expected: []network.VpnGateway{{ + Metadata: defsecTypes.NewTestMetadata(), + SecurityGroup: defsecTypes.String("example-security-group", defsecTypes.NewTestMetadata()), + }}, + }, + { + name: "defaults", + terraform: ` + resource "nifcloud_vpn_gateway" "example" { + } +`, + + expected: []network.VpnGateway{{ + Metadata: defsecTypes.NewTestMetadata(), + SecurityGroup: defsecTypes.String("", defsecTypes.NewTestMetadata()), + }}, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptVpnGateways(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} diff --git a/internal/adapters/terraform/nifcloud/nifcloud.go b/internal/adapters/terraform/nifcloud/nifcloud.go new file mode 100644 index 000000000000..8c9ae16a4dc4 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/nifcloud.go @@ -0,0 +1,23 @@ +package nifcloud + +import ( + "github.com/aquasecurity/defsec/pkg/providers/nifcloud" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/trivy/internal/adapters/terraform/nifcloud/computing" + "github.com/aquasecurity/trivy/internal/adapters/terraform/nifcloud/dns" + "github.com/aquasecurity/trivy/internal/adapters/terraform/nifcloud/nas" + "github.com/aquasecurity/trivy/internal/adapters/terraform/nifcloud/network" + "github.com/aquasecurity/trivy/internal/adapters/terraform/nifcloud/rdb" + "github.com/aquasecurity/trivy/internal/adapters/terraform/nifcloud/sslcertificate" +) + +func Adapt(modules terraform.Modules) nifcloud.Nifcloud { + return nifcloud.Nifcloud{ + Computing: computing.Adapt(modules), + DNS: dns.Adapt(modules), + NAS: nas.Adapt(modules), + Network: network.Adapt(modules), + RDB: rdb.Adapt(modules), + SSLCertificate: sslcertificate.Adapt(modules), + } +} diff --git a/internal/adapters/terraform/nifcloud/rdb/adapt.go b/internal/adapters/terraform/nifcloud/rdb/adapt.go new file mode 100644 index 000000000000..8c249b09b212 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/rdb/adapt.go @@ -0,0 +1,13 @@ +package rdb + +import ( + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/rdb" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func Adapt(modules terraform.Modules) rdb.RDB { + return rdb.RDB{ + DBSecurityGroups: adaptDBSecurityGroups(modules), + DBInstances: adaptDBInstances(modules), + } +} diff --git a/internal/adapters/terraform/nifcloud/rdb/adapt_test.go b/internal/adapters/terraform/nifcloud/rdb/adapt_test.go new file mode 100644 index 000000000000..ab15a2f10747 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/rdb/adapt_test.go @@ -0,0 +1,60 @@ +package rdb + +import ( + "testing" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestLines(t *testing.T) { + src := ` +resource "nifcloud_db_instance" "example" { + publicly_accessible = false + engine = "MySQL" + engine_version = "5.7.15" + backup_retention_period = 2 + network_id = "example-network" +} + +resource "nifcloud_db_security_group" "example" { + description = "memo" + + rule { + cidr_ip = "0.0.0.0/0" + } +} +` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.DBInstances, 1) + require.Len(t, adapted.DBSecurityGroups, 1) + + dbInstance := adapted.DBInstances[0] + dbSecurityGroup := adapted.DBSecurityGroups[0] + + assert.Equal(t, 3, dbInstance.PublicAccess.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, dbInstance.PublicAccess.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 4, dbInstance.Engine.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 4, dbInstance.Engine.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 5, dbInstance.EngineVersion.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 5, dbInstance.EngineVersion.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 6, dbInstance.BackupRetentionPeriodDays.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 6, dbInstance.BackupRetentionPeriodDays.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 7, dbInstance.NetworkID.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 7, dbInstance.NetworkID.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 11, dbSecurityGroup.Description.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 11, dbSecurityGroup.Description.GetMetadata().Range().GetEndLine()) + + assert.Equal(t, 14, dbSecurityGroup.CIDRs[0].GetMetadata().Range().GetStartLine()) + assert.Equal(t, 14, dbSecurityGroup.CIDRs[0].GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/nifcloud/rdb/db_instance.go b/internal/adapters/terraform/nifcloud/rdb/db_instance.go new file mode 100644 index 000000000000..90662cb8a4ed --- /dev/null +++ b/internal/adapters/terraform/nifcloud/rdb/db_instance.go @@ -0,0 +1,26 @@ +package rdb + +import ( + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/rdb" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func adaptDBInstances(modules terraform.Modules) []rdb.DBInstance { + var dbInstances []rdb.DBInstance + + for _, resource := range modules.GetResourcesByType("nifcloud_db_instance") { + dbInstances = append(dbInstances, adaptDBInstance(resource)) + } + return dbInstances +} + +func adaptDBInstance(resource *terraform.Block) rdb.DBInstance { + return rdb.DBInstance{ + Metadata: resource.GetMetadata(), + BackupRetentionPeriodDays: resource.GetAttribute("backup_retention_period").AsIntValueOrDefault(0, resource), + Engine: resource.GetAttribute("engine").AsStringValueOrDefault("", resource), + EngineVersion: resource.GetAttribute("engine_version").AsStringValueOrDefault("", resource), + NetworkID: resource.GetAttribute("network_id").AsStringValueOrDefault("net-COMMON_PRIVATE", resource), + PublicAccess: resource.GetAttribute("publicly_accessible").AsBoolValueOrDefault(true, resource), + } +} diff --git a/internal/adapters/terraform/nifcloud/rdb/db_instance_test.go b/internal/adapters/terraform/nifcloud/rdb/db_instance_test.go new file mode 100644 index 000000000000..5878dc3fdc14 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/rdb/db_instance_test.go @@ -0,0 +1,66 @@ +package rdb + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/rdb" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" +) + +func Test_adaptDBInstances(t *testing.T) { + tests := []struct { + name string + terraform string + expected []rdb.DBInstance + }{ + { + name: "configured", + terraform: ` + resource "nifcloud_db_instance" "example" { + backup_retention_period = 2 + engine = "MySQL" + engine_version = "5.7.15" + publicly_accessible = false + network_id = "example-network" + } +`, + expected: []rdb.DBInstance{{ + Metadata: defsecTypes.NewTestMetadata(), + BackupRetentionPeriodDays: defsecTypes.Int(2, defsecTypes.NewTestMetadata()), + Engine: defsecTypes.String("MySQL", defsecTypes.NewTestMetadata()), + EngineVersion: defsecTypes.String("5.7.15", defsecTypes.NewTestMetadata()), + NetworkID: defsecTypes.String("example-network", defsecTypes.NewTestMetadata()), + PublicAccess: defsecTypes.Bool(false, defsecTypes.NewTestMetadata()), + }}, + }, + { + name: "defaults", + terraform: ` + resource "nifcloud_db_instance" "example" { + } +`, + + expected: []rdb.DBInstance{{ + Metadata: defsecTypes.NewTestMetadata(), + BackupRetentionPeriodDays: defsecTypes.Int(0, defsecTypes.NewTestMetadata()), + Engine: defsecTypes.String("", defsecTypes.NewTestMetadata()), + EngineVersion: defsecTypes.String("", defsecTypes.NewTestMetadata()), + NetworkID: defsecTypes.String("net-COMMON_PRIVATE", defsecTypes.NewTestMetadata()), + PublicAccess: defsecTypes.Bool(true, defsecTypes.NewTestMetadata()), + }}, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptDBInstances(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} diff --git a/internal/adapters/terraform/nifcloud/rdb/db_security_group.go b/internal/adapters/terraform/nifcloud/rdb/db_security_group.go new file mode 100644 index 000000000000..4e476c31176d --- /dev/null +++ b/internal/adapters/terraform/nifcloud/rdb/db_security_group.go @@ -0,0 +1,30 @@ +package rdb + +import ( + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/rdb" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +func adaptDBSecurityGroups(modules terraform.Modules) []rdb.DBSecurityGroup { + var dbSecurityGroups []rdb.DBSecurityGroup + + for _, resource := range modules.GetResourcesByType("nifcloud_db_security_group") { + dbSecurityGroups = append(dbSecurityGroups, adaptDBSecurityGroup(resource)) + } + return dbSecurityGroups +} + +func adaptDBSecurityGroup(resource *terraform.Block) rdb.DBSecurityGroup { + var cidrs []defsecTypes.StringValue + + for _, rule := range resource.GetBlocks("rule") { + cidrs = append(cidrs, rule.GetAttribute("cidr_ip").AsStringValueOrDefault("", resource)) + } + + return rdb.DBSecurityGroup{ + Metadata: resource.GetMetadata(), + Description: resource.GetAttribute("description").AsStringValueOrDefault("", resource), + CIDRs: cidrs, + } +} diff --git a/internal/adapters/terraform/nifcloud/rdb/db_security_group_test.go b/internal/adapters/terraform/nifcloud/rdb/db_security_group_test.go new file mode 100644 index 000000000000..148fe2cc8ddb --- /dev/null +++ b/internal/adapters/terraform/nifcloud/rdb/db_security_group_test.go @@ -0,0 +1,66 @@ +package rdb + +import ( + "testing" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/rdb" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" +) + +func Test_adaptDBSecurityGroups(t *testing.T) { + tests := []struct { + name string + terraform string + expected []rdb.DBSecurityGroup + }{ + { + name: "configured", + terraform: ` + resource "nifcloud_db_security_group" "example" { + description = "memo" + + rule { + cidr_ip = "0.0.0.0/0" + } + } +`, + expected: []rdb.DBSecurityGroup{{ + Metadata: defsecTypes.NewTestMetadata(), + Description: defsecTypes.String("memo", defsecTypes.NewTestMetadata()), + CIDRs: []defsecTypes.StringValue{ + defsecTypes.String("0.0.0.0/0", defsecTypes.NewTestMetadata()), + }, + }}, + }, + { + name: "defaults", + terraform: ` + resource "nifcloud_db_security_group" "example" { + rule { + } + } +`, + + expected: []rdb.DBSecurityGroup{{ + Metadata: defsecTypes.NewTestMetadata(), + Description: defsecTypes.String("", defsecTypes.NewTestMetadata()), + CIDRs: []defsecTypes.StringValue{ + defsecTypes.String("", defsecTypes.NewTestMetadata()), + }, + }}, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + modules := tftestutil.CreateModulesFromSource(t, test.terraform, ".tf") + adapted := adaptDBSecurityGroups(modules) + testutil.AssertDefsecEqual(t, test.expected, adapted) + }) + } +} diff --git a/internal/adapters/terraform/nifcloud/sslcertificate/adapt.go b/internal/adapters/terraform/nifcloud/sslcertificate/adapt.go new file mode 100644 index 000000000000..31673c121493 --- /dev/null +++ b/internal/adapters/terraform/nifcloud/sslcertificate/adapt.go @@ -0,0 +1,12 @@ +package sslcertificate + +import ( + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/sslcertificate" + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func Adapt(modules terraform.Modules) sslcertificate.SSLCertificate { + return sslcertificate.SSLCertificate{ + ServerCertificates: adaptServerCertificates(modules), + } +} diff --git a/internal/adapters/terraform/nifcloud/sslcertificate/adapt_test.go b/internal/adapters/terraform/nifcloud/sslcertificate/adapt_test.go new file mode 100644 index 000000000000..9483467e47cc --- /dev/null +++ b/internal/adapters/terraform/nifcloud/sslcertificate/adapt_test.go @@ -0,0 +1,28 @@ +package sslcertificate + +import ( + "testing" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestLines(t *testing.T) { + src := ` +resource "nifcloud_ssl_certificate" "example" { + certificate = "generated-certificate" +} +` + + modules := tftestutil.CreateModulesFromSource(t, src, ".tf") + adapted := Adapt(modules) + + require.Len(t, adapted.ServerCertificates, 1) + + serverCertificate := adapted.ServerCertificates[0] + + assert.Equal(t, 3, serverCertificate.Expiration.GetMetadata().Range().GetStartLine()) + assert.Equal(t, 3, serverCertificate.Expiration.GetMetadata().Range().GetEndLine()) +} diff --git a/internal/adapters/terraform/nifcloud/sslcertificate/server_certificate.go b/internal/adapters/terraform/nifcloud/sslcertificate/server_certificate.go new file mode 100644 index 000000000000..c90570cee9ea --- /dev/null +++ b/internal/adapters/terraform/nifcloud/sslcertificate/server_certificate.go @@ -0,0 +1,41 @@ +package sslcertificate + +import ( + "crypto/x509" + "encoding/pem" + + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/sslcertificate" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +func adaptServerCertificates(modules terraform.Modules) []sslcertificate.ServerCertificate { + var serverCertificates []sslcertificate.ServerCertificate + + for _, resource := range modules.GetResourcesByType("nifcloud_ssl_certificate") { + serverCertificates = append(serverCertificates, adaptServerCertificate(resource)) + } + return serverCertificates +} + +func adaptServerCertificate(resource *terraform.Block) sslcertificate.ServerCertificate { + certificateAttr := resource.GetAttribute("certificate") + expiryDateVal := defsecTypes.TimeUnresolvable(resource.GetMetadata()) + + if certificateAttr.IsNotNil() { + expiryDateVal = defsecTypes.TimeUnresolvable(certificateAttr.GetMetadata()) + if certificateAttr.IsString() { + certificateString := certificateAttr.Value().AsString() + if block, _ := pem.Decode([]byte(certificateString)); block != nil { + if cert, err := x509.ParseCertificate(block.Bytes); err == nil { + expiryDateVal = defsecTypes.Time(cert.NotAfter, certificateAttr.GetMetadata()) + } + } + } + } + + return sslcertificate.ServerCertificate{ + Metadata: resource.GetMetadata(), + Expiration: expiryDateVal, + } +} diff --git a/internal/adapters/terraform/nifcloud/sslcertificate/server_certificate_test.go b/internal/adapters/terraform/nifcloud/sslcertificate/server_certificate_test.go new file mode 100644 index 000000000000..661e452a019d --- /dev/null +++ b/internal/adapters/terraform/nifcloud/sslcertificate/server_certificate_test.go @@ -0,0 +1,72 @@ +package sslcertificate + +import ( + "testing" + "time" + + "github.com/aquasecurity/defsec/pkg/providers/nifcloud/sslcertificate" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy/internal/adapters/terraform/tftestutil" + + "github.com/aquasecurity/trivy/test/testutil" +) + +const certificate = ` +-----BEGIN CERTIFICATE----- +MIIB0zCCAX2gAwIBAgIJAI/M7BYjwB+uMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV +BAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBX +aWRnaXRzIFB0eSBMdGQwHhcNMTIwOTEyMjE1MjAyWhcNMTUwOTEyMjE1MjAyWjBF +MQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50 +ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBANLJ +hPHhITqQbPklG3ibCVxwGMRfp/v4XqhfdQHdcVfHap6NQ5Wok/4xIA+ui35/MmNa +rtNuC+BdZ1tMuVCPFZcCAwEAAaNQME4wHQYDVR0OBBYEFJvKs8RfJaXTH08W+SGv +zQyKn0H8MB8GA1UdIwQYMBaAFJvKs8RfJaXTH08W+SGvzQyKn0H8MAwGA1UdEwQF +MAMBAf8wDQYJKoZIhvcNAQEFBQADQQBJlffJHybjDGxRMqaRmDhX0+6v02TUKZsW +r5QuVbpQhH6u+0UgcW0jp9QwpxoPTLTWGXEWBBBurxFwiCBhkQ+V +-----END CERTIFICATE----- +` + +func Test_adaptServerCertificates(t *testing.T) { + tests := []struct { + name string + terraform string + expected []sslcertificate.ServerCertificate + }{ + { + name: "configured", + terraform: ` + resource "nifcloud_ssl_certificate" "example" { + certificate = < 0 { + for _, c := range csRule.Checks { + if rule.GetRule().AVDID == c.ID { + specRules = append(specRules, rule) + } + } + } + } + } + + return specRules +} + +func (r *registry) Reset() { + r.Lock() + defer r.Unlock() + r.frameworks = make(map[framework.Framework][]types.RegisteredRule) +} + +func GetFrameworkRules(fw ...framework.Framework) []types.RegisteredRule { + return coreRegistry.getFrameworkRules(fw...) +} + +func GetSpecRules(spec string) []types.RegisteredRule { + if len(spec) > 0 { + return coreRegistry.getSpecRules(spec) + } + + return GetFrameworkRules() +} diff --git a/internal/rules/register_test.go b/internal/rules/register_test.go new file mode 100644 index 000000000000..ff9c5033b4a2 --- /dev/null +++ b/internal/rules/register_test.go @@ -0,0 +1,139 @@ +package rules + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/defsec/pkg/framework" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/stretchr/testify/assert" +) + +func Test_Reset(t *testing.T) { + rule := scan.Rule{} + _ = Register(rule) + assert.Equal(t, 1, len(GetFrameworkRules())) + Reset() + assert.Equal(t, 0, len(GetFrameworkRules())) +} + +func Test_Registration(t *testing.T) { + var tests = []struct { + name string + registeredFrameworks map[framework.Framework][]string + inputFrameworks []framework.Framework + expected bool + }{ + { + name: "rule without framework specified should be returned when no frameworks are requested", + expected: true, + }, + { + name: "rule without framework specified should not be returned when a specific framework is requested", + inputFrameworks: []framework.Framework{framework.CIS_AWS_1_2}, + expected: false, + }, + { + name: "rule without framework specified should be returned when the default framework is requested", + inputFrameworks: []framework.Framework{framework.Default}, + expected: true, + }, + { + name: "rule with default framework specified should be returned when the default framework is requested", + registeredFrameworks: map[framework.Framework][]string{framework.Default: {"1.1"}}, + inputFrameworks: []framework.Framework{framework.Default}, + expected: true, + }, + { + name: "rule with default framework specified should not be returned when a specific framework is requested", + registeredFrameworks: map[framework.Framework][]string{framework.Default: {"1.1"}}, + inputFrameworks: []framework.Framework{framework.CIS_AWS_1_2}, + expected: false, + }, + { + name: "rule with specific framework specified should not be returned when a default framework is requested", + registeredFrameworks: map[framework.Framework][]string{framework.CIS_AWS_1_2: {"1.1"}}, + inputFrameworks: []framework.Framework{framework.Default}, + expected: false, + }, + { + name: "rule with specific framework specified should be returned when the specific framework is requested", + registeredFrameworks: map[framework.Framework][]string{framework.CIS_AWS_1_2: {"1.1"}}, + inputFrameworks: []framework.Framework{framework.CIS_AWS_1_2}, + expected: true, + }, + { + name: "rule with multiple frameworks specified should be returned when the specific framework is requested", + registeredFrameworks: map[framework.Framework][]string{framework.CIS_AWS_1_2: {"1.1"}, "blah": {"1.2"}}, + inputFrameworks: []framework.Framework{framework.CIS_AWS_1_2}, + expected: true, + }, + { + name: "rule with multiple frameworks specified should be returned only once when multiple matching frameworks are requested", + registeredFrameworks: map[framework.Framework][]string{framework.CIS_AWS_1_2: {"1.1"}, "blah": {"1.2"}, "something": {"1.3"}}, + inputFrameworks: []framework.Framework{framework.CIS_AWS_1_2, "blah", "other"}, + expected: true, + }, + } + + for i, test := range tests { + t.Run(test.name, func(t *testing.T) { + Reset() + rule := scan.Rule{ + AVDID: fmt.Sprintf("%d-%s", i, test.name), + Frameworks: test.registeredFrameworks, + } + _ = Register(rule) + var found bool + for _, matchedRule := range GetFrameworkRules(test.inputFrameworks...) { + if matchedRule.GetRule().AVDID == rule.AVDID { + assert.False(t, found, "rule should not be returned more than once") + found = true + } + } + assert.Equal(t, test.expected, found, "rule should be returned if it matches any of the input frameworks") + }) + } +} + +func Test_Deregistration(t *testing.T) { + Reset() + registrationA := Register(scan.Rule{ + AVDID: "A", + }) + registrationB := Register(scan.Rule{ + AVDID: "B", + }) + assert.Equal(t, 2, len(GetFrameworkRules())) + Deregister(registrationA) + actual := GetFrameworkRules() + require.Equal(t, 1, len(actual)) + assert.Equal(t, "B", actual[0].GetRule().AVDID) + Deregister(registrationB) + assert.Equal(t, 0, len(GetFrameworkRules())) +} + +func Test_DeregistrationMultipleFrameworks(t *testing.T) { + Reset() + registrationA := Register(scan.Rule{ + AVDID: "A", + }) + registrationB := Register(scan.Rule{ + AVDID: "B", + Frameworks: map[framework.Framework][]string{ + "a": nil, + "b": nil, + "c": nil, + framework.Default: nil, + }, + }) + assert.Equal(t, 2, len(GetFrameworkRules())) + Deregister(registrationA) + actual := GetFrameworkRules() + require.Equal(t, 1, len(actual)) + assert.Equal(t, "B", actual[0].GetRule().AVDID) + Deregister(registrationB) + assert.Equal(t, 0, len(GetFrameworkRules())) +} diff --git a/pkg/fanal/analyzer/config/terraform/terraform.go b/pkg/fanal/analyzer/config/terraform/terraform.go index e684ca1017c0..363d35de87fe 100644 --- a/pkg/fanal/analyzer/config/terraform/terraform.go +++ b/pkg/fanal/analyzer/config/terraform/terraform.go @@ -3,9 +3,9 @@ package terraform import ( "os" - "github.com/aquasecurity/trivy-iac/pkg/detection" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" "github.com/aquasecurity/trivy/pkg/fanal/analyzer/config" + "github.com/aquasecurity/trivy/pkg/iac/detection" "github.com/aquasecurity/trivy/pkg/misconf" ) diff --git a/pkg/fanal/analyzer/const.go b/pkg/fanal/analyzer/const.go index 88774ac15cbc..b3a777bc3387 100644 --- a/pkg/fanal/analyzer/const.go +++ b/pkg/fanal/analyzer/const.go @@ -1,6 +1,6 @@ package analyzer -import "github.com/aquasecurity/trivy-iac/pkg/detection" +import "github.com/aquasecurity/trivy/pkg/iac/detection" type Type string diff --git a/pkg/fanal/secret/builtin-rules.go b/pkg/fanal/secret/builtin-rules.go index b868967b6696..0d354cfecd6c 100644 --- a/pkg/fanal/secret/builtin-rules.go +++ b/pkg/fanal/secret/builtin-rules.go @@ -5,8 +5,8 @@ import ( "github.com/samber/lo" - defsecRules "github.com/aquasecurity/trivy-iac/pkg/rules" "github.com/aquasecurity/trivy/pkg/fanal/types" + defsecRules "github.com/aquasecurity/trivy/pkg/iac/rules" ) var ( diff --git a/pkg/iac/detection/detect.go b/pkg/iac/detection/detect.go new file mode 100644 index 000000000000..fcf615a6c54d --- /dev/null +++ b/pkg/iac/detection/detect.go @@ -0,0 +1,296 @@ +package detection + +import ( + "bytes" + "encoding/json" + "io" + "path/filepath" + "strings" + + "gopkg.in/yaml.v3" + + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure/arm/parser/armjson" +) + +type FileType string + +const ( + FileTypeCloudFormation FileType = "cloudformation" + FileTypeTerraform FileType = "terraform" + FileTypeTerraformPlan FileType = "terraformplan" + FileTypeDockerfile FileType = "dockerfile" + FileTypeKubernetes FileType = "kubernetes" + FileTypeRbac FileType = "rbac" + FileTypeYAML FileType = "yaml" + FileTypeTOML FileType = "toml" + FileTypeJSON FileType = "json" + FileTypeHelm FileType = "helm" + FileTypeAzureARM FileType = "azure-arm" +) + +var matchers = map[FileType]func(name string, r io.ReadSeeker) bool{} + +// nolint +func init() { + + matchers[FileTypeJSON] = func(name string, r io.ReadSeeker) bool { + ext := filepath.Ext(filepath.Base(name)) + if !strings.EqualFold(ext, ".json") { + return false + } + if resetReader(r) == nil { + return true + } + + var content interface{} + return json.NewDecoder(r).Decode(&content) == nil + } + + matchers[FileTypeYAML] = func(name string, r io.ReadSeeker) bool { + ext := filepath.Ext(filepath.Base(name)) + if !strings.EqualFold(ext, ".yaml") && !strings.EqualFold(ext, ".yml") { + return false + } + if resetReader(r) == nil { + return true + } + + var content interface{} + return yaml.NewDecoder(r).Decode(&content) == nil + } + + matchers[FileTypeHelm] = func(name string, r io.ReadSeeker) bool { + if IsHelmChartArchive(name, r) { + return true + } + + return strings.HasSuffix(name, "hart.yaml") + } + + matchers[FileTypeTOML] = func(name string, r io.ReadSeeker) bool { + ext := filepath.Ext(filepath.Base(name)) + return strings.EqualFold(ext, ".toml") + } + + matchers[FileTypeTerraform] = func(name string, _ io.ReadSeeker) bool { + return IsTerraformFile(name) + } + + matchers[FileTypeTerraformPlan] = func(name string, r io.ReadSeeker) bool { + if IsType(name, r, FileTypeJSON) { + if resetReader(r) == nil { + return false + } + + contents := make(map[string]interface{}) + err := json.NewDecoder(r).Decode(&contents) + if err == nil { + if _, ok := contents["terraform_version"]; ok { + _, stillOk := contents["format_version"] + return stillOk + } + } + } + return false + } + + matchers[FileTypeCloudFormation] = func(name string, r io.ReadSeeker) bool { + sniff := struct { + Resources map[string]map[string]interface{} `json:"Resources" yaml:"Resources"` + }{} + + switch { + case IsType(name, r, FileTypeYAML): + if resetReader(r) == nil { + return false + } + if err := yaml.NewDecoder(r).Decode(&sniff); err != nil { + return false + } + case IsType(name, r, FileTypeJSON): + if resetReader(r) == nil { + return false + } + if err := json.NewDecoder(r).Decode(&sniff); err != nil { + return false + } + default: + return false + } + + return sniff.Resources != nil + } + + matchers[FileTypeAzureARM] = func(name string, r io.ReadSeeker) bool { + + if resetReader(r) == nil { + return false + } + + sniff := struct { + ContentType string `json:"contentType"` + Parameters map[string]interface{} `json:"parameters"` + Resources []interface{} `json:"resources"` + }{} + metadata := types.NewUnmanagedMetadata() + if err := armjson.UnmarshalFromReader(r, &sniff, &metadata); err != nil { + return false + } + + return (sniff.Parameters != nil && len(sniff.Parameters) > 0) || + (sniff.Resources != nil && len(sniff.Resources) > 0) + } + + matchers[FileTypeDockerfile] = func(name string, _ io.ReadSeeker) bool { + requiredFiles := []string{"Dockerfile", "Containerfile"} + for _, requiredFile := range requiredFiles { + base := filepath.Base(name) + ext := filepath.Ext(base) + if strings.TrimSuffix(base, ext) == requiredFile { + return true + } + if strings.EqualFold(ext, "."+requiredFile) { + return true + } + } + return false + } + + matchers[FileTypeHelm] = func(name string, r io.ReadSeeker) bool { + helmFiles := []string{"Chart.yaml", ".helmignore", "values.schema.json", "NOTES.txt"} + for _, expected := range helmFiles { + if strings.HasSuffix(name, expected) { + return true + } + } + helmFileExtensions := []string{".yaml", ".tpl"} + ext := filepath.Ext(filepath.Base(name)) + for _, expected := range helmFileExtensions { + if strings.EqualFold(ext, expected) { + return true + } + } + return IsHelmChartArchive(name, r) + } + + matchers[FileTypeKubernetes] = func(name string, r io.ReadSeeker) bool { + + if !IsType(name, r, FileTypeYAML) && !IsType(name, r, FileTypeJSON) { + return false + } + if resetReader(r) == nil { + return false + } + + expectedProperties := []string{"apiVersion", "kind", "metadata"} + + if IsType(name, r, FileTypeJSON) { + if resetReader(r) == nil { + return false + } + + var result map[string]interface{} + if err := json.NewDecoder(r).Decode(&result); err != nil { + return false + } + + for _, expected := range expectedProperties { + if _, ok := result[expected]; !ok { + return false + } + } + return true + } + + // at this point, we need to inspect bytes + var buf bytes.Buffer + if _, err := io.Copy(&buf, r); err != nil { + return false + } + data := buf.Bytes() + + marker := "\n---\n" + altMarker := "\r\n---\r\n" + if bytes.Contains(data, []byte(altMarker)) { + marker = altMarker + } + + for _, partial := range strings.Split(string(data), marker) { + var result map[string]interface{} + if err := yaml.Unmarshal([]byte(partial), &result); err != nil { + continue + } + match := true + for _, expected := range expectedProperties { + if _, ok := result[expected]; !ok { + match = false + break + } + } + if match { + return true + } + } + + return false + } +} + +func IsTerraformFile(path string) bool { + for _, ext := range []string{".tf", ".tf.json", ".tfvars"} { + if strings.HasSuffix(path, ext) { + return true + } + } + + return false +} + +func IsType(name string, r io.ReadSeeker, t FileType) bool { + r = ensureSeeker(r) + f, ok := matchers[t] + if !ok { + return false + } + return f(name, r) +} + +func GetTypes(name string, r io.ReadSeeker) []FileType { + var matched []FileType + r = ensureSeeker(r) + for check, f := range matchers { + if f(name, r) { + matched = append(matched, check) + } + resetReader(r) + } + return matched +} + +func ensureSeeker(r io.Reader) io.ReadSeeker { + if r == nil { + return nil + } + if seeker, ok := r.(io.ReadSeeker); ok { + return seeker + } + + var buf bytes.Buffer + if _, err := io.Copy(&buf, r); err == nil { + return bytes.NewReader(buf.Bytes()) + } + + return nil +} + +func resetReader(r io.Reader) io.ReadSeeker { + if r == nil { + return nil + } + if seeker, ok := r.(io.ReadSeeker); ok { + _, _ = seeker.Seek(0, 0) + return seeker + } + return ensureSeeker(r) +} diff --git a/pkg/iac/detection/detect_test.go b/pkg/iac/detection/detect_test.go new file mode 100644 index 000000000000..6a38dc014731 --- /dev/null +++ b/pkg/iac/detection/detect_test.go @@ -0,0 +1,410 @@ +package detection + +import ( + "bytes" + "fmt" + "io" + "os" + "strings" + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Detection(t *testing.T) { + tests := []struct { + name string + path string + r io.ReadSeeker + expected []FileType + }{ + { + name: "text file, no reader", + path: "something.txt", + expected: nil, + }, + { + name: "text file, with reader", + path: "something.txt", + r: strings.NewReader("some file content"), + expected: nil, + }, + { + name: "terraform, no reader", + path: "main.tf", + expected: []FileType{ + FileTypeTerraform, + }, + }, + { + name: "terraform, with reader", + path: "main.tf", + r: strings.NewReader("some file content"), + expected: []FileType{ + FileTypeTerraform, + }, + }, + { + name: "terraform json, no reader", + path: "main.tf.json", + expected: []FileType{ + FileTypeTerraform, + FileTypeJSON, + }, + }, + { + name: "terraform json, with reader", + path: "main.tf.json", + r: strings.NewReader(` +{ + "variable": { + "example": { + "default": "hello" + } + } +} +`), + expected: []FileType{ + FileTypeTerraform, + FileTypeJSON, + }, + }, + { + name: "terraform vars, no reader", + path: "main.tfvars", + expected: []FileType{ + FileTypeTerraform, + }, + }, + { + name: "terraform vars, with reader", + path: "main.tfvars", + r: strings.NewReader("some_var = \"some value\""), + expected: []FileType{ + FileTypeTerraform, + }, + }, + { + name: "cloudformation, no reader", + path: "main.yaml", + expected: []FileType{ + FileTypeYAML, + FileTypeHelm, + }, + }, + { + name: "terraform plan, with reader", + path: "plan.json", + r: strings.NewReader(`{ + "format_version": "0.2", + "terraform_version": "1.0.3", + "variables": { + "bucket_name": { + "value": "tfsec-plan-testing" + } + }, + "planned_values": {}, + "resource_changes": [], + "prior_state": {}, + "configuration": {} + }`), + expected: []FileType{ + FileTypeTerraformPlan, + FileTypeJSON, + }, + }, + { + name: "cloudformation, with reader", + path: "main.yaml", + r: strings.NewReader(`--- +AWSTemplateFormatVersion: 2010-09-09 + +Description: CodePipeline for continuous integration and continuous deployment + +Parameters: + RepositoryName: + Type: String + Description: Name of the CodeCommit repository + BuildDockerImage: + Type: String + Default: aws/codebuild/ubuntu-base:14.04 + Description: Docker image to use for the build phase + DeployDockerImage: + Type: String + Default: aws/codebuild/ubuntu-base:14.04 + Description: Docker image to use for the deployment phase + +Resources: + PipelineS3Bucket: + Type: AWS::S3::Bucket +`), + expected: []FileType{ + FileTypeCloudFormation, + FileTypeYAML, + FileTypeHelm, + }, + }, + { + name: "JSON with Resources, not cloudformation", + path: "whatever.json", + r: strings.NewReader(`{ + "Resources": ["something"] +}`), + expected: []FileType{ + FileTypeJSON, + }, + }, + { + name: "Dockerfile, no reader", + path: "Dockerfile", + r: nil, + expected: []FileType{ + FileTypeDockerfile, + }, + }, + { + name: "Containerfile, no reader", + path: "Containerfile", + r: nil, + expected: []FileType{ + FileTypeDockerfile, + }, + }, + { + name: "Dockerfile, reader", + path: "Dockerfile", + r: strings.NewReader("FROM ubuntu\n"), + expected: []FileType{ + FileTypeDockerfile, + }, + }, + { + name: "Dockerfile extension", + path: "lol.Dockerfile", + r: nil, + expected: []FileType{ + FileTypeDockerfile, + }, + }, + { + name: "kubernetes, no reader", + path: "k8s.yml", + r: nil, + expected: []FileType{ + FileTypeYAML, + }, + }, + { + name: "kubernetes, reader", + path: "k8s.yml", + r: strings.NewReader(`apiVersion: apps/v1 +kind: Deployment +metadata: + name: nginx-deployment + labels: + app: nginx +spec: + replicas: 3 + selector: + matchLabels: + app: nginx + template: + metadata: + labels: + app: nginx + spec: + containers: + - name: nginx + image: nginx:1.14.2 + ports: + - containerPort: 80`), + expected: []FileType{ + FileTypeKubernetes, + FileTypeYAML, + }, + }, + { + name: "kubernetes, reader, JSON", + path: "k8s.json", + r: strings.NewReader(`{ + "apiVersion": "apps/v1", + "kind": "Deployment", + "metadata": { + "name": "nginx-deployment", + "labels": { + "app": "nginx" + } + }, + "spec": { + "replicas": 3, + "selector": { + "matchLabels": { + "app": "nginx" + } + }, + "template": { + "metadata": { + "labels": { + "app": "nginx" + } + }, + "spec": { + "containers": [ + { + "name": "nginx", + "image": "nginx:1.14.2", + "ports": [ + { + "containerPort": 80 + } + ] + } + ] + } + } + } +}`), + expected: []FileType{ + FileTypeKubernetes, + FileTypeJSON, + }, + }, + { + name: "YAML, no reader", + path: "file.yaml", + r: nil, + expected: []FileType{ + FileTypeYAML, + FileTypeHelm, + }, + }, + { + name: "YML, no reader", + path: "file.yml", + r: nil, + expected: []FileType{ + FileTypeYAML, + }, + }, + { + name: "YML uppercase", + path: "file.YML", + r: nil, + expected: []FileType{ + FileTypeYAML, + }, + }, + { + name: "TOML, no reader", + path: "file.toml", + r: nil, + expected: []FileType{ + FileTypeTOML, + }, + }, + { + name: "JSON, no reader", + path: "file.json", + r: nil, + expected: []FileType{ + FileTypeJSON, + }, + }, + { + name: "kubernetes, configmap", + path: "k8s.yml", + r: strings.NewReader(`apiVersion: v1 +kind: ConfigMap +metadata: + name: test + namespace: default +data: + AWS_ACCESS_KEY_ID: "XXX" + AWS_SECRET_ACCESS_KEY: "XXX"`), + expected: []FileType{ + FileTypeKubernetes, + FileTypeYAML, + }, + }, + { + name: "kubernetes, clusterRole", + path: "k8s.yml", + r: strings.NewReader(`apiVersion: rbac.authorization.k8s.io/v1 +kind: ClusterRole +metadata: + annotations: + rbac.authorization.kubernetes.io/autoupdate: "true" + labels: + kubernetes.io/bootstrapping: rbac-defaults + rbac.authorization.k8s.io/aggregate-to-edit: "true" + name: view +rules: +- apiGroups: + - networking.k8s.io + resources: + - ingresses + - ingresses/status + - networkpolicies + verbs: + - get + - list + - watch`), + expected: []FileType{ + FileTypeKubernetes, + FileTypeYAML, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + t.Run("GetTypes", func(t *testing.T) { + actualDetections := GetTypes(test.path, test.r) + assert.Equal(t, len(test.expected), len(actualDetections)) + for _, expected := range test.expected { + resetReader(test.r) + var found bool + for _, actual := range actualDetections { + if actual == expected { + found = true + break + } + } + assert.True(t, found, "%s should be detected", expected) + } + }) + for _, expected := range test.expected { + resetReader(test.r) + t.Run(fmt.Sprintf("IsType_%s", expected), func(t *testing.T) { + assert.True(t, IsType(test.path, test.r, expected)) + }) + } + t.Run("IsType_invalid", func(t *testing.T) { + resetReader(test.r) + assert.False(t, IsType(test.path, test.r, "invalid")) + }) + }) + } +} + +func BenchmarkIsType_SmallFile(b *testing.B) { + data, err := os.ReadFile(fmt.Sprintf("./testdata/%s", "small.file")) + assert.Nil(b, err) + + b.ReportAllocs() + b.ResetTimer() + for i := 0; i < b.N; i++ { + _ = IsType(fmt.Sprintf("./testdata/%s", "small.file"), bytes.NewReader(data), FileTypeAzureARM) + } +} + +func BenchmarkIsType_BigFile(b *testing.B) { + data, err := os.ReadFile(fmt.Sprintf("./testdata/%s", "big.file")) + assert.Nil(b, err) + + b.ReportAllocs() + b.ResetTimer() + for i := 0; i < b.N; i++ { + _ = IsType(fmt.Sprintf("./testdata/%s", "big.file"), bytes.NewReader(data), FileTypeAzureARM) + } +} diff --git a/pkg/iac/detection/peek.go b/pkg/iac/detection/peek.go new file mode 100644 index 000000000000..0e76115d9bd8 --- /dev/null +++ b/pkg/iac/detection/peek.go @@ -0,0 +1,53 @@ +package detection + +import ( + "archive/tar" + "compress/gzip" + "errors" + "io" + "strings" +) + +func IsHelmChartArchive(path string, file io.Reader) bool { + + if !IsArchive(path) { + return false + } + + var err error + var fr = file + + if IsZip(path) { + if fr, err = gzip.NewReader(file); err != nil { + return false + } + } + tr := tar.NewReader(fr) + + if tr == nil { + return false + } + + for { + header, err := tr.Next() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return false + } + + if header.Typeflag == tar.TypeReg && strings.HasSuffix(header.Name, "Chart.yaml") { + return true + } + } + return false +} + +func IsArchive(path string) bool { + return strings.HasSuffix(path, ".tar") || IsZip(path) +} + +func IsZip(path string) bool { + return strings.HasSuffix(path, ".tgz") || strings.HasSuffix(path, ".tar.gz") +} diff --git a/pkg/iac/detection/testdata/big.file b/pkg/iac/detection/testdata/big.file new file mode 100644 index 0000000000000000000000000000000000000000..e7f3c2d40ecc31921643a456cda2de3a907b680a GIT binary patch literal 5120 scmeIu0Sy2E0K%a6Pi+qe5hx58Fkrxd0RsjM7%*VKfB^#r3>bJF7!&{i0RR91 literal 0 HcmV?d00001 diff --git a/pkg/iac/detection/testdata/small.file b/pkg/iac/detection/testdata/small.file new file mode 100644 index 000000000000..d8ae428a4800 --- /dev/null +++ b/pkg/iac/detection/testdata/small.file @@ -0,0 +1,3 @@ +{ + "content": "foo bar baz" +} \ No newline at end of file diff --git a/pkg/iac/extrafs/extrafs.go b/pkg/iac/extrafs/extrafs.go new file mode 100644 index 000000000000..e3956c193bbe --- /dev/null +++ b/pkg/iac/extrafs/extrafs.go @@ -0,0 +1,54 @@ +package extrafs + +import ( + "io/fs" + "os" + "path/filepath" +) + +/* + Go does not currently support symlinks in io/fs. + We work around this by wrapping the fs.FS returned by os.DirFS with our own type which bolts on the ReadLinkFS +*/ + +type OSFS interface { + fs.FS + fs.StatFS +} + +type ReadLinkFS interface { + ResolveSymlink(name, dir string) (string, error) +} + +type FS interface { + OSFS + ReadLinkFS +} + +type filesystem struct { + root string + underlying OSFS +} + +func OSDir(path string) FS { + return &filesystem{ + root: path, + underlying: os.DirFS(path).(OSFS), + } +} + +func (f *filesystem) Open(name string) (fs.File, error) { + return f.underlying.Open(name) +} + +func (f *filesystem) Stat(name string) (fs.FileInfo, error) { + return f.underlying.Stat(name) +} + +func (f *filesystem) ResolveSymlink(name, dir string) (string, error) { + link, err := os.Readlink(filepath.Join(f.root, dir, name)) + if err == nil { + return filepath.Join(dir, link), nil + } + return name, nil +} diff --git a/pkg/iac/rego/build.go b/pkg/iac/rego/build.go new file mode 100644 index 000000000000..4d51b9a2d164 --- /dev/null +++ b/pkg/iac/rego/build.go @@ -0,0 +1,84 @@ +package rego + +import ( + "io/fs" + "path/filepath" + "strings" + + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/rego/schemas" + "github.com/open-policy-agent/opa/ast" + "github.com/open-policy-agent/opa/util" +) + +func BuildSchemaSetFromPolicies(policies map[string]*ast.Module, paths []string, fsys fs.FS) (*ast.SchemaSet, bool, error) { + schemaSet := ast.NewSchemaSet() + schemaSet.Put(ast.MustParseRef("schema.input"), map[string]interface{}{}) // for backwards compat only + var customFound bool + for _, policy := range policies { + for _, annotation := range policy.Annotations { + for _, ss := range annotation.Schemas { + schemaName, err := ss.Schema.Ptr() + if err != nil { + continue + } + if schemaName != "input" { + if schema, ok := schemas.SchemaMap[types.Source(schemaName)]; ok { + customFound = true + schemaSet.Put(ast.MustParseRef(ss.Schema.String()), util.MustUnmarshalJSON([]byte(schema))) + } else { + b, err := findSchemaInFS(paths, fsys, schemaName) + if err != nil { + return schemaSet, true, err + } + if b != nil { + customFound = true + schemaSet.Put(ast.MustParseRef(ss.Schema.String()), util.MustUnmarshalJSON(b)) + } + } + } + } + } + } + + return schemaSet, customFound, nil +} + +// findSchemaInFS tries to find the schema anywhere in the specified FS +func findSchemaInFS(paths []string, srcFS fs.FS, schemaName string) ([]byte, error) { + var schema []byte + for _, path := range paths { + if err := fs.WalkDir(srcFS, sanitisePath(path), func(path string, info fs.DirEntry, err error) error { + if err != nil { + return err + } + if info.IsDir() { + return nil + } + if !IsJSONFile(info.Name()) { + return nil + } + if info.Name() == schemaName+".json" { + schema, err = fs.ReadFile(srcFS, filepath.ToSlash(path)) + if err != nil { + return err + } + return nil + } + return nil + }); err != nil { + return nil, err + } + } + return schema, nil +} + +func IsJSONFile(name string) bool { + return strings.HasSuffix(name, ".json") +} + +func sanitisePath(path string) string { + vol := filepath.VolumeName(path) + path = strings.TrimPrefix(path, vol) + return strings.TrimPrefix(strings.TrimPrefix(filepath.ToSlash(path), "./"), "/") +} diff --git a/pkg/iac/rego/custom.go b/pkg/iac/rego/custom.go new file mode 100644 index 000000000000..c15b05a4577f --- /dev/null +++ b/pkg/iac/rego/custom.go @@ -0,0 +1,109 @@ +package rego + +import ( + "github.com/open-policy-agent/opa/ast" + "github.com/open-policy-agent/opa/rego" + "github.com/open-policy-agent/opa/types" +) + +func init() { + rego.RegisterBuiltin2(®o.Function{ + Name: "result.new", + Decl: types.NewFunction(types.Args(types.S, types.A), types.A), + }, + createResult, + ) + + rego.RegisterBuiltin1(®o.Function{ + Name: "isManaged", + Decl: types.NewFunction(types.Args(types.A), types.B), + }, + func(c rego.BuiltinContext, resource *ast.Term) (*ast.Term, error) { + metadata, err := createResult(c, ast.StringTerm(""), resource) + if err != nil { + return nil, err + } + return metadata.Get(ast.StringTerm("managed")), nil + }, + ) +} + +func createResult(ctx rego.BuiltinContext, msg, cause *ast.Term) (*ast.Term, error) { + + metadata := map[string]*ast.Term{ + "startline": ast.IntNumberTerm(0), + "endline": ast.IntNumberTerm(0), + "sourceprefix": ast.StringTerm(""), + "filepath": ast.StringTerm(""), + "explicit": ast.BooleanTerm(false), + "managed": ast.BooleanTerm(true), + "fskey": ast.StringTerm(""), + "resource": ast.StringTerm(""), + "parent": ast.NullTerm(), + } + if msg != nil { + metadata["msg"] = msg + } + + // universal + input := cause.Get(ast.StringTerm("__defsec_metadata")) + if input == nil { + // docker + input = cause + } + metadata = updateMetadata(metadata, input) + + if term := input.Get(ast.StringTerm("parent")); term != nil { + var err error + metadata["parent"], err = createResult(ctx, nil, term) + if err != nil { + return nil, err + } + } + + var values [][2]*ast.Term + for key, val := range metadata { + values = append(values, [2]*ast.Term{ + ast.StringTerm(key), + val, + }) + } + return ast.ObjectTerm(values...), nil +} + +func updateMetadata(metadata map[string]*ast.Term, input *ast.Term) map[string]*ast.Term { + if term := input.Get(ast.StringTerm("startline")); term != nil { + metadata["startline"] = term + } + if term := input.Get(ast.StringTerm("StartLine")); term != nil { + metadata["startline"] = term + } + if term := input.Get(ast.StringTerm("endline")); term != nil { + metadata["endline"] = term + } + if term := input.Get(ast.StringTerm("EndLine")); term != nil { + metadata["endline"] = term + } + if term := input.Get(ast.StringTerm("filepath")); term != nil { + metadata["filepath"] = term + } + if term := input.Get(ast.StringTerm("sourceprefix")); term != nil { + metadata["sourceprefix"] = term + } + if term := input.Get(ast.StringTerm("Path")); term != nil { + metadata["filepath"] = term + } + if term := input.Get(ast.StringTerm("explicit")); term != nil { + metadata["explicit"] = term + } + if term := input.Get(ast.StringTerm("managed")); term != nil { + metadata["managed"] = term + } + if term := input.Get(ast.StringTerm("fskey")); term != nil { + metadata["fskey"] = term + } + if term := input.Get(ast.StringTerm("resource")); term != nil { + metadata["resource"] = term + } + return metadata +} diff --git a/pkg/iac/rego/embed.go b/pkg/iac/rego/embed.go new file mode 100644 index 000000000000..6ab9c4f2ac09 --- /dev/null +++ b/pkg/iac/rego/embed.go @@ -0,0 +1,107 @@ +package rego + +import ( + "context" + "io/fs" + "path/filepath" + "strings" + + rules2 "github.com/aquasecurity/trivy-policies/rules" + "github.com/aquasecurity/trivy/pkg/iac/rules" + "github.com/open-policy-agent/opa/ast" +) + +func init() { + + modules, err := LoadEmbeddedPolicies() + if err != nil { + // we should panic as the policies were not embedded properly + panic(err) + } + loadedLibs, err := LoadEmbeddedLibraries() + if err != nil { + panic(err) + } + for name, policy := range loadedLibs { + modules[name] = policy + } + + RegisterRegoRules(modules) +} + +func RegisterRegoRules(modules map[string]*ast.Module) { + ctx := context.TODO() + + schemaSet, _, _ := BuildSchemaSetFromPolicies(modules, nil, nil) + + compiler := ast.NewCompiler(). + WithSchemas(schemaSet). + WithCapabilities(nil). + WithUseTypeCheckAnnotations(true) + + compiler.Compile(modules) + if compiler.Failed() { + // we should panic as the embedded rego policies are syntactically incorrect... + panic(compiler.Errors) + } + + retriever := NewMetadataRetriever(compiler) + for _, module := range modules { + metadata, err := retriever.RetrieveMetadata(ctx, module) + if err != nil { + continue + } + if metadata.AVDID == "" { + continue + } + rules.Register( + metadata.ToRule(), + ) + } +} + +func LoadEmbeddedPolicies() (map[string]*ast.Module, error) { + return LoadPoliciesFromDirs(rules2.EmbeddedPolicyFileSystem, ".") +} + +func LoadEmbeddedLibraries() (map[string]*ast.Module, error) { + return LoadPoliciesFromDirs(rules2.EmbeddedLibraryFileSystem, ".") +} + +func LoadPoliciesFromDirs(target fs.FS, paths ...string) (map[string]*ast.Module, error) { + modules := make(map[string]*ast.Module) + for _, path := range paths { + if err := fs.WalkDir(target, sanitisePath(path), func(path string, info fs.DirEntry, err error) error { + if err != nil { + return err + } + if info.IsDir() { + return nil + } + + if strings.HasSuffix(filepath.Dir(filepath.ToSlash(path)), "policies/advanced/optional") { + return fs.SkipDir + } + + if !IsRegoFile(info.Name()) || IsDotFile(info.Name()) { + return nil + } + data, err := fs.ReadFile(target, filepath.ToSlash(path)) + if err != nil { + return err + } + module, err := ast.ParseModuleWithOpts(path, string(data), ast.ParserOptions{ + ProcessAnnotation: true, + }) + if err != nil { + // s.debug.Log("Failed to load module: %s, err: %s", filepath.ToSlash(path), err.Error()) + return err + } + modules[path] = module + return nil + }); err != nil { + return nil, err + } + } + return modules, nil +} diff --git a/pkg/iac/rego/embed_test.go b/pkg/iac/rego/embed_test.go new file mode 100644 index 000000000000..1f3eef11b054 --- /dev/null +++ b/pkg/iac/rego/embed_test.go @@ -0,0 +1,123 @@ +package rego + +import ( + "testing" + + rules2 "github.com/aquasecurity/trivy-policies/rules" + "github.com/aquasecurity/trivy/pkg/iac/rules" + "github.com/open-policy-agent/opa/ast" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_EmbeddedLoading(t *testing.T) { + + frameworkRules := rules.GetRegistered() + var found bool + for _, rule := range frameworkRules { + if rule.GetRule().RegoPackage != "" { + found = true + } + } + assert.True(t, found, "no embedded rego policies were registered as rules") +} + +func Test_RegisterRegoRules(t *testing.T) { + var testCases = []struct { + name string + inputPolicy string + expectedError bool + }{ + { + name: "happy path old single schema", + inputPolicy: `# METADATA +# title: "dummy title" +# description: "some description" +# scope: package +# schemas: +# - input: schema["input"] +# custom: +# input: +# selector: +# - type: dockerfile +package builtin.dockerfile.DS1234 +deny[res]{ + res := true +}`, + }, + { + name: "happy path new builtin single schema", + inputPolicy: `# METADATA +# title: "dummy title" +# description: "some description" +# scope: package +# schemas: +# - input: schema["dockerfile"] +# custom: +# input: +# selector: +# - type: dockerfile +package builtin.dockerfile.DS1234 +deny[res]{ + res := true +}`, + }, + { + name: "happy path new multiple schemas", + inputPolicy: `# METADATA +# title: "dummy title" +# description: "some description" +# scope: package +# schemas: +# - input: schema["dockerfile"] +# - input: schema["kubernetes"] +# custom: +# input: +# selector: +# - type: dockerfile +package builtin.dockerfile.DS1234 +deny[res]{ + res := true +}`, + }, + { + name: "sad path schema does not exist", + inputPolicy: `# METADATA +# title: "dummy title" +# description: "some description" +# scope: package +# schemas: +# - input: schema["invalid schema"] +# custom: +# input: +# selector: +# - type: dockerfile +package builtin.dockerfile.DS1234 +deny[res]{ + res := true +}`, + expectedError: true, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + policies, err := LoadPoliciesFromDirs(rules2.EmbeddedLibraryFileSystem, ".") + require.NoError(t, err) + newRule, err := ast.ParseModuleWithOpts("/rules/newrule.rego", tc.inputPolicy, ast.ParserOptions{ + ProcessAnnotation: true, + }) + require.NoError(t, err) + + policies["/rules/newrule.rego"] = newRule + switch { + case tc.expectedError: + assert.Panics(t, func() { + RegisterRegoRules(policies) + }, tc.name) + default: + RegisterRegoRules(policies) + } + }) + } +} diff --git a/pkg/iac/rego/exceptions.go b/pkg/iac/rego/exceptions.go new file mode 100644 index 000000000000..ab202ec00d19 --- /dev/null +++ b/pkg/iac/rego/exceptions.go @@ -0,0 +1,33 @@ +package rego + +import ( + "context" + "fmt" +) + +func (s *Scanner) isIgnored(ctx context.Context, namespace string, ruleName string, input interface{}) (bool, error) { + if ignored, err := s.isNamespaceIgnored(ctx, namespace, input); err != nil { + return false, err + } else if ignored { + return true, nil + } + return s.isRuleIgnored(ctx, namespace, ruleName, input) +} + +func (s *Scanner) isNamespaceIgnored(ctx context.Context, namespace string, input interface{}) (bool, error) { + exceptionQuery := fmt.Sprintf("data.namespace.exceptions.exception[_] == %q", namespace) + result, _, err := s.runQuery(ctx, exceptionQuery, input, true) + if err != nil { + return false, fmt.Errorf("query namespace exceptions: %w", err) + } + return result.Allowed(), nil +} + +func (s *Scanner) isRuleIgnored(ctx context.Context, namespace string, ruleName string, input interface{}) (bool, error) { + exceptionQuery := fmt.Sprintf("endswith(%q, data.%s.exception[_][_])", ruleName, namespace) + result, _, err := s.runQuery(ctx, exceptionQuery, input, true) + if err != nil { + return false, err + } + return result.Allowed(), nil +} diff --git a/pkg/iac/rego/load.go b/pkg/iac/rego/load.go new file mode 100644 index 000000000000..909510e8f505 --- /dev/null +++ b/pkg/iac/rego/load.go @@ -0,0 +1,210 @@ +package rego + +import ( + "context" + "fmt" + "io" + "io/fs" + "strings" + + "github.com/open-policy-agent/opa/ast" + "github.com/open-policy-agent/opa/bundle" +) + +func IsRegoFile(name string) bool { + return strings.HasSuffix(name, bundle.RegoExt) && !strings.HasSuffix(name, "_test"+bundle.RegoExt) +} + +func IsDotFile(name string) bool { + return strings.HasPrefix(name, ".") +} + +func (s *Scanner) loadPoliciesFromReaders(readers []io.Reader) (map[string]*ast.Module, error) { + modules := make(map[string]*ast.Module) + for i, r := range readers { + moduleName := fmt.Sprintf("reader_%d", i) + data, err := io.ReadAll(r) + if err != nil { + return nil, err + } + module, err := ast.ParseModuleWithOpts(moduleName, string(data), ast.ParserOptions{ + ProcessAnnotation: true, + }) + if err != nil { + return nil, err + } + modules[moduleName] = module + } + return modules, nil +} + +func (s *Scanner) loadEmbedded(enableEmbeddedLibraries, enableEmbeddedPolicies bool) error { + if enableEmbeddedLibraries { + loadedLibs, errLoad := LoadEmbeddedLibraries() + if errLoad != nil { + return fmt.Errorf("failed to load embedded rego libraries: %w", errLoad) + } + for name, policy := range loadedLibs { + s.policies[name] = policy + } + s.debug.Log("Loaded %d embedded libraries.", len(loadedLibs)) + } + + if enableEmbeddedPolicies { + loaded, err := LoadEmbeddedPolicies() + if err != nil { + return fmt.Errorf("failed to load embedded rego policies: %w", err) + } + for name, policy := range loaded { + s.policies[name] = policy + } + s.debug.Log("Loaded %d embedded policies.", len(loaded)) + } + + return nil +} + +func (s *Scanner) LoadPolicies(enableEmbeddedLibraries, enableEmbeddedPolicies bool, srcFS fs.FS, paths []string, readers []io.Reader) error { + + if s.policies == nil { + s.policies = make(map[string]*ast.Module) + } + + if s.policyFS != nil { + s.debug.Log("Overriding filesystem for policies!") + srcFS = s.policyFS + } + + if err := s.loadEmbedded(enableEmbeddedLibraries, enableEmbeddedPolicies); err != nil { + return err + } + + var err error + if len(paths) > 0 { + loaded, err := LoadPoliciesFromDirs(srcFS, paths...) + if err != nil { + return fmt.Errorf("failed to load rego policies from %s: %w", paths, err) + } + for name, policy := range loaded { + s.policies[name] = policy + } + s.debug.Log("Loaded %d policies from disk.", len(loaded)) + } + + if len(readers) > 0 { + loaded, err := s.loadPoliciesFromReaders(readers) + if err != nil { + return fmt.Errorf("failed to load rego policies from reader(s): %w", err) + } + for name, policy := range loaded { + s.policies[name] = policy + } + s.debug.Log("Loaded %d policies from reader(s).", len(loaded)) + } + + // gather namespaces + uniq := make(map[string]struct{}) + for _, module := range s.policies { + namespace := getModuleNamespace(module) + uniq[namespace] = struct{}{} + } + var namespaces []string + for namespace := range uniq { + namespaces = append(namespaces, namespace) + } + + dataFS := srcFS + if s.dataFS != nil { + s.debug.Log("Overriding filesystem for data!") + dataFS = s.dataFS + } + store, err := initStore(dataFS, s.dataDirs, namespaces) + if err != nil { + return fmt.Errorf("unable to load data: %w", err) + } + s.store = store + + return s.compilePolicies(srcFS, paths) +} + +func (s *Scanner) prunePoliciesWithError(compiler *ast.Compiler) error { + if len(compiler.Errors) > s.regoErrorLimit { + s.debug.Log("Error(s) occurred while loading policies") + return compiler.Errors + } + + for _, e := range compiler.Errors { + s.debug.Log("Error occurred while parsing: %s, %s", e.Location.File, e.Error()) + delete(s.policies, e.Location.File) + } + return nil +} + +func (s *Scanner) compilePolicies(srcFS fs.FS, paths []string) error { + + schemaSet, custom, err := BuildSchemaSetFromPolicies(s.policies, paths, srcFS) + if err != nil { + return err + } + if custom { + s.inputSchema = nil // discard auto detected input schema in favour of policy defined schema + } + + compiler := ast.NewCompiler(). + WithUseTypeCheckAnnotations(true). + WithCapabilities(ast.CapabilitiesForThisVersion()). + WithSchemas(schemaSet) + + compiler.Compile(s.policies) + if compiler.Failed() { + if err := s.prunePoliciesWithError(compiler); err != nil { + return err + } + return s.compilePolicies(srcFS, paths) + } + retriever := NewMetadataRetriever(compiler) + + if err := s.filterModules(retriever); err != nil { + return err + } + if s.inputSchema != nil { + schemaSet := ast.NewSchemaSet() + schemaSet.Put(ast.MustParseRef("schema.input"), s.inputSchema) + compiler.WithSchemas(schemaSet) + compiler.Compile(s.policies) + if compiler.Failed() { + if err := s.prunePoliciesWithError(compiler); err != nil { + return err + } + return s.compilePolicies(srcFS, paths) + } + } + s.compiler = compiler + s.retriever = retriever + return nil +} + +func (s *Scanner) filterModules(retriever *MetadataRetriever) error { + + filtered := make(map[string]*ast.Module) + for name, module := range s.policies { + meta, err := retriever.RetrieveMetadata(context.TODO(), module) + if err != nil { + return err + } + if len(meta.InputOptions.Selectors) == 0 { + s.debug.Log("WARNING: Module %s has no input selectors - it will be loaded for all inputs!", name) + filtered[name] = module + continue + } + for _, selector := range meta.InputOptions.Selectors { + if selector.Type == string(s.sourceType) { + filtered[name] = module + break + } + } + } + + s.policies = filtered + return nil +} diff --git a/pkg/iac/rego/load_test.go b/pkg/iac/rego/load_test.go new file mode 100644 index 000000000000..3240bc208181 --- /dev/null +++ b/pkg/iac/rego/load_test.go @@ -0,0 +1,46 @@ +package rego + +import ( + "bytes" + "embed" + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +//go:embed all:testdata/policies +var testEmbedFS embed.FS + +func Test_RegoScanning_WithSomeInvalidPolicies(t *testing.T) { + t.Run("allow no errors", func(t *testing.T) { + var debugBuf bytes.Buffer + scanner := NewScanner(types.SourceDockerfile) + scanner.SetRegoErrorLimit(0) + scanner.SetDebugWriter(&debugBuf) + p, _ := LoadPoliciesFromDirs(testEmbedFS, ".") + require.NotNil(t, p) + + scanner.policies = p + err := scanner.compilePolicies(testEmbedFS, []string{"policies"}) + require.ErrorContains(t, err, `want (one of): ["Cmd" "EndLine" "Flags" "JSON" "Original" "Path" "Stage" "StartLine" "SubCmd" "Value"]`) + assert.Contains(t, debugBuf.String(), "Error(s) occurred while loading policies") + }) + + t.Run("allow up to max 1 error", func(t *testing.T) { + var debugBuf bytes.Buffer + scanner := NewScanner(types.SourceDockerfile) + scanner.SetRegoErrorLimit(1) + scanner.SetDebugWriter(&debugBuf) + + p, _ := LoadPoliciesFromDirs(testEmbedFS, ".") + scanner.policies = p + + err := scanner.compilePolicies(testEmbedFS, []string{"policies"}) + require.NoError(t, err) + + assert.Contains(t, debugBuf.String(), "Error occurred while parsing: testdata/policies/invalid.rego, testdata/policies/invalid.rego:7") + }) + +} diff --git a/pkg/iac/rego/metadata.go b/pkg/iac/rego/metadata.go new file mode 100644 index 000000000000..ee6b5d1d2dce --- /dev/null +++ b/pkg/iac/rego/metadata.go @@ -0,0 +1,380 @@ +package rego + +import ( + "context" + "fmt" + "strings" + + "github.com/aquasecurity/defsec/pkg/framework" + "github.com/aquasecurity/defsec/pkg/providers" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/severity" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/mitchellh/mapstructure" + "github.com/open-policy-agent/opa/ast" + "github.com/open-policy-agent/opa/rego" +) + +type StaticMetadata struct { + ID string + AVDID string + Title string + ShortCode string + Description string + Severity string + RecommendedActions string + PrimaryURL string + References []string + InputOptions InputOptions + Package string + Frameworks map[framework.Framework][]string + Provider string + Service string + Library bool + CloudFormation *scan.EngineMetadata + Terraform *scan.EngineMetadata +} + +func NewStaticMetadata(pkgPath string, inputOpt InputOptions) *StaticMetadata { + return &StaticMetadata{ + ID: "N/A", + Title: "N/A", + Severity: "UNKNOWN", + Description: fmt.Sprintf("Rego module: %s", pkgPath), + Package: pkgPath, + InputOptions: inputOpt, + Frameworks: make(map[framework.Framework][]string), + } +} + +func (sm *StaticMetadata) Update(meta map[string]any) error { + + upd := func(field *string, key string) { + if raw, ok := meta[key]; ok { + *field = fmt.Sprintf("%s", raw) + } + } + + upd(&sm.ID, "id") + upd(&sm.AVDID, "avd_id") + upd(&sm.Title, "title") + upd(&sm.ShortCode, "short_code") + upd(&sm.Description, "description") + upd(&sm.Service, "service") + upd(&sm.Provider, "provider") + upd(&sm.RecommendedActions, "recommended_actions") + upd(&sm.RecommendedActions, "recommended_action") + + if raw, ok := meta["severity"]; ok { + sm.Severity = strings.ToUpper(fmt.Sprintf("%s", raw)) + } + + if raw, ok := meta["library"]; ok { + if lib, ok := raw.(bool); ok { + sm.Library = lib + } + } + + if raw, ok := meta["url"]; ok { + sm.References = append(sm.References, fmt.Sprintf("%s", raw)) + } + if raw, ok := meta["frameworks"]; ok { + frameworks, ok := raw.(map[string][]string) + if !ok { + return fmt.Errorf("failed to parse framework metadata: not an object") + } + for fw, sections := range frameworks { + sm.Frameworks[framework.Framework(fw)] = sections + } + } + if raw, ok := meta["related_resources"]; ok { + if relatedResources, ok := raw.([]map[string]any); ok { + for _, relatedResource := range relatedResources { + if raw, ok := relatedResource["ref"]; ok { + sm.References = append(sm.References, fmt.Sprintf("%s", raw)) + } + } + } else if relatedResources, ok := raw.([]string); ok { + sm.References = append(sm.References, relatedResources...) + } + } + + var err error + if sm.CloudFormation, err = NewEngineMetadata("cloud_formation", meta); err != nil { + return err + } + + if sm.Terraform, err = NewEngineMetadata("terraform", meta); err != nil { + return err + } + + return nil +} + +func (sm *StaticMetadata) FromAnnotations(annotations *ast.Annotations) error { + sm.Title = annotations.Title + sm.Description = annotations.Description + for _, resource := range annotations.RelatedResources { + if !resource.Ref.IsAbs() { + continue + } + sm.References = append(sm.References, resource.Ref.String()) + } + if custom := annotations.Custom; custom != nil { + if err := sm.Update(custom); err != nil { + return err + } + } + if len(annotations.RelatedResources) > 0 { + sm.PrimaryURL = annotations.RelatedResources[0].Ref.String() + } + return nil +} + +func NewEngineMetadata(schema string, meta map[string]interface{}) (*scan.EngineMetadata, error) { + var sMap map[string]interface{} + if raw, ok := meta[schema]; ok { + sMap, ok = raw.(map[string]interface{}) + if !ok { + return nil, fmt.Errorf("failed to parse %s metadata: not an object", schema) + } + } + + var em scan.EngineMetadata + if val, ok := sMap["good_examples"].(string); ok { + em.GoodExamples = []string{val} + } + if val, ok := sMap["bad_examples"].(string); ok { + em.BadExamples = []string{val} + } + if val, ok := sMap["links"].(string); ok { + em.Links = []string{val} + } + if val, ok := sMap["remediation_markdown"].(string); ok { + em.RemediationMarkdown = val + } + + return &em, nil +} + +type InputOptions struct { + Combined bool + Selectors []Selector +} + +type Selector struct { + Type string + Subtypes []SubType +} + +type SubType struct { + Group string + Version string + Kind string + Namespace string + Service string // only for cloud + Provider string // only for cloud +} + +func (m StaticMetadata) ToRule() scan.Rule { + + provider := "generic" + if m.Provider != "" { + provider = m.Provider + } else if len(m.InputOptions.Selectors) > 0 { + provider = m.InputOptions.Selectors[0].Type + } + service := "general" + if m.Service != "" { + service = m.Service + } + + return scan.Rule{ + AVDID: m.AVDID, + Aliases: []string{m.ID}, + ShortCode: m.ShortCode, + Summary: m.Title, + Explanation: m.Description, + Impact: "", + Resolution: m.RecommendedActions, + Provider: providers.Provider(provider), + Service: service, + Links: m.References, + Severity: severity.Severity(m.Severity), + RegoPackage: m.Package, + Frameworks: m.Frameworks, + CloudFormation: m.CloudFormation, + Terraform: m.Terraform, + } +} + +type MetadataRetriever struct { + compiler *ast.Compiler +} + +func NewMetadataRetriever(compiler *ast.Compiler) *MetadataRetriever { + return &MetadataRetriever{ + compiler: compiler, + } +} + +func (m *MetadataRetriever) findPackageAnnotations(module *ast.Module) *ast.Annotations { + annotationSet := m.compiler.GetAnnotationSet() + if annotationSet == nil { + return nil + } + for _, annotation := range annotationSet.Flatten() { + if annotation.GetPackage().Path.String() != module.Package.Path.String() || annotation.Annotations.Scope != "package" { + continue + } + return annotation.Annotations + } + return nil +} + +func (m *MetadataRetriever) RetrieveMetadata(ctx context.Context, module *ast.Module, contents ...any) (*StaticMetadata, error) { + + metadata := NewStaticMetadata( + module.Package.Path.String(), + m.queryInputOptions(ctx, module), + ) + + // read metadata from official rego annotations if possible + if annotations := m.findPackageAnnotations(module); annotations != nil { + if err := metadata.FromAnnotations(annotations); err != nil { + return nil, err + } + return metadata, nil + } + + // otherwise, try to read metadata from the rego module itself - we used to do this before annotations were a thing + namespace := getModuleNamespace(module) + metadataQuery := fmt.Sprintf("data.%s.__rego_metadata__", namespace) + + options := []func(*rego.Rego){ + rego.Query(metadataQuery), + rego.Compiler(m.compiler), + rego.Capabilities(nil), + } + // support dynamic metadata fields + for _, in := range contents { + options = append(options, rego.Input(in)) + } + + instance := rego.New(options...) + set, err := instance.Eval(ctx) + if err != nil { + return nil, err + } + + // no metadata supplied + if set == nil { + return metadata, nil + } + + if len(set) != 1 { + return nil, fmt.Errorf("failed to parse metadata: unexpected set length") + } + if len(set[0].Expressions) != 1 { + return nil, fmt.Errorf("failed to parse metadata: unexpected expression length") + } + expression := set[0].Expressions[0] + meta, ok := expression.Value.(map[string]interface{}) + if !ok { + return nil, fmt.Errorf("failed to parse metadata: not an object") + } + + if err := metadata.Update(meta); err != nil { + return nil, err + } + + return metadata, nil +} + +// nolint: cyclop +func (m *MetadataRetriever) queryInputOptions(ctx context.Context, module *ast.Module) InputOptions { + + options := InputOptions{ + Combined: false, + Selectors: nil, + } + + var metadata map[string]interface{} + + // read metadata from official rego annotations if possible + if annotation := m.findPackageAnnotations(module); annotation != nil && annotation.Custom != nil { + if input, ok := annotation.Custom["input"]; ok { + if mapped, ok := input.(map[string]interface{}); ok { + metadata = mapped + } + } + } + + if metadata == nil { + + namespace := getModuleNamespace(module) + inputOptionQuery := fmt.Sprintf("data.%s.__rego_input__", namespace) + instance := rego.New( + rego.Query(inputOptionQuery), + rego.Compiler(m.compiler), + rego.Capabilities(nil), + ) + set, err := instance.Eval(ctx) + if err != nil { + return options + } + + if len(set) != 1 { + return options + } + if len(set[0].Expressions) != 1 { + return options + } + expression := set[0].Expressions[0] + meta, ok := expression.Value.(map[string]interface{}) + if !ok { + return options + } + metadata = meta + } + + if raw, ok := metadata["combine"]; ok { + if combine, ok := raw.(bool); ok { + options.Combined = combine + } + } + + if raw, ok := metadata["selector"]; ok { + if each, ok := raw.([]interface{}); ok { + for _, rawSelector := range each { + var selector Selector + if selectorMap, ok := rawSelector.(map[string]interface{}); ok { + if rawType, ok := selectorMap["type"]; ok { + selector.Type = fmt.Sprintf("%s", rawType) + // handle backward compatibility for "defsec" source type which is now "cloud" + if selector.Type == string(defsecTypes.SourceDefsec) { + selector.Type = string(defsecTypes.SourceCloud) + } + } + if subType, ok := selectorMap["subtypes"].([]interface{}); ok { + for _, subT := range subType { + if st, ok := subT.(map[string]interface{}); ok { + s := SubType{} + _ = mapstructure.Decode(st, &s) + selector.Subtypes = append(selector.Subtypes, s) + } + } + } + } + options.Selectors = append(options.Selectors, selector) + } + } + } + + return options + +} + +func getModuleNamespace(module *ast.Module) string { + return strings.TrimPrefix(module.Package.Path.String(), "data.") +} diff --git a/pkg/iac/rego/metadata_test.go b/pkg/iac/rego/metadata_test.go new file mode 100644 index 000000000000..935c027d0c58 --- /dev/null +++ b/pkg/iac/rego/metadata_test.go @@ -0,0 +1,188 @@ +package rego + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/framework" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_UpdateStaticMetadata(t *testing.T) { + t.Run("happy", func(t *testing.T) { + sm := StaticMetadata{ + ID: "i", + AVDID: "a", + Title: "t", + ShortCode: "sc", + Description: "d", + Severity: "s", + RecommendedActions: "ra", + PrimaryURL: "pu", + References: []string{"r"}, + Package: "pkg", + Provider: "pr", + Service: "srvc", + Library: false, + Frameworks: map[framework.Framework][]string{ + framework.Default: {"dd"}, + }, + } + + require.NoError(t, sm.Update( + map[string]any{ + "id": "i_n", + "avd_id": "a_n", + "title": "t_n", + "short_code": "sc_n", + "description": "d_n", + "service": "srvc_n", + "provider": "pr_n", + "recommended_actions": "ra_n", + "severity": "s_n", + "library": true, + "url": "r_n", + "frameworks": map[string][]string{ + "all": {"aa"}, + }, + }, + )) + + expected := StaticMetadata{ + ID: "i_n", + AVDID: "a_n", + Title: "t_n", + ShortCode: "sc_n", + Description: "d_n", + Severity: "S_N", + RecommendedActions: "ra_n", + PrimaryURL: "pu", + References: []string{"r", "r_n"}, + Package: "pkg", + Provider: "pr_n", + Service: "srvc_n", + Library: true, + Frameworks: map[framework.Framework][]string{ + framework.Default: {"dd"}, + framework.ALL: {"aa"}, + }, + CloudFormation: &scan.EngineMetadata{}, + Terraform: &scan.EngineMetadata{}, + } + + assert.Equal(t, expected, sm) + }) + + t.Run("related resources are a map", func(t *testing.T) { + sm := StaticMetadata{ + References: []string{"r"}, + } + require.NoError(t, sm.Update(map[string]any{ + "related_resources": []map[string]any{ + { + "ref": "r1_n", + }, + { + "ref": "r2_n", + }, + }, + })) + + expected := StaticMetadata{ + References: []string{"r", "r1_n", "r2_n"}, + CloudFormation: &scan.EngineMetadata{}, + Terraform: &scan.EngineMetadata{}, + } + + assert.Equal(t, expected, sm) + }) + + t.Run("related resources are a string", func(t *testing.T) { + sm := StaticMetadata{ + References: []string{"r"}, + } + require.NoError(t, sm.Update(map[string]any{ + "related_resources": []string{"r1_n", "r2_n"}, + })) + + expected := StaticMetadata{ + References: []string{"r", "r1_n", "r2_n"}, + CloudFormation: &scan.EngineMetadata{}, + Terraform: &scan.EngineMetadata{}, + } + + assert.Equal(t, expected, sm) + }) +} + +func Test_getEngineMetadata(t *testing.T) { + inputSchema := map[string]interface{}{ + "terraform": map[string]interface{}{ + "good_examples": `resource "aws_cloudtrail" "good_example" { + is_multi_region_trail = true + + event_selector { + read_write_type = "All" + include_management_events = true + + data_resource { + type = "AWS::S3::Object" + values = ["${data.aws_s3_bucket.important-bucket.arn}/"] + } + } + }`, + }, + "cloud_formation": map[string]interface{}{"good_examples": `--- +Resources: + GoodExample: + Type: AWS::CloudTrail::Trail + Properties: + IsLogging: true + IsMultiRegionTrail: true + S3BucketName: "CloudtrailBucket" + S3KeyPrefix: "/trailing" + TrailName: "Cloudtrail"`, + }} + + var testCases = []struct { + schema string + want string + }{ + { + schema: "terraform", + want: `resource "aws_cloudtrail" "good_example" { + is_multi_region_trail = true + + event_selector { + read_write_type = "All" + include_management_events = true + + data_resource { + type = "AWS::S3::Object" + values = ["${data.aws_s3_bucket.important-bucket.arn}/"] + } + } + }`, + }, + {schema: "cloud_formation", + want: `--- +Resources: + GoodExample: + Type: AWS::CloudTrail::Trail + Properties: + IsLogging: true + IsMultiRegionTrail: true + S3BucketName: "CloudtrailBucket" + S3KeyPrefix: "/trailing" + TrailName: "Cloudtrail"`}, + } + + for _, tc := range testCases { + t.Run(tc.schema, func(t *testing.T) { + em, err := NewEngineMetadata(tc.schema, inputSchema) + assert.NoError(t, err) + assert.Equal(t, tc.want, em.GoodExamples[0]) + }) + } +} diff --git a/pkg/iac/rego/result.go b/pkg/iac/rego/result.go new file mode 100644 index 000000000000..94319eee4887 --- /dev/null +++ b/pkg/iac/rego/result.go @@ -0,0 +1,166 @@ +package rego + +import ( + "fmt" + "io/fs" + "strconv" + + "github.com/aquasecurity/defsec/pkg/scan" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/open-policy-agent/opa/rego" +) + +type regoResult struct { + Filepath string + Resource string + StartLine int + EndLine int + SourcePrefix string + Message string + Explicit bool + Managed bool + FSKey string + FS fs.FS + Parent *regoResult +} + +func (r regoResult) GetMetadata() defsecTypes.Metadata { + var m defsecTypes.Metadata + if !r.Managed { + m = defsecTypes.NewUnmanagedMetadata() + } else { + rng := defsecTypes.NewRangeWithFSKey(r.Filepath, r.StartLine, r.EndLine, r.SourcePrefix, r.FSKey, r.FS) + if r.Explicit { + m = defsecTypes.NewExplicitMetadata(rng, r.Resource) + } else { + m = defsecTypes.NewMetadata(rng, r.Resource) + } + } + if r.Parent != nil { + return m.WithParent(r.Parent.GetMetadata()) + } + return m +} + +func (r regoResult) GetRawValue() interface{} { + return nil +} + +func parseResult(raw interface{}) *regoResult { + var result regoResult + result.Managed = true + switch val := raw.(type) { + case []interface{}: + var msg string + for _, item := range val { + switch raw := item.(type) { + case map[string]interface{}: + result = parseCause(raw) + case string: + msg = raw + } + } + result.Message = msg + case string: + result.Message = val + case map[string]interface{}: + result = parseCause(val) + default: + result.Message = "Rego policy resulted in DENY" + } + return &result +} + +func parseCause(cause map[string]interface{}) regoResult { + var result regoResult + result.Managed = true + if msg, ok := cause["msg"]; ok { + result.Message = fmt.Sprintf("%s", msg) + } + if filepath, ok := cause["filepath"]; ok { + result.Filepath = fmt.Sprintf("%s", filepath) + } + if msg, ok := cause["fskey"]; ok { + result.FSKey = fmt.Sprintf("%s", msg) + } + if msg, ok := cause["resource"]; ok { + result.Resource = fmt.Sprintf("%s", msg) + } + if start, ok := cause["startline"]; ok { + result.StartLine = parseLineNumber(start) + } + if end, ok := cause["endline"]; ok { + result.EndLine = parseLineNumber(end) + } + if prefix, ok := cause["sourceprefix"]; ok { + result.SourcePrefix = fmt.Sprintf("%s", prefix) + } + if explicit, ok := cause["explicit"]; ok { + if set, ok := explicit.(bool); ok { + result.Explicit = set + } + } + if managed, ok := cause["managed"]; ok { + if set, ok := managed.(bool); ok { + result.Managed = set + } + } + if parent, ok := cause["parent"]; ok { + if m, ok := parent.(map[string]interface{}); ok { + parentResult := parseCause(m) + result.Parent = &parentResult + } + } + return result +} + +func parseLineNumber(raw interface{}) int { + str := fmt.Sprintf("%s", raw) + n, _ := strconv.Atoi(str) + return n +} + +func (s *Scanner) convertResults(set rego.ResultSet, input Input, namespace string, rule string, traces []string) scan.Results { + var results scan.Results + + offset := 0 + if input.Contents != nil { + if xx, ok := input.Contents.(map[string]interface{}); ok { + if md, ok := xx["__defsec_metadata"]; ok { + if md2, ok := md.(map[string]interface{}); ok { + if sl, ok := md2["offset"]; ok { + offset, _ = sl.(int) + } + } + } + } + } + for _, result := range set { + for _, expression := range result.Expressions { + values, ok := expression.Value.([]interface{}) + if !ok { + values = []interface{}{expression.Value} + } + + for _, value := range values { + regoResult := parseResult(value) + regoResult.FS = input.FS + if regoResult.Filepath == "" && input.Path != "" { + regoResult.Filepath = input.Path + } + if regoResult.Message == "" { + regoResult.Message = fmt.Sprintf("Rego policy rule: %s.%s", namespace, rule) + } + regoResult.StartLine += offset + regoResult.EndLine += offset + results.AddRego(regoResult.Message, namespace, rule, traces, regoResult) + } + } + } + return results +} + +func (s *Scanner) embellishResultsWithRuleMetadata(results scan.Results, metadata StaticMetadata) scan.Results { + results.SetRule(metadata.ToRule()) + return results +} diff --git a/pkg/iac/rego/result_test.go b/pkg/iac/rego/result_test.go new file mode 100644 index 000000000000..d958f7962b10 --- /dev/null +++ b/pkg/iac/rego/result_test.go @@ -0,0 +1,104 @@ +package rego + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_parseResult(t *testing.T) { + var testCases = []struct { + name string + input interface{} + want regoResult + }{ + { + name: "unknown", + input: nil, + want: regoResult{ + Managed: true, + Message: "Rego policy resulted in DENY", + }, + }, + { + name: "string", + input: "message", + want: regoResult{ + Managed: true, + Message: "message", + }, + }, + { + name: "strings", + input: []interface{}{"message"}, + want: regoResult{ + Managed: true, + Message: "message", + }, + }, + { + name: "maps", + input: []interface{}{ + "message", + map[string]interface{}{ + "filepath": "a.out", + }, + }, + want: regoResult{ + Managed: true, + Message: "message", + Filepath: "a.out", + }, + }, + { + name: "map", + input: map[string]interface{}{ + "msg": "message", + "filepath": "a.out", + "fskey": "abcd", + "resource": "resource", + "startline": "123", + "endline": "456", + "sourceprefix": "git", + "explicit": true, + "managed": true, + }, + want: regoResult{ + Message: "message", + Filepath: "a.out", + Resource: "resource", + StartLine: 123, + EndLine: 456, + SourcePrefix: "git", + FSKey: "abcd", + Explicit: true, + Managed: true, + }, + }, + { + name: "parent", + input: map[string]interface{}{ + "msg": "child", + "parent": map[string]interface{}{ + "msg": "parent", + }, + }, + want: regoResult{ + Message: "child", + Managed: true, + Parent: ®oResult{ + Message: "parent", + Managed: true, + }, + }, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + have := parseResult(tc.input) + assert.NotNil(t, have) + assert.Equal(t, tc.want, *have) + }) + } +} diff --git a/pkg/iac/rego/runtime.go b/pkg/iac/rego/runtime.go new file mode 100644 index 000000000000..6e28268d9971 --- /dev/null +++ b/pkg/iac/rego/runtime.go @@ -0,0 +1,28 @@ +package rego + +import ( + "os" + "strings" + + "github.com/open-policy-agent/opa/ast" + "github.com/open-policy-agent/opa/version" +) + +func addRuntimeValues() *ast.Term { + env := ast.NewObject() + for _, pair := range os.Environ() { + parts := strings.SplitN(pair, "=", 2) + if len(parts) == 1 { + env.Insert(ast.StringTerm(parts[0]), ast.NullTerm()) + } else if len(parts) > 1 { + env.Insert(ast.StringTerm(parts[0]), ast.StringTerm(parts[1])) + } + } + + obj := ast.NewObject() + obj.Insert(ast.StringTerm("env"), ast.NewTerm(env)) + obj.Insert(ast.StringTerm("version"), ast.StringTerm(version.Version)) + obj.Insert(ast.StringTerm("commit"), ast.StringTerm(version.Vcs)) + + return ast.NewTerm(obj) +} diff --git a/pkg/iac/rego/scanner.go b/pkg/iac/rego/scanner.go new file mode 100644 index 000000000000..c88c48df3d95 --- /dev/null +++ b/pkg/iac/rego/scanner.go @@ -0,0 +1,413 @@ +package rego + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "io/fs" + "strings" + + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/framework" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/rego/schemas" + "github.com/open-policy-agent/opa/ast" + "github.com/open-policy-agent/opa/rego" + "github.com/open-policy-agent/opa/storage" + + "github.com/aquasecurity/defsec/pkg/scanners/options" +) + +var _ options.ConfigurableScanner = (*Scanner)(nil) + +type Scanner struct { + ruleNamespaces map[string]struct{} + policies map[string]*ast.Module + store storage.Store + dataDirs []string + runtimeValues *ast.Term + compiler *ast.Compiler + regoErrorLimit int + debug debug.Logger + traceWriter io.Writer + tracePerResult bool + retriever *MetadataRetriever + policyFS fs.FS + dataFS fs.FS + frameworks []framework.Framework + spec string + inputSchema interface{} // unmarshalled into this from a json schema document + sourceType types.Source +} + +func (s *Scanner) SetUseEmbeddedLibraries(b bool) { + // handled externally +} + +func (s *Scanner) SetSpec(spec string) { + s.spec = spec +} + +func (s *Scanner) SetRegoOnly(bool) {} + +func (s *Scanner) SetFrameworks(frameworks []framework.Framework) { + s.frameworks = frameworks +} + +func (s *Scanner) SetUseEmbeddedPolicies(b bool) { + // handled externally +} + +func (s *Scanner) trace(heading string, input interface{}) { + if s.traceWriter == nil { + return + } + data, err := json.MarshalIndent(input, "", " ") + if err != nil { + return + } + _, _ = fmt.Fprintf(s.traceWriter, "REGO %[1]s:\n%s\nEND REGO %[1]s\n\n", heading, string(data)) +} + +func (s *Scanner) SetPolicyFilesystem(fs fs.FS) { + s.policyFS = fs +} + +func (s *Scanner) SetDataFilesystem(fs fs.FS) { + s.dataFS = fs +} + +func (s *Scanner) SetPolicyReaders(_ []io.Reader) { + // NOTE: Policy readers option not applicable for rego, policies are loaded on-demand by other scanners. +} + +func (s *Scanner) SetDebugWriter(writer io.Writer) { + s.debug = debug.New(writer, "rego", "scanner") +} + +func (s *Scanner) SetTraceWriter(writer io.Writer) { + s.traceWriter = writer +} + +func (s *Scanner) SetPerResultTracingEnabled(b bool) { + s.tracePerResult = b +} + +func (s *Scanner) SetPolicyDirs(_ ...string) { + // NOTE: Policy dirs option not applicable for rego, policies are loaded on-demand by other scanners. +} + +func (s *Scanner) SetDataDirs(dirs ...string) { + s.dataDirs = dirs +} + +func (s *Scanner) SetPolicyNamespaces(namespaces ...string) { + for _, namespace := range namespaces { + s.ruleNamespaces[namespace] = struct{}{} + } +} + +func (s *Scanner) SetSkipRequiredCheck(_ bool) { + // NOTE: Skip required option not applicable for rego. +} + +func (s *Scanner) SetRegoErrorLimit(limit int) { + s.regoErrorLimit = limit +} + +type DynamicMetadata struct { + Warning bool + Filepath string + Message string + StartLine int + EndLine int +} + +func NewScanner(source types.Source, options ...options.ScannerOption) *Scanner { + schema, ok := schemas.SchemaMap[source] + if !ok { + schema = schemas.Anything + } + + s := &Scanner{ + regoErrorLimit: ast.CompileErrorLimitDefault, + sourceType: source, + ruleNamespaces: map[string]struct{}{ + "builtin": {}, + "appshield": {}, + "defsec": {}, + }, + runtimeValues: addRuntimeValues(), + } + for _, opt := range options { + opt(s) + } + if schema != schemas.None { + err := json.Unmarshal([]byte(schema), &s.inputSchema) + if err != nil { + panic(err) + } + } + return s +} + +func (s *Scanner) SetParentDebugLogger(l debug.Logger) { + s.debug = l.Extend("rego") +} + +func (s *Scanner) runQuery(ctx context.Context, query string, input interface{}, disableTracing bool) (rego.ResultSet, []string, error) { + + trace := (s.traceWriter != nil || s.tracePerResult) && !disableTracing + + regoOptions := []func(*rego.Rego){ + rego.Query(query), + rego.Compiler(s.compiler), + rego.Store(s.store), + rego.Runtime(s.runtimeValues), + rego.Trace(trace), + } + + if s.inputSchema != nil { + schemaSet := ast.NewSchemaSet() + schemaSet.Put(ast.MustParseRef("schema.input"), s.inputSchema) + regoOptions = append(regoOptions, rego.Schemas(schemaSet)) + } + + if input != nil { + regoOptions = append(regoOptions, rego.Input(input)) + } + + instance := rego.New(regoOptions...) + set, err := instance.Eval(ctx) + if err != nil { + return nil, nil, err + } + + // we also build a slice of trace lines for per-result tracing - primarily for fanal/trivy + var traces []string + + if trace { + if s.traceWriter != nil { + rego.PrintTrace(s.traceWriter, instance) + } + if s.tracePerResult { + traceBuffer := bytes.NewBuffer([]byte{}) + rego.PrintTrace(traceBuffer, instance) + traces = strings.Split(traceBuffer.String(), "\n") + } + } + return set, traces, nil +} + +type Input struct { + Path string `json:"path"` + FS fs.FS `json:"-"` + Contents interface{} `json:"contents"` +} + +func GetInputsContents(inputs []Input) []any { + results := make([]any, len(inputs)) + for i, c := range inputs { + results[i] = c.Contents + } + return results +} + +func (s *Scanner) ScanInput(ctx context.Context, inputs ...Input) (scan.Results, error) { + + s.debug.Log("Scanning %d inputs...", len(inputs)) + + var results scan.Results + + for _, module := range s.policies { + + select { + case <-ctx.Done(): + return nil, ctx.Err() + default: + } + + namespace := getModuleNamespace(module) + topLevel := strings.Split(namespace, ".")[0] + if _, ok := s.ruleNamespaces[topLevel]; !ok { + continue + } + + staticMeta, err := s.retriever.RetrieveMetadata(ctx, module, GetInputsContents(inputs)...) + if err != nil { + return nil, err + } + + if isPolicyWithSubtype(s.sourceType) { + // skip if policy isn't relevant to what is being scanned + if !isPolicyApplicable(staticMeta, inputs...) { + continue + } + } + + if len(inputs) == 0 { + continue + } + + usedRules := make(map[string]struct{}) + + // all rules + for _, rule := range module.Rules { + ruleName := rule.Head.Name.String() + if _, ok := usedRules[ruleName]; ok { + continue + } + usedRules[ruleName] = struct{}{} + if isEnforcedRule(ruleName) { + ruleResults, err := s.applyRule(ctx, namespace, ruleName, inputs, staticMeta.InputOptions.Combined) + if err != nil { + return nil, err + } + results = append(results, s.embellishResultsWithRuleMetadata(ruleResults, *staticMeta)...) + } + } + + } + + return results, nil +} + +func isPolicyWithSubtype(sourceType types.Source) bool { + for _, s := range []types.Source{types.SourceCloud, types.SourceDefsec} { // TODO(simar): Add types.Kubernetes once all k8s policy have subtype + if sourceType == s { + return true + } + } + return false +} + +func checkSubtype(ii map[string]interface{}, provider string, subTypes []SubType) bool { + if len(subTypes) == 0 { + return true + } + + for _, st := range subTypes { + switch services := ii[provider].(type) { + case map[string]interface{}: // cloud + for service := range services { + if (service == st.Service) && (st.Provider == provider) { + return true + } + } + case string: // k8s + // TODO(simar): This logic probably needs to be revisited + if services == st.Group || + services == st.Version || + services == st.Kind { + return true + } + } + } + return false +} + +func isPolicyApplicable(staticMetadata *StaticMetadata, inputs ...Input) bool { + for _, input := range inputs { + if ii, ok := input.Contents.(map[string]interface{}); ok { + for provider := range ii { + // TODO(simar): Add other providers + if !strings.Contains(strings.Join([]string{"kind", "aws", "azure"}, ","), provider) { + continue + } + + if len(staticMetadata.InputOptions.Selectors) == 0 { // policy always applies if no selectors + return true + } + + // check metadata for subtype + for _, s := range staticMetadata.InputOptions.Selectors { + if checkSubtype(ii, provider, s.Subtypes) { + return true + } + } + } + } + } + return false +} + +func (s *Scanner) applyRule(ctx context.Context, namespace string, rule string, inputs []Input, combined bool) (scan.Results, error) { + + // handle combined evaluations if possible + if combined { + s.trace("INPUT", inputs) + return s.applyRuleCombined(ctx, namespace, rule, inputs) + } + + var results scan.Results + qualified := fmt.Sprintf("data.%s.%s", namespace, rule) + for _, input := range inputs { + s.trace("INPUT", input) + if ignored, err := s.isIgnored(ctx, namespace, rule, input.Contents); err != nil { + return nil, err + } else if ignored { + var result regoResult + result.FS = input.FS + result.Filepath = input.Path + result.Managed = true + results.AddIgnored(result) + continue + } + set, traces, err := s.runQuery(ctx, qualified, input.Contents, false) + if err != nil { + return nil, err + } + s.trace("RESULTSET", set) + ruleResults := s.convertResults(set, input, namespace, rule, traces) + if len(ruleResults) == 0 { // It passed because we didn't find anything wrong (NOT because it didn't exist) + var result regoResult + result.FS = input.FS + result.Filepath = input.Path + result.Managed = true + results.AddPassedRego(namespace, rule, traces, result) + continue + } + results = append(results, ruleResults...) + } + + return results, nil +} + +func (s *Scanner) applyRuleCombined(ctx context.Context, namespace string, rule string, inputs []Input) (scan.Results, error) { + if len(inputs) == 0 { + return nil, nil + } + var results scan.Results + qualified := fmt.Sprintf("data.%s.%s", namespace, rule) + if ignored, err := s.isIgnored(ctx, namespace, rule, inputs); err != nil { + return nil, err + } else if ignored { + for _, input := range inputs { + var result regoResult + result.FS = input.FS + result.Filepath = input.Path + result.Managed = true + results.AddIgnored(result) + } + return results, nil + } + set, traces, err := s.runQuery(ctx, qualified, inputs, false) + if err != nil { + return nil, err + } + return s.convertResults(set, inputs[0], namespace, rule, traces), nil +} + +// severity is now set with metadata, so deny/warn/violation now behave the same way +func isEnforcedRule(name string) bool { + switch { + case name == "deny", strings.HasPrefix(name, "deny_"), + name == "warn", strings.HasPrefix(name, "warn_"), + name == "violation", strings.HasPrefix(name, "violation_"): + return true + } + return false +} diff --git a/pkg/iac/rego/scanner_test.go b/pkg/iac/rego/scanner_test.go new file mode 100644 index 000000000000..d1aca5098f65 --- /dev/null +++ b/pkg/iac/rego/scanner_test.go @@ -0,0 +1,978 @@ +package rego + +import ( + "bytes" + "context" + "io/fs" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/aquasecurity/defsec/pkg/severity" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/liamg/memoryfs" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/defsec/pkg/scanners/options" +) + +func CreateFS(t *testing.T, files map[string]string) fs.FS { + memfs := memoryfs.New() + for name, content := range files { + name := strings.TrimPrefix(name, "/") + err := memfs.MkdirAll(filepath.Dir(name), 0o700) + require.NoError(t, err) + err = memfs.WriteFile(name, []byte(content), 0o644) + require.NoError(t, err) + } + return memfs +} + +func Test_RegoScanning_Deny(t *testing.T) { + + srcFS := CreateFS(t, map[string]string{ + "policies/test.rego": ` +package defsec.test + +deny { + input.evil +} +`, + }) + + scanner := NewScanner(types.SourceJSON) + require.NoError( + t, + scanner.LoadPolicies(false, false, srcFS, []string{"policies"}, nil), + ) + + results, err := scanner.ScanInput(context.TODO(), Input{ + Path: "/evil.lol", + Contents: map[string]interface{}{ + "evil": true, + }, + FS: srcFS, + }) + require.NoError(t, err) + + require.Equal(t, 1, len(results.GetFailed())) + assert.Equal(t, 0, len(results.GetPassed())) + assert.Equal(t, 0, len(results.GetIgnored())) + + assert.Equal(t, "/evil.lol", results.GetFailed()[0].Metadata().Range().GetFilename()) + assert.False(t, results.GetFailed()[0].IsWarning()) +} + +func Test_RegoScanning_AbsolutePolicyPath_Deny(t *testing.T) { + + tmp := t.TempDir() + require.NoError(t, os.Mkdir(filepath.Join(tmp, "policies"), 0755)) + require.NoError(t, os.WriteFile(filepath.Join(tmp, "policies", "test.rego"), []byte(`package defsec.test + +deny { + input.evil +}`), 0600)) + + srcFS := os.DirFS(tmp) + + scanner := NewScanner(types.SourceJSON) + require.NoError( + t, + scanner.LoadPolicies(false, false, srcFS, []string{"/policies"}, nil), + ) + + results, err := scanner.ScanInput(context.TODO(), Input{ + Path: "/evil.lol", + Contents: map[string]interface{}{ + "evil": true, + }, + FS: srcFS, + }) + require.NoError(t, err) + + require.Equal(t, 1, len(results.GetFailed())) + assert.Equal(t, 0, len(results.GetPassed())) + assert.Equal(t, 0, len(results.GetIgnored())) + + assert.Equal(t, "/evil.lol", results.GetFailed()[0].Metadata().Range().GetFilename()) + assert.False(t, results.GetFailed()[0].IsWarning()) +} + +func Test_RegoScanning_Warn(t *testing.T) { + + srcFS := CreateFS(t, map[string]string{ + "policies/test.rego": ` +package defsec.test + +warn { + input.evil +} +`, + }) + + scanner := NewScanner(types.SourceJSON) + require.NoError( + t, + scanner.LoadPolicies(false, false, srcFS, []string{"policies"}, nil), + ) + + results, err := scanner.ScanInput(context.TODO(), Input{ + Path: "/evil.lol", + Contents: map[string]interface{}{ + "evil": true, + }, + }) + require.NoError(t, err) + + require.Equal(t, 1, len(results.GetFailed())) + require.Equal(t, 0, len(results.GetPassed())) + require.Equal(t, 0, len(results.GetIgnored())) + + assert.True(t, results.GetFailed()[0].IsWarning()) +} + +func Test_RegoScanning_Allow(t *testing.T) { + srcFS := CreateFS(t, map[string]string{ + "policies/test.rego": ` +package defsec.test + +deny { + input.evil +} +`, + }) + + scanner := NewScanner(types.SourceJSON) + require.NoError( + t, + scanner.LoadPolicies(false, false, srcFS, []string{"policies"}, nil), + ) + + results, err := scanner.ScanInput(context.TODO(), Input{ + Path: "/evil.lol", + Contents: map[string]interface{}{ + "evil": false, + }, + }) + require.NoError(t, err) + + assert.Equal(t, 0, len(results.GetFailed())) + require.Equal(t, 1, len(results.GetPassed())) + assert.Equal(t, 0, len(results.GetIgnored())) + + assert.Equal(t, "/evil.lol", results.GetPassed()[0].Metadata().Range().GetFilename()) +} + +func Test_RegoScanning_Namespace_Exception(t *testing.T) { + + srcFS := CreateFS(t, map[string]string{ + "policies/test.rego": ` +package defsec.test + +deny { + input.evil +} +`, + "policies/exceptions.rego": ` +package namespace.exceptions + +import data.namespaces + +exception[ns] { + ns := data.namespaces[_] + startswith(ns, "defsec") +} +`, + }) + + scanner := NewScanner(types.SourceJSON) + require.NoError( + t, + scanner.LoadPolicies(false, false, srcFS, []string{"policies"}, nil), + ) + + results, err := scanner.ScanInput(context.TODO(), Input{ + Path: "/evil.lol", + Contents: map[string]interface{}{ + "evil": true, + }, + }) + require.NoError(t, err) + + assert.Equal(t, 0, len(results.GetFailed())) + assert.Equal(t, 0, len(results.GetPassed())) + assert.Equal(t, 1, len(results.GetIgnored())) + +} + +func Test_RegoScanning_Namespace_Exception_WithoutMatch(t *testing.T) { + + srcFS := CreateFS(t, map[string]string{ + "policies/test.rego": ` +package defsec.test + +deny { + input.evil +} +`, "policies/something.rego": ` +package builtin.test + +deny_something { + input.something +} +`, + "policies/exceptions.rego": ` +package namespace.exceptions + +import data.namespaces + +exception[ns] { + ns := data.namespaces[_] + startswith(ns, "builtin") +} +`, + }) + + scanner := NewScanner(types.SourceJSON) + require.NoError( + t, + scanner.LoadPolicies(false, false, srcFS, []string{"policies"}, nil), + ) + + results, err := scanner.ScanInput(context.TODO(), Input{ + Path: "/evil.lol", + Contents: map[string]interface{}{ + "evil": true, + }, + }) + require.NoError(t, err) + + assert.Equal(t, 1, len(results.GetFailed())) + assert.Equal(t, 0, len(results.GetPassed())) + assert.Equal(t, 1, len(results.GetIgnored())) + +} + +func Test_RegoScanning_Rule_Exception(t *testing.T) { + srcFS := CreateFS(t, map[string]string{ + "policies/test.rego": ` +package defsec.test +deny_evil { + input.evil +} +`, + "policies/exceptions.rego": ` +package defsec.test + +exception[rules] { + rules := ["evil"] +} +`, + }) + + scanner := NewScanner(types.SourceJSON) + require.NoError( + t, + scanner.LoadPolicies(false, false, srcFS, []string{"policies"}, nil), + ) + + results, err := scanner.ScanInput(context.TODO(), Input{ + Path: "/evil.lol", + Contents: map[string]interface{}{ + "evil": true, + }, + }) + require.NoError(t, err) + + assert.Equal(t, 0, len(results.GetFailed())) + assert.Equal(t, 0, len(results.GetPassed())) + assert.Equal(t, 1, len(results.GetIgnored())) +} + +func Test_RegoScanning_Rule_Exception_WithoutMatch(t *testing.T) { + srcFS := CreateFS(t, map[string]string{ + "policies/test.rego": ` +package defsec.test +deny_evil { + input.evil +} +`, + "policies/exceptions.rego": ` +package defsec.test + +exception[rules] { + rules := ["good"] +} +`, + }) + + scanner := NewScanner(types.SourceJSON) + require.NoError( + t, + scanner.LoadPolicies(false, false, srcFS, []string{"policies"}, nil), + ) + + results, err := scanner.ScanInput(context.TODO(), Input{ + Path: "/evil.lol", + Contents: map[string]interface{}{ + "evil": true, + }, + }) + require.NoError(t, err) + + assert.Equal(t, 1, len(results.GetFailed())) + assert.Equal(t, 0, len(results.GetPassed())) + assert.Equal(t, 0, len(results.GetIgnored())) +} + +func Test_RegoScanning_WithRuntimeValues(t *testing.T) { + + _ = os.Setenv("DEFSEC_RUNTIME_VAL", "AOK") + + srcFS := CreateFS(t, map[string]string{ + "policies/test.rego": ` +package defsec.test + +deny_evil { + output := opa.runtime() + output.env.DEFSEC_RUNTIME_VAL == "AOK" +} +`, + }) + + scanner := NewScanner(types.SourceJSON) + require.NoError( + t, + scanner.LoadPolicies(false, false, srcFS, []string{"policies"}, nil), + ) + + results, err := scanner.ScanInput(context.TODO(), Input{ + Path: "/evil.lol", + Contents: map[string]interface{}{ + "evil": true, + }, + }) + require.NoError(t, err) + + assert.Equal(t, 1, len(results.GetFailed())) + assert.Equal(t, 0, len(results.GetPassed())) + assert.Equal(t, 0, len(results.GetIgnored())) +} + +func Test_RegoScanning_WithDenyMessage(t *testing.T) { + srcFS := CreateFS(t, map[string]string{ + "policies/test.rego": ` +package defsec.test + +deny[msg] { + input.evil + msg := "oh no" +} +`, + }) + + scanner := NewScanner(types.SourceJSON) + require.NoError( + t, + scanner.LoadPolicies(false, false, srcFS, []string{"policies"}, nil), + ) + + results, err := scanner.ScanInput(context.TODO(), Input{ + Path: "/evil.lol", + Contents: map[string]interface{}{ + "evil": true, + }, + }) + require.NoError(t, err) + + require.Equal(t, 1, len(results.GetFailed())) + assert.Equal(t, 0, len(results.GetPassed())) + assert.Equal(t, 0, len(results.GetIgnored())) + + assert.Equal(t, "oh no", results.GetFailed()[0].Description()) + assert.Equal(t, "/evil.lol", results.GetFailed()[0].Metadata().Range().GetFilename()) +} + +func Test_RegoScanning_WithDenyMetadata_ImpliedPath(t *testing.T) { + srcFS := CreateFS(t, map[string]string{ + "policies/test.rego": ` +package defsec.test + +deny[res] { + input.evil + res := { + "msg": "oh no", + "startline": 123, + "endline": 456, + } +} +`, + }) + + scanner := NewScanner(types.SourceJSON) + require.NoError( + t, + scanner.LoadPolicies(false, false, srcFS, []string{"policies"}, nil), + ) + + results, err := scanner.ScanInput(context.TODO(), Input{ + Path: "/evil.lol", + Contents: map[string]interface{}{ + "evil": true, + }, + }) + require.NoError(t, err) + + require.Equal(t, 1, len(results.GetFailed())) + assert.Equal(t, 0, len(results.GetPassed())) + assert.Equal(t, 0, len(results.GetIgnored())) + + assert.Equal(t, "oh no", results.GetFailed()[0].Description()) + assert.Equal(t, "/evil.lol", results.GetFailed()[0].Metadata().Range().GetFilename()) + assert.Equal(t, 123, results.GetFailed()[0].Metadata().Range().GetStartLine()) + assert.Equal(t, 456, results.GetFailed()[0].Metadata().Range().GetEndLine()) + +} + +func Test_RegoScanning_WithDenyMetadata_PersistedPath(t *testing.T) { + srcFS := CreateFS(t, map[string]string{ + "policies/test.rego": ` +package defsec.test + +deny[res] { + input.evil + res := { + "msg": "oh no", + "startline": 123, + "endline": 456, + "filepath": "/blah.txt", + } +} +`, + }) + + scanner := NewScanner(types.SourceJSON) + require.NoError( + t, + scanner.LoadPolicies(false, false, srcFS, []string{"policies"}, nil), + ) + + results, err := scanner.ScanInput(context.TODO(), Input{ + Path: "/evil.lol", + Contents: map[string]interface{}{ + "evil": true, + }, + }) + require.NoError(t, err) + + require.Equal(t, 1, len(results.GetFailed())) + assert.Equal(t, 0, len(results.GetPassed())) + assert.Equal(t, 0, len(results.GetIgnored())) + + assert.Equal(t, "oh no", results.GetFailed()[0].Description()) + assert.Equal(t, "/blah.txt", results.GetFailed()[0].Metadata().Range().GetFilename()) + assert.Equal(t, 123, results.GetFailed()[0].Metadata().Range().GetStartLine()) + assert.Equal(t, 456, results.GetFailed()[0].Metadata().Range().GetEndLine()) + +} + +func Test_RegoScanning_WithStaticMetadata(t *testing.T) { + srcFS := CreateFS(t, map[string]string{ + "policies/test.rego": ` +package defsec.test + +__rego_metadata__ := { + "id": "AA001", + "avd_id": "AVD-XX-9999", + "title": "This is a title", + "short_code": "short-code", + "severity": "LOW", + "type": "Dockerfile Security Check", + "description": "This is a description", + "recommended_actions": "This is a recommendation", + "url": "https://google.com", +} + +deny[res] { + input.evil + res := { + "msg": "oh no", + "startline": 123, + "endline": 456, + "filepath": "/blah.txt", + } +} +`, + }) + + scanner := NewScanner(types.SourceJSON) + require.NoError( + t, + scanner.LoadPolicies(false, false, srcFS, []string{"policies"}, nil), + ) + + results, err := scanner.ScanInput(context.TODO(), Input{ + Path: "/evil.lol", + Contents: map[string]interface{}{ + "evil": true, + }, + }) + require.NoError(t, err) + + require.Equal(t, 1, len(results.GetFailed())) + assert.Equal(t, 0, len(results.GetPassed())) + assert.Equal(t, 0, len(results.GetIgnored())) + + failure := results.GetFailed()[0] + + assert.Equal(t, "oh no", failure.Description()) + assert.Equal(t, "/blah.txt", failure.Metadata().Range().GetFilename()) + assert.Equal(t, 123, failure.Metadata().Range().GetStartLine()) + assert.Equal(t, 456, failure.Metadata().Range().GetEndLine()) + assert.Equal(t, "AVD-XX-9999", failure.Rule().AVDID) + assert.True(t, failure.Rule().HasID("AA001")) + assert.Equal(t, "This is a title", failure.Rule().Summary) + assert.Equal(t, severity.Low, failure.Rule().Severity) + assert.Equal(t, "This is a recommendation", failure.Rule().Resolution) + assert.Equal(t, "https://google.com", failure.Rule().Links[0]) + +} + +func Test_RegoScanning_WithMatchingInputSelector(t *testing.T) { + srcFS := CreateFS(t, map[string]string{ + "policies/test.rego": ` +package defsec.test + +__rego_input__ := { + "selector": [{"type": "json"}], +} + +deny { + input.evil +} + +`, + }) + + scanner := NewScanner(types.SourceJSON) + require.NoError( + t, + scanner.LoadPolicies(false, false, srcFS, []string{"policies"}, nil), + ) + + results, err := scanner.ScanInput(context.TODO(), Input{ + Path: "/evil.lol", + Contents: map[string]interface{}{ + "evil": true, + }, + }) + require.NoError(t, err) + + assert.Equal(t, 1, len(results.GetFailed())) + assert.Equal(t, 0, len(results.GetPassed())) + assert.Equal(t, 0, len(results.GetIgnored())) +} + +func Test_RegoScanning_WithNonMatchingInputSelector(t *testing.T) { + srcFS := CreateFS(t, map[string]string{ + "policies/test.rego": ` +package defsec.test + +__rego_input__ := { + "selector": [{"type": "testing"}], +} + +deny { + input.evil +} +`, + }) + + scanner := NewScanner(types.SourceJSON) + require.NoError( + t, + scanner.LoadPolicies(false, false, srcFS, []string{"policies"}, nil), + ) + + results, err := scanner.ScanInput(context.TODO(), Input{ + Path: "/evil.lol", + Contents: map[string]interface{}{ + "evil": true, + }, + }) + require.NoError(t, err) + + assert.Equal(t, 0, len(results.GetFailed())) + assert.Equal(t, 0, len(results.GetPassed())) + assert.Equal(t, 0, len(results.GetIgnored())) +} + +func Test_RegoScanning_NoTracingByDefault(t *testing.T) { + + srcFS := CreateFS(t, map[string]string{ + "policies/test.rego": ` +package defsec.test + +deny { + input.evil +} +`, + }) + + scanner := NewScanner(types.SourceJSON) + require.NoError( + t, + scanner.LoadPolicies(false, false, srcFS, []string{"policies"}, nil), + ) + + results, err := scanner.ScanInput(context.TODO(), Input{ + Path: "/evil.lol", + Contents: map[string]interface{}{ + "evil": true, + }, + }) + require.NoError(t, err) + + assert.Equal(t, 1, len(results.GetFailed())) + assert.Equal(t, 0, len(results.GetPassed())) + assert.Equal(t, 0, len(results.GetIgnored())) + + assert.Len(t, results.GetFailed()[0].Traces(), 0) +} + +func Test_RegoScanning_GlobalTracingEnabled(t *testing.T) { + + srcFS := CreateFS(t, map[string]string{ + "policies/test.rego": ` +package defsec.test + +deny { + input.evil +} +`, + }) + + traceBuffer := bytes.NewBuffer([]byte{}) + + scanner := NewScanner(types.SourceJSON, options.ScannerWithTrace(traceBuffer)) + require.NoError( + t, + scanner.LoadPolicies(false, false, srcFS, []string{"policies"}, nil), + ) + + results, err := scanner.ScanInput(context.TODO(), Input{ + Path: "/evil.lol", + Contents: map[string]interface{}{ + "evil": true, + }, + }) + require.NoError(t, err) + + assert.Equal(t, 1, len(results.GetFailed())) + assert.Equal(t, 0, len(results.GetPassed())) + assert.Equal(t, 0, len(results.GetIgnored())) + + assert.Len(t, results.GetFailed()[0].Traces(), 0) + assert.Greater(t, len(traceBuffer.Bytes()), 0) +} + +func Test_RegoScanning_PerResultTracingEnabled(t *testing.T) { + + srcFS := CreateFS(t, map[string]string{ + "policies/test.rego": ` +package defsec.test + +deny { + input.evil +} +`, + }) + + scanner := NewScanner(types.SourceJSON, options.ScannerWithPerResultTracing(true)) + require.NoError( + t, + scanner.LoadPolicies(false, false, srcFS, []string{"policies"}, nil), + ) + + results, err := scanner.ScanInput(context.TODO(), Input{ + Path: "/evil.lol", + Contents: map[string]interface{}{ + "evil": true, + }, + }) + require.NoError(t, err) + + assert.Equal(t, 1, len(results.GetFailed())) + assert.Equal(t, 0, len(results.GetPassed())) + assert.Equal(t, 0, len(results.GetIgnored())) + + assert.Greater(t, len(results.GetFailed()[0].Traces()), 0) +} + +func Test_dynamicMetadata(t *testing.T) { + + srcFS := CreateFS(t, map[string]string{ + "policies/test.rego": ` +package defsec.test + +__rego_metadata__ := { + "title" : sprintf("i am %s",[input.text]) +} + +deny { + input.text +} + +`, + }) + + scanner := NewScanner(types.SourceJSON) + require.NoError( + t, + scanner.LoadPolicies(false, false, srcFS, []string{"policies"}, nil), + ) + + results, err := scanner.ScanInput(context.TODO(), Input{ + Path: "/evil.lol", + Contents: map[string]interface{}{ + "text": "dynamic", + }, + }) + require.NoError(t, err) + assert.Equal(t, results[0].Rule().Summary, "i am dynamic") +} + +func Test_staticMetadata(t *testing.T) { + + srcFS := CreateFS(t, map[string]string{ + "policies/test.rego": ` +package defsec.test + +__rego_metadata__ := { + "title" : "i am static" +} + +deny { + input.text +} + +`, + }) + + scanner := NewScanner(types.SourceJSON) + require.NoError( + t, + scanner.LoadPolicies(false, false, srcFS, []string{"policies"}, nil), + ) + + results, err := scanner.ScanInput(context.TODO(), Input{ + Path: "/evil.lol", + Contents: map[string]interface{}{ + "text": "test", + }, + }) + require.NoError(t, err) + assert.Equal(t, results[0].Rule().Summary, "i am static") +} + +func Test_annotationMetadata(t *testing.T) { + + srcFS := CreateFS(t, map[string]string{ + "policies/test.rego": `# METADATA +# title: i am a title +# description: i am a description +# related_resources: +# - https://google.com +# custom: +# id: EG123 +# avd_id: AVD-EG-0123 +# severity: LOW +# recommended_action: have a cup of tea +package defsec.test + +deny { + input.text +} + +`, + "policies/test2.rego": `# METADATA +# title: i am another title +package defsec.test2 + +deny { + input.blah +} + +`, + }) + + scanner := NewScanner(types.SourceJSON) + require.NoError( + t, + scanner.LoadPolicies(false, false, srcFS, []string{"policies"}, nil), + ) + + results, err := scanner.ScanInput(context.TODO(), Input{ + Path: "/evil.lol", + Contents: map[string]interface{}{ + "text": "test", + }, + }) + require.NoError(t, err) + require.Len(t, results.GetFailed(), 1) + failure := results.GetFailed()[0].Rule() + assert.Equal(t, "i am a title", failure.Summary) + assert.Equal(t, "i am a description", failure.Explanation) + require.Len(t, failure.Links, 1) + assert.Equal(t, "https://google.com", failure.Links[0]) + assert.Equal(t, "AVD-EG-0123", failure.AVDID) + assert.Equal(t, severity.Low, failure.Severity) + assert.Equal(t, "have a cup of tea", failure.Resolution) +} + +func Test_RegoScanning_WithInvalidInputSchema(t *testing.T) { + + srcFS := CreateFS(t, map[string]string{ + "policies/test.rego": `# METADATA +# schemas: +# - input: schema["input"] +package defsec.test + +deny { + input.evil == "lol" +} +`, + }) + + scanner := NewScanner(types.SourceDockerfile) + scanner.SetRegoErrorLimit(0) // override to not allow any errors + assert.ErrorContains( + t, + scanner.LoadPolicies(false, false, srcFS, []string{"policies"}, nil), + "undefined ref: input.evil", + ) +} + +func Test_RegoScanning_WithValidInputSchema(t *testing.T) { + + srcFS := CreateFS(t, map[string]string{ + "policies/test.rego": `# METADATA +# schemas: +# - input: schema["input"] +package defsec.test + +deny { + input.Stages[0].Commands[0].Cmd == "lol" +} +`, + }) + + scanner := NewScanner(types.SourceDockerfile) + assert.NoError( + t, + scanner.LoadPolicies(false, false, srcFS, []string{"policies"}, nil), + ) +} + +func Test_RegoScanning_WithFilepathToSchema(t *testing.T) { + srcFS := CreateFS(t, map[string]string{ + "policies/test.rego": `# METADATA +# schemas: +# - input: schema["dockerfile"] +package defsec.test + +deny { + input.evil == "lol" +} +`, + }) + scanner := NewScanner(types.SourceJSON) + scanner.SetRegoErrorLimit(0) // override to not allow any errors + assert.ErrorContains( + t, + scanner.LoadPolicies(false, false, srcFS, []string{"policies"}, nil), + "undefined ref: input.evil", + ) +} + +func Test_RegoScanning_CustomData(t *testing.T) { + srcFS := CreateFS(t, map[string]string{ + "policies/test.rego": ` +package defsec.test +import data.settings.DS123.foo_bar_baz + +deny { + not foo_bar_baz +} +`, + }) + + dataFS := CreateFS(t, map[string]string{ + "data/data.json": `{ + "settings": { + "DS123":{ + "foo_bar_baz":false + } + } +}`, + "data/junk.txt": "this file should be ignored", + }) + + scanner := NewScanner(types.SourceJSON) + scanner.SetDataFilesystem(dataFS) + scanner.SetDataDirs(".") + + require.NoError( + t, + scanner.LoadPolicies(false, false, srcFS, []string{"policies"}, nil), + ) + + results, err := scanner.ScanInput(context.TODO(), Input{}) + require.NoError(t, err) + + assert.Equal(t, 1, len(results.GetFailed())) + assert.Equal(t, 0, len(results.GetPassed())) + assert.Equal(t, 0, len(results.GetIgnored())) +} + +func Test_RegoScanning_InvalidFS(t *testing.T) { + srcFS := CreateFS(t, map[string]string{ + "policies/test.rego": ` +package defsec.test +import data.settings.DS123.foo_bar_baz + +deny { + not foo_bar_baz +} +`, + }) + + dataFS := CreateFS(t, map[string]string{ + "data/data.json": `{ + "settings": { + "DS123":{ + "foo_bar_baz":false + } + } +}`, + "data/junk.txt": "this file should be ignored", + }) + + scanner := NewScanner(types.SourceJSON) + scanner.SetDataFilesystem(dataFS) + scanner.SetDataDirs("X://") + + require.NoError( + t, + scanner.LoadPolicies(false, false, srcFS, []string{"policies"}, nil), + ) + + results, err := scanner.ScanInput(context.TODO(), Input{}) + require.NoError(t, err) + + assert.Equal(t, 1, len(results.GetFailed())) + assert.Equal(t, 0, len(results.GetPassed())) + assert.Equal(t, 0, len(results.GetIgnored())) +} diff --git a/pkg/iac/rego/schemas/00_schema.go b/pkg/iac/rego/schemas/00_schema.go new file mode 100644 index 000000000000..e6674912fe58 --- /dev/null +++ b/pkg/iac/rego/schemas/00_schema.go @@ -0,0 +1,22 @@ +package schemas + +import _ "embed" + +type Schema string + +var ( + None Schema = "" + Anything Schema = `{}` + + //go:embed dockerfile.json + Dockerfile Schema + + //go:embed kubernetes.json + Kubernetes Schema + + //go:embed rbac.json + RBAC Schema + + //go:embed cloud.json + Cloud Schema +) diff --git a/pkg/iac/rego/schemas/builder.go b/pkg/iac/rego/schemas/builder.go new file mode 100644 index 000000000000..11b37d11304f --- /dev/null +++ b/pkg/iac/rego/schemas/builder.go @@ -0,0 +1,270 @@ +package schemas + +import ( + "fmt" + "reflect" + "strings" + + "github.com/aquasecurity/defsec/pkg/rego/convert" + "github.com/aquasecurity/defsec/pkg/state" +) + +type RawSchema struct { + Type string `json:"type"` // object + Properties map[string]Property `json:"properties,omitempty"` + Defs map[string]*Property `json:"definitions,omitempty"` +} + +type Property struct { + Type string `json:"type,omitempty"` + Ref string `json:"$ref,omitempty"` + Properties map[string]Property `json:"properties,omitempty"` + Items *Property `json:"items,omitempty"` +} + +type builder struct { + schema RawSchema +} + +func Build() (*RawSchema, error) { + + b := newBuilder() + + inputValue := reflect.ValueOf(state.State{}) + + err := b.fromInput(inputValue) + if err != nil { + return nil, err + } + + return &b.schema, nil +} + +func newBuilder() *builder { + return &builder{ + schema: RawSchema{ + Properties: nil, + Defs: nil, + }, + } +} + +func (b *builder) fromInput(inputValue reflect.Value) error { + + prop, err := b.readProperty("", nil, inputValue.Type(), 0) + if err != nil { + return err + } + if prop == nil { + return fmt.Errorf("property is nil") + } + b.schema.Properties = prop.Properties + b.schema.Type = prop.Type + return nil +} + +func refName(name string, parent, t reflect.Type) string { + if t.Name() == "" { // inline struct + return sanitise(parent.PkgPath() + "." + parent.Name() + "." + name) + } + return sanitise(t.PkgPath() + "." + t.Name()) +} + +func sanitise(s string) string { + return strings.ReplaceAll(s, "/", ".") +} + +func (b *builder) readProperty(name string, parent, inputType reflect.Type, indent int) (*Property, error) { + + if inputType.Kind() == reflect.Ptr { + inputType = inputType.Elem() + } + + switch inputType.String() { + case "types.Metadata", "types.Range", "types.Reference": + return nil, nil + } + + if b.schema.Defs != nil { + _, ok := b.schema.Defs[refName(name, parent, inputType)] + if ok { + return &Property{ + Type: "object", + Ref: "#/definitions/" + refName(name, parent, inputType), + }, nil + } + } + + fmt.Println(strings.Repeat(" ", indent) + name) + + switch kind := inputType.Kind(); kind { + case reflect.Struct: + return b.readStruct(name, parent, inputType, indent) + case reflect.Slice: + return b.readSlice(name, parent, inputType, indent) + case reflect.String: + return &Property{ + Type: "string", + }, nil + case reflect.Int: + return &Property{ + Type: "integer", + }, nil + case reflect.Bool: + return &Property{ + Type: "boolean", + }, nil + case reflect.Float32, reflect.Float64: + return &Property{ + Type: "number", + }, nil + } + + switch inputType.Name() { + case "BoolValue": + return &Property{ + Type: "object", + Properties: map[string]Property{ + "value": { + Type: "boolean", + }, + }, + }, nil + case "IntValue": + return &Property{ + Type: "object", + Properties: map[string]Property{ + "value": { + Type: "integer", + }, + }, + }, nil + case "StringValue", "TimeValue", "BytesValue": + return &Property{ + Type: "object", + Properties: map[string]Property{ + "value": { + Type: "string", + }, + }, + }, nil + case "MapValue": + return &Property{ + Type: "object", + Properties: map[string]Property{ + "value": { + Type: "object", + }, + }, + }, nil + + } + + fmt.Printf("WARNING: unsupported type: %s (%s)\n", inputType.Name(), inputType) + return nil, nil +} + +var converterInterface = reflect.TypeOf((*convert.Converter)(nil)).Elem() + +func (b *builder) readStruct(name string, parent, inputType reflect.Type, indent int) (*Property, error) { + + if b.schema.Defs == nil { + b.schema.Defs = map[string]*Property{} + } + + def := &Property{ + Type: "object", + Properties: map[string]Property{}, + } + + if parent != nil { + b.schema.Defs[refName(name, parent, inputType)] = def + } + + if inputType.Implements(converterInterface) { + if inputType.Kind() == reflect.Ptr { + inputType = inputType.Elem() + } + returns := reflect.New(inputType).MethodByName("ToRego").Call(nil) + if err := b.readRego(def, name, parent, returns[0].Type(), returns[0].Interface(), indent); err != nil { + return nil, err + } + } else { + + for i := 0; i < inputType.NumField(); i++ { + field := inputType.Field(i) + prop, err := b.readProperty(field.Name, inputType, field.Type, indent+1) + if err != nil { + return nil, err + } + if prop == nil { + continue + } + key := strings.ToLower(field.Name) + if key == "metadata" { + continue + } + def.Properties[key] = *prop + } + } + + if parent == nil { + return def, nil + } + + return &Property{ + Type: "object", + Ref: "#/definitions/" + refName(name, parent, inputType), + }, nil +} + +func (b *builder) readSlice(name string, parent, inputType reflect.Type, indent int) (*Property, error) { + + items, err := b.readProperty(name, parent, inputType.Elem(), indent+1) + if err != nil { + return nil, err + } + + prop := &Property{ + Type: "array", + Items: items, + } + return prop, nil +} + +func (b *builder) readRego(def *Property, name string, parent reflect.Type, typ reflect.Type, raw interface{}, indent int) error { + + switch cast := raw.(type) { + case map[string]interface{}: + def.Type = "object" + for k, v := range cast { + child := &Property{ + Properties: map[string]Property{}, + } + if err := b.readRego(child, k, reflect.TypeOf(raw), reflect.TypeOf(v), v, indent+1); err != nil { + return err + } + def.Properties[k] = *child + } + case map[string]string: + def.Type = "object" + for k, v := range cast { + child := &Property{ + Properties: map[string]Property{}, + } + if err := b.readRego(child, k, reflect.TypeOf(raw), reflect.TypeOf(v), v, indent+1); err != nil { + return err + } + def.Properties[k] = *child + } + default: + prop, err := b.readProperty(name, parent, typ, indent) + if err != nil { + return err + } + *def = *prop + } + + return nil + +} diff --git a/pkg/iac/rego/schemas/cloud.json b/pkg/iac/rego/schemas/cloud.json new file mode 100644 index 000000000000..d6ca8b87575f --- /dev/null +++ b/pkg/iac/rego/schemas/cloud.json @@ -0,0 +1,6818 @@ +{ + "type": "object", + "properties": { + "aws": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.AWS" + }, + "azure": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.Azure" + }, + "cloudstack": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.cloudstack.CloudStack" + }, + "digitalocean": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.digitalocean.DigitalOcean" + }, + "github": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.github.GitHub" + }, + "google": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.Google" + }, + "kubernetes": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.kubernetes.Kubernetes" + }, + "nifcloud": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.Nifcloud" + }, + "openstack": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.openstack.OpenStack" + }, + "oracle": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.oracle.Oracle" + } + }, + "definitions": { + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.AWS": { + "type": "object", + "properties": { + "accessanalyzer": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.accessanalyzer.AccessAnalyzer" + }, + "apigateway": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.apigateway.APIGateway" + }, + "athena": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.athena.Athena" + }, + "cloudfront": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.cloudfront.Cloudfront" + }, + "cloudtrail": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.cloudtrail.CloudTrail" + }, + "cloudwatch": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.cloudwatch.CloudWatch" + }, + "codebuild": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.codebuild.CodeBuild" + }, + "config": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.config.Config" + }, + "documentdb": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.documentdb.DocumentDB" + }, + "dynamodb": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.dynamodb.DynamoDB" + }, + "ec2": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ec2.EC2" + }, + "ecr": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ecr.ECR" + }, + "ecs": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ecs.ECS" + }, + "efs": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.efs.EFS" + }, + "eks": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.eks.EKS" + }, + "elasticache": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.elasticache.ElastiCache" + }, + "elasticsearch": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.elasticsearch.Elasticsearch" + }, + "elb": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.elb.ELB" + }, + "emr": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.emr.EMR" + }, + "iam": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.iam.IAM" + }, + "kinesis": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.kinesis.Kinesis" + }, + "kms": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.kms.KMS" + }, + "lambda": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.lambda.Lambda" + }, + "meta": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.Meta" + }, + "mq": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.mq.MQ" + }, + "msk": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.msk.MSK" + }, + "neptune": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.neptune.Neptune" + }, + "rds": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.rds.RDS" + }, + "redshift": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.redshift.Redshift" + }, + "s3": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.s3.S3" + }, + "sam": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sam.SAM" + }, + "sns": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sns.SNS" + }, + "sqs": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sqs.SQS" + }, + "ssm": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ssm.SSM" + }, + "workspaces": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.workspaces.WorkSpaces" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.AssumeRole": { + "type": "object", + "properties": { + "duration": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "externalid": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "policy": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "policyarns": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + }, + "rolearn": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "sessionname": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "sourceidentity": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "tags": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.MapValue" + }, + "transitivetagkeys": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.AssumeRoleWithWebIdentity": { + "type": "object", + "properties": { + "duration": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "policy": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "policyarns": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + }, + "rolearn": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "sessionname": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "webidentitytoken": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "webidentitytokenfile": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.DefaultTags": { + "type": "object", + "properties": { + "tags": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.MapValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.IgnoreTags": { + "type": "object", + "properties": { + "keyprefixes": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + }, + "keys": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.Meta": { + "type": "object", + "properties": { + "tfproviders": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.TerraformProvider" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.TerraformProvider": { + "type": "object", + "properties": { + "accesskey": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "alias": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "allowedaccountsids": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + }, + "assumerole": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.AssumeRole" + }, + "assumerolewithwebidentity": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.AssumeRoleWithWebIdentity" + }, + "customcabundle": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "defaulttags": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.DefaultTags" + }, + "ec2metadataserviceendpoint": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "ec2metadataserviceendpointmode": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "endpoints": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.MapValue" + }, + "forbiddenaccountids": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + }, + "httpproxy": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "ignoretags": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.IgnoreTags" + }, + "insecure": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "maxretries": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.IntValue" + }, + "profile": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "region": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "retrymode": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "s3useast1regionalendpoint": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "s3usepathstyle": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "secretkey": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "sharedconfigfiles": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + }, + "sharedcredentialsfiles": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + }, + "skipcredentialsvalidation": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "skipmetadataapicheck": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "skipregionvalidation": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "skiprequestingaccountid": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "stsregion": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "token": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "usedualstackendpoint": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "usefipsendpoint": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "version": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.accessanalyzer.AccessAnalyzer": { + "type": "object", + "properties": { + "analyzers": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.accessanalyzer.Analyzer" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.accessanalyzer.Analyzer": { + "type": "object", + "properties": { + "active": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "arn": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "findings": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.accessanalyzer.Findings" + } + }, + "name": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.accessanalyzer.Findings": { + "type": "object" + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.apigateway.APIGateway": { + "type": "object", + "properties": { + "v1": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.apigateway.v1.APIGateway" + }, + "v2": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.apigateway.v2.APIGateway" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.apigateway.v1.API": { + "type": "object", + "properties": { + "name": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "resources": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.apigateway.v1.Resource" + } + }, + "stages": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.apigateway.v1.Stage" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.apigateway.v1.APIGateway": { + "type": "object", + "properties": { + "apis": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.apigateway.v1.API" + } + }, + "domainnames": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.apigateway.v1.DomainName" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.apigateway.v1.AccessLogging": { + "type": "object", + "properties": { + "cloudwatchloggrouparn": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.apigateway.v1.DomainName": { + "type": "object", + "properties": { + "name": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "securitypolicy": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.apigateway.v1.Method": { + "type": "object", + "properties": { + "apikeyrequired": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "authorizationtype": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "httpmethod": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.apigateway.v1.RESTMethodSettings": { + "type": "object", + "properties": { + "cachedataencrypted": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "cacheenabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "method": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.apigateway.v1.Resource": { + "type": "object", + "properties": { + "methods": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.apigateway.v1.Method" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.apigateway.v1.Stage": { + "type": "object", + "properties": { + "accesslogging": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.apigateway.v1.AccessLogging" + }, + "name": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "restmethodsettings": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.apigateway.v1.RESTMethodSettings" + } + }, + "xraytracingenabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.apigateway.v2.API": { + "type": "object", + "properties": { + "name": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "protocoltype": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "stages": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.apigateway.v2.Stage" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.apigateway.v2.APIGateway": { + "type": "object", + "properties": { + "apis": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.apigateway.v2.API" + } + }, + "domainnames": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.apigateway.v2.DomainName" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.apigateway.v2.AccessLogging": { + "type": "object", + "properties": { + "cloudwatchloggrouparn": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.apigateway.v2.DomainName": { + "type": "object", + "properties": { + "name": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "securitypolicy": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.apigateway.v2.Stage": { + "type": "object", + "properties": { + "accesslogging": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.apigateway.v2.AccessLogging" + }, + "name": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.athena.Athena": { + "type": "object", + "properties": { + "databases": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.athena.Database" + } + }, + "workgroups": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.athena.Workgroup" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.athena.Database": { + "type": "object", + "properties": { + "encryption": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.athena.EncryptionConfiguration" + }, + "name": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.athena.EncryptionConfiguration": { + "type": "object", + "properties": { + "type": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.athena.Workgroup": { + "type": "object", + "properties": { + "encryption": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.athena.EncryptionConfiguration" + }, + "enforceconfiguration": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "name": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.cloudfront.CacheBehaviour": { + "type": "object", + "properties": { + "viewerprotocolpolicy": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.cloudfront.Cloudfront": { + "type": "object", + "properties": { + "distributions": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.cloudfront.Distribution" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.cloudfront.Distribution": { + "type": "object", + "properties": { + "defaultcachebehaviour": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.cloudfront.CacheBehaviour" + }, + "logging": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.cloudfront.Logging" + }, + "orderercachebehaviours": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.cloudfront.CacheBehaviour" + } + }, + "viewercertificate": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.cloudfront.ViewerCertificate" + }, + "wafid": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.cloudfront.Logging": { + "type": "object", + "properties": { + "bucket": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.cloudfront.ViewerCertificate": { + "type": "object", + "properties": { + "cloudfrontdefaultcertificate": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "minimumprotocolversion": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "sslsupportmethod": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.cloudtrail.CloudTrail": { + "type": "object", + "properties": { + "trails": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.cloudtrail.Trail" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.cloudtrail.DataResource": { + "type": "object", + "properties": { + "type": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "values": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.cloudtrail.EventSelector": { + "type": "object", + "properties": { + "dataresources": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.cloudtrail.DataResource" + } + }, + "readwritetype": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.cloudtrail.Trail": { + "type": "object", + "properties": { + "bucketname": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "cloudwatchlogsloggrouparn": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "enablelogfilevalidation": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "eventselectors": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.cloudtrail.EventSelector" + } + }, + "islogging": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "ismultiregion": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "kmskeyid": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "name": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.cloudwatch.Alarm": { + "type": "object", + "properties": { + "alarmname": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "dimensions": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.cloudwatch.AlarmDimension" + } + }, + "metricname": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "metrics": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.cloudwatch.MetricDataQuery" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.cloudwatch.AlarmDimension": { + "type": "object", + "properties": { + "name": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "value": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.cloudwatch.CloudWatch": { + "type": "object", + "properties": { + "alarms": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.cloudwatch.Alarm" + } + }, + "loggroups": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.cloudwatch.LogGroup" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.cloudwatch.LogGroup": { + "type": "object", + "properties": { + "arn": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "kmskeyid": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "metricfilters": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.cloudwatch.MetricFilter" + } + }, + "name": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "retentionindays": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.IntValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.cloudwatch.MetricDataQuery": { + "type": "object", + "properties": { + "expression": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "id": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.cloudwatch.MetricFilter": { + "type": "object", + "properties": { + "filtername": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "filterpattern": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.codebuild.ArtifactSettings": { + "type": "object", + "properties": { + "encryptionenabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.codebuild.CodeBuild": { + "type": "object", + "properties": { + "projects": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.codebuild.Project" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.codebuild.Project": { + "type": "object", + "properties": { + "artifactsettings": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.codebuild.ArtifactSettings" + }, + "secondaryartifactsettings": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.codebuild.ArtifactSettings" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.config.Config": { + "type": "object", + "properties": { + "configurationaggregrator": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.config.ConfigurationAggregrator" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.config.ConfigurationAggregrator": { + "type": "object", + "properties": { + "sourceallregions": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.documentdb.Cluster": { + "type": "object", + "properties": { + "backupretentionperiod": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.IntValue" + }, + "enabledlogexports": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + }, + "identifier": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "instances": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.documentdb.Instance" + } + }, + "kmskeyid": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "storageencrypted": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.documentdb.DocumentDB": { + "type": "object", + "properties": { + "clusters": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.documentdb.Cluster" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.documentdb.Instance": { + "type": "object", + "properties": { + "kmskeyid": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.dynamodb.DAXCluster": { + "type": "object", + "properties": { + "pointintimerecovery": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "serversideencryption": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.dynamodb.ServerSideEncryption" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.dynamodb.DynamoDB": { + "type": "object", + "properties": { + "daxclusters": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.dynamodb.DAXCluster" + } + }, + "tables": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.dynamodb.Table" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.dynamodb.ServerSideEncryption": { + "type": "object", + "properties": { + "enabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "kmskeyid": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.dynamodb.Table": { + "type": "object", + "properties": { + "pointintimerecovery": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "serversideencryption": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.dynamodb.ServerSideEncryption" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ec2.BlockDevice": { + "type": "object", + "properties": { + "encrypted": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ec2.EC2": { + "type": "object", + "properties": { + "instances": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ec2.Instance" + } + }, + "launchconfigurations": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ec2.LaunchConfiguration" + } + }, + "launchtemplates": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ec2.LaunchTemplate" + } + }, + "networkacls": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ec2.NetworkACL" + } + }, + "securitygroups": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ec2.SecurityGroup" + } + }, + "subnets": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ec2.Subnet" + } + }, + "volumes": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ec2.Volume" + } + }, + "vpcs": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ec2.VPC" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ec2.Encryption": { + "type": "object", + "properties": { + "enabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "kmskeyid": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ec2.Instance": { + "type": "object", + "properties": { + "ebsblockdevices": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ec2.BlockDevice" + } + }, + "metadataoptions": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ec2.MetadataOptions" + }, + "rootblockdevice": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ec2.BlockDevice" + }, + "securitygroups": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ec2.SecurityGroup" + } + }, + "userdata": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ec2.LaunchConfiguration": { + "type": "object", + "properties": { + "associatepublicip": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "ebsblockdevices": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ec2.BlockDevice" + } + }, + "metadataoptions": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ec2.MetadataOptions" + }, + "name": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "rootblockdevice": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ec2.BlockDevice" + }, + "userdata": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ec2.LaunchTemplate": { + "type": "object", + "properties": { + "instance": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ec2.Instance" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ec2.MetadataOptions": { + "type": "object", + "properties": { + "httpendpoint": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "httptokens": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ec2.NetworkACL": { + "type": "object", + "properties": { + "isdefaultrule": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "rules": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ec2.NetworkACLRule" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ec2.NetworkACLRule": { + "type": "object", + "properties": { + "action": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "cidrs": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + }, + "protocol": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "type": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ec2.SecurityGroup": { + "type": "object", + "properties": { + "description": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "egressrules": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ec2.SecurityGroupRule" + } + }, + "ingressrules": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ec2.SecurityGroupRule" + } + }, + "isdefault": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "vpcid": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ec2.SecurityGroupRule": { + "type": "object", + "properties": { + "cidrs": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + }, + "description": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ec2.Subnet": { + "type": "object", + "properties": { + "mappubliciponlaunch": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ec2.VPC": { + "type": "object", + "properties": { + "flowlogsenabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "id": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "isdefault": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "securitygroups": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ec2.SecurityGroup" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ec2.Volume": { + "type": "object", + "properties": { + "encryption": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ec2.Encryption" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ecr.ECR": { + "type": "object", + "properties": { + "repositories": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ecr.Repository" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ecr.Encryption": { + "type": "object", + "properties": { + "kmskeyid": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "type": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ecr.ImageScanning": { + "type": "object", + "properties": { + "scanonpush": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ecr.Repository": { + "type": "object", + "properties": { + "encryption": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ecr.Encryption" + }, + "imagescanning": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ecr.ImageScanning" + }, + "imagetagsimmutable": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "policies": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.iam.Policy" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ecs.Cluster": { + "type": "object", + "properties": { + "settings": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ecs.ClusterSettings" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ecs.ClusterSettings": { + "type": "object", + "properties": { + "containerinsightsenabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ecs.ContainerDefinition": { + "type": "object", + "properties": { + "cpu": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.IntValue" + }, + "environment": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ecs.EnvVar" + } + }, + "essential": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "image": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "memory": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.IntValue" + }, + "name": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "portmappings": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ecs.PortMapping" + } + }, + "privileged": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ecs.ECS": { + "type": "object", + "properties": { + "clusters": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ecs.Cluster" + } + }, + "taskdefinitions": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ecs.TaskDefinition" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ecs.EFSVolumeConfiguration": { + "type": "object", + "properties": { + "transitencryptionenabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ecs.EnvVar": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "value": { + "type": "string" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ecs.PortMapping": { + "type": "object", + "properties": { + "containerport": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.IntValue" + }, + "hostport": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.IntValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ecs.TaskDefinition": { + "type": "object", + "properties": { + "containerdefinitions": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ecs.ContainerDefinition" + } + }, + "volumes": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ecs.Volume" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ecs.Volume": { + "type": "object", + "properties": { + "efsvolumeconfiguration": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ecs.EFSVolumeConfiguration" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.efs.EFS": { + "type": "object", + "properties": { + "filesystems": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.efs.FileSystem" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.efs.FileSystem": { + "type": "object", + "properties": { + "encrypted": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.eks.Cluster": { + "type": "object", + "properties": { + "encryption": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.eks.Encryption" + }, + "logging": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.eks.Logging" + }, + "publicaccesscidrs": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + }, + "publicaccessenabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.eks.EKS": { + "type": "object", + "properties": { + "clusters": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.eks.Cluster" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.eks.Encryption": { + "type": "object", + "properties": { + "kmskeyid": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "secrets": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.eks.Logging": { + "type": "object", + "properties": { + "api": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "audit": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "authenticator": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "controllermanager": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "scheduler": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.elasticache.Cluster": { + "type": "object", + "properties": { + "engine": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "nodetype": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "snapshotretentionlimit": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.IntValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.elasticache.ElastiCache": { + "type": "object", + "properties": { + "clusters": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.elasticache.Cluster" + } + }, + "replicationgroups": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.elasticache.ReplicationGroup" + } + }, + "securitygroups": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.elasticache.SecurityGroup" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.elasticache.ReplicationGroup": { + "type": "object", + "properties": { + "atrestencryptionenabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "transitencryptionenabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.elasticache.SecurityGroup": { + "type": "object", + "properties": { + "description": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.elasticsearch.AtRestEncryption": { + "type": "object", + "properties": { + "enabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "kmskeyid": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.elasticsearch.Domain": { + "type": "object", + "properties": { + "accesspolicies": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "atrestencryption": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.elasticsearch.AtRestEncryption" + }, + "dedicatedmasterenabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "domainname": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "endpoint": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.elasticsearch.Endpoint" + }, + "logpublishing": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.elasticsearch.LogPublishing" + }, + "servicesoftwareoptions": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.elasticsearch.ServiceSoftwareOptions" + }, + "transitencryption": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.elasticsearch.TransitEncryption" + }, + "vpcid": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.elasticsearch.Elasticsearch": { + "type": "object", + "properties": { + "domains": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.elasticsearch.Domain" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.elasticsearch.Endpoint": { + "type": "object", + "properties": { + "enforcehttps": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "tlspolicy": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.elasticsearch.LogPublishing": { + "type": "object", + "properties": { + "auditenabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "cloudwatchloggrouparn": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.elasticsearch.ServiceSoftwareOptions": { + "type": "object", + "properties": { + "currentversion": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "newversion": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "updateavailable": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "updatestatus": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.elasticsearch.TransitEncryption": { + "type": "object", + "properties": { + "enabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.elb.Action": { + "type": "object", + "properties": { + "type": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.elb.ELB": { + "type": "object", + "properties": { + "loadbalancers": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.elb.LoadBalancer" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.elb.Listener": { + "type": "object", + "properties": { + "defaultactions": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.elb.Action" + } + }, + "protocol": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "tlspolicy": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.elb.LoadBalancer": { + "type": "object", + "properties": { + "dropinvalidheaderfields": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "internal": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "listeners": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.elb.Listener" + } + }, + "type": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.emr.Cluster": { + "type": "object", + "properties": { + "settings": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.emr.ClusterSettings" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.emr.ClusterSettings": { + "type": "object", + "properties": { + "name": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "releaselabel": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "servicerole": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.emr.EMR": { + "type": "object", + "properties": { + "clusters": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.emr.Cluster" + } + }, + "securityconfiguration": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.emr.SecurityConfiguration" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.emr.SecurityConfiguration": { + "type": "object", + "properties": { + "configuration": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "name": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.iam.AccessKey": { + "type": "object", + "properties": { + "accesskeyid": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "active": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "creationdate": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.TimeValue" + }, + "lastaccess": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.TimeValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.iam.Document": { + "type": "object", + "properties": { + "endline": { + "type": "integer" + }, + "explicit": { + "type": "boolean" + }, + "filepath": { + "type": "string" + }, + "fskey": { + "type": "string" + }, + "managed": { + "type": "boolean" + }, + "startline": { + "type": "integer" + }, + "value": { + "type": "string" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.iam.Group": { + "type": "object", + "properties": { + "name": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "policies": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.iam.Policy" + } + }, + "users": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.iam.User" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.iam.IAM": { + "type": "object", + "properties": { + "groups": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.iam.Group" + } + }, + "passwordpolicy": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.iam.PasswordPolicy" + }, + "policies": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.iam.Policy" + } + }, + "roles": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.iam.Role" + } + }, + "servercertificates": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.iam.ServerCertificate" + } + }, + "users": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.iam.User" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.iam.MFADevice": { + "type": "object", + "properties": { + "isvirtual": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.iam.PasswordPolicy": { + "type": "object", + "properties": { + "maxagedays": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.IntValue" + }, + "minimumlength": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.IntValue" + }, + "requirelowercase": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "requirenumbers": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "requiresymbols": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "requireuppercase": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "reusepreventioncount": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.IntValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.iam.Policy": { + "type": "object", + "properties": { + "builtin": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "document": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.iam.Document" + }, + "name": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.iam.Role": { + "type": "object", + "properties": { + "name": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "policies": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.iam.Policy" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.iam.ServerCertificate": { + "type": "object", + "properties": { + "expiration": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.TimeValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.iam.User": { + "type": "object", + "properties": { + "accesskeys": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.iam.AccessKey" + } + }, + "groups": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.iam.Group" + } + }, + "lastaccess": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.TimeValue" + }, + "mfadevices": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.iam.MFADevice" + } + }, + "name": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "policies": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.iam.Policy" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.kinesis.Encryption": { + "type": "object", + "properties": { + "kmskeyid": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "type": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.kinesis.Kinesis": { + "type": "object", + "properties": { + "streams": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.kinesis.Stream" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.kinesis.Stream": { + "type": "object", + "properties": { + "encryption": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.kinesis.Encryption" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.kms.KMS": { + "type": "object", + "properties": { + "keys": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.kms.Key" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.kms.Key": { + "type": "object", + "properties": { + "rotationenabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "usage": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.lambda.Function": { + "type": "object", + "properties": { + "permissions": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.lambda.Permission" + } + }, + "tracing": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.lambda.Tracing" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.lambda.Lambda": { + "type": "object", + "properties": { + "functions": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.lambda.Function" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.lambda.Permission": { + "type": "object", + "properties": { + "principal": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "sourcearn": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.lambda.Tracing": { + "type": "object", + "properties": { + "mode": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.mq.Broker": { + "type": "object", + "properties": { + "logging": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.mq.Logging" + }, + "publicaccess": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.mq.Logging": { + "type": "object", + "properties": { + "audit": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "general": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.mq.MQ": { + "type": "object", + "properties": { + "brokers": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.mq.Broker" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.msk.BrokerLogging": { + "type": "object", + "properties": { + "cloudwatch": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.msk.CloudwatchLogging" + }, + "firehose": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.msk.FirehoseLogging" + }, + "s3": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.msk.S3Logging" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.msk.CloudwatchLogging": { + "type": "object", + "properties": { + "enabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.msk.Cluster": { + "type": "object", + "properties": { + "encryptionatrest": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.msk.EncryptionAtRest" + }, + "encryptionintransit": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.msk.EncryptionInTransit" + }, + "logging": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.msk.Logging" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.msk.EncryptionAtRest": { + "type": "object", + "properties": { + "enabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "kmskeyarn": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.msk.EncryptionInTransit": { + "type": "object", + "properties": { + "clientbroker": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.msk.FirehoseLogging": { + "type": "object", + "properties": { + "enabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.msk.Logging": { + "type": "object", + "properties": { + "broker": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.msk.BrokerLogging" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.msk.MSK": { + "type": "object", + "properties": { + "clusters": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.msk.Cluster" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.msk.S3Logging": { + "type": "object", + "properties": { + "enabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.neptune.Cluster": { + "type": "object", + "properties": { + "kmskeyid": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "logging": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.neptune.Logging" + }, + "storageencrypted": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.neptune.Logging": { + "type": "object", + "properties": { + "audit": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.neptune.Neptune": { + "type": "object", + "properties": { + "clusters": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.neptune.Cluster" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.rds.Classic": { + "type": "object", + "properties": { + "dbsecuritygroups": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.rds.DBSecurityGroup" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.rds.Cluster": { + "type": "object", + "properties": { + "availabilityzones": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + }, + "backupretentionperioddays": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.IntValue" + }, + "deletionprotection": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "encryption": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.rds.Encryption" + }, + "engine": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "instances": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.rds.ClusterInstance" + } + }, + "latestrestorabletime": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.TimeValue" + }, + "performanceinsights": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.rds.PerformanceInsights" + }, + "publicaccess": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "replicationsourcearn": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "skipfinalsnapshot": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.rds.ClusterInstance": { + "type": "object", + "properties": { + "clusteridentifier": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "instance": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.rds.Instance" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.rds.DBParameterGroupsList": { + "type": "object", + "properties": { + "dbparametergroupname": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "kmskeyid": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.rds.DBSecurityGroup": { + "type": "object" + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.rds.DBSnapshotAttributes": { + "type": "object", + "properties": { + "attributevalues": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.rds.Encryption": { + "type": "object", + "properties": { + "encryptstorage": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "kmskeyid": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.rds.Instance": { + "type": "object", + "properties": { + "autominorversionupgrade": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "backupretentionperioddays": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.IntValue" + }, + "dbinstancearn": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "dbinstanceidentifier": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "dbparametergroups": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.rds.DBParameterGroupsList" + } + }, + "deletionprotection": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "enabledcloudwatchlogsexports": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + }, + "encryption": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.rds.Encryption" + }, + "engine": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "engineversion": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "iamauthenabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "latestrestorabletime": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.TimeValue" + }, + "multiaz": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "performanceinsights": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.rds.PerformanceInsights" + }, + "publicaccess": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "publiclyaccessible": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "readreplicadbinstanceidentifiers": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + }, + "replicationsourcearn": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "storageencrypted": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "taglist": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.rds.TagList" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.rds.ParameterGroups": { + "type": "object", + "properties": { + "dbparametergroupfamily": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "dbparametergroupname": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "parameters": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.rds.Parameters" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.rds.Parameters": { + "type": "object", + "properties": { + "parametername": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "parametervalue": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.rds.PerformanceInsights": { + "type": "object", + "properties": { + "enabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "kmskeyid": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.rds.RDS": { + "type": "object", + "properties": { + "classic": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.rds.Classic" + }, + "clusters": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.rds.Cluster" + } + }, + "instances": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.rds.Instance" + } + }, + "parametergroups": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.rds.ParameterGroups" + } + }, + "snapshots": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.rds.Snapshots" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.rds.Snapshots": { + "type": "object", + "properties": { + "dbsnapshotarn": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "dbsnapshotidentifier": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "encrypted": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "kmskeyid": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "snapshotattributes": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.rds.DBSnapshotAttributes" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.rds.TagList": { + "type": "object" + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.redshift.Cluster": { + "type": "object", + "properties": { + "allowversionupgrade": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "automatedsnapshotretentionperiod": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.IntValue" + }, + "clusteridentifier": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "encryption": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.redshift.Encryption" + }, + "endpoint": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.redshift.EndPoint" + }, + "loggingenabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "masterusername": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "nodetype": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "numberofnodes": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.IntValue" + }, + "publiclyaccessible": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "subnetgroupname": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "vpcid": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.redshift.ClusterParameter": { + "type": "object", + "properties": { + "parametername": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "parametervalue": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.redshift.Encryption": { + "type": "object", + "properties": { + "enabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "kmskeyid": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.redshift.EndPoint": { + "type": "object", + "properties": { + "port": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.IntValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.redshift.Redshift": { + "type": "object", + "properties": { + "clusterparameters": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.redshift.ClusterParameter" + } + }, + "clusters": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.redshift.Cluster" + } + }, + "reservednodes": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.redshift.ReservedNode" + } + }, + "securitygroups": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.redshift.SecurityGroup" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.redshift.ReservedNode": { + "type": "object", + "properties": { + "nodetype": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.redshift.SecurityGroup": { + "type": "object", + "properties": { + "description": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.s3.Bucket": { + "type": "object", + "properties": { + "accelerateconfigurationstatus": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "acl": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "bucketlocation": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "bucketpolicies": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.iam.Policy" + } + }, + "encryption": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.s3.Encryption" + }, + "lifecycleconfiguration": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.s3.Rules" + } + }, + "logging": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.s3.Logging" + }, + "name": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "objects": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.s3.Contents" + } + }, + "publicaccessblock": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.s3.PublicAccessBlock" + }, + "versioning": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.s3.Versioning" + }, + "website": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.s3.Website" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.s3.Contents": { + "type": "object" + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.s3.Encryption": { + "type": "object", + "properties": { + "algorithm": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "enabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "kmskeyid": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.s3.Logging": { + "type": "object", + "properties": { + "enabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "targetbucket": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.s3.PublicAccessBlock": { + "type": "object", + "properties": { + "blockpublicacls": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "blockpublicpolicy": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "ignorepublicacls": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "restrictpublicbuckets": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.s3.Rules": { + "type": "object", + "properties": { + "status": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.s3.S3": { + "type": "object", + "properties": { + "buckets": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.s3.Bucket" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.s3.Versioning": { + "type": "object", + "properties": { + "enabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "mfadelete": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.s3.Website": { + "type": "object" + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sam.API": { + "type": "object", + "properties": { + "accesslogging": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sam.AccessLogging" + }, + "domainconfiguration": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sam.DomainConfiguration" + }, + "name": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "restmethodsettings": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sam.RESTMethodSettings" + }, + "tracingenabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sam.AccessLogging": { + "type": "object", + "properties": { + "cloudwatchloggrouparn": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sam.Application": { + "type": "object", + "properties": { + "location": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sam.Location" + }, + "locationpath": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sam.DomainConfiguration": { + "type": "object", + "properties": { + "name": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "securitypolicy": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sam.Function": { + "type": "object", + "properties": { + "functionname": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "managedpolicies": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + }, + "policies": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.iam.Policy" + } + }, + "tracing": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sam.HttpAPI": { + "type": "object", + "properties": { + "accesslogging": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sam.AccessLogging" + }, + "defaultroutesettings": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sam.RouteSettings" + }, + "domainconfiguration": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sam.DomainConfiguration" + }, + "name": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sam.Location": { + "type": "object", + "properties": { + "applicationid": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "semanticversion": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sam.LoggingConfiguration": { + "type": "object", + "properties": { + "loggingenabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sam.RESTMethodSettings": { + "type": "object", + "properties": { + "cachedataencrypted": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "datatraceenabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "loggingenabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "metricsenabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sam.RouteSettings": { + "type": "object", + "properties": { + "datatraceenabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "detailedmetricsenabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "loggingenabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sam.SAM": { + "type": "object", + "properties": { + "apis": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sam.API" + } + }, + "applications": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sam.Application" + } + }, + "functions": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sam.Function" + } + }, + "httpapis": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sam.HttpAPI" + } + }, + "simpletables": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sam.SimpleTable" + } + }, + "statemachines": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sam.StateMachine" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sam.SSESpecification": { + "type": "object", + "properties": { + "enabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "kmsmasterkeyid": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sam.SimpleTable": { + "type": "object", + "properties": { + "ssespecification": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sam.SSESpecification" + }, + "tablename": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sam.StateMachine": { + "type": "object", + "properties": { + "loggingconfiguration": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sam.LoggingConfiguration" + }, + "managedpolicies": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + }, + "name": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "policies": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.iam.Policy" + } + }, + "tracing": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sam.TracingConfiguration" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sam.TracingConfiguration": { + "type": "object", + "properties": { + "enabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sns.Encryption": { + "type": "object", + "properties": { + "kmskeyid": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sns.SNS": { + "type": "object", + "properties": { + "topics": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sns.Topic" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sns.Topic": { + "type": "object", + "properties": { + "arn": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "encryption": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sns.Encryption" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sqs.Encryption": { + "type": "object", + "properties": { + "kmskeyid": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "managedencryption": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sqs.Queue": { + "type": "object", + "properties": { + "encryption": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sqs.Encryption" + }, + "policies": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.iam.Policy" + } + }, + "queueurl": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sqs.SQS": { + "type": "object", + "properties": { + "queues": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.sqs.Queue" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ssm.SSM": { + "type": "object", + "properties": { + "secrets": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ssm.Secret" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.ssm.Secret": { + "type": "object", + "properties": { + "kmskeyid": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.workspaces.Encryption": { + "type": "object", + "properties": { + "enabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.workspaces.Volume": { + "type": "object", + "properties": { + "encryption": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.workspaces.Encryption" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.workspaces.WorkSpace": { + "type": "object", + "properties": { + "rootvolume": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.workspaces.Volume" + }, + "uservolume": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.workspaces.Volume" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.workspaces.WorkSpaces": { + "type": "object", + "properties": { + "workspaces": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.aws.workspaces.WorkSpace" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.Azure": { + "type": "object", + "properties": { + "appservice": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.appservice.AppService" + }, + "authorization": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.authorization.Authorization" + }, + "compute": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.compute.Compute" + }, + "container": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.container.Container" + }, + "database": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.database.Database" + }, + "datafactory": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.datafactory.DataFactory" + }, + "datalake": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.datalake.DataLake" + }, + "keyvault": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.keyvault.KeyVault" + }, + "monitor": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.monitor.Monitor" + }, + "network": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.network.Network" + }, + "securitycenter": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.securitycenter.SecurityCenter" + }, + "storage": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.storage.Storage" + }, + "synapse": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.synapse.Synapse" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.appservice.AppService": { + "type": "object", + "properties": { + "functionapps": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.appservice.FunctionApp" + } + }, + "services": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.appservice.Service" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.appservice.FunctionApp": { + "type": "object", + "properties": { + "httpsonly": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.appservice.Service": { + "type": "object", + "properties": { + "authentication": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.appservice.Service.Authentication" + }, + "enableclientcert": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "identity": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.appservice.Service.Identity" + }, + "site": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.appservice.Service.Site" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.appservice.Service.Authentication": { + "type": "object", + "properties": { + "enabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.appservice.Service.Identity": { + "type": "object", + "properties": { + "type": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.appservice.Service.Site": { + "type": "object", + "properties": { + "enablehttp2": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "minimumtlsversion": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.authorization.Authorization": { + "type": "object", + "properties": { + "roledefinitions": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.authorization.RoleDefinition" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.authorization.Permission": { + "type": "object", + "properties": { + "actions": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.authorization.RoleDefinition": { + "type": "object", + "properties": { + "assignablescopes": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + }, + "permissions": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.authorization.Permission" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.compute.Compute": { + "type": "object", + "properties": { + "linuxvirtualmachines": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.compute.LinuxVirtualMachine" + } + }, + "manageddisks": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.compute.ManagedDisk" + } + }, + "windowsvirtualmachines": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.compute.WindowsVirtualMachine" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.compute.Encryption": { + "type": "object", + "properties": { + "enabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.compute.LinuxVirtualMachine": { + "type": "object", + "properties": { + "osprofilelinuxconfig": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.compute.OSProfileLinuxConfig" + }, + "virtualmachine": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.compute.VirtualMachine" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.compute.ManagedDisk": { + "type": "object", + "properties": { + "encryption": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.compute.Encryption" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.compute.OSProfileLinuxConfig": { + "type": "object", + "properties": { + "disablepasswordauthentication": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.compute.VirtualMachine": { + "type": "object", + "properties": { + "customdata": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.compute.WindowsVirtualMachine": { + "type": "object", + "properties": { + "virtualmachine": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.compute.VirtualMachine" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.container.AddonProfile": { + "type": "object", + "properties": { + "omsagent": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.container.OMSAgent" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.container.Container": { + "type": "object", + "properties": { + "kubernetesclusters": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.container.KubernetesCluster" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.container.KubernetesCluster": { + "type": "object", + "properties": { + "addonprofile": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.container.AddonProfile" + }, + "apiserverauthorizedipranges": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + }, + "enableprivatecluster": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "networkprofile": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.container.NetworkProfile" + }, + "rolebasedaccesscontrol": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.container.RoleBasedAccessControl" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.container.NetworkProfile": { + "type": "object", + "properties": { + "networkpolicy": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.container.OMSAgent": { + "type": "object", + "properties": { + "enabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.container.RoleBasedAccessControl": { + "type": "object", + "properties": { + "enabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.database.Database": { + "type": "object", + "properties": { + "mariadbservers": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.database.MariaDBServer" + } + }, + "mssqlservers": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.database.MSSQLServer" + } + }, + "mysqlservers": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.database.MySQLServer" + } + }, + "postgresqlservers": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.database.PostgreSQLServer" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.database.ExtendedAuditingPolicy": { + "type": "object", + "properties": { + "retentionindays": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.IntValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.database.FirewallRule": { + "type": "object", + "properties": { + "endip": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "startip": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.database.MSSQLServer": { + "type": "object", + "properties": { + "extendedauditingpolicies": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.database.ExtendedAuditingPolicy" + } + }, + "securityalertpolicies": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.database.SecurityAlertPolicy" + } + }, + "server": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.database.Server" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.database.MariaDBServer": { + "type": "object", + "properties": { + "server": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.database.Server" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.database.MySQLServer": { + "type": "object", + "properties": { + "server": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.database.Server" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.database.PostgreSQLServer": { + "type": "object", + "properties": { + "config": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.database.PostgresSQLConfig" + }, + "server": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.database.Server" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.database.PostgresSQLConfig": { + "type": "object", + "properties": { + "connectionthrottling": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "logcheckpoints": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "logconnections": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.database.SecurityAlertPolicy": { + "type": "object", + "properties": { + "disabledalerts": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + }, + "emailaccountadmins": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "emailaddresses": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.database.Server": { + "type": "object", + "properties": { + "enablepublicnetworkaccess": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "enablesslenforcement": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "firewallrules": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.database.FirewallRule" + } + }, + "minimumtlsversion": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.datafactory.DataFactory": { + "type": "object", + "properties": { + "datafactories": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.datafactory.Factory" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.datafactory.Factory": { + "type": "object", + "properties": { + "enablepublicnetwork": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.datalake.DataLake": { + "type": "object", + "properties": { + "stores": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.datalake.Store" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.datalake.Store": { + "type": "object", + "properties": { + "enableencryption": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.keyvault.Key": { + "type": "object", + "properties": { + "expirydate": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.TimeValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.keyvault.KeyVault": { + "type": "object", + "properties": { + "vaults": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.keyvault.Vault" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.keyvault.NetworkACLs": { + "type": "object", + "properties": { + "defaultaction": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.keyvault.Secret": { + "type": "object", + "properties": { + "contenttype": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "expirydate": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.TimeValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.keyvault.Vault": { + "type": "object", + "properties": { + "enablepurgeprotection": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "keys": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.keyvault.Key" + } + }, + "networkacls": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.keyvault.NetworkACLs" + }, + "secrets": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.keyvault.Secret" + } + }, + "softdeleteretentiondays": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.IntValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.monitor.LogProfile": { + "type": "object", + "properties": { + "categories": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + }, + "locations": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + }, + "retentionpolicy": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.monitor.RetentionPolicy" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.monitor.Monitor": { + "type": "object", + "properties": { + "logprofiles": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.monitor.LogProfile" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.monitor.RetentionPolicy": { + "type": "object", + "properties": { + "days": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.IntValue" + }, + "enabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.network.Network": { + "type": "object", + "properties": { + "networkwatcherflowlogs": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.network.NetworkWatcherFlowLog" + } + }, + "securitygroups": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.network.SecurityGroup" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.network.NetworkWatcherFlowLog": { + "type": "object", + "properties": { + "retentionpolicy": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.network.RetentionPolicy" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.network.PortRange": { + "type": "object", + "properties": { + "end": { + "type": "integer" + }, + "start": { + "type": "integer" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.network.RetentionPolicy": { + "type": "object", + "properties": { + "days": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.IntValue" + }, + "enabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.network.SecurityGroup": { + "type": "object", + "properties": { + "rules": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.network.SecurityGroupRule" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.network.SecurityGroupRule": { + "type": "object", + "properties": { + "allow": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "destinationaddresses": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + }, + "destinationports": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.network.PortRange" + } + }, + "outbound": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "protocol": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "sourceaddresses": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + }, + "sourceports": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.network.PortRange" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.securitycenter.Contact": { + "type": "object", + "properties": { + "enablealertnotifications": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "phone": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.securitycenter.SecurityCenter": { + "type": "object", + "properties": { + "contacts": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.securitycenter.Contact" + } + }, + "subscriptions": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.securitycenter.SubscriptionPricing" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.securitycenter.SubscriptionPricing": { + "type": "object", + "properties": { + "tier": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.storage.Account": { + "type": "object", + "properties": { + "containers": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.storage.Container" + } + }, + "enforcehttps": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "minimumtlsversion": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "networkrules": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.storage.NetworkRule" + } + }, + "queueproperties": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.storage.QueueProperties" + }, + "queues": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.storage.Queue" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.storage.Container": { + "type": "object", + "properties": { + "publicaccess": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.storage.NetworkRule": { + "type": "object", + "properties": { + "allowbydefault": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "bypass": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.storage.Queue": { + "type": "object", + "properties": { + "name": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.storage.QueueProperties": { + "type": "object", + "properties": { + "enablelogging": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.storage.Storage": { + "type": "object", + "properties": { + "accounts": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.storage.Account" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.synapse.Synapse": { + "type": "object", + "properties": { + "workspaces": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.synapse.Workspace" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.azure.synapse.Workspace": { + "type": "object", + "properties": { + "enablemanagedvirtualnetwork": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.cloudstack.CloudStack": { + "type": "object", + "properties": { + "compute": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.cloudstack.compute.Compute" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.cloudstack.compute.Compute": { + "type": "object", + "properties": { + "instances": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.cloudstack.compute.Instance" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.cloudstack.compute.Instance": { + "type": "object", + "properties": { + "userdata": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.digitalocean.DigitalOcean": { + "type": "object", + "properties": { + "compute": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.digitalocean.compute.Compute" + }, + "spaces": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.digitalocean.spaces.Spaces" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.digitalocean.compute.Compute": { + "type": "object", + "properties": { + "droplets": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.digitalocean.compute.Droplet" + } + }, + "firewalls": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.digitalocean.compute.Firewall" + } + }, + "kubernetesclusters": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.digitalocean.compute.KubernetesCluster" + } + }, + "loadbalancers": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.digitalocean.compute.LoadBalancer" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.digitalocean.compute.Droplet": { + "type": "object", + "properties": { + "sshkeys": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.digitalocean.compute.Firewall": { + "type": "object", + "properties": { + "inboundrules": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.digitalocean.compute.InboundFirewallRule" + } + }, + "outboundrules": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.digitalocean.compute.OutboundFirewallRule" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.digitalocean.compute.ForwardingRule": { + "type": "object", + "properties": { + "entryprotocol": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.digitalocean.compute.InboundFirewallRule": { + "type": "object", + "properties": { + "sourceaddresses": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.digitalocean.compute.KubernetesCluster": { + "type": "object", + "properties": { + "autoupgrade": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "surgeupgrade": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.digitalocean.compute.LoadBalancer": { + "type": "object", + "properties": { + "forwardingrules": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.digitalocean.compute.ForwardingRule" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.digitalocean.compute.OutboundFirewallRule": { + "type": "object", + "properties": { + "destinationaddresses": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.digitalocean.spaces.Bucket": { + "type": "object", + "properties": { + "acl": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "forcedestroy": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "name": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "objects": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.digitalocean.spaces.Object" + } + }, + "versioning": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.digitalocean.spaces.Versioning" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.digitalocean.spaces.Object": { + "type": "object", + "properties": { + "acl": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.digitalocean.spaces.Spaces": { + "type": "object", + "properties": { + "buckets": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.digitalocean.spaces.Bucket" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.digitalocean.spaces.Versioning": { + "type": "object", + "properties": { + "enabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.github.BranchProtection": { + "type": "object", + "properties": { + "requiresignedcommits": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.github.EnvironmentSecret": { + "type": "object", + "properties": { + "encryptedvalue": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "environment": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "plaintextvalue": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "repository": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "secretname": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.github.GitHub": { + "type": "object", + "properties": { + "branchprotections": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.github.BranchProtection" + } + }, + "environmentsecrets": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.github.EnvironmentSecret" + } + }, + "repositories": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.github.Repository" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.github.Repository": { + "type": "object", + "properties": { + "archived": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "public": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "vulnerabilityalerts": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.Google": { + "type": "object", + "properties": { + "bigquery": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.bigquery.BigQuery" + }, + "compute": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.compute.Compute" + }, + "dns": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.dns.DNS" + }, + "gke": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.gke.GKE" + }, + "iam": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.iam.IAM" + }, + "kms": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.kms.KMS" + }, + "sql": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.sql.SQL" + }, + "storage": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.storage.Storage" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.bigquery.AccessGrant": { + "type": "object", + "properties": { + "domain": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "role": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "specialgroup": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.bigquery.BigQuery": { + "type": "object", + "properties": { + "datasets": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.bigquery.Dataset" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.bigquery.Dataset": { + "type": "object", + "properties": { + "accessgrants": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.bigquery.AccessGrant" + } + }, + "id": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.compute.Compute": { + "type": "object", + "properties": { + "disks": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.compute.Disk" + } + }, + "instances": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.compute.Instance" + } + }, + "networks": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.compute.Network" + } + }, + "projectmetadata": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.compute.ProjectMetadata" + }, + "sslpolicies": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.compute.SSLPolicy" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.compute.Disk": { + "type": "object", + "properties": { + "encryption": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.compute.DiskEncryption" + }, + "name": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.compute.DiskEncryption": { + "type": "object", + "properties": { + "kmskeylink": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "rawkey": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BytesValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.compute.EgressRule": { + "type": "object", + "properties": { + "destinationranges": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + }, + "firewallrule": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.compute.FirewallRule" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.compute.Firewall": { + "type": "object", + "properties": { + "egressrules": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.compute.EgressRule" + } + }, + "ingressrules": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.compute.IngressRule" + } + }, + "name": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "sourcetags": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + }, + "targettags": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.compute.FirewallRule": { + "type": "object", + "properties": { + "enforced": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "isallow": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "ports": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.IntValue" + } + }, + "protocol": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.compute.IngressRule": { + "type": "object", + "properties": { + "firewallrule": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.compute.FirewallRule" + }, + "sourceranges": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.compute.Instance": { + "type": "object", + "properties": { + "attacheddisks": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.compute.Disk" + } + }, + "bootdisks": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.compute.Disk" + } + }, + "canipforward": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "enableprojectsshkeyblocking": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "enableserialport": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "name": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "networkinterfaces": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.compute.NetworkInterface" + } + }, + "osloginenabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "serviceaccount": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.compute.ServiceAccount" + }, + "shieldedvm": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.compute.ShieldedVMConfig" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.compute.Network": { + "type": "object", + "properties": { + "firewall": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.compute.Firewall" + }, + "subnetworks": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.compute.SubNetwork" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.compute.NetworkInterface": { + "type": "object", + "properties": { + "haspublicip": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "natip": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "network": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.compute.Network" + }, + "subnetwork": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.compute.SubNetwork" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.compute.ProjectMetadata": { + "type": "object", + "properties": { + "enableoslogin": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.compute.SSLPolicy": { + "type": "object", + "properties": { + "minimumtlsversion": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "name": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "profile": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.compute.ServiceAccount": { + "type": "object", + "properties": { + "email": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "isdefault": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "scopes": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.compute.ShieldedVMConfig": { + "type": "object", + "properties": { + "integritymonitoringenabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "securebootenabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "vtpmenabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.compute.SubNetwork": { + "type": "object", + "properties": { + "enableflowlogs": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "name": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "purpose": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.dns.DNS": { + "type": "object", + "properties": { + "managedzones": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.dns.ManagedZone" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.dns.DNSSec": { + "type": "object", + "properties": { + "defaultkeyspecs": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.dns.KeySpecs" + } + }, + "enabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.dns.KeySpecs": { + "type": "object", + "properties": { + "algorithm": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "keytype": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.dns.ManagedZone": { + "type": "object", + "properties": { + "dnssec": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.dns.DNSSec" + }, + "visibility": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.gke.ClientCertificate": { + "type": "object", + "properties": { + "issuecertificate": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.gke.Cluster": { + "type": "object", + "properties": { + "enableautpilot": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "enablelegacyabac": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "enableshieldednodes": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "ipallocationpolicy": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.gke.IPAllocationPolicy" + }, + "loggingservice": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "masterauth": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.gke.MasterAuth" + }, + "masterauthorizednetworks": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.gke.MasterAuthorizedNetworks" + }, + "monitoringservice": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "networkpolicy": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.gke.NetworkPolicy" + }, + "nodeconfig": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.gke.NodeConfig" + }, + "nodepools": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.gke.NodePool" + } + }, + "privatecluster": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.gke.PrivateCluster" + }, + "removedefaultnodepool": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "resourcelabels": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.MapValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.gke.GKE": { + "type": "object", + "properties": { + "clusters": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.gke.Cluster" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.gke.IPAllocationPolicy": { + "type": "object", + "properties": { + "enabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.gke.Management": { + "type": "object", + "properties": { + "enableautorepair": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "enableautoupgrade": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.gke.MasterAuth": { + "type": "object", + "properties": { + "clientcertificate": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.gke.ClientCertificate" + }, + "password": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "username": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.gke.MasterAuthorizedNetworks": { + "type": "object", + "properties": { + "cidrs": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + }, + "enabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.gke.NetworkPolicy": { + "type": "object", + "properties": { + "enabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.gke.NodeConfig": { + "type": "object", + "properties": { + "enablelegacyendpoints": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "imagetype": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "serviceaccount": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "workloadmetadataconfig": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.gke.WorkloadMetadataConfig" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.gke.NodePool": { + "type": "object", + "properties": { + "management": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.gke.Management" + }, + "nodeconfig": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.gke.NodeConfig" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.gke.PrivateCluster": { + "type": "object", + "properties": { + "enableprivatenodes": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.gke.WorkloadMetadataConfig": { + "type": "object", + "properties": { + "nodemetadata": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.iam.Binding": { + "type": "object", + "properties": { + "includesdefaultserviceaccount": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "members": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + }, + "role": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.iam.Folder": { + "type": "object", + "properties": { + "bindings": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.iam.Binding" + } + }, + "folders": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.iam.Folder" + } + }, + "members": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.iam.Member" + } + }, + "projects": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.iam.Project" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.iam.IAM": { + "type": "object", + "properties": { + "organizations": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.iam.Organization" + } + }, + "workloadidentitypoolproviders": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.iam.WorkloadIdentityPoolProvider" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.iam.Member": { + "type": "object", + "properties": { + "defaultserviceaccount": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "member": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "role": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.iam.Organization": { + "type": "object", + "properties": { + "bindings": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.iam.Binding" + } + }, + "folders": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.iam.Folder" + } + }, + "members": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.iam.Member" + } + }, + "projects": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.iam.Project" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.iam.Project": { + "type": "object", + "properties": { + "autocreatenetwork": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "bindings": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.iam.Binding" + } + }, + "members": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.iam.Member" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.iam.WorkloadIdentityPoolProvider": { + "type": "object", + "properties": { + "attributecondition": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "workloadidentitypoolid": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "workloadidentitypoolproviderid": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.kms.KMS": { + "type": "object", + "properties": { + "keyrings": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.kms.KeyRing" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.kms.Key": { + "type": "object", + "properties": { + "rotationperiodseconds": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.IntValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.kms.KeyRing": { + "type": "object", + "properties": { + "keys": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.kms.Key" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.sql.Backups": { + "type": "object", + "properties": { + "enabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.sql.DatabaseInstance": { + "type": "object", + "properties": { + "databaseversion": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "isreplica": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "settings": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.sql.Settings" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.sql.Flags": { + "type": "object", + "properties": { + "containeddatabaseauthentication": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "crossdbownershipchaining": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "localinfile": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "logcheckpoints": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "logconnections": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "logdisconnections": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "loglockwaits": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "logmindurationstatement": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.IntValue" + }, + "logminmessages": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "logtempfilesize": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.IntValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.sql.IPConfiguration": { + "type": "object", + "properties": { + "authorizednetworks": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.sql.IPConfiguration.AuthorizedNetworks" + } + }, + "enableipv4": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "requiretls": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.sql.IPConfiguration.AuthorizedNetworks": { + "type": "object", + "properties": { + "cidr": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "name": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.sql.SQL": { + "type": "object", + "properties": { + "instances": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.sql.DatabaseInstance" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.sql.Settings": { + "type": "object", + "properties": { + "backups": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.sql.Backups" + }, + "flags": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.sql.Flags" + }, + "ipconfiguration": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.sql.IPConfiguration" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.storage.Bucket": { + "type": "object", + "properties": { + "bindings": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.iam.Binding" + } + }, + "enableuniformbucketlevelaccess": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "encryption": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.storage.BucketEncryption" + }, + "location": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "members": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.iam.Member" + } + }, + "name": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.storage.BucketEncryption": { + "type": "object", + "properties": { + "defaultkmskeyname": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.storage.Storage": { + "type": "object", + "properties": { + "buckets": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.google.storage.Bucket" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.kubernetes.Egress": { + "type": "object", + "properties": { + "destinationcidrs": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + }, + "ports": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.kubernetes.Port" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.kubernetes.Ingress": { + "type": "object", + "properties": { + "ports": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.kubernetes.Port" + } + }, + "sourcecidrs": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.kubernetes.Kubernetes": { + "type": "object", + "properties": { + "networkpolicies": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.kubernetes.NetworkPolicy" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.kubernetes.NetworkPolicy": { + "type": "object", + "properties": { + "spec": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.kubernetes.NetworkPolicySpec" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.kubernetes.NetworkPolicySpec": { + "type": "object", + "properties": { + "egress": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.kubernetes.Egress" + }, + "ingress": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.kubernetes.Ingress" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.kubernetes.Port": { + "type": "object", + "properties": { + "number": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "protocol": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.Nifcloud": { + "type": "object", + "properties": { + "computing": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.computing.Computing" + }, + "dns": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.dns.DNS" + }, + "nas": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.nas.NAS" + }, + "network": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.network.Network" + }, + "rdb": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.rdb.RDB" + }, + "sslcertificate": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.sslcertificate.SSLCertificate" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.computing.Computing": { + "type": "object", + "properties": { + "instances": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.computing.Instance" + } + }, + "securitygroups": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.computing.SecurityGroup" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.computing.Instance": { + "type": "object", + "properties": { + "networkinterfaces": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.computing.NetworkInterface" + } + }, + "securitygroup": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.computing.NetworkInterface": { + "type": "object", + "properties": { + "networkid": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.computing.SecurityGroup": { + "type": "object", + "properties": { + "description": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "egressrules": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.computing.SecurityGroupRule" + } + }, + "ingressrules": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.computing.SecurityGroupRule" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.computing.SecurityGroupRule": { + "type": "object", + "properties": { + "cidr": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "description": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.dns.DNS": { + "type": "object", + "properties": { + "records": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.dns.Record" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.dns.Record": { + "type": "object", + "properties": { + "record": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "type": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.nas.NAS": { + "type": "object", + "properties": { + "nasinstances": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.nas.NASInstance" + } + }, + "nassecuritygroups": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.nas.NASSecurityGroup" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.nas.NASInstance": { + "type": "object", + "properties": { + "networkid": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.nas.NASSecurityGroup": { + "type": "object", + "properties": { + "cidrs": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + }, + "description": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.network.ElasticLoadBalancer": { + "type": "object", + "properties": { + "listeners": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.network.ElasticLoadBalancerListener" + } + }, + "networkinterfaces": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.network.NetworkInterface" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.network.ElasticLoadBalancerListener": { + "type": "object", + "properties": { + "protocol": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.network.LoadBalancer": { + "type": "object", + "properties": { + "listeners": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.network.LoadBalancerListener" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.network.LoadBalancerListener": { + "type": "object", + "properties": { + "protocol": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "tlspolicy": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.network.Network": { + "type": "object", + "properties": { + "elasticloadbalancers": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.network.ElasticLoadBalancer" + } + }, + "loadbalancers": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.network.LoadBalancer" + } + }, + "routers": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.network.Router" + } + }, + "vpngateways": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.network.VpnGateway" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.network.NetworkInterface": { + "type": "object", + "properties": { + "isvipnetwork": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "networkid": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.network.Router": { + "type": "object", + "properties": { + "networkinterfaces": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.network.NetworkInterface" + } + }, + "securitygroup": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.network.VpnGateway": { + "type": "object", + "properties": { + "securitygroup": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.rdb.DBInstance": { + "type": "object", + "properties": { + "backupretentionperioddays": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.IntValue" + }, + "engine": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "engineversion": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "networkid": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "publicaccess": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.rdb.DBSecurityGroup": { + "type": "object", + "properties": { + "cidrs": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + }, + "description": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.rdb.RDB": { + "type": "object", + "properties": { + "dbinstances": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.rdb.DBInstance" + } + }, + "dbsecuritygroups": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.rdb.DBSecurityGroup" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.sslcertificate.SSLCertificate": { + "type": "object", + "properties": { + "servercertificates": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.sslcertificate.ServerCertificate" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.nifcloud.sslcertificate.ServerCertificate": { + "type": "object", + "properties": { + "expiration": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.TimeValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.openstack.Compute": { + "type": "object", + "properties": { + "firewall": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.openstack.Firewall" + }, + "instances": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.openstack.Instance" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.openstack.Firewall": { + "type": "object", + "properties": { + "allowrules": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.openstack.FirewallRule" + } + }, + "denyrules": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.openstack.FirewallRule" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.openstack.FirewallRule": { + "type": "object", + "properties": { + "destination": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "destinationport": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "enabled": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "source": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "sourceport": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.openstack.Instance": { + "type": "object", + "properties": { + "adminpassword": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.openstack.Networking": { + "type": "object", + "properties": { + "securitygroups": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.openstack.SecurityGroup" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.openstack.OpenStack": { + "type": "object", + "properties": { + "compute": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.openstack.Compute" + }, + "networking": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.openstack.Networking" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.openstack.SecurityGroup": { + "type": "object", + "properties": { + "description": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "name": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "rules": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.openstack.SecurityGroupRule" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.openstack.SecurityGroupRule": { + "type": "object", + "properties": { + "cidr": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + }, + "ethertype": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.IntValue" + }, + "isingress": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue" + }, + "portmax": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.IntValue" + }, + "portmin": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.IntValue" + }, + "protocol": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.oracle.AddressReservation": { + "type": "object", + "properties": { + "pool": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.oracle.Compute": { + "type": "object", + "properties": { + "addressreservations": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.oracle.AddressReservation" + } + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.providers.oracle.Oracle": { + "type": "object", + "properties": { + "compute": { + "type": "object", + "$ref": "#/definitions/jackfan.us.kg.aquasecurity.defsec.pkg.providers.oracle.Compute" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.types.BoolValue": { + "type": "object", + "properties": { + "endline": { + "type": "integer" + }, + "explicit": { + "type": "boolean" + }, + "filepath": { + "type": "string" + }, + "fskey": { + "type": "string" + }, + "managed": { + "type": "boolean" + }, + "resource": { + "type": "string" + }, + "sourceprefix": { + "type": "string" + }, + "startline": { + "type": "integer" + }, + "value": { + "type": "boolean" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.types.BytesValue": { + "type": "object", + "properties": { + "endline": { + "type": "integer" + }, + "explicit": { + "type": "boolean" + }, + "filepath": { + "type": "string" + }, + "fskey": { + "type": "string" + }, + "managed": { + "type": "boolean" + }, + "resource": { + "type": "string" + }, + "sourceprefix": { + "type": "string" + }, + "startline": { + "type": "integer" + }, + "value": { + "type": "string" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.types.IntValue": { + "type": "object", + "properties": { + "endline": { + "type": "integer" + }, + "explicit": { + "type": "boolean" + }, + "filepath": { + "type": "string" + }, + "fskey": { + "type": "string" + }, + "managed": { + "type": "boolean" + }, + "resource": { + "type": "string" + }, + "sourceprefix": { + "type": "string" + }, + "startline": { + "type": "integer" + }, + "value": { + "type": "integer" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.types.MapValue": { + "type": "object", + "properties": { + "endline": { + "type": "integer" + }, + "explicit": { + "type": "boolean" + }, + "filepath": { + "type": "string" + }, + "fskey": { + "type": "string" + }, + "managed": { + "type": "boolean" + }, + "resource": { + "type": "string" + }, + "sourceprefix": { + "type": "string" + }, + "startline": { + "type": "integer" + }, + "value": { + "type": "object" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.types.StringValue": { + "type": "object", + "properties": { + "endline": { + "type": "integer" + }, + "explicit": { + "type": "boolean" + }, + "filepath": { + "type": "string" + }, + "fskey": { + "type": "string" + }, + "managed": { + "type": "boolean" + }, + "resource": { + "type": "string" + }, + "sourceprefix": { + "type": "string" + }, + "startline": { + "type": "integer" + }, + "value": { + "type": "string" + } + } + }, + "jackfan.us.kg.aquasecurity.defsec.pkg.types.TimeValue": { + "type": "object", + "properties": { + "endline": { + "type": "integer" + }, + "explicit": { + "type": "boolean" + }, + "filepath": { + "type": "string" + }, + "fskey": { + "type": "string" + }, + "managed": { + "type": "boolean" + }, + "resource": { + "type": "string" + }, + "sourceprefix": { + "type": "string" + }, + "startline": { + "type": "integer" + }, + "value": { + "type": "string" + } + } + } + } +} \ No newline at end of file diff --git a/pkg/iac/rego/schemas/dockerfile.json b/pkg/iac/rego/schemas/dockerfile.json new file mode 100644 index 000000000000..d769cb195bae --- /dev/null +++ b/pkg/iac/rego/schemas/dockerfile.json @@ -0,0 +1,70 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://github.com/aquasecurity/trivy-policies/blob/main/pkg/rego/schemas/dockerfile.json", + "type": "object", + "properties": { + "Stages": { + "type": "array", + "items": { + "$ref": "#/$defs/stage" + } + } + }, + "$defs": { + "stage": { + "type": "object", + "properties": { + "Name": { + "type": "string" + }, + "Commands": { + "type": "array", + "items": { + "$ref": "#/$defs/command" + } + } + } + }, + "command": { + "type": "object", + "properties": { + "Flags": { + "type": "array", + "items": { + "type": "string" + } + }, + "Value": { + "type": "array", + "items": { + "type": "string" + } + }, + "Cmd": { + "type": "string" + }, + "SubCmd": { + "type": "string" + }, + "Original": { + "type": "string" + }, + "Path": { + "type": "string" + }, + "JSON": { + "type": "boolean" + }, + "Stage": { + "type": "integer" + }, + "StartLine": { + "type": "integer" + }, + "EndLine": { + "type": "integer" + } + } + } + } +} \ No newline at end of file diff --git a/pkg/iac/rego/schemas/kubernetes.json b/pkg/iac/rego/schemas/kubernetes.json new file mode 100644 index 000000000000..1975944b7790 --- /dev/null +++ b/pkg/iac/rego/schemas/kubernetes.json @@ -0,0 +1,51 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://github.com/aquasecurity/trivy-policies/blob/main/pkg/rego/schemas/kubernetes.json", + "type": "object", + "properties": { + "apiVersion": { + "type": "string" + }, + "kind": { + "type": "string" + }, + "metadata": { + "type": "object" + }, + "spec": { + "type": "object" + }, + "rules": { + "type": "array", + "items": { + "type": "object", + "properties": { + "apiGroups": { + "type": "array", + "items": { + "type": "string" + } + }, + "resources": { + "type": "array", + "items": { + "type": "string" + } + }, + "resourceNames": { + "type": "array", + "items": { + "type": "string" + } + }, + "verbs": { + "type": "array", + "items": { + "type": "string" + } + } + } + } + } + } +} \ No newline at end of file diff --git a/pkg/iac/rego/schemas/rbac.json b/pkg/iac/rego/schemas/rbac.json new file mode 100644 index 000000000000..c251890f91fd --- /dev/null +++ b/pkg/iac/rego/schemas/rbac.json @@ -0,0 +1,51 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://github.com/aquasecurity/trivy-policies/blob/main/pkg/rego/schemas/rbac.json", + "type": "object", + "properties": { + "apiVersion": { + "type": "string" + }, + "kind": { + "type": "string" + }, + "metadata": { + "type": "object" + }, + "spec": { + "type": "object" + }, + "rules": { + "type": "array", + "items": { + "type": "object", + "properties": { + "apiGroups": { + "type": "array", + "items": { + "type": "string" + } + }, + "resources": { + "type": "array", + "items": { + "type": "string" + } + }, + "resourceNames": { + "type": "array", + "items": { + "type": "string" + } + }, + "verbs": { + "type": "array", + "items": { + "type": "string" + } + } + } + } + } + } +} \ No newline at end of file diff --git a/pkg/iac/rego/schemas/schemas.go b/pkg/iac/rego/schemas/schemas.go new file mode 100644 index 000000000000..1ece03582bd4 --- /dev/null +++ b/pkg/iac/rego/schemas/schemas.go @@ -0,0 +1,16 @@ +package schemas + +import ( + "github.com/aquasecurity/defsec/pkg/types" +) + +var SchemaMap = map[types.Source]Schema{ + types.SourceDefsec: Cloud, + types.SourceCloud: Cloud, + types.SourceKubernetes: Kubernetes, + types.SourceRbac: Kubernetes, + types.SourceDockerfile: Dockerfile, + types.SourceTOML: Anything, + types.SourceYAML: Anything, + types.SourceJSON: Anything, +} diff --git a/pkg/iac/rego/store.go b/pkg/iac/rego/store.go new file mode 100644 index 000000000000..127b1d8dd647 --- /dev/null +++ b/pkg/iac/rego/store.go @@ -0,0 +1,48 @@ +package rego + +import ( + "fmt" + "io/fs" + "os" + "path/filepath" + "strings" + + "github.com/open-policy-agent/opa/loader" + "github.com/open-policy-agent/opa/storage" +) + +// initialise a store populated with OPA data files found in dataPaths +func initStore(dataFS fs.FS, dataPaths, namespaces []string) (storage.Store, error) { + // FilteredPaths will recursively find all file paths that contain a valid document + // extension from the given list of data paths. + allDocumentPaths, _ := loader.FilteredPathsFS(dataFS, dataPaths, func(abspath string, info os.FileInfo, depth int) bool { + if info.IsDir() { + return false // filter in, include + } + ext := strings.ToLower(filepath.Ext(info.Name())) + for _, filter := range []string{ + ".yaml", + ".yml", + ".json", + } { + if filter == ext { + return false // filter in, include + } + } + return true // filter out, exclude + }) + + documents, err := loader.NewFileLoader().WithFS(dataFS).All(allDocumentPaths) + if err != nil { + return nil, fmt.Errorf("load documents: %w", err) + } + + // pass all namespaces so that rego rule can refer to namespaces as data.namespaces + documents.Documents["namespaces"] = namespaces + + store, err := documents.Store() + if err != nil { + return nil, fmt.Errorf("get documents store: %w", err) + } + return store, nil +} diff --git a/pkg/iac/rego/testdata/policies/._sysfile.rego b/pkg/iac/rego/testdata/policies/._sysfile.rego new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/pkg/iac/rego/testdata/policies/invalid.rego b/pkg/iac/rego/testdata/policies/invalid.rego new file mode 100644 index 000000000000..a2ef3607bc70 --- /dev/null +++ b/pkg/iac/rego/testdata/policies/invalid.rego @@ -0,0 +1,8 @@ +# METADATA +# schemas: +# - input: schema["input"] +package defsec.test_invalid + +deny { + input.Stages[0].Commands[0].FooBarNothingBurger == "lol" +} diff --git a/pkg/iac/rego/testdata/policies/valid.rego b/pkg/iac/rego/testdata/policies/valid.rego new file mode 100644 index 000000000000..74a96afeec0c --- /dev/null +++ b/pkg/iac/rego/testdata/policies/valid.rego @@ -0,0 +1,8 @@ +# METADATA +# schemas: +# - input: schema["input"] +package defsec.test_valid + +deny { + input.Stages[0].Commands[0].Cmd == "lol" +} diff --git a/pkg/iac/rules/providers.go b/pkg/iac/rules/providers.go new file mode 100644 index 000000000000..60c976fd045b --- /dev/null +++ b/pkg/iac/rules/providers.go @@ -0,0 +1,169 @@ +package rules + +import ( + "encoding/json" + "strings" +) + +type Provider struct { + Name string `json:"name"` + Services []Service `json:"services"` +} + +type Service struct { + Name string `json:"name"` + Checks []Check `json:"checks"` +} + +type Check struct { + Name string `json:"name"` + Description string `json:"description"` +} + +func GetProvidersHierarchy() (providers map[string]map[string][]string) { + + registeredRules := GetRegistered() + + provs := make(map[string]map[string][]string) + + for _, rule := range registeredRules { + + cNames := make(map[string]bool) + pName := strings.ToLower(rule.GetRule().Provider.DisplayName()) + sName := strings.ToLower(rule.GetRule().Service) + cName := rule.GetRule().AVDID + + if _, ok := provs[pName]; !ok { + provs[pName] = make(map[string][]string) + } + + if _, ok := provs[pName][sName]; !ok { + provs[pName][sName] = make([]string, 0) + } + + if _, ok := cNames[cName]; !ok { + cNames[cName] = true + provs[pName][sName] = append(provs[pName][sName], cName) + } + } + + return provs +} + +func GetProviders() (providers []Provider) { + + registeredRules := GetRegistered() + + provs := make(map[string]map[string][]Check) + + for _, rule := range registeredRules { + + pName := strings.ToLower(rule.GetRule().Provider.DisplayName()) + sName := strings.ToLower(rule.GetRule().Service) + cName := rule.GetRule().AVDID + desc := rule.GetRule().Summary + + if _, ok := provs[pName]; !ok { + provs[pName] = make(map[string][]Check) + } + + if _, ok := provs[pName][sName]; !ok { + provs[pName][sName] = []Check{} + } + + provs[pName][sName] = append(provs[pName][sName], Check{ + Name: cName, + Description: desc, + }) + } + + for providerName, providerServices := range provs { + var services []Service + for serviceName, checks := range providerServices { + services = append(services, Service{ + Name: serviceName, + Checks: checks, + }) + } + + providers = append(providers, Provider{ + Name: providerName, + Services: services, + }) + } + + return providers +} + +func GetProvidersAsJson() ([]byte, error) { + + providers := GetProviders() + + return json.MarshalIndent(providers, "", " ") +} + +func GetProviderNames() []string { + + registeredRules := GetRegistered() + + providers := make(map[string]bool) + + for _, rule := range registeredRules { + + if _, ok := providers[rule.GetRule().Provider.DisplayName()]; !ok { + providers[rule.GetRule().Provider.DisplayName()] = true + } + + } + + var uniqueProviders []string + for p := range providers { + uniqueProviders = append(uniqueProviders, p) + } + + return uniqueProviders + +} + +func GetProviderServiceNames(providerName string) []string { + + registeredRules := GetRegistered() + + services := make(map[string]bool) + + for _, rule := range registeredRules { + + if !strings.EqualFold(providerName, rule.GetRule().Provider.DisplayName()) { + continue + } + + if _, ok := services[rule.GetRule().Service]; !ok { + services[rule.GetRule().Service] = true + } + + } + var uniqueServices []string + for p := range services { + uniqueServices = append(uniqueServices, p) + } + + return uniqueServices +} + +func GetProviderServiceCheckNames(providerName string, serviceName string) []string { + + registeredRules := GetRegistered() + + var checks []string + + for _, rule := range registeredRules { + + if !strings.EqualFold(providerName, rule.GetRule().Provider.DisplayName()) || + !strings.EqualFold(serviceName, rule.GetRule().Service) { + continue + } + + checks = append(checks, rule.GetRule().AVDID) + } + return checks +} diff --git a/pkg/iac/rules/register.go b/pkg/iac/rules/register.go new file mode 100644 index 000000000000..c4e4f27aeba0 --- /dev/null +++ b/pkg/iac/rules/register.go @@ -0,0 +1,25 @@ +package rules + +import ( + "github.com/aquasecurity/defsec/pkg/framework" + "github.com/aquasecurity/defsec/pkg/scan" + + "github.com/aquasecurity/trivy/internal/rules" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func Register(rule scan.Rule) types.RegisteredRule { + return rules.Register(rule) +} + +func Deregister(rule types.RegisteredRule) { + rules.Deregister(rule) +} + +func GetRegistered(fw ...framework.Framework) []types.RegisteredRule { + return rules.GetFrameworkRules(fw...) +} + +func GetSpecRules(spec string) []types.RegisteredRule { + return rules.GetSpecRules(spec) +} diff --git a/pkg/iac/rules/rules.go b/pkg/iac/rules/rules.go new file mode 100644 index 000000000000..3ea9a161aca1 --- /dev/null +++ b/pkg/iac/rules/rules.go @@ -0,0 +1,82 @@ +package rules + +import ( + trules "github.com/aquasecurity/trivy-policies/pkg/rules" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/accessanalyzer" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/apigateway" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/athena" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/cloudfront" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/cloudtrail" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/cloudwatch" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/codebuild" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/config" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/documentdb" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/dynamodb" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/ec2" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/ecr" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/ecs" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/efs" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/eks" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/elasticache" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/elasticsearch" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/elb" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/emr" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/iam" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/kinesis" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/kms" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/lambda" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/mq" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/msk" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/neptune" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/rds" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/redshift" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/s3" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/sam" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/sns" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/sqs" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/ssm" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/workspaces" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/azure/appservice" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/azure/authorization" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/azure/compute" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/azure/container" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/azure/database" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/azure/datafactory" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/azure/datalake" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/azure/keyvault" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/azure/monitor" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/azure/network" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/azure/securitycenter" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/azure/storage" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/azure/synapse" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/cloudstack/compute" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/digitalocean/compute" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/digitalocean/spaces" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/github/actions" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/github/branch_protections" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/github/repositories" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/google/bigquery" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/google/compute" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/google/dns" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/google/gke" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/google/iam" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/google/kms" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/google/sql" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/google/storage" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/nifcloud/computing" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/nifcloud/dns" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/nifcloud/nas" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/nifcloud/network" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/nifcloud/rdb" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/nifcloud/sslcertificate" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/openstack/compute" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/openstack/networking" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/oracle/compute" + _ "github.com/aquasecurity/trivy-policies/rules/kubernetes/network" +) + +func init() { + for _, r := range trules.GetRules() { + Register(r) + } +} diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/bench_test.go b/pkg/iac/scanners/azure/arm/parser/armjson/bench_test.go new file mode 100644 index 000000000000..f2e56f853f7b --- /dev/null +++ b/pkg/iac/scanners/azure/arm/parser/armjson/bench_test.go @@ -0,0 +1,71 @@ +package armjson + +import ( + "encoding/json" + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/stretchr/testify/require" +) + +func BenchmarkUnmarshal_JFather(b *testing.B) { + target := make(map[string]interface{}) + input := []byte(`{ + "glossary": { + "title": "example glossary", + "GlossDiv": { + "title": "S", + "GlossList": { + "GlossEntry": { + "ID": "SGML", + "SortAs": "SGML", + "GlossTerm": "Standard Generalized Markup Language", + "Acronym": "SGML", + "Abbrev": "ISO 8879:1986", + "GlossDef": { + "para": "A meta-markup language, used to create markup languages such as DocBook.", + "GlossSeeAlso": ["GML", "XML"] + }, + "GlossSee": "markup" + } + } + } + } +}`) + + for n := 0; n < b.N; n++ { + metadata := types.NewTestMetadata() + require.NoError(b, Unmarshal(input, &target, &metadata)) + } +} + +func BenchmarkUnmarshal_Traditional(b *testing.B) { + target := make(map[string]interface{}) + input := []byte(`{ + "glossary": { + "title": "example glossary", + "GlossDiv": { + "title": "S", + "GlossList": { + "GlossEntry": { + "ID": "SGML", + "SortAs": "SGML", + "GlossTerm": "Standard Generalized Markup Language", + "Acronym": "SGML", + "Abbrev": "ISO 8879:1986", + "GlossDef": { + "para": "A meta-markup language, used to create markup languages such as DocBook.", + "GlossSeeAlso": ["GML", "XML"] + }, + "GlossSee": "markup" + } + } + } + } +}`) + + for n := 0; n < b.N; n++ { + require.NoError(b, json.Unmarshal(input, &target)) + } +} diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/decode.go b/pkg/iac/scanners/azure/arm/parser/armjson/decode.go new file mode 100644 index 000000000000..5dd2f6fd3e1c --- /dev/null +++ b/pkg/iac/scanners/azure/arm/parser/armjson/decode.go @@ -0,0 +1,66 @@ +package armjson + +import ( + "fmt" + "reflect" + + "github.com/aquasecurity/defsec/pkg/types" +) + +func (n *node) Decode(target interface{}) error { + v := reflect.ValueOf(target) + return n.decodeToValue(v) +} + +func (n *node) Metadata() types.Metadata { + return *n.metadata +} + +var unmarshaller = reflect.TypeOf((*Unmarshaller)(nil)).Elem() +var receiver = reflect.TypeOf((*MetadataReceiver)(nil)).Elem() + +func (n *node) decodeToValue(v reflect.Value) error { + + if v.Type().Implements(receiver) { + rec := v + defer func() { + rec.MethodByName("SetMetadata").Call([]reflect.Value{reflect.ValueOf(n.metadata)}) + }() + } + if v.Type().Implements(unmarshaller) { + returns := v.MethodByName("UnmarshalJSONWithMetadata").Call([]reflect.Value{reflect.ValueOf(n)}) + if err := returns[0].Interface(); err != nil { + return err.(error) + } + return nil + } + + for v.Kind() == reflect.Ptr { + v = v.Elem() + } + + if !v.CanSet() { + return fmt.Errorf("target is not settable") + } + + switch n.kind { + case KindObject: + return n.decodeObject(v) + case KindArray: + return n.decodeArray(v) + case KindString: + return n.decodeString(v) + case KindNumber: + return n.decodeNumber(v) + case KindBoolean: + return n.decodeBoolean(v) + case KindNull: + return n.decodeNull(v) + case KindComment: + return n.decodeString(v) + case KindUnknown: + return fmt.Errorf("cannot decode unknown kind") + default: + return fmt.Errorf("decoding of kind 0x%x is not supported", n.kind) + } +} diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/decode_array.go b/pkg/iac/scanners/azure/arm/parser/armjson/decode_array.go new file mode 100644 index 000000000000..75faada57252 --- /dev/null +++ b/pkg/iac/scanners/azure/arm/parser/armjson/decode_array.go @@ -0,0 +1,51 @@ +package armjson + +import ( + "fmt" + "reflect" +) + +func (n *node) decodeArray(v reflect.Value) error { + + length := len(n.content) + + var original reflect.Value + + switch v.Kind() { + case reflect.Array: + if v.Len() != length { + return fmt.Errorf("invalid length") + } + case reflect.Slice: + v.Set(reflect.MakeSlice(v.Type(), length, length)) + case reflect.Interface: + original = v + slice := reflect.ValueOf(make([]interface{}, length)) + v = reflect.New(slice.Type()).Elem() + v.Set(slice) + default: + return fmt.Errorf("invalid target type") + } + + elementType := v.Type().Elem() + for i, nodeElement := range n.content { + node := nodeElement.(*node) + targetElement := reflect.New(elementType).Elem() + addressable := targetElement + if targetElement.Kind() == reflect.Ptr { + targetElement.Set(reflect.New(elementType.Elem())) + } else { + addressable = targetElement.Addr() + } + if err := node.decodeToValue(addressable); err != nil { + return err + } + v.Index(i).Set(targetElement) + } + + if original.IsValid() { + original.Set(v) + } + + return nil +} diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/decode_boolean.go b/pkg/iac/scanners/azure/arm/parser/armjson/decode_boolean.go new file mode 100644 index 000000000000..dbdef3a3253d --- /dev/null +++ b/pkg/iac/scanners/azure/arm/parser/armjson/decode_boolean.go @@ -0,0 +1,18 @@ +package armjson + +import ( + "fmt" + "reflect" +) + +func (n *node) decodeBoolean(v reflect.Value) error { + switch v.Kind() { + case reflect.Bool: + v.SetBool(n.raw.(bool)) + case reflect.Interface: + v.Set(reflect.ValueOf(n.raw)) + default: + return fmt.Errorf("cannot decode boolean value to %s target", v.Kind()) + } + return nil +} diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/decode_meta_test.go b/pkg/iac/scanners/azure/arm/parser/armjson/decode_meta_test.go new file mode 100644 index 000000000000..57e657a7093a --- /dev/null +++ b/pkg/iac/scanners/azure/arm/parser/armjson/decode_meta_test.go @@ -0,0 +1,40 @@ +package armjson + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +type TestParent struct { + Child *TestChild `json:"child"` +} + +type TestChild struct { + Name string + Line int + Column int +} + +func (t *TestChild) UnmarshalJSONWithMetadata(node Node) error { + t.Line = node.Range().Start.Line + t.Column = node.Range().Start.Column + return node.Decode(&t.Name) +} + +func Test_DecodeWithMetadata(t *testing.T) { + example := []byte(` +{ + "child": "secret" +} +`) + var parent TestParent + metadata := types.NewTestMetadata() + require.NoError(t, Unmarshal(example, &parent, &metadata)) + assert.Equal(t, 3, parent.Child.Line) + assert.Equal(t, 12, parent.Child.Column) + assert.Equal(t, "secret", parent.Child.Name) +} diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/decode_null.go b/pkg/iac/scanners/azure/arm/parser/armjson/decode_null.go new file mode 100644 index 000000000000..2cc86b3c1bb7 --- /dev/null +++ b/pkg/iac/scanners/azure/arm/parser/armjson/decode_null.go @@ -0,0 +1,10 @@ +package armjson + +import ( + "reflect" +) + +func (n *node) decodeNull(v reflect.Value) error { + v.Set(reflect.Zero(v.Type())) + return nil +} diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/decode_number.go b/pkg/iac/scanners/azure/arm/parser/armjson/decode_number.go new file mode 100644 index 000000000000..653f6f1fbe06 --- /dev/null +++ b/pkg/iac/scanners/azure/arm/parser/armjson/decode_number.go @@ -0,0 +1,46 @@ +package armjson + +import ( + "fmt" + "reflect" +) + +func (n *node) decodeNumber(v reflect.Value) error { + + switch v.Kind() { + case reflect.Int64, reflect.Int32, reflect.Int16, reflect.Int8, reflect.Int: + if i64, ok := n.raw.(int64); ok { + v.SetInt(i64) + return nil + } + if f64, ok := n.raw.(float64); ok { + v.SetInt(int64(f64)) + return nil + } + case reflect.Uint64, reflect.Uint32, reflect.Uint16, reflect.Uint8, reflect.Uint: + if i64, ok := n.raw.(int64); ok { + v.SetUint(uint64(i64)) + return nil + } + if f64, ok := n.raw.(float64); ok { + v.SetUint(uint64(f64)) + return nil + } + case reflect.Float32, reflect.Float64: + if i64, ok := n.raw.(int64); ok { + v.SetFloat(float64(i64)) + return nil + } + if f64, ok := n.raw.(float64); ok { + v.SetFloat(f64) + return nil + } + case reflect.Interface: + v.Set(reflect.ValueOf(n.raw)) + return nil + default: + return fmt.Errorf("cannot decode number value to %s target", v.Kind()) + } + + return fmt.Errorf("internal value is not numeric") +} diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/decode_object.go b/pkg/iac/scanners/azure/arm/parser/armjson/decode_object.go new file mode 100644 index 000000000000..516029b55deb --- /dev/null +++ b/pkg/iac/scanners/azure/arm/parser/armjson/decode_object.go @@ -0,0 +1,122 @@ +package armjson + +import ( + "fmt" + "reflect" + "strings" +) + +func (n *node) decodeObject(v reflect.Value) error { + switch v.Kind() { + case reflect.Struct: + return n.decodeObjectToStruct(v) + case reflect.Map: + return n.decodeObjectToMap(v) + case reflect.Interface: + target := reflect.New(reflect.TypeOf(make(map[string]interface{}, len(n.Content())))).Elem() + if err := n.decodeObjectToMap(target); err != nil { + return err + } + v.Set(target) + return nil + default: + return fmt.Errorf("cannot set object value to target of type %s", v.Kind()) + } +} + +func (n *node) decodeObjectToMap(v reflect.Value) error { + properties, err := n.objectAsMap() + if err != nil { + return err + } + + newMap := reflect.MakeMap(v.Type()) + valueType := v.Type().Elem() + + for key, value := range properties { + target := reflect.New(valueType).Elem() + addressable := target + if target.Kind() == reflect.Ptr { + target.Set(reflect.New(valueType.Elem())) + } else { + addressable = target.Addr() + } + if err := value.(*node).decodeToValue(addressable); err != nil { + return err + } + newMap.SetMapIndex(reflect.ValueOf(key), target) + } + + v.Set(newMap) + return nil + +} + +func (n *node) objectAsMap() (map[string]Node, error) { + if n.kind != KindObject { + return nil, fmt.Errorf("not an object") + } + properties := make(map[string]Node) + contents := n.content + for i := 0; i < len(contents); i += 2 { + key := contents[i] + if key.Kind() != KindString { + return nil, fmt.Errorf("invalid object key - please report this bug") + } + keyStr := key.(*node).raw.(string) + + if i+1 >= len(contents) { + return nil, fmt.Errorf("missing object value - please report this bug") + } + properties[keyStr] = contents[i+1] + } + return properties, nil +} + +func (n *node) decodeObjectToStruct(v reflect.Value) error { + + temp := reflect.New(v.Type()).Elem() + v.Set(temp) + + properties, err := n.objectAsMap() + if err != nil { + return err + } + + t := v.Type() + for i := 0; i < t.NumField(); i++ { + fv := t.Field(i) + tags := strings.Split(fv.Tag.Get("json"), ",") + var tagName string + for _, tag := range tags { + if tag != "omitempty" && tag != "-" { + tagName = tag + } + } + if tagName == "" { + tagName = fv.Name + } + + value, ok := properties[tagName] + if !ok { + // TODO: should we zero this value? + continue + } + + subject := v.Field(i) + + // if fields are nil pointers, initialise them with values of the correct type + if subject.Kind() == reflect.Ptr { + if subject.IsNil() { + subject.Set(reflect.New(subject.Type().Elem())) + } + } else { + subject = subject.Addr() + } + + if err := value.(*node).decodeToValue(subject); err != nil { + return err + } + } + return nil +} diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/decode_string.go b/pkg/iac/scanners/azure/arm/parser/armjson/decode_string.go new file mode 100644 index 000000000000..c8f734b57024 --- /dev/null +++ b/pkg/iac/scanners/azure/arm/parser/armjson/decode_string.go @@ -0,0 +1,19 @@ +package armjson + +import ( + "fmt" + "reflect" +) + +func (n *node) decodeString(v reflect.Value) error { + + switch v.Kind() { + case reflect.String: + v.SetString(n.raw.(string)) + case reflect.Interface: + v.Set(reflect.ValueOf(n.raw)) + default: + return fmt.Errorf("cannot decode string value to non-string target: %s", v.Kind()) + } + return nil +} diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/kind.go b/pkg/iac/scanners/azure/arm/parser/armjson/kind.go new file mode 100644 index 000000000000..82712cc89225 --- /dev/null +++ b/pkg/iac/scanners/azure/arm/parser/armjson/kind.go @@ -0,0 +1,14 @@ +package armjson + +type Kind uint8 + +const ( + KindUnknown Kind = iota + KindNull + KindNumber + KindString + KindBoolean + KindArray + KindObject + KindComment +) diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/node.go b/pkg/iac/scanners/azure/arm/parser/armjson/node.go new file mode 100644 index 000000000000..3c398d6ed29c --- /dev/null +++ b/pkg/iac/scanners/azure/arm/parser/armjson/node.go @@ -0,0 +1,59 @@ +package armjson + +import "github.com/aquasecurity/defsec/pkg/types" + +type Node interface { + Comments() []Node + Range() Range + Decode(target interface{}) error + Kind() Kind + Content() []Node + Metadata() types.Metadata +} + +type Range struct { + Start Position + End Position +} + +type Position struct { + Line int + Column int +} + +type node struct { + raw interface{} + start Position + end Position + kind Kind + content []Node + comments []Node + metadata *types.Metadata + ref string +} + +func (n *node) Range() Range { + return Range{ + Start: n.start, + End: Position{ + Column: n.end.Column - 1, + Line: n.end.Line, + }, + } +} + +func (n *node) Comments() []Node { + return n.comments +} + +func (n *node) End() Position { + return n.end +} + +func (n *node) Kind() Kind { + return n.kind +} + +func (n *node) Content() []Node { + return n.content +} diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/parse.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse.go new file mode 100644 index 000000000000..ac86f459fced --- /dev/null +++ b/pkg/iac/scanners/azure/arm/parser/armjson/parse.go @@ -0,0 +1,150 @@ +package armjson + +import ( + "fmt" + "strings" + + "github.com/aquasecurity/defsec/pkg/types" +) + +type parser struct { + position Position + size int + peeker *PeekReader +} + +func newParser(p *PeekReader, pos Position) *parser { + return &parser{ + position: pos, + peeker: p, + } +} + +func (p *parser) parse(rootMetadata *types.Metadata) (Node, error) { + root, err := p.parseElement(rootMetadata) + if err != nil { + return nil, err + } + root.(*node).updateMetadata("") + return root, nil +} + +func (p *parser) parseElement(parentMetadata *types.Metadata) (Node, error) { + if err := p.parseWhitespace(); err != nil { + return nil, err + } + n, err := p.parseValue(parentMetadata) + if err != nil { + return nil, err + } + if err := p.parseWhitespace(); err != nil { + return nil, err + } + return n, nil +} + +func (p *parser) parseValue(parentMetadata *types.Metadata) (Node, error) { + c, err := p.peeker.Peek() + if err != nil { + return nil, err + } + + switch c { + case '/': + return p.parseComment(parentMetadata) + case '"': + return p.parseString(parentMetadata) + case '{': + return p.parseObject(parentMetadata) + case '[': + return p.parseArray(parentMetadata) + case 'n': + return p.parseNull(parentMetadata) + case 't', 'f': + return p.parseBoolean(parentMetadata) + default: + if c == '-' || (c >= '0' && c <= '9') { + return p.parseNumber(parentMetadata) + } + return nil, fmt.Errorf("unexpected character '%c'", c) + } +} + +func (p *parser) next() (rune, error) { + b, err := p.peeker.Next() + if err != nil { + return 0, err + } + p.position.Column++ + p.size++ + return b, nil +} + +func (p *parser) undo() error { + if err := p.peeker.Undo(); err != nil { + return err + } + p.position.Column-- + p.size-- + return nil +} + +func (p *parser) makeError(format string, args ...interface{}) error { + return fmt.Errorf( + "error at line %d, column %d: %s", + p.position.Line, + p.position.Column, + fmt.Sprintf(format, args...), + ) +} + +func (p *parser) newNode(k Kind, parentMetadata *types.Metadata) (*node, *types.Metadata) { + n := &node{ + start: p.position, + kind: k, + } + metadata := types.NewMetadata( + types.NewRange(parentMetadata.Range().GetFilename(), n.start.Line, n.end.Line, "", parentMetadata.Range().GetFS()), + n.ref, + ) + metadata.SetParentPtr(parentMetadata) + n.metadata = &metadata + return n, n.metadata +} + +func (n *node) updateMetadata(prefix string) { + + var full string + // nolint:gocritic + if strings.HasPrefix(n.ref, "[") { + full = prefix + n.ref + } else if prefix != "" { + full = prefix + "." + n.ref + } else { + full = n.ref + } + + n.metadata.SetRange(types.NewRange(n.metadata.Range().GetFilename(), + n.start.Line, + n.end.Line, + "", + n.metadata.Range().GetFS())) + + n.metadata.SetReference(full) + + for i := range n.content { + n.content[i].(*node).updateMetadata(full) + } +} + +func (p *parser) swallowIfEqual(r rune) bool { + c, err := p.peeker.Peek() + if err != nil { + return false + } + if c != r { + return false + } + _, _ = p.next() + return true +} diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/parse_array.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_array.go new file mode 100644 index 000000000000..795d69460253 --- /dev/null +++ b/pkg/iac/scanners/azure/arm/parser/armjson/parse_array.go @@ -0,0 +1,54 @@ +package armjson + +import ( + "fmt" + + "github.com/aquasecurity/defsec/pkg/types" +) + +func (p *parser) parseArray(parentMetadata *types.Metadata) (Node, error) { + n, metadata := p.newNode(KindArray, parentMetadata) + + c, err := p.next() + if err != nil { + return nil, err + } + + if c != '[' { + return nil, p.makeError("expecting object delimiter") + } + if err := p.parseWhitespace(); err != nil { + return nil, err + } + // we've hit the end of the object + if p.swallowIfEqual(']') { + n.end = p.position + return n, nil + } + + // for each element + for { + + if err := p.parseWhitespace(); err != nil { + return nil, err + } + + val, err := p.parseElement(metadata) + if err != nil { + return nil, err + } + val.(*node).ref = fmt.Sprintf("[%d]", len(n.content)) + + n.content = append(n.content, val) + + // we've hit the end of the array + if p.swallowIfEqual(']') { + n.end = p.position + return n, nil + } + + if !p.swallowIfEqual(',') { + return nil, p.makeError("unexpected character - expecting , or ]") + } + } +} diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/parse_array_test.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_array_test.go new file mode 100644 index 000000000000..f1146ab08d87 --- /dev/null +++ b/pkg/iac/scanners/azure/arm/parser/armjson/parse_array_test.go @@ -0,0 +1,46 @@ +package armjson + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_Array_Empty(t *testing.T) { + example := []byte(`[]`) + target := []int{} + metadata := types.NewTestMetadata() + require.NoError(t, Unmarshal(example, &target, &metadata)) + assert.Len(t, target, 0) +} + +func Test_Array_ToSlice(t *testing.T) { + example := []byte(`[1, 2, 3]`) + target := []int{} + metadata := types.NewTestMetadata() + require.NoError(t, Unmarshal(example, &target, &metadata)) + assert.Len(t, target, 3) + assert.EqualValues(t, []int{1, 2, 3}, target) +} + +func Test_Array_ToArray(t *testing.T) { + example := []byte(`[3, 2, 1]`) + target := [3]int{6, 6, 6} + metadata := types.NewTestMetadata() + require.NoError(t, Unmarshal(example, &target, &metadata)) + assert.Len(t, target, 3) + assert.EqualValues(t, [3]int{3, 2, 1}, target) +} + +func Test_Array_ToInterface(t *testing.T) { + example := []byte(`{ "List": [1, 2, 3] }`) + target := struct { + List interface{} + }{} + metadata := types.NewTestMetadata() + require.NoError(t, Unmarshal(example, &target, &metadata)) + assert.Len(t, target.List, 3) +} diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/parse_boolean.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_boolean.go new file mode 100644 index 000000000000..3d97589acdcb --- /dev/null +++ b/pkg/iac/scanners/azure/arm/parser/armjson/parse_boolean.go @@ -0,0 +1,40 @@ +package armjson + +import ( + "fmt" + + "github.com/aquasecurity/defsec/pkg/types" +) + +var trueRunes = []rune("true") +var falseRunes = []rune("false") + +func (p *parser) parseBoolean(parentMetadata *types.Metadata) (Node, error) { + + n, _ := p.newNode(KindBoolean, parentMetadata) + + r, err := p.peeker.Peek() + if err != nil { + return nil, err + } + + if r == 't' { + for _, expected := range trueRunes { + if !p.swallowIfEqual(expected) { + return nil, fmt.Errorf("unexpected character in boolean value") + } + } + n.raw = true + n.end = p.position + return n, err + } + + for _, expected := range falseRunes { + if !p.swallowIfEqual(expected) { + return nil, fmt.Errorf("unexpected character in boolean value") + } + } + n.raw = false + n.end = p.position + return n, nil +} diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/parse_boolean_test.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_boolean_test.go new file mode 100644 index 000000000000..e1d44db6119c --- /dev/null +++ b/pkg/iac/scanners/azure/arm/parser/armjson/parse_boolean_test.go @@ -0,0 +1,54 @@ +package armjson + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_Boolean_True(t *testing.T) { + example := []byte(`true`) + var output bool + metadata := types.NewTestMetadata() + err := Unmarshal(example, &output, &metadata) + require.NoError(t, err) + assert.True(t, output) +} + +func Test_Boolean_False(t *testing.T) { + example := []byte(`false`) + var output bool + metadata := types.NewTestMetadata() + err := Unmarshal(example, &output, &metadata) + require.NoError(t, err) + assert.False(t, output) +} + +func Test_Boolean_ToNonBoolPointer(t *testing.T) { + example := []byte(`false`) + var output string + metadata := types.NewTestMetadata() + err := Unmarshal(example, &output, &metadata) + require.Error(t, err) +} + +func Test_Bool_ToUninitialisedPointer(t *testing.T) { + example := []byte(`true`) + var str *string + metadata := types.NewTestMetadata() + err := Unmarshal(example, str, &metadata) + require.Error(t, err) + assert.Nil(t, str) +} + +func Test_Bool_ToInterface(t *testing.T) { + example := []byte(`true`) + var output interface{} + metadata := types.NewTestMetadata() + err := Unmarshal(example, &output, &metadata) + require.NoError(t, err) + assert.True(t, output.(bool)) +} diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/parse_comment.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_comment.go new file mode 100644 index 000000000000..7f35078ae84e --- /dev/null +++ b/pkg/iac/scanners/azure/arm/parser/armjson/parse_comment.go @@ -0,0 +1,98 @@ +package armjson + +import ( + "strings" + + "github.com/aquasecurity/defsec/pkg/types" +) + +func (p *parser) parseComment(parentMetadata *types.Metadata) (Node, error) { + + if err := p.parseWhitespace(); err != nil { + return nil, err + } + + _, err := p.next() + if err != nil { + return nil, err + } + + b, err := p.next() + if err != nil { + return nil, err + } + + switch b { + case '/': + return p.parseLineComment(parentMetadata) + case '*': + return p.parseBlockComment(parentMetadata) + default: + return nil, p.makeError("expecting comment delimiter") + } +} + +func (p *parser) parseLineComment(parentMetadata *types.Metadata) (Node, error) { + + n, _ := p.newNode(KindComment, parentMetadata) + + var sb strings.Builder + for { + c, err := p.next() + if err != nil { + return nil, err + } + if c == '\n' { + p.position.Column = 1 + p.position.Line++ + break + } + sb.WriteRune(c) + } + + n.raw = sb.String() + n.end = p.position + + if err := p.parseWhitespace(); err != nil { + return nil, err + } + return n, nil +} + +func (p *parser) parseBlockComment(parentMetadata *types.Metadata) (Node, error) { + + n, _ := p.newNode(KindComment, parentMetadata) + + var sb strings.Builder + + for { + c, err := p.next() + if err != nil { + return nil, err + } + if c == '*' { + c, err := p.peeker.Peek() + if err != nil { + return nil, err + } + if c == '/' { + break + } + sb.WriteRune('*') + } else { + if c == '\n' { + p.position.Column = 1 + p.position.Line++ + } + sb.WriteRune(c) + } + } + + n.raw = sb.String() + + if err := p.parseWhitespace(); err != nil { + return nil, err + } + + return n, nil +} diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/parse_complex_test.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_complex_test.go new file mode 100644 index 000000000000..17c4014b83a3 --- /dev/null +++ b/pkg/iac/scanners/azure/arm/parser/armjson/parse_complex_test.go @@ -0,0 +1,131 @@ +package armjson + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/stretchr/testify/require" +) + +func Test_Complex(t *testing.T) { + target := make(map[string]interface{}) + input := `{ + "glossary": { + "title": "example glossary", + "GlossDiv": { + "title": "S", + "GlossList": { + "GlossEntry": { + "ID": "SGML", + "SortAs": "SGML", + "GlossTerm": "Standard Generalized Markup Language", + "Acronym": "SGML", + "Abbrev": "ISO 8879:1986", + "GlossDef": { + "para": "A meta-markup language, used to create markup languages such as DocBook.", + "GlossSeeAlso": ["GML", "XML"] + }, + "GlossSee": "markup" + } + } + } + } +}` + metadata := types.NewTestMetadata() + require.NoError(t, Unmarshal([]byte(input), &target, &metadata)) +} + +type Resource struct { + Line int + inner resourceInner +} + +type resourceInner struct { + Type string `json:"Type" yaml:"Type"` + Properties map[string]*Property `json:"Properties" yaml:"Properties"` +} + +func (r *Resource) UnmarshalJSONWithMetadata(node Node) error { + r.Line = node.Range().Start.Line + return node.Decode(&r.inner) +} + +type Parameter struct { + inner parameterInner +} + +type parameterInner struct { + Type string `json:"Type" yaml:"Type"` + Default interface{} `yaml:"Default"` +} + +func (p *Parameter) UnmarshalJSONWithMetadata(node Node) error { + return node.Decode(&p.inner) +} + +type Property struct { + Line int + inner propertyInner +} + +type CFType string + +type propertyInner struct { + Type CFType + Value interface{} `json:"Value" yaml:"Value"` +} + +func (p *Property) UnmarshalJSONWithMetadata(node Node) error { + p.Line = node.Range().Start.Line + return node.Decode(&p.inner) +} + +type Temp struct { + BucketName *Parameter + BucketKeyEnabled *Parameter +} + +type FileContext struct { + Parameters map[string]*Parameter `json:"Parameters" yaml:"Parameters"` + Resources map[string]*Resource `json:"Resources" yaml:"Resources"` +} + +func Test_CloudFormation(t *testing.T) { + var target FileContext + input := ` +{ + "Parameters": { + "BucketName": { + "Type": "String", + "Default": "naughty" + }, + "BucketKeyEnabled": { + "Type": "Boolean", + "Default": false + } + }, + "Resources": { + "S3Bucket": { + "Type": "AWS::S3::Bucket", + "Properties": { + "BucketName": { + "Ref": "BucketName" + }, + "BucketEncryption": { + "ServerSideEncryptionConfiguration": [ + { + "BucketKeyEnabled": { + "Ref": "BucketKeyEnabled" + } + } + ] + } + } + } + } +} +` + metadata := types.NewTestMetadata() + require.NoError(t, Unmarshal([]byte(input), &target, &metadata)) +} diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/parse_null.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_null.go new file mode 100644 index 000000000000..36fa5d7370e1 --- /dev/null +++ b/pkg/iac/scanners/azure/arm/parser/armjson/parse_null.go @@ -0,0 +1,23 @@ +package armjson + +import ( + "fmt" + + "github.com/aquasecurity/defsec/pkg/types" +) + +var nullRunes = []rune("null") + +func (p *parser) parseNull(parentMetadata *types.Metadata) (Node, error) { + + n, _ := p.newNode(KindNull, parentMetadata) + + for _, expected := range nullRunes { + if !p.swallowIfEqual(expected) { + return nil, fmt.Errorf("unexpected character") + } + } + n.raw = nil + n.end = p.position + return n, nil +} diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/parse_null_test.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_null_test.go new file mode 100644 index 000000000000..5fd343479c82 --- /dev/null +++ b/pkg/iac/scanners/azure/arm/parser/armjson/parse_null_test.go @@ -0,0 +1,18 @@ +package armjson + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/stretchr/testify/require" +) + +func Test_Null(t *testing.T) { + example := []byte(`null`) + var output string + ref := &output + metadata := types.NewTestMetadata() + err := Unmarshal(example, &ref, &metadata) + require.NoError(t, err) +} diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/parse_number.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_number.go new file mode 100644 index 000000000000..ca544cecce35 --- /dev/null +++ b/pkg/iac/scanners/azure/arm/parser/armjson/parse_number.go @@ -0,0 +1,163 @@ +package armjson + +import ( + "fmt" + "strconv" + "strings" + + "github.com/aquasecurity/defsec/pkg/types" +) + +func (p *parser) parseNumber(parentMetadata *types.Metadata) (Node, error) { + + n, _ := p.newNode(KindNumber, parentMetadata) + + var str string + + if p.swallowIfEqual('-') { + str = "-" + } + + integral, err := p.parseIntegral() + if err != nil { + return nil, err + } + fraction, err := p.parseFraction() + if err != nil { + return nil, err + } + exponent, err := p.parseExponent() + if err != nil { + return nil, err + } + + str = fmt.Sprintf("%s%s%s%s", str, integral, fraction, exponent) + n.end = p.position + + if fraction != "" || exponent != "" { + f, err := strconv.ParseFloat(str, 64) + if err != nil { + return nil, p.makeError("%s", err) + } + n.raw = f + return n, nil + } + + i, err := strconv.ParseInt(str, 10, 64) + if err != nil { + return nil, p.makeError("%s", err) + } + n.raw = i + + return n, nil +} + +func (p *parser) parseIntegral() (string, error) { + r, err := p.next() + if err != nil { + return "", err + } + if r == '0' { + r, _ := p.peeker.Peek() + if r >= '0' && r <= '9' { + return "", p.makeError("invalid number") + } + return "0", nil + } + + var sb strings.Builder + if r < '1' || r > '9' { + return "", p.makeError("invalid number") + } + sb.WriteRune(r) + + for { + r, err := p.next() + if err != nil { + return sb.String(), nil + } + if r < '0' || r > '9' { + return sb.String(), p.undo() + } + sb.WriteRune(r) + } +} + +func (p *parser) parseFraction() (string, error) { + r, err := p.next() + if err != nil { + return "", nil + } + if r != '.' { + return "", p.undo() + } + + var sb strings.Builder + sb.WriteRune('.') + + for { + r, err := p.next() + if err != nil { + break + } + if r < '0' || r > '9' { + if err := p.undo(); err != nil { + return "", err + } + break + } + sb.WriteRune(r) + } + + str := sb.String() + if str == "." { + return "", p.makeError("invalid number - missing digits after decimal point") + } + + return str, nil +} + +func (p *parser) parseExponent() (string, error) { + r, err := p.next() + if err != nil { + return "", nil + } + if r != 'e' && r != 'E' { + return "", p.undo() + } + + var sb strings.Builder + sb.WriteRune(r) + + r, err = p.next() + if err != nil { + return "", nil + } + hasDigits := r >= '0' && r <= '9' + if r != '-' && r != '+' && !hasDigits { + return "", p.undo() + } + + sb.WriteRune(r) + + for { + r, err := p.next() + if err != nil { + break + } + if r < '0' || r > '9' { + if err := p.undo(); err != nil { + return "", err + } + break + } + hasDigits = true + sb.WriteRune(r) + } + + if !hasDigits { + return "", p.makeError("invalid number - no digits in exponent") + } + + return sb.String(), nil +} diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/parse_number_test.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_number_test.go new file mode 100644 index 000000000000..237c3b918fe9 --- /dev/null +++ b/pkg/iac/scanners/azure/arm/parser/armjson/parse_number_test.go @@ -0,0 +1,178 @@ +package armjson + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_Number_IntToInt(t *testing.T) { + example := []byte(`123`) + var output int + metadata := types.NewTestMetadata() + err := Unmarshal(example, &output, &metadata) + require.NoError(t, err) + assert.Equal(t, 123, output) +} + +func Test_Number_IntToFloat(t *testing.T) { + example := []byte(`123`) + var output float64 + metadata := types.NewTestMetadata() + err := Unmarshal(example, &output, &metadata) + require.NoError(t, err) + assert.Equal(t, 123.0, output) +} + +func Test_Number_FloatToFloat(t *testing.T) { + example := []byte(`123.456`) + var output float64 + metadata := types.NewTestMetadata() + err := Unmarshal(example, &output, &metadata) + require.NoError(t, err) + assert.Equal(t, 123.456, output) +} + +func Test_Number_FloatToInt(t *testing.T) { + example := []byte(`123.456`) + var output int + metadata := types.NewTestMetadata() + err := Unmarshal(example, &output, &metadata) + require.NoError(t, err) + assert.Equal(t, 123, output) +} + +func Test_Number_FloatWithExponent(t *testing.T) { + cases := []struct { + in string + out float64 + }{ + { + in: `123.456e10`, + out: 123.456e+10, + }, + { + in: `123e+1`, + out: 123e+1, + }, + { + in: `123e-2`, + out: 123e-2, + }, + } + for _, test := range cases { + t.Run(test.in, func(t *testing.T) { + example := []byte(test.in) + var output float64 + metadata := types.NewTestMetadata() + err := Unmarshal(example, &output, &metadata) + require.NoError(t, err) + assert.Equal(t, test.out, output) + + }) + } +} + +func Test_Number_IntWithExponent(t *testing.T) { + cases := []struct { + in string + out int64 + }{ + { + in: `123e10`, + out: 123e+10, + }, + { + in: `123e+1`, + out: 123e+1, + }, + } + for _, test := range cases { + t.Run(test.in, func(t *testing.T) { + example := []byte(test.in) + var output int64 + metadata := types.NewTestMetadata() + err := Unmarshal(example, &output, &metadata) + require.NoError(t, err) + assert.Equal(t, test.out, output) + + }) + } +} + +func Test_Number_Ints(t *testing.T) { + cases := []struct { + in string + out int64 + err bool + }{ + { + in: `123e10`, + out: 123e+10, + }, + { + in: `-1`, + out: -1, + }, + { + in: `1.0123`, + out: 1, + }, + { + in: `0`, + out: 0, + }, + { + in: `01`, + err: true, + }, + { + in: ``, + err: true, + }, + { + in: `+1`, + err: true, + }, + { + in: `e`, + err: true, + }, + + { + in: `.123`, + err: true, + }, + + { + in: `.`, + err: true, + }, + + { + in: `00`, + err: true, + }, + { + in: `-`, + err: true, + }, + } + for _, test := range cases { + t.Run(test.in, func(t *testing.T) { + example := []byte(test.in) + var output int64 + metadata := types.NewTestMetadata() + err := Unmarshal(example, &output, &metadata) + if test.err { + require.Error(t, err) + return + } + require.NoError(t, err) + assert.Equal(t, test.out, output) + }) + } +} diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/parse_object.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_object.go new file mode 100644 index 000000000000..f87ffc439ac9 --- /dev/null +++ b/pkg/iac/scanners/azure/arm/parser/armjson/parse_object.go @@ -0,0 +1,143 @@ +package armjson + +import ( + "github.com/aquasecurity/defsec/pkg/types" +) + +func (p *parser) parseObject(parentMetadata *types.Metadata) (Node, error) { + + n, metadata := p.newNode(KindObject, parentMetadata) + + c, err := p.next() + if err != nil { + return nil, err + } + + if c != '{' { + return nil, p.makeError("expecting object delimiter") + } + + if err := p.parseWhitespace(); err != nil { + return nil, err + } + + // we've hit the end of the object + if p.swallowIfEqual('}') { + n.end = p.position + return n, nil + } + + var nextComments []Node + return p.iterateObject(nextComments, metadata, n) + +} + +// nolint: cyclop +func (p *parser) iterateObject(nextComments []Node, metadata *types.Metadata, n *node) (Node, error) { + for { + + if err := p.parseWhitespace(); err != nil { + return nil, err + } + + comments := make([]Node, len(nextComments)) + copy(comments, nextComments) + nextComments = nil + for { + peeked, err := p.peeker.Peek() + if err != nil { + return nil, err + } + if peeked != '/' { + break + } + comment, err := p.parseComment(metadata) + if err != nil { + return nil, err + } + comments = append(comments, comment) + } + + if comments != nil { + if err := p.parseWhitespace(); err != nil { + return nil, err + } + } + + key, err := p.parseString(metadata) + if err != nil { + return nil, err + } + + if err := p.parseWhitespace(); err != nil { + return nil, err + } + + if !p.swallowIfEqual(':') { + return nil, p.makeError("invalid character, expecting ':'") + } + + val, err := p.parseElement(metadata) + if err != nil { + return nil, err + } + ref := key.(*node).raw.(string) + key.(*node).ref = ref + val.(*node).ref = ref + + for { + peeked, err := p.peeker.Peek() + if err != nil { + return nil, err + } + if peeked != '/' { + break + } + comment, err := p.parseComment(metadata) + if err != nil { + return nil, err + } + comments = append(comments, comment) + } + + // we've hit the end of the object + if p.swallowIfEqual('}') { + key.(*node).comments = comments + val.(*node).comments = comments + n.content = append(n.content, key, val) + n.end = p.position + return n, nil + } + + if !p.swallowIfEqual(',') { + return nil, p.makeError("unexpected character - expecting , or }") + } + + for { + if err := p.parseWhitespace(); err != nil { + return nil, err + } + peeked, err := p.peeker.Peek() + if err != nil { + return nil, err + } + if peeked != '/' { + break + } + comment, err := p.parseComment(metadata) + if err != nil { + return nil, err + } + if comment.Range().Start.Line > val.Range().End.Line { + nextComments = append(nextComments, comment) + } else { + comments = append(comments, comment) + } + } + + key.(*node).comments = comments + val.(*node).comments = comments + n.content = append(n.content, key, val) + + } +} diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/parse_object_test.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_object_test.go new file mode 100644 index 000000000000..56985ecbc805 --- /dev/null +++ b/pkg/iac/scanners/azure/arm/parser/armjson/parse_object_test.go @@ -0,0 +1,115 @@ +package armjson + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_Object(t *testing.T) { + example := []byte(`{ + "name": "testing", + "balance": 3.14 +}`) + target := struct { + Name string `json:"name"` + Balance float64 `json:"balance"` + }{} + metadata := types.NewTestMetadata() + require.NoError(t, Unmarshal(example, &target, &metadata)) + assert.Equal(t, "testing", target.Name) + assert.Equal(t, 3.14, target.Balance) +} + +func Test_ObjectWithPointers(t *testing.T) { + example := []byte(`{ + "name": "testing", + "balance": 3.14 +}`) + target := struct { + Name *string `json:"name"` + Balance *float64 `json:"balance"` + }{} + metadata := types.NewTestMetadata() + require.NoError(t, Unmarshal(example, &target, &metadata)) + assert.Equal(t, "testing", *target.Name) + assert.Equal(t, 3.14, *target.Balance) +} + +type nestedParent struct { + Child *nestedChild + Name string +} + +type nestedChild struct { + Blah string `json:"secret"` +} + +func Test_ObjectWithPointerToNestedStruct(t *testing.T) { + example := []byte(`{ + "Child": { + "secret": "password" + }, + "Name": "testing" +}`) + + var parent nestedParent + metadata := types.NewTestMetadata() + require.NoError(t, Unmarshal(example, &parent, &metadata)) + assert.Equal(t, "testing", parent.Name) + assert.Equal(t, "password", parent.Child.Blah) +} + +func Test_Object_ToMapStringInterface(t *testing.T) { + example := []byte(`{ + "Name": "testing" +}`) + + parent := make(map[string]interface{}) + metadata := types.NewTestMetadata() + require.NoError(t, Unmarshal(example, &parent, &metadata)) + assert.Equal(t, "testing", parent["Name"]) +} + +func Test_Object_ToNestedMapStringInterfaceFromIAM(t *testing.T) { + example := []byte(` +{ + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "", + "Effect": "Allow", + "Action": "ec2:*", + "Resource": "*", + "Condition": { + "Bool": { + "aws:MultiFactorAuthPresent": ["true"] + } + } + } + ] +}`) + + parent := make(map[string]interface{}) + metadata := types.NewTestMetadata() + require.NoError(t, Unmarshal(example, &parent, &metadata)) +} + +func Test_Object_ToNestedMapStringInterface(t *testing.T) { + example := []byte(`{ + "Child": { + "secret": "password" + }, + "Name": "testing" +}`) + + parent := make(map[string]interface{}) + metadata := types.NewTestMetadata() + require.NoError(t, Unmarshal(example, &parent, &metadata)) + assert.Equal(t, "testing", parent["Name"]) + child := parent["Child"].(map[string]interface{}) + assert.Equal(t, "password", child["secret"]) +} diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/parse_string.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_string.go new file mode 100644 index 000000000000..7f4740cd459b --- /dev/null +++ b/pkg/iac/scanners/azure/arm/parser/armjson/parse_string.go @@ -0,0 +1,91 @@ +package armjson + +import ( + "strconv" + "strings" + + "github.com/aquasecurity/defsec/pkg/types" +) + +var escapes = map[rune]string{ + '\\': "\\", + '/': "/", + '"': "\"", + 'n': "\n", + 'r': "\r", + 'b': "\b", + 'f': "\f", + 't': "\t", +} + +// nolint: cyclop +func (p *parser) parseString(parentMetadata *types.Metadata) (Node, error) { + + n, _ := p.newNode(KindString, parentMetadata) + + b, err := p.next() + if err != nil { + return nil, err + } + + if b != '"' { + return nil, p.makeError("expecting string delimiter") + } + + var sb strings.Builder + + var inEscape bool + var inHex bool + var hex []rune + + for { + c, err := p.next() + if err != nil { + return nil, err + } + // nolint: gocritic + if inHex { + switch { + case c >= 'a' && c <= 'f', c >= 'A' && c <= 'F', c >= '0' && c <= '9': + hex = append(hex, c) + if len(hex) == 4 { + inHex = false + char, err := strconv.Unquote("\\u" + string(hex)) + if err != nil { + return nil, p.makeError("invalid unicode character '%s'", err) + } + sb.WriteString(char) + hex = nil + } + default: + return nil, p.makeError("invalid hexedecimal escape sequence '\\%s%c'", string(hex), c) + } + } else if inEscape { + inEscape = false + if c == 'u' { + inHex = true + continue + } + seq, ok := escapes[c] + if !ok { + return nil, p.makeError("invalid escape sequence '\\%c'", c) + } + sb.WriteString(seq) + } else { + switch c { + case '\\': + inEscape = true + case '"': + n.raw = sb.String() + n.end = p.position + return n, nil + default: + if c < 0x20 || c > 0x10FFFF { + return nil, p.makeError("invalid unescaped character '0x%X'", c) + } + sb.WriteRune(c) + } + } + + } +} diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/parse_string_test.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_string_test.go new file mode 100644 index 000000000000..83c98cd859fc --- /dev/null +++ b/pkg/iac/scanners/azure/arm/parser/armjson/parse_string_test.go @@ -0,0 +1,37 @@ +package armjson + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_String(t *testing.T) { + example := []byte(`"hello"`) + var output string + metadata := types.NewTestMetadata() + err := Unmarshal(example, &output, &metadata) + require.NoError(t, err) + assert.Equal(t, "hello", output) +} + +func Test_StringToUninitialisedPointer(t *testing.T) { + example := []byte(`"hello"`) + var str *string + metadata := types.NewTestMetadata() + err := Unmarshal(example, str, &metadata) + require.Error(t, err) + assert.Nil(t, str) +} + +func Test_String_ToInterface(t *testing.T) { + example := []byte(`"hello"`) + var output interface{} + metadata := types.NewTestMetadata() + err := Unmarshal(example, &output, &metadata) + require.NoError(t, err) + assert.Equal(t, "hello", output) +} diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/parse_whitespace.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_whitespace.go new file mode 100644 index 000000000000..ad5751147d3e --- /dev/null +++ b/pkg/iac/scanners/azure/arm/parser/armjson/parse_whitespace.go @@ -0,0 +1,29 @@ +package armjson + +import ( + "errors" + "io" +) + +func (p *parser) parseWhitespace() error { + for { + b, err := p.peeker.Peek() + if err != nil { + if errors.Is(err, io.EOF) { + return nil + } + return err + } + switch b { + case 0x0d, 0x20, 0x09: + case 0x0a: + p.position.Column = 1 + p.position.Line++ + default: + return nil + } + if _, err := p.next(); err != nil { + return err + } + } +} diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/reader.go b/pkg/iac/scanners/azure/arm/parser/armjson/reader.go new file mode 100644 index 000000000000..e05769f02da9 --- /dev/null +++ b/pkg/iac/scanners/azure/arm/parser/armjson/reader.go @@ -0,0 +1,36 @@ +package armjson + +import ( + "bufio" + "io" +) + +type PeekReader struct { + underlying *bufio.Reader +} + +func NewPeekReader(reader io.Reader) *PeekReader { + return &PeekReader{ + underlying: bufio.NewReader(reader), + } +} + +func (r *PeekReader) Next() (rune, error) { + c, _, err := r.underlying.ReadRune() + return c, err +} + +func (r *PeekReader) Undo() error { + return r.underlying.UnreadRune() +} + +func (r *PeekReader) Peek() (rune, error) { + c, _, err := r.underlying.ReadRune() + if err != nil { + return 0, err + } + if err := r.underlying.UnreadRune(); err != nil { + return 0, err + } + return c, nil +} diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/reader_test.go b/pkg/iac/scanners/azure/arm/parser/armjson/reader_test.go new file mode 100644 index 000000000000..8017f30f9f98 --- /dev/null +++ b/pkg/iac/scanners/azure/arm/parser/armjson/reader_test.go @@ -0,0 +1,62 @@ +package armjson + +import ( + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +var input = `abcdefghijklmnopqrstuvwxyz` + +func Test_Peeker(t *testing.T) { + peeker := NewPeekReader(strings.NewReader(input)) + + var b rune + var err error + + for i := 0; i < 30; i++ { + b, err = peeker.Peek() + require.NoError(t, err) + assert.Equal(t, ('a'), b) + } + + b, err = peeker.Next() + require.NoError(t, err) + assert.Equal(t, ('a'), b) + + b, err = peeker.Next() + require.NoError(t, err) + assert.Equal(t, ('b'), b) + + b, err = peeker.Peek() + require.NoError(t, err) + assert.Equal(t, ('c'), b) + + for i := 0; i < 5; i++ { + b, err = peeker.Next() + require.NoError(t, err) + assert.Equal(t, []rune(input)[2+i], b) + } + + b, err = peeker.Peek() + require.NoError(t, err) + assert.Equal(t, ('h'), b) + + b, err = peeker.Next() + require.NoError(t, err) + assert.Equal(t, ('h'), b) + for i := 0; i < 18; i++ { + b, err = peeker.Next() + require.NoError(t, err) + assert.Equal(t, []rune(input)[8+i], b) + } + + _, err = peeker.Peek() + require.Error(t, err) + + _, err = peeker.Next() + require.Error(t, err) + +} diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/unmarshal.go b/pkg/iac/scanners/azure/arm/parser/armjson/unmarshal.go new file mode 100644 index 000000000000..6e096a694d8a --- /dev/null +++ b/pkg/iac/scanners/azure/arm/parser/armjson/unmarshal.go @@ -0,0 +1,40 @@ +package armjson + +import ( + "bytes" + "io" + + "github.com/aquasecurity/defsec/pkg/types" +) + +type Unmarshaller interface { + UnmarshalJSONWithMetadata(node Node) error +} + +type MetadataReceiver interface { + SetMetadata(m *types.Metadata) +} + +func Unmarshal(data []byte, target interface{}, metadata *types.Metadata) error { + node, err := newParser(NewPeekReader(bytes.NewReader(data)), Position{1, 1}).parse(metadata) + if err != nil { + return err + } + if err := node.Decode(target); err != nil { + return err + } + + return nil +} + +func UnmarshalFromReader(r io.ReadSeeker, target interface{}, metadata *types.Metadata) error { + node, err := newParser(NewPeekReader(r), Position{1, 1}).parse(metadata) + if err != nil { + return err + } + if err := node.Decode(target); err != nil { + return err + } + + return nil +} diff --git a/pkg/iac/scanners/azure/arm/parser/parser.go b/pkg/iac/scanners/azure/arm/parser/parser.go new file mode 100644 index 000000000000..a4c14cacb8d3 --- /dev/null +++ b/pkg/iac/scanners/azure/arm/parser/parser.go @@ -0,0 +1,194 @@ +package parser + +import ( + "context" + "fmt" + "io" + "io/fs" + "path/filepath" + "strings" + + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure/arm/parser/armjson" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure/resolver" +) + +type Parser struct { + targetFS fs.FS + skipRequired bool + debug debug.Logger +} + +func (p *Parser) SetDebugWriter(writer io.Writer) { + p.debug = debug.New(writer, "azure", "arm") +} + +func (p *Parser) SetSkipRequiredCheck(b bool) { + p.skipRequired = b +} + +func New(targetFS fs.FS, opts ...options.ParserOption) *Parser { + p := &Parser{ + targetFS: targetFS, + } + for _, opt := range opts { + opt(p) + } + return p +} + +func (p *Parser) ParseFS(ctx context.Context, dir string) ([]azure.Deployment, error) { + + var deployments []azure.Deployment + + if err := fs.WalkDir(p.targetFS, dir, func(path string, entry fs.DirEntry, err error) error { + if err != nil { + return err + } + select { + case <-ctx.Done(): + return ctx.Err() + default: + } + if entry.IsDir() { + return nil + } + if !p.Required(path) { + return nil + } + f, err := p.targetFS.Open(path) + if err != nil { + return err + } + defer f.Close() + deployment, err := p.parseFile(f, path) + if err != nil { + return err + } + deployments = append(deployments, *deployment) + return nil + }); err != nil { + return nil, err + } + + return deployments, nil +} + +func (p *Parser) Required(path string) bool { + if p.skipRequired { + return true + } + if !strings.HasSuffix(path, ".json") { + return false + } + data, err := fs.ReadFile(p.targetFS, path) + if err != nil { + return false + } + var template Template + root := types.NewMetadata( + types.NewRange(filepath.Base(path), 0, 0, "", p.targetFS), + "", + ) + if err := armjson.Unmarshal(data, &template, &root); err != nil { + p.debug.Log("Error scanning %s: %s", path, err) + return false + } + + if template.Schema.Kind != azure.KindString { + return false + } + + return strings.HasPrefix(template.Schema.AsString(), "https://schema.management.azure.com") +} + +func (p *Parser) parseFile(r io.Reader, filename string) (*azure.Deployment, error) { + var template Template + data, err := io.ReadAll(r) + if err != nil { + return nil, err + } + root := types.NewMetadata( + types.NewRange(filename, 0, 0, "", p.targetFS), + "", + ).WithInternal(resolver.NewResolver()) + + if err := armjson.Unmarshal(data, &template, &root); err != nil { + return nil, fmt.Errorf("failed to parse template: %w", err) + } + return p.convertTemplate(template), nil +} + +func (p *Parser) convertTemplate(template Template) *azure.Deployment { + + deployment := azure.Deployment{ + Metadata: template.Metadata, + TargetScope: azure.ScopeResourceGroup, // TODO: override from --resource-group? + Parameters: nil, + Variables: nil, + Resources: nil, + Outputs: nil, + } + + if r, ok := template.Metadata.Internal().(resolver.Resolver); ok { + r.SetDeployment(&deployment) + } + + // TODO: the references passed here should probably not be the name - maybe params.NAME.DefaultValue? + for name, param := range template.Parameters { + deployment.Parameters = append(deployment.Parameters, azure.Parameter{ + Variable: azure.Variable{ + Name: name, + Value: param.DefaultValue, + }, + Default: param.DefaultValue, + Decorators: nil, + }) + } + + for name, variable := range template.Variables { + deployment.Variables = append(deployment.Variables, azure.Variable{ + Name: name, + Value: variable, + }) + } + + for name, output := range template.Outputs { + deployment.Outputs = append(deployment.Outputs, azure.Output{ + Name: name, + Value: output, + }) + } + + for _, resource := range template.Resources { + deployment.Resources = append(deployment.Resources, p.convertResource(resource)) + } + + return &deployment +} + +func (p *Parser) convertResource(input Resource) azure.Resource { + + var children []azure.Resource + + for _, child := range input.Resources { + children = append(children, p.convertResource(child)) + } + + resource := azure.Resource{ + Metadata: input.Metadata, + APIVersion: input.APIVersion, + Type: input.Type, + Kind: input.Kind, + Name: input.Name, + Location: input.Location, + Properties: input.Properties, + Resources: children, + } + + return resource +} diff --git a/pkg/iac/scanners/azure/arm/parser/parser_test.go b/pkg/iac/scanners/azure/arm/parser/parser_test.go new file mode 100644 index 000000000000..92fa57795e00 --- /dev/null +++ b/pkg/iac/scanners/azure/arm/parser/parser_test.go @@ -0,0 +1,338 @@ +package parser + +import ( + "context" + "io/fs" + "os" + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + "github.com/liamg/memoryfs" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure/resolver" +) + +func createMetadata(targetFS fs.FS, filename string, start, end int, ref string, parent *types.Metadata) types.Metadata { + child := types.NewMetadata(types.NewRange(filename, start, end, "", targetFS), ref) + if parent != nil { + child.SetParentPtr(parent) + } + return child +} + +func TestParser_Parse(t *testing.T) { + + filename := "example.json" + + targetFS := memoryfs.New() + + tests := []struct { + name string + input string + want func() azure.Deployment + wantDeployment bool + }{ + { + name: "invalid code", + input: `blah`, + wantDeployment: false, + }, + { + name: "basic param", + input: `{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", // another one + "contentVersion": "1.0.0.0", + "parameters": { + "storagePrefix": { + "type": "string", + "defaultValue": "x", + "maxLength": 11, + "minLength": 3 + } + }, + "resources": [] +}`, + want: func() azure.Deployment { + + root := createMetadata(targetFS, filename, 0, 0, "", nil).WithInternal(resolver.NewResolver()) + metadata := createMetadata(targetFS, filename, 1, 13, "", &root) + parametersMetadata := createMetadata(targetFS, filename, 4, 11, "parameters", &metadata) + storageMetadata := createMetadata(targetFS, filename, 5, 10, "parameters.storagePrefix", ¶metersMetadata) + + return azure.Deployment{ + Metadata: metadata, + TargetScope: azure.ScopeResourceGroup, + Parameters: []azure.Parameter{ + { + Variable: azure.Variable{ + Name: "storagePrefix", + Value: azure.NewValue("x", createMetadata(targetFS, filename, 7, 7, "parameters.storagePrefix.defaultValue", &storageMetadata)), + }, + Default: azure.NewValue("x", createMetadata(targetFS, filename, 7, 7, "parameters.storagePrefix.defaultValue", &storageMetadata)), + Decorators: nil, + }, + }, + } + }, + wantDeployment: true, + }, + { + name: "storageAccount", + input: `{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", // another one + "contentVersion": "1.0.0.0", + "parameters": {}, + "resources": [ +{ + "type": "Microsoft.Storage/storageAccounts", + "apiVersion": "2022-05-01", + "name": "myResource", + "location": "string", + "tags": { + "tagName1": "tagValue1", + "tagName2": "tagValue2" + }, + "sku": { + "name": "string" + }, + "kind": "string", + "extendedLocation": { + "name": "string", + "type": "EdgeZone" + }, + "identity": { + "type": "string", + "userAssignedIdentities": {} + }, + "properties": { + "allowSharedKeyAccess":false, + "customDomain": { + "name": "string", + "useSubDomainName":false, + "number": 123 + }, + "networkAcls": [ + { + "bypass": "AzureServices1" + }, + { + "bypass": "AzureServices2" + } + ] + } +} +] +}`, + want: func() azure.Deployment { + + rootMetadata := createMetadata(targetFS, filename, 0, 0, "", nil).WithInternal(resolver.NewResolver()) + fileMetadata := createMetadata(targetFS, filename, 1, 45, "", &rootMetadata) + resourcesMetadata := createMetadata(targetFS, filename, 5, 44, "resources", &fileMetadata) + + resourceMetadata := createMetadata(targetFS, filename, 6, 43, "resources[0]", &resourcesMetadata) + + propertiesMetadata := createMetadata(targetFS, filename, 27, 42, "resources[0].properties", &resourceMetadata) + + customDomainMetadata := createMetadata(targetFS, filename, 29, 33, "resources[0].properties.customDomain", &propertiesMetadata) + networkACLListMetadata := createMetadata(targetFS, filename, 34, 41, "resources[0].properties.networkAcls", &propertiesMetadata) + + networkACL0Metadata := createMetadata(targetFS, filename, 35, 37, "resources[0].properties.networkAcls[0]", &networkACLListMetadata) + networkACL1Metadata := createMetadata(targetFS, filename, 38, 40, "resources[0].properties.networkAcls[1]", &networkACLListMetadata) + + return azure.Deployment{ + Metadata: fileMetadata, + TargetScope: azure.ScopeResourceGroup, + Resources: []azure.Resource{ + { + Metadata: resourceMetadata, + APIVersion: azure.NewValue( + "2022-05-01", + createMetadata(targetFS, filename, 8, 8, "resources[0].apiVersion", &resourceMetadata), + ), + Type: azure.NewValue( + "Microsoft.Storage/storageAccounts", + createMetadata(targetFS, filename, 7, 7, "resources[0].type", &resourceMetadata), + ), + Kind: azure.NewValue( + "string", + createMetadata(targetFS, filename, 18, 18, "resources[0].kind", &resourceMetadata), + ), + Name: azure.NewValue( + "myResource", + createMetadata(targetFS, filename, 9, 9, "resources[0].name", &resourceMetadata), + ), + Location: azure.NewValue( + "string", + createMetadata(targetFS, filename, 10, 10, "resources[0].location", &resourceMetadata), + ), + Properties: azure.NewValue( + map[string]azure.Value{ + "allowSharedKeyAccess": azure.NewValue(false, createMetadata(targetFS, filename, 28, 28, "resources[0].properties.allowSharedKeyAccess", &propertiesMetadata)), + "customDomain": azure.NewValue( + map[string]azure.Value{ + "name": azure.NewValue("string", createMetadata(targetFS, filename, 30, 30, "resources[0].properties.customDomain.name", &customDomainMetadata)), + "useSubDomainName": azure.NewValue(false, createMetadata(targetFS, filename, 31, 31, "resources[0].properties.customDomain.useSubDomainName", &customDomainMetadata)), + "number": azure.NewValue(int64(123), createMetadata(targetFS, filename, 32, 32, "resources[0].properties.customDomain.number", &customDomainMetadata)), + }, customDomainMetadata), + "networkAcls": azure.NewValue( + []azure.Value{ + azure.NewValue( + map[string]azure.Value{ + "bypass": azure.NewValue("AzureServices1", createMetadata(targetFS, filename, 36, 36, "resources[0].properties.networkAcls[0].bypass", &networkACL0Metadata)), + }, + networkACL0Metadata, + ), + azure.NewValue( + map[string]azure.Value{ + "bypass": azure.NewValue("AzureServices2", createMetadata(targetFS, filename, 39, 39, "resources[0].properties.networkAcls[1].bypass", &networkACL1Metadata)), + }, + networkACL1Metadata, + ), + }, networkACLListMetadata), + }, + propertiesMetadata, + ), + }, + }, + } + }, + + wantDeployment: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + + require.NoError(t, targetFS.WriteFile(filename, []byte(tt.input), 0644)) + + p := New(targetFS, options.ParserWithDebug(os.Stderr)) + got, err := p.ParseFS(context.Background(), ".") + require.NoError(t, err) + + if !tt.wantDeployment { + assert.Len(t, got, 0) + return + } + + require.Len(t, got, 1) + want := tt.want() + g := got[0] + + require.Equal(t, want, g) + }) + } +} + +func Test_NestedResourceParsing(t *testing.T) { + + input := ` +{ + "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "environment": { + "type": "string", + "allowedValues": [ + "dev", + "test", + "prod" + ] + }, + "location": { + "type": "string", + "defaultValue": "[resourceGroup().location]", + "metadata": { + "description": "Location for all resources." + } + }, + "storageAccountSkuName": { + "type": "string", + "defaultValue": "Standard_LRS" + }, + "storageAccountSkuTier": { + "type": "string", + "defaultValue": "Standard" + } + }, + "variables": { + "uniquePart": "[take(uniqueString(resourceGroup().id), 4)]", + "storageAccountName": "[concat('mystorageaccount', variables('uniquePart'), parameters('environment'))]", + "queueName": "myqueue" + }, + "resources": [ + { + "type": "Microsoft.Storage/storageAccounts", + "name": "[variables('storageAccountName')]", + "location": "[parameters('location')]", + "apiVersion": "2019-06-01", + "sku": { + "name": "[parameters('storageAccountSkuName')]", + "tier": "[parameters('storageAccountSkuTier')]" + }, + "kind": "StorageV2", + "properties": {}, + "resources": [ + { + "name": "[concat('default/', variables('queueName'))]", + "type": "queueServices/queues", + "apiVersion": "2019-06-01", + "dependsOn": [ + "[variables('storageAccountName')]" + ], + "properties": { + "metadata": {} + } + } + ] + } + ] +} +` + + targetFS := memoryfs.New() + + require.NoError(t, targetFS.WriteFile("nested.json", []byte(input), 0644)) + + p := New(targetFS, options.ParserWithDebug(os.Stderr)) + got, err := p.ParseFS(context.Background(), ".") + require.NoError(t, err) + require.Len(t, got, 1) + + deployment := got[0] + + require.Len(t, deployment.Resources, 1) + + storageAccountResource := deployment.Resources[0] + + require.Len(t, storageAccountResource.Resources, 1) + + queue := storageAccountResource.Resources[0] + + assert.Equal(t, "queueServices/queues", queue.Type.AsString()) +} + +// +// func Test_JsonFile(t *testing.T) { +// +// input, err := os.ReadFile("testdata/postgres.json") +// require.NoError(t, err) +// +// targetFS := memoryfs.New() +// +// require.NoError(t, targetFS.WriteFile("postgres.json", input, 0644)) +// +// p := New(targetFS, options.ParserWithDebug(os.Stderr)) +// got, err := p.ParseFS(context.Background(), ".") +// require.NoError(t, err) +// +// got[0].Resources[3].Name.Resolve() +// +// name := got[0].Resources[3].Name.AsString() +// assert.Equal(t, "myserver", name) +// +// } diff --git a/pkg/iac/scanners/azure/arm/parser/template.go b/pkg/iac/scanners/azure/arm/parser/template.go new file mode 100644 index 000000000000..955431bd9dd2 --- /dev/null +++ b/pkg/iac/scanners/azure/arm/parser/template.go @@ -0,0 +1,78 @@ +package parser + +import ( + "github.com/aquasecurity/defsec/pkg/types" + types2 "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure/arm/parser/armjson" +) + +type Template struct { + Metadata types.Metadata `json:"-"` + Schema types2.Value `json:"$schema"` + ContentVersion types2.Value `json:"contentVersion"` + APIProfile types2.Value `json:"apiProfile"` + Parameters map[string]Parameter `json:"parameters"` + Variables map[string]types2.Value `json:"variables"` + Functions []Function `json:"functions"` + Resources []Resource `json:"resources"` + Outputs map[string]types2.Value `json:"outputs"` +} + +type Parameter struct { + Metadata types.Metadata + Type types2.Value `json:"type"` + DefaultValue types2.Value `json:"defaultValue"` + MaxLength types2.Value `json:"maxLength"` + MinLength types2.Value `json:"minLength"` +} + +type Function struct{} + +type Resource struct { + Metadata types.Metadata `json:"-"` + innerResource +} + +func (t *Template) SetMetadata(m *types.Metadata) { + t.Metadata = *m +} + +func (r *Resource) SetMetadata(m *types.Metadata) { + r.Metadata = *m +} + +func (p *Parameter) SetMetadata(m *types.Metadata) { + p.Metadata = *m +} + +type innerResource struct { + APIVersion types2.Value `json:"apiVersion"` + Type types2.Value `json:"type"` + Kind types2.Value `json:"kind"` + Name types2.Value `json:"name"` + Location types2.Value `json:"location"` + Tags types2.Value `json:"tags"` + Sku types2.Value `json:"sku"` + Properties types2.Value `json:"properties"` + Resources []Resource `json:"resources"` +} + +func (v *Resource) UnmarshalJSONWithMetadata(node armjson.Node) error { + + if err := node.Decode(&v.innerResource); err != nil { + return err + } + + v.Metadata = node.Metadata() + + for _, comment := range node.Comments() { + var str string + if err := comment.Decode(&str); err != nil { + return err + } + // TODO + // v.Metadata.Comments = append(v.Metadata.Comments, str) + } + + return nil +} diff --git a/pkg/iac/scanners/azure/arm/parser/template_test.go b/pkg/iac/scanners/azure/arm/parser/template_test.go new file mode 100644 index 000000000000..bc2083b9c2b5 --- /dev/null +++ b/pkg/iac/scanners/azure/arm/parser/template_test.go @@ -0,0 +1,60 @@ +package parser + +import ( + "os" + "path/filepath" + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + types2 "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure/arm/parser/armjson" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_JSONUnmarshal(t *testing.T) { + data, err := os.ReadFile(filepath.Join("testdata", "example.json")) + require.NoError(t, err) + var target Template + metadata := types.NewTestMetadata() + require.NoError(t, armjson.Unmarshal(data, &target, &metadata)) + assert.Equal(t, + "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + target.Schema.AsString(), + ) + require.Len(t, target.Schema.Comments, 2) + assert.Equal(t, " wow this is a comment", target.Schema.Comments[0]) + assert.Equal(t, " another one", target.Schema.Comments[1]) + + assert.Equal(t, "1.0.0.0", target.ContentVersion.Raw()) + require.Len(t, target.ContentVersion.Comments, 1) + assert.Equal(t, " this version is great", target.ContentVersion.Comments[0]) + + require.Contains(t, target.Parameters, "storagePrefix") + prefix := target.Parameters["storagePrefix"] + /* + "type": "string", + "defaultValue": "x", + "maxLength": 11, + "minLength": 3 + */ + assert.Equal(t, "string", prefix.Type.Raw()) + assert.Equal(t, types2.KindString, prefix.Type.Kind) + assert.Equal(t, 8, prefix.Type.Metadata.Range().GetStartLine()) + assert.Equal(t, 8, prefix.Type.Metadata.Range().GetEndLine()) + + assert.Equal(t, "x", prefix.DefaultValue.Raw()) + assert.Equal(t, types2.KindString, prefix.DefaultValue.Kind) + assert.Equal(t, 9, prefix.DefaultValue.Metadata.Range().GetStartLine()) + assert.Equal(t, 9, prefix.DefaultValue.Metadata.Range().GetEndLine()) + + assert.Equal(t, int64(11), prefix.MaxLength.Raw()) + assert.Equal(t, types2.KindNumber, prefix.MaxLength.Kind) + assert.Equal(t, 10, prefix.MaxLength.Metadata.Range().GetStartLine()) + assert.Equal(t, 10, prefix.MaxLength.Metadata.Range().GetEndLine()) + + assert.Equal(t, int64(3), prefix.MinLength.Raw()) + assert.Equal(t, types2.KindNumber, prefix.MinLength.Kind) + assert.Equal(t, 11, prefix.MinLength.Metadata.Range().GetStartLine()) + assert.Equal(t, 11, prefix.MinLength.Metadata.Range().GetEndLine()) +} diff --git a/pkg/iac/scanners/azure/arm/parser/testdata/example.json b/pkg/iac/scanners/azure/arm/parser/testdata/example.json new file mode 100644 index 000000000000..9698ed1a0583 --- /dev/null +++ b/pkg/iac/scanners/azure/arm/parser/testdata/example.json @@ -0,0 +1,15 @@ +{ + // wow this is a comment + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", // another one + // this version is great + "contentVersion": "1.0.0.0", + "parameters": { + "storagePrefix": { + "type": "string", + "defaultValue": "x", + "maxLength": 11, + "minLength": 3 + } + }, + "resources": [] +} \ No newline at end of file diff --git a/pkg/iac/scanners/azure/arm/parser/testdata/postgres.json b/pkg/iac/scanners/azure/arm/parser/testdata/postgres.json new file mode 100644 index 000000000000..670733fdd308 --- /dev/null +++ b/pkg/iac/scanners/azure/arm/parser/testdata/postgres.json @@ -0,0 +1,73 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "metadata": { + "_generator": { + "name": "bicep", + "version": "0.10.61.36676", + "templateHash": "8074447630975889785" + } + }, + "resources": [ + { + "type": "Microsoft.DBforPostgreSQL/servers", + "apiVersion": "2017-12-01", + "name": "myPostgreSQLServer", + "location": "westus", + "identity": { + "type": "SystemAssigned" + }, + "properties": { + "administratorLogin": "myadmin", + "administratorLoginPassword": "myadminpassword", + "version": "9.6", + "sslEnforcement": "Enabled", + "storageProfile": { + "storageMB": 5120 + }, + "createMode": "Default", + "minimalTlsVersion": "1.2", + "publicNetworkAccess": "Enabled", + "FirewallRules": [ + { + "name": "AllowAllAzureIps", + "startIpAddress": "0.0.0.0/0" + } + ] + } + }, + { + "type": "Microsoft.DBforPostgreSQL/servers/configurations", + "apiVersion": "2017-12-01", + "name": "[format('{0}/{1}', 'myPostgreSQLServer', 'connection_throttling')]", + "properties": { + "value": "OFF" + }, + "dependsOn": [ + "[resourceId('Microsoft.DBforPostgreSQL/servers', 'myPostgreSQLServer')]" + ] + }, + { + "type": "Microsoft.DBforPostgreSQL/servers/configurations", + "apiVersion": "2017-12-01", + "name": "[format('{0}/{1}', 'myPostgreSQLServer', 'log_checkpoints')]", + "properties": { + "value": "OFF" + }, + "dependsOn": [ + "[resourceId('Microsoft.DBforPostgreSQL/servers', 'myPostgreSQLServer')]" + ] + }, + { + "type": "Microsoft.DBforPostgreSQL/servers/configurations", + "apiVersion": "2017-12-01", + "name": "[format('{0}/{1}', 'myPostgreSQLServer', 'log_connections')]", + "properties": { + "value": "OFF" + }, + "dependsOn": [ + "[resourceId('Microsoft.DBforPostgreSQL/servers', 'myPostgreSQLServer')]" + ] + } + ] +} \ No newline at end of file diff --git a/pkg/iac/scanners/azure/arm/scanner.go b/pkg/iac/scanners/azure/arm/scanner.go new file mode 100644 index 000000000000..8f4a26ef7cbe --- /dev/null +++ b/pkg/iac/scanners/azure/arm/scanner.go @@ -0,0 +1,187 @@ +package arm + +import ( + "context" + "fmt" + + "io" + "io/fs" + "sync" + + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/framework" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/state" + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/internal/adapters/arm" + "github.com/aquasecurity/trivy/pkg/iac/rego" + "github.com/aquasecurity/trivy/pkg/iac/rules" + "github.com/aquasecurity/trivy/pkg/iac/scanners" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure/arm/parser" +) + +var _ scanners.FSScanner = (*Scanner)(nil) +var _ options.ConfigurableScanner = (*Scanner)(nil) + +type Scanner struct { + scannerOptions []options.ScannerOption + parserOptions []options.ParserOption + debug debug.Logger + frameworks []framework.Framework + skipRequired bool + regoOnly bool + loadEmbeddedPolicies bool + loadEmbeddedLibraries bool + policyDirs []string + policyReaders []io.Reader + regoScanner *rego.Scanner + spec string + sync.Mutex +} + +func (s *Scanner) SetSpec(spec string) { + s.spec = spec +} + +func (s *Scanner) SetRegoOnly(regoOnly bool) { + s.regoOnly = regoOnly +} + +func New(opts ...options.ScannerOption) *Scanner { + scanner := &Scanner{ + scannerOptions: opts, + } + for _, opt := range opts { + opt(scanner) + } + return scanner +} + +func (s *Scanner) Name() string { + return "Azure ARM" +} + +func (s *Scanner) SetDebugWriter(writer io.Writer) { + s.debug = debug.New(writer, "azure", "arm") + s.parserOptions = append(s.parserOptions, options.ParserWithDebug(writer)) +} + +func (s *Scanner) SetPolicyDirs(dirs ...string) { + s.policyDirs = dirs +} + +func (s *Scanner) SetSkipRequiredCheck(skipRequired bool) { + s.skipRequired = skipRequired +} +func (s *Scanner) SetPolicyReaders(readers []io.Reader) { + s.policyReaders = readers +} + +func (s *Scanner) SetPolicyFilesystem(_ fs.FS) { + // handled by rego when option is passed on +} +func (s *Scanner) SetDataFilesystem(_ fs.FS) { + // handled by rego when option is passed on +} + +func (s *Scanner) SetUseEmbeddedPolicies(b bool) { + s.loadEmbeddedPolicies = b +} + +func (s *Scanner) SetUseEmbeddedLibraries(b bool) { + s.loadEmbeddedLibraries = b +} + +func (s *Scanner) SetFrameworks(frameworks []framework.Framework) { + s.frameworks = frameworks +} + +func (s *Scanner) SetTraceWriter(io.Writer) {} +func (s *Scanner) SetPerResultTracingEnabled(bool) {} +func (s *Scanner) SetDataDirs(...string) {} +func (s *Scanner) SetPolicyNamespaces(...string) {} +func (s *Scanner) SetRegoErrorLimit(_ int) {} + +func (s *Scanner) initRegoScanner(srcFS fs.FS) error { + s.Lock() + defer s.Unlock() + if s.regoScanner != nil { + return nil + } + regoScanner := rego.NewScanner(types.SourceCloud, s.scannerOptions...) + regoScanner.SetParentDebugLogger(s.debug) + if err := regoScanner.LoadPolicies(s.loadEmbeddedLibraries, s.loadEmbeddedPolicies, srcFS, s.policyDirs, s.policyReaders); err != nil { + return err + } + s.regoScanner = regoScanner + return nil +} + +func (s *Scanner) ScanFS(ctx context.Context, fs fs.FS, dir string) (scan.Results, error) { + p := parser.New(fs, s.parserOptions...) + deployments, err := p.ParseFS(ctx, dir) + if err != nil { + return nil, err + } + if err := s.initRegoScanner(fs); err != nil { + return nil, err + } + + return s.scanDeployments(ctx, deployments, fs) +} + +func (s *Scanner) scanDeployments(ctx context.Context, deployments []azure.Deployment, f fs.FS) (scan.Results, error) { + + var results scan.Results + + for _, deployment := range deployments { + + result, err := s.scanDeployment(ctx, deployment, f) + if err != nil { + return nil, err + } + results = append(results, result...) + } + + return results, nil +} + +func (s *Scanner) scanDeployment(ctx context.Context, deployment azure.Deployment, fs fs.FS) (scan.Results, error) { + var results scan.Results + deploymentState := s.adaptDeployment(ctx, deployment) + if !s.regoOnly { + for _, rule := range rules.GetRegistered(s.frameworks...) { + select { + case <-ctx.Done(): + return nil, ctx.Err() + default: + } + if rule.GetRule().RegoPackage != "" { + continue + } + ruleResults := rule.Evaluate(deploymentState) + s.debug.Log("Found %d results for %s", len(ruleResults), rule.GetRule().AVDID) + if len(ruleResults) > 0 { + results = append(results, ruleResults...) + } + } + } + + regoResults, err := s.regoScanner.ScanInput(ctx, rego.Input{ + Path: deployment.Metadata.Range().GetFilename(), + FS: fs, + Contents: deploymentState.ToRego(), + }) + if err != nil { + return nil, fmt.Errorf("rego scan error: %w", err) + } + + return append(results, regoResults...), nil +} + +func (s *Scanner) adaptDeployment(ctx context.Context, deployment azure.Deployment) *state.State { + return arm.Adapt(ctx, deployment) +} diff --git a/pkg/iac/scanners/azure/deployment.go b/pkg/iac/scanners/azure/deployment.go new file mode 100644 index 000000000000..6df8b48d6b6a --- /dev/null +++ b/pkg/iac/scanners/azure/deployment.go @@ -0,0 +1,179 @@ +package azure + +import ( + "os" + + "github.com/aquasecurity/defsec/pkg/types" +) + +type Deployment struct { + Metadata types.Metadata + TargetScope Scope + Parameters []Parameter + Variables []Variable + Resources []Resource + Outputs []Output +} + +type Parameter struct { + Variable + Default Value + Decorators []Decorator +} + +type Variable struct { + Name string + Value Value +} + +type Output Variable + +type Resource struct { + Metadata types.Metadata + APIVersion Value + Type Value + Kind Value + Name Value + Location Value + Tags Value + Sku Value + Properties Value + Resources []Resource +} + +type PropertyBag struct { + Metadata types.Metadata + Data map[string]Value +} + +type Decorator struct { + Name string + Args []Value +} + +type Scope string + +const ( + ScopeResourceGroup Scope = "resourceGroup" +) + +func (d *Deployment) GetResourcesByType(t string) []Resource { + var resources []Resource + for _, r := range d.Resources { + if r.Type.AsString() == t { + resources = append(resources, r) + } + } + return resources +} + +func (r *Resource) GetResourcesByType(t string) []Resource { + var resources []Resource + for _, res := range r.Resources { + if res.Type.AsString() == t { + resources = append(resources, res) + } + } + return resources +} + +func (d *Deployment) GetParameter(parameterName string) interface{} { + + for _, parameter := range d.Parameters { + if parameter.Name == parameterName { + return parameter.Value.Raw() + } + } + return nil +} + +func (d *Deployment) GetVariable(variableName string) interface{} { + + for _, variable := range d.Variables { + if variable.Name == variableName { + return variable.Value.Raw() + } + } + return nil +} + +func (d *Deployment) GetEnvVariable(envVariableName string) interface{} { + + if envVariable, exists := os.LookupEnv(envVariableName); exists { + return envVariable + } + return nil +} + +func (d *Deployment) GetOutput(outputName string) interface{} { + + for _, output := range d.Outputs { + if output.Name == outputName { + return output.Value.Raw() + } + } + return nil +} + +func (d *Deployment) GetDeployment() interface{} { + + type template struct { + Schema string `json:"$schema"` + ContentVersion string `json:"contentVersion"` + Parameters map[string]interface{} `json:"parameters"` + Variables map[string]interface{} `json:"variables"` + Resources []interface{} `json:"resources"` + Outputs map[string]interface{} `json:"outputs"` + } + + type templateLink struct { + URI string `json:"uri"` + } + + type properties struct { + TemplateLink templateLink `json:"templateLink"` + Template template `json:"template"` + TemplateHash string `json:"templateHash"` + Parameters map[string]interface{} `json:"parameters"` + Mode string `json:"mode"` + ProvisioningState string `json:"provisioningState"` + } + + deploymentShell := struct { + Name string `json:"name"` + Properties properties `json:"properties"` + }{ + Name: "Placeholder Deployment", + Properties: properties{ + TemplateLink: templateLink{ + URI: "https://placeholder.com", + }, + Template: template{ + Schema: "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + ContentVersion: "", + Parameters: make(map[string]interface{}), + Variables: make(map[string]interface{}), + Resources: make([]interface{}, 0), + Outputs: make(map[string]interface{}), + }, + }, + } + + for _, parameter := range d.Parameters { + deploymentShell.Properties.Template.Parameters[parameter.Name] = parameter.Value.Raw() + } + + for _, variable := range d.Variables { + deploymentShell.Properties.Template.Variables[variable.Name] = variable.Value.Raw() + } + + for _, resource := range d.Resources { + deploymentShell.Properties.Template.Resources = append(deploymentShell.Properties.Template.Resources, resource) + } + + for _, output := range d.Outputs { + deploymentShell.Properties.Template.Outputs[output.Name] = output.Value.Raw() + } + + return deploymentShell +} diff --git a/pkg/iac/scanners/azure/expressions/lex.go b/pkg/iac/scanners/azure/expressions/lex.go new file mode 100644 index 000000000000..09eb7b819eff --- /dev/null +++ b/pkg/iac/scanners/azure/expressions/lex.go @@ -0,0 +1,203 @@ +package expressions + +import ( + "bufio" + "fmt" + "strconv" + "strings" +) + +type TokenType uint16 + +const ( + TokenName TokenType = iota + TokenOpenParen + TokenCloseParen + TokenComma + TokenDot + TokenLiteralString + TokenLiteralInteger + TokenLiteralFloat + TokenNewLine +) + +type Token struct { + Type TokenType + Data interface{} +} + +type lexer struct { + reader *bufio.Reader +} + +func lex(expression string) ([]Token, error) { + lexer := &lexer{ + reader: bufio.NewReader(strings.NewReader(expression)), + } + return lexer.Lex() +} + +func (l *lexer) unread() { + _ = l.reader.UnreadRune() +} + +func (l *lexer) read() (rune, error) { + r, _, err := l.reader.ReadRune() + return r, err +} + +func (l *lexer) Lex() ([]Token, error) { + var tokens []Token + + for { + r, err := l.read() + if err != nil { + break + } + + switch r { + case ' ', '\t', '\r': + continue + case '\n': + tokens = append(tokens, Token{Type: TokenNewLine}) + case '(': + tokens = append(tokens, Token{Type: TokenOpenParen}) + case ')': + tokens = append(tokens, Token{Type: TokenCloseParen}) + case ',': + tokens = append(tokens, Token{Type: TokenComma}) + case '.': + tokens = append(tokens, Token{Type: TokenDot}) + case '"', '\'': + token, err := l.lexString(r) + if err != nil { + return nil, fmt.Errorf("string parse error: %w", err) + } + tokens = append(tokens, token) + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + l.unread() + token, err := l.lexNumber() + if err != nil { + return nil, fmt.Errorf("number parse error: %w", err) + } + tokens = append(tokens, token) + default: + l.unread() + tokens = append(tokens, l.lexKeyword()) + } + } + + return tokens, nil +} + +func (l *lexer) lexString(terminator rune) (Token, error) { + var sb strings.Builder + for { + r, err := l.read() + if err != nil { + break + } + if r == '\\' { + r, err := l.readEscapedChar() + if err != nil { + return Token{}, fmt.Errorf("bad escape: %w", err) + } + sb.WriteRune(r) + continue + } + if r == terminator { + break + } + sb.WriteRune(r) + } + return Token{ + Type: TokenLiteralString, + Data: sb.String(), + }, nil +} + +func (l *lexer) readEscapedChar() (rune, error) { + r, err := l.read() + if err != nil { + return 0, fmt.Errorf("unexpected EOF") + } + switch r { + case 'n': + return '\n', nil + case 'r': + return '\r', nil + case 't': + return '\t', nil + case '"', '\'': + return r, nil + default: + return 0, fmt.Errorf("'%c' is not a supported escape sequence", r) + } +} + +func (l *lexer) lexNumber() (Token, error) { + + var sb strings.Builder + var decimal bool + +LOOP: + for { + r, err := l.read() + if err != nil { + break + } + switch r { + case '.': + decimal = true + sb.WriteRune('.') + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + sb.WriteRune(r) + default: + l.unread() + break LOOP + } + } + + raw := sb.String() + if decimal { + fl, err := strconv.ParseFloat(raw, 64) + if err != nil { + return Token{}, err + } + return Token{ + Type: TokenLiteralFloat, + Data: fl, + }, nil + } + + i, err := strconv.ParseInt(raw, 10, 64) + if err != nil { + return Token{}, err + } + return Token{ + Type: TokenLiteralInteger, + Data: i, + }, nil +} + +func (l *lexer) lexKeyword() Token { + var sb strings.Builder +LOOP: + for { + r, err := l.read() + if err != nil { + break + } + switch { + case r >= 'a' && r <= 'z', r >= 'A' && r <= 'Z', r >= '0' && r <= '9', r == '_': + sb.WriteRune(r) + default: + l.unread() + break LOOP + } + } + return Token{ + Type: TokenName, + Data: sb.String(), + } +} diff --git a/pkg/iac/scanners/azure/expressions/node.go b/pkg/iac/scanners/azure/expressions/node.go new file mode 100644 index 000000000000..843357dd9a62 --- /dev/null +++ b/pkg/iac/scanners/azure/expressions/node.go @@ -0,0 +1,75 @@ +package expressions + +import ( + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure/functions" +) + +type Node interface { + Evaluate(deploymentProvider functions.DeploymentData) interface{} +} + +type expressionValue struct { + val interface{} +} + +func (e expressionValue) Evaluate(deploymentProvider functions.DeploymentData) interface{} { + if f, ok := e.val.(expression); ok { + return f.Evaluate(deploymentProvider) + } + return e.val +} + +type expression struct { + name string + args []Node +} + +func (f expression) Evaluate(deploymentProvider functions.DeploymentData) interface{} { + args := make([]interface{}, len(f.args)) + for i, arg := range f.args { + args[i] = arg.Evaluate(deploymentProvider) + } + + return functions.Evaluate(deploymentProvider, f.name, args...) +} + +func NewExpressionTree(code string) (Node, error) { + tokens, err := lex(code) + if err != nil { + return nil, err + } + + // create a walker for the nodes + tw := newTokenWalker(tokens) + + // generate the root function + return newFunctionNode(tw), nil +} + +func newFunctionNode(tw *tokenWalker) Node { + funcNode := &expression{ + name: tw.pop().Data.(string), + } + + for tw.hasNext() { + token := tw.pop() + if token == nil { + break + } + + switch token.Type { + case TokenCloseParen: + return funcNode + case TokenName: + if tw.peek().Type == TokenOpenParen { + // this is a function, unwind 1 + tw.unPop() + funcNode.args = append(funcNode.args, newFunctionNode(tw)) + } + case TokenLiteralString, TokenLiteralInteger, TokenLiteralFloat: + funcNode.args = append(funcNode.args, expressionValue{token.Data}) + } + + } + return funcNode +} diff --git a/pkg/iac/scanners/azure/expressions/token_walker.go b/pkg/iac/scanners/azure/expressions/token_walker.go new file mode 100644 index 000000000000..d07a238d1bd9 --- /dev/null +++ b/pkg/iac/scanners/azure/expressions/token_walker.go @@ -0,0 +1,40 @@ +package expressions + +type tokenWalker struct { + tokens []Token + currentPosition int +} + +func newTokenWalker(tokens []Token) *tokenWalker { + return &tokenWalker{ + tokens: tokens, + currentPosition: 0, + } +} + +func (t *tokenWalker) peek() Token { + if t.currentPosition >= len(t.tokens) { + return Token{} + } + return t.tokens[t.currentPosition] +} + +func (t *tokenWalker) hasNext() bool { + return t.currentPosition+1 < len(t.tokens) +} + +func (t *tokenWalker) unPop() { + if t.currentPosition > 0 { + t.currentPosition-- + } +} + +func (t *tokenWalker) pop() *Token { + if !t.hasNext() { + return nil + } + + token := t.tokens[t.currentPosition] + t.currentPosition++ + return &token +} diff --git a/pkg/iac/scanners/azure/functions/add.go b/pkg/iac/scanners/azure/functions/add.go new file mode 100644 index 000000000000..9eb699e2eb9b --- /dev/null +++ b/pkg/iac/scanners/azure/functions/add.go @@ -0,0 +1,15 @@ +package functions + +func Add(args ...interface{}) interface{} { + + if len(args) != 2 { + return nil + } + + if a, ok := args[0].(int); ok { + if b, ok := args[1].(int); ok { + return a + b + } + } + return nil +} diff --git a/pkg/iac/scanners/azure/functions/add_test.go b/pkg/iac/scanners/azure/functions/add_test.go new file mode 100644 index 000000000000..b88e9b8ee1cc --- /dev/null +++ b/pkg/iac/scanners/azure/functions/add_test.go @@ -0,0 +1,38 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Add(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected int + }{ + { + name: "Add with 1 and 2", + args: []interface{}{1, 2}, + expected: 3, + }, + { + name: "Add with 2 and 3", + args: []interface{}{2, 3}, + expected: 5, + }, + { + name: "Add with 3 and -4", + args: []interface{}{3, -4}, + expected: -1, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := Add(tt.args...) + assert.Equal(t, tt.expected, got) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/and.go b/pkg/iac/scanners/azure/functions/and.go new file mode 100644 index 000000000000..67070b5c2cb0 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/and.go @@ -0,0 +1,27 @@ +package functions + +func And(args ...interface{}) interface{} { + + if len(args) <= 1 { + return false + } + + arg0, ok := args[0].(bool) + if !ok { + return false + } + + benchmark := arg0 + + for _, arg := range args[1:] { + arg1, ok := arg.(bool) + if !ok { + return false + } + if benchmark != arg1 { + return false + } + + } + return true +} diff --git a/pkg/iac/scanners/azure/functions/and_test.go b/pkg/iac/scanners/azure/functions/and_test.go new file mode 100644 index 000000000000..6814e9288ca0 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/and_test.go @@ -0,0 +1,39 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_And(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected bool + }{ + { + name: "And with same 2 bools", + args: []interface{}{true, true}, + expected: true, + }, + { + name: "And with same 3 bools", + args: []interface{}{true, true, true}, + expected: true, + }, + { + name: "And with different 4 bools", + args: []interface{}{true, true, false, true}, + expected: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := And(tt.args...) + assert.Equal(t, tt.expected, got) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/array.go b/pkg/iac/scanners/azure/functions/array.go new file mode 100644 index 000000000000..a1da05ef4fdc --- /dev/null +++ b/pkg/iac/scanners/azure/functions/array.go @@ -0,0 +1,29 @@ +package functions + +func Array(args ...interface{}) interface{} { + + if len(args) != 1 { + return "" + } + + switch ctype := args[0].(type) { + case int: + return []int{ctype} + case string: + return []string{ctype} + case map[string]interface{}: + var result []interface{} + for k, v := range ctype { + result = append(result, k, v) + } + return result + case interface{}: + switch ctype := ctype.(type) { + case []string: + return ctype + case []interface{}: + return ctype + } + } + return []interface{}{} +} diff --git a/pkg/iac/scanners/azure/functions/array_test.go b/pkg/iac/scanners/azure/functions/array_test.go new file mode 100644 index 000000000000..c4a376ea6080 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/array_test.go @@ -0,0 +1,44 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Array(t *testing.T) { + test := []struct { + name string + input []interface{} + expected interface{} + }{ + { + name: "array from an int", + input: []interface{}{1}, + expected: []int{1}, + }, + { + name: "array from a string", + input: []interface{}{"hello"}, + expected: []string{"hello"}, + }, + { + name: "array from a map", + input: []interface{}{map[string]interface{}{"hello": "world"}}, + expected: []interface{}{"hello", "world"}, + }, + { + name: "array from an slice", + input: []interface{}{ + []string{"hello", "world"}, + }, + expected: []string{"hello", "world"}, + }, + } + for _, tt := range test { + t.Run(tt.name, func(t *testing.T) { + actual := Array(tt.input...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/base64.go b/pkg/iac/scanners/azure/functions/base64.go new file mode 100644 index 000000000000..c3222e7675ec --- /dev/null +++ b/pkg/iac/scanners/azure/functions/base64.go @@ -0,0 +1,52 @@ +package functions + +import ( + "encoding/base64" + "encoding/json" +) + +func Base64(args ...interface{}) interface{} { + + if len(args) == 0 { + return nil + } + + input := args[0].(string) + + return base64.StdEncoding.EncodeToString([]byte(input)) +} + +func Base64ToString(args ...interface{}) interface{} { + if len(args) == 0 { + return nil + } + + input := args[0].(string) + + result, err := base64.StdEncoding.DecodeString(input) + if err != nil { + return "" + } + return string(result) +} + +func Base64ToJson(args ...interface{}) interface{} { + + if len(args) == 0 { + return nil + } + + input := args[0].(string) + + decoded, err := base64.StdEncoding.DecodeString(input) + if err != nil { + return nil + } + + var result map[string]interface{} + + if err := json.Unmarshal(decoded, &result); err != nil { + return nil + } + return result +} diff --git a/pkg/iac/scanners/azure/functions/base64_test.go b/pkg/iac/scanners/azure/functions/base64_test.go new file mode 100644 index 000000000000..f557b277930c --- /dev/null +++ b/pkg/iac/scanners/azure/functions/base64_test.go @@ -0,0 +1,85 @@ +package functions + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_Base64Call(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "simple base64 call", + args: []interface{}{ + "hello, world", + }, + expected: "aGVsbG8sIHdvcmxk", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Base64(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } + +} + +func Test_Base64ToStringCall(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "simple base64ToString call", + args: []interface{}{ + "aGVsbG8sIHdvcmxk", + }, + expected: "hello, world", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Base64ToString(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } + +} + +func Test_Base64ToJsonCall(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "simple base64ToJson call", + args: []interface{}{ + "eyJoZWxsbyI6ICJ3b3JsZCJ9", + }, + expected: `{"hello":"world"}`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Base64ToJson(tt.args...) + + actualContent, err := json.Marshal(actual) + require.NoError(t, err) + + assert.Equal(t, tt.expected, string(actualContent)) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/bool.go b/pkg/iac/scanners/azure/functions/bool.go new file mode 100644 index 000000000000..0221a5a4b8ee --- /dev/null +++ b/pkg/iac/scanners/azure/functions/bool.go @@ -0,0 +1,20 @@ +package functions + +import "strings" + +func Bool(args ...interface{}) interface{} { + if len(args) != 1 { + return false + } + + switch input := args[0].(type) { + case bool: + return input + case string: + input = strings.ToLower(input) + return input == "true" || input == "1" || input == "yes" || input == "on" + case int: + return input == 1 + } + return false +} diff --git a/pkg/iac/scanners/azure/functions/bool_test.go b/pkg/iac/scanners/azure/functions/bool_test.go new file mode 100644 index 000000000000..6c520a9380f8 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/bool_test.go @@ -0,0 +1,63 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Bool(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected bool + }{ + { + name: "Bool with true", + args: []interface{}{true}, + expected: true, + }, + { + name: "Bool with false", + args: []interface{}{false}, + expected: false, + }, + { + name: "Bool with 1", + args: []interface{}{1}, + expected: true, + }, + { + name: "Bool with 0", + args: []interface{}{0}, + expected: false, + }, + { + name: "Bool with true string", + args: []interface{}{"true"}, + expected: true, + }, + { + name: "Bool with false string", + args: []interface{}{"false"}, + expected: false, + }, + { + name: "Bool with 1 string", + args: []interface{}{"1"}, + expected: true, + }, + { + name: "Bool with 0 string", + args: []interface{}{"0"}, + expected: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := Bool(tt.args...) + assert.Equal(t, tt.expected, got) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/casing.go b/pkg/iac/scanners/azure/functions/casing.go new file mode 100644 index 000000000000..56a93bbd7a4b --- /dev/null +++ b/pkg/iac/scanners/azure/functions/casing.go @@ -0,0 +1,29 @@ +package functions + +import "strings" + +func ToLower(args ...interface{}) interface{} { + if len(args) != 1 { + return "" + } + + input, ok := args[0].(string) + if !ok { + return "" + } + + return strings.ToLower(input) +} + +func ToUpper(args ...interface{}) interface{} { + if len(args) != 1 { + return "" + } + + input, ok := args[0].(string) + if !ok { + return "" + } + + return strings.ToUpper(input) +} diff --git a/pkg/iac/scanners/azure/functions/casing_test.go b/pkg/iac/scanners/azure/functions/casing_test.go new file mode 100644 index 000000000000..51c970e1765e --- /dev/null +++ b/pkg/iac/scanners/azure/functions/casing_test.go @@ -0,0 +1,71 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_ToLower(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "lowercase a string", + args: []interface{}{ + "HELLO", + }, + expected: "hello", + }, + { + name: "lowercase a string with a non-string input", + args: []interface{}{ + 10, + }, + expected: "", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := ToLower(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } + +} + +func Test_ToUpper(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "uppercase a string", + args: []interface{}{ + "hello", + }, + expected: "HELLO", + }, + { + name: "uppercase a string with a non-string input", + args: []interface{}{ + 10, + }, + expected: "", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := ToUpper(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } + +} diff --git a/pkg/iac/scanners/azure/functions/coalesce.go b/pkg/iac/scanners/azure/functions/coalesce.go new file mode 100644 index 000000000000..b7ec261450f7 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/coalesce.go @@ -0,0 +1,10 @@ +package functions + +func Coalesce(args ...interface{}) interface{} { + for _, arg := range args { + if arg != nil { + return arg + } + } + return nil +} diff --git a/pkg/iac/scanners/azure/functions/coalesce_test.go b/pkg/iac/scanners/azure/functions/coalesce_test.go new file mode 100644 index 000000000000..361914df64cd --- /dev/null +++ b/pkg/iac/scanners/azure/functions/coalesce_test.go @@ -0,0 +1,56 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Coalesce(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected interface{} + }{ + { + name: "coalesce with nil", + args: []interface{}{ + nil, + }, + expected: nil, + }, + { + name: "coalesce with nil and string", + args: []interface{}{ + nil, + "test", + }, + expected: "test", + }, + { + name: "coalesce with nil and string and int", + args: []interface{}{ + nil, + "test", + 1, + }, + expected: "test", + }, + { + name: "coalesce with nil and nil and array", + args: []interface{}{ + nil, + nil, + []interface{}{"a", "b", "c"}, + }, + expected: []interface{}{"a", "b", "c"}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Coalesce(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/concat.go b/pkg/iac/scanners/azure/functions/concat.go new file mode 100644 index 000000000000..800db04be77d --- /dev/null +++ b/pkg/iac/scanners/azure/functions/concat.go @@ -0,0 +1,28 @@ +package functions + +import ( + "fmt" +) + +func Concat(args ...interface{}) interface{} { + + switch args[0].(type) { + case string: + var result string + for _, arg := range args { + result += fmt.Sprintf("%v", arg) + } + return result + case interface{}: + var result []interface{} + for _, arg := range args { + argArr, ok := arg.([]interface{}) + if !ok { + continue + } + result = append(result, argArr...) + } + return result + } + return "" +} diff --git a/pkg/iac/scanners/azure/functions/concat_test.go b/pkg/iac/scanners/azure/functions/concat_test.go new file mode 100644 index 000000000000..7b0c461c960d --- /dev/null +++ b/pkg/iac/scanners/azure/functions/concat_test.go @@ -0,0 +1,94 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func Test_StringConcatenation(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "simple string concatenation", + args: []interface{}{ + "hello", + ", ", + "world", + "!", + }, + expected: "hello, world!", + }, + { + name: "string concatenation with non strings", + args: []interface{}{ + "pi to 3 decimal places is ", + 3.142, + }, + expected: "pi to 3 decimal places is 3.142", + }, + { + name: "string concatenation with multiple primitives", + args: []interface{}{ + "to say that ", + 3, + " is greater than ", + 5, + " would be ", + false, + }, + expected: "to say that 3 is greater than 5 would be false", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + concatenated := Concat(tt.args...) + require.Equal(t, tt.expected, concatenated) + }) + } +} + +func Test_ArrayConcatenation(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected []interface{} + }{ + { + name: "simple array concatenation", + args: []interface{}{ + []interface{}{1, 2, 3}, + []interface{}{4, 5, 6}, + }, + expected: []interface{}{1, 2, 3, 4, 5, 6}, + }, + { + name: "array concatenation with non arrays", + args: []interface{}{ + []interface{}{1, 2, 3}, + 4, + }, + expected: []interface{}{1, 2, 3}, + }, + { + name: "array concatenation with multiple primitives", + args: []interface{}{ + []interface{}{1, 2, 3}, + 4, + []interface{}{5, 6, 7}, + }, + expected: []interface{}{1, 2, 3, 5, 6, 7}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + concatenated := Concat(tt.args...) + require.Equal(t, tt.expected, concatenated) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/contains.go b/pkg/iac/scanners/azure/functions/contains.go new file mode 100644 index 000000000000..a067d63dfa85 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/contains.go @@ -0,0 +1,40 @@ +package functions + +import ( + "fmt" + "strings" +) + +func Contains(args ...interface{}) interface{} { + + if len(args) != 2 { + return false + } + + container := args[0] + itemToFind := args[1] + + switch cType := container.(type) { + case string: + switch iType := itemToFind.(type) { + case string: + return strings.Contains(strings.ToLower(cType), strings.ToLower(iType)) + case int, int32, int64, uint, uint32, uint64: + return strings.Contains(strings.ToLower(cType), fmt.Sprintf("%d", iType)) + } + case []interface{}: + for _, item := range cType { + if item == itemToFind { + return true + } + } + case map[string]interface{}: + for key := range cType { + if key == itemToFind { + return true + } + } + } + + return false +} diff --git a/pkg/iac/scanners/azure/functions/contains_test.go b/pkg/iac/scanners/azure/functions/contains_test.go new file mode 100644 index 000000000000..e92f08fd5462 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/contains_test.go @@ -0,0 +1,95 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func Test_Contains(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected bool + }{ + { + name: "simple true string contains", + args: []interface{}{ + "hello, world", + "hell", + }, + expected: true, + }, + { + name: "simple false string contains", + args: []interface{}{ + "hello, world", + "help", + }, + expected: false, + }, + { + name: "simple true string contains with case sensitivity", + args: []interface{}{ + "hello, world", + "HELL", + }, + expected: true, + }, + { + name: "simple true string contains with number", + args: []interface{}{ + "You're my number 1", + 1, + }, + expected: true, + }, + { + name: "true object contains key", + args: []interface{}{ + map[string]interface{}{ + "hello": "world", + }, + "hello", + }, + expected: true, + }, + { + name: "false object contains key", + args: []interface{}{ + map[string]interface{}{ + "hello": "world", + }, + "world", + }, + expected: false, + }, + { + name: "true array contains value", + args: []interface{}{ + []interface{}{ + "hello", "world", + }, + "hello", + }, + expected: true, + }, + { + name: "false array contains value", + args: []interface{}{ + []interface{}{ + "hello", "world", + }, + "help", + }, + expected: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + doesContain := Contains(tt.args...) + require.Equal(t, tt.expected, doesContain) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/copy_index.go b/pkg/iac/scanners/azure/functions/copy_index.go new file mode 100644 index 000000000000..aee090e79466 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/copy_index.go @@ -0,0 +1,25 @@ +package functions + +var loopCounter = map[string]int{} + +func CopyIndex(args ...interface{}) interface{} { + loopName := "default" + offset := 1 + if len(args) > 0 { + if providedLoopName, ok := args[0].(string); ok { + loopName = providedLoopName + } + } + if len(args) > 1 { + if providedOffset, ok := args[1].(int); ok { + offset = providedOffset + } + } + + if _, ok := loopCounter[loopName]; !ok { + loopCounter[loopName] = 0 + } + + loopCounter[loopName] += offset + return loopCounter[loopName] +} diff --git a/pkg/iac/scanners/azure/functions/copy_index_test.go b/pkg/iac/scanners/azure/functions/copy_index_test.go new file mode 100644 index 000000000000..041b258ca8cf --- /dev/null +++ b/pkg/iac/scanners/azure/functions/copy_index_test.go @@ -0,0 +1,52 @@ +package functions + +import "testing" + +func Test_CopyIndex(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected int + }{ + { + name: "CopyIndex with 1", + args: []interface{}{}, + expected: 1, + }, + { + name: "CopyIndex with 2", + args: []interface{}{}, + expected: 2, + }, + { + name: "CopyIndex with 3", + args: []interface{}{}, + expected: 3, + }, + { + name: "CopyIndex with loopName", + args: []interface{}{"loop1"}, + expected: 1, + }, + { + name: "CopyIndex with same lo" + + "opName", + args: []interface{}{"loop1"}, + expected: 2, + }, + { + name: "CopyIndex with loopName", + args: []interface{}{"loop2", 10}, + expected: 10, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := CopyIndex(tt.args...) + if got != tt.expected { + t.Errorf("CopyIndex() = %v, want %v", got, tt.expected) + } + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/create_array.go b/pkg/iac/scanners/azure/functions/create_array.go new file mode 100644 index 000000000000..99f3558847a1 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/create_array.go @@ -0,0 +1,11 @@ +package functions + +func CreateArray(args ...interface{}) interface{} { + var result []interface{} + if len(args) == 0 { + return result + } + + result = append(result, args...) + return result +} diff --git a/pkg/iac/scanners/azure/functions/create_array_test.go b/pkg/iac/scanners/azure/functions/create_array_test.go new file mode 100644 index 000000000000..5e63074888cb --- /dev/null +++ b/pkg/iac/scanners/azure/functions/create_array_test.go @@ -0,0 +1,68 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_CreateArray(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected interface{} + }{ + { + name: "create array with strings", + args: []interface{}{ + "Hello", + "World", + }, + expected: []interface{}{"Hello", "World"}, + }, + { + name: "create array with ints", + + args: []interface{}{ + 1, 2, 3, + }, + expected: []interface{}{1, 2, 3}, + }, + { + name: "create array with arrays", + args: []interface{}{ + []interface{}{1, 2, 3}, + []interface{}{4, 5, 6}, + }, + expected: []interface{}{[]interface{}{1, 2, 3}, []interface{}{4, 5, 6}}, + }, + { + name: "create arrau with maps", + args: []interface{}{ + map[string]interface{}{ + "one": "a", + }, + map[string]interface{}{ + "two": "b", + }, + }, + expected: []interface{}{ + map[string]interface{}{ + "one": "a", + }, + map[string]interface{}{ + "two": "b", + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := CreateArray(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } + +} diff --git a/pkg/iac/scanners/azure/functions/create_object.go b/pkg/iac/scanners/azure/functions/create_object.go new file mode 100644 index 000000000000..b9fe2d29f6d2 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/create_object.go @@ -0,0 +1,21 @@ +package functions + +func CreateObject(args ...interface{}) interface{} { + obj := map[string]interface{}{} + if len(args) == 0 { + return obj + } + + // if there aren't even pairs then return an empty object + if len(args)%2 != 0 { + return obj + } + + for i := 0; i < len(args); i += 2 { + key := args[i].(string) + value := args[i+1] + obj[key] = value + } + + return obj +} diff --git a/pkg/iac/scanners/azure/functions/create_object_test.go b/pkg/iac/scanners/azure/functions/create_object_test.go new file mode 100644 index 000000000000..f695e38410fe --- /dev/null +++ b/pkg/iac/scanners/azure/functions/create_object_test.go @@ -0,0 +1,60 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_CreateObject(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected interface{} + }{ + { + name: "CreateObject with no args", + args: []interface{}{}, + expected: map[string]interface{}{}, + }, + { + name: "CreateObject with one arg", + args: []interface{}{"foo", "bar"}, + expected: map[string]interface{}{"foo": "bar"}, + }, + { + name: "CreateObject with two args", + args: []interface{}{"foo", "bar", "baz", "qux"}, + expected: map[string]interface{}{"foo": "bar", "baz": "qux"}, + }, + { + name: "CreateObject with three args", + args: []interface{}{"foo", "bar", "baz", 1, "quux", true}, + expected: map[string]interface{}{"foo": "bar", "baz": 1, "quux": true}, + }, + { + name: "CreateObject with odd number of args", + args: []interface{}{"foo", "bar", "baz"}, + expected: map[string]interface{}{}, + }, + { + name: "CreateObject with odd number of args", + args: []interface{}{"foo", "bar", "baz", []string{"Hello", "World"}}, + expected: map[string]interface{}{ + "foo": "bar", + "baz": []string{ + "Hello", "World", + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := CreateObject(tt.args...) + assert.Equal(t, tt.expected, got) + }) + } + +} diff --git a/pkg/iac/scanners/azure/functions/data_uri.go b/pkg/iac/scanners/azure/functions/data_uri.go new file mode 100644 index 000000000000..50f0835ee6ad --- /dev/null +++ b/pkg/iac/scanners/azure/functions/data_uri.go @@ -0,0 +1,36 @@ +package functions + +import ( + "fmt" + "strings" +) + +func DataUri(args ...interface{}) interface{} { + if len(args) == 0 { + return "" + } + + input, ok := args[0].(string) + if !ok { + return "" + } + + return fmt.Sprintf("data:text/plain;charset=utf8;base64,%s", Base64(input)) +} + +func DataUriToString(args ...interface{}) interface{} { + if len(args) == 0 { + return "" + } + + input, ok := args[0].(string) + if !ok { + return "" + } + parts := strings.Split(input, "base64,") + if len(parts) != 2 { + return "" + } + + return Base64ToString(parts[1]) +} diff --git a/pkg/iac/scanners/azure/functions/data_uri_test.go b/pkg/iac/scanners/azure/functions/data_uri_test.go new file mode 100644 index 000000000000..04f92249e093 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/data_uri_test.go @@ -0,0 +1,53 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func Test_data_uri_from_string(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "data uri from string", + args: []interface{}{ + "Hello", + }, + expected: "data:text/plain;charset=utf8;base64,SGVsbG8=", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + dataUri := DataUri(tt.args...) + require.Equal(t, tt.expected, dataUri) + }) + } +} + +func Test_string_from_data_uri(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "data uri to string", + args: []interface{}{ + "data:;base64,SGVsbG8sIFdvcmxkIQ==", + }, + expected: "Hello, World!", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + dataUri := DataUriToString(tt.args...) + require.Equal(t, tt.expected, dataUri) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/date_time_add.go b/pkg/iac/scanners/azure/functions/date_time_add.go new file mode 100644 index 000000000000..c3b902b08965 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/date_time_add.go @@ -0,0 +1,115 @@ +package functions + +import ( + "fmt" + "regexp" + "strconv" + "time" +) + +var pattern = regexp.MustCompile(`^P((?P\d+)Y)?((?P\d+)M)?((?P\d+)W)?((?P\d+)D)?(T((?P\d+)H)?((?P\d+)M)?((?P\d+)S)?)?$`) + +func DateTimeAdd(args ...interface{}) interface{} { + if len(args) < 2 { + return nil + } + + base, ok := args[0].(string) + if !ok { + return nil + } + + format := time.RFC3339 + if len(args) == 3 { + if providedFormat, ok := args[2].(string); ok { + format = convertFormat(providedFormat) + } + + } + + baseTime, err := time.Parse(format, base) + if err != nil { + return nil + } + + duration, err := parseISO8601(args[1].(string)) + if err != nil { + return nil + } + + timeDuration := duration.timeDuration() + baseTime = baseTime.Add(timeDuration) + + if ok { + return baseTime.Format(format) + } + + return baseTime.Format(time.RFC3339) +} + +type Iso8601Duration struct { + Y int + M int + W int + D int + // Time Component + TH int + TM int + TS int +} + +func parseISO8601(from string) (Iso8601Duration, error) { + var match []string + var d Iso8601Duration + + if pattern.MatchString(from) { + match = pattern.FindStringSubmatch(from) + } else { + return d, fmt.Errorf("could not parse duration string") + } + + for i, name := range pattern.SubexpNames() { + part := match[i] + if i == 0 || name == "" || part == "" { + continue + } + + val, err := strconv.Atoi(part) + if err != nil { + return d, err + } + switch name { + case "year": + d.Y = val + case "month": + d.M = val + case "week": + d.W = val + case "day": + d.D = val + case "hour": + d.TH = val + case "minute": + d.TM = val + case "second": + d.TS = val + default: + return d, fmt.Errorf("unknown field %s", name) + } + } + + return d, nil +} + +func (d Iso8601Duration) timeDuration() time.Duration { + var dur time.Duration + dur += time.Duration(d.TH) * time.Hour + dur += time.Duration(d.TM) * time.Minute + dur += time.Duration(d.TS) * time.Second + dur += time.Duration(d.D) * 24 * time.Hour + dur += time.Duration(d.W) * 7 * 24 * time.Hour + dur += time.Duration(d.M) * 30 * 24 * time.Hour + dur += time.Duration(d.Y) * 365 * 24 * time.Hour + + return dur +} diff --git a/pkg/iac/scanners/azure/functions/date_time_epoch.go b/pkg/iac/scanners/azure/functions/date_time_epoch.go new file mode 100644 index 000000000000..9b1802573269 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/date_time_epoch.go @@ -0,0 +1,38 @@ +package functions + +import ( + "time" + + smithyTime "github.com/aws/smithy-go/time" +) + +func DateTimeFromEpoch(args ...interface{}) interface{} { + if len(args) != 1 { + return nil + } + + epoch, ok := args[0].(int) + if !ok { + return nil + } + + return smithyTime.ParseEpochSeconds(float64(epoch)).Format(time.RFC3339) +} + +func DateTimeToEpoch(args ...interface{}) interface{} { + if len(args) != 1 { + return nil + } + + dateTime, ok := args[0].(string) + if !ok { + return nil + } + + parsed, err := time.Parse(time.RFC3339, dateTime) + if err != nil { + return nil + } + + return int(parsed.Unix()) +} diff --git a/pkg/iac/scanners/azure/functions/date_time_epoch_test.go b/pkg/iac/scanners/azure/functions/date_time_epoch_test.go new file mode 100644 index 000000000000..6cdf7a0442bd --- /dev/null +++ b/pkg/iac/scanners/azure/functions/date_time_epoch_test.go @@ -0,0 +1,51 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_DateTimeFromEpoch(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected interface{} + }{ + { + name: "datetime from epoch", + args: []interface{}{ + 1683040573, + }, + expected: "2023-05-02T15:16:13Z", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := DateTimeFromEpoch(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} + +func Test_DateTimeToEpoch(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected interface{} + }{ + { + name: "datetime to epoch", + args: []interface{}{ + "2023-05-02T15:16:13Z", + }, + expected: 1683040573, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := DateTimeToEpoch(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/datetime_add_test.go b/pkg/iac/scanners/azure/functions/datetime_add_test.go new file mode 100644 index 000000000000..b5c09d04a742 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/datetime_add_test.go @@ -0,0 +1,72 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_DateTimeAdd(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected interface{} + }{ + + { + name: "datetime add 1 years", + args: []interface{}{ + "2010-01-01T00:00:00Z", + "P1Y", + }, + expected: "2011-01-01T00:00:00Z", + }, + { + name: "datetime add 3 months", + args: []interface{}{ + "2010-01-01T00:00:00Z", + "P3M", + }, + expected: "2010-04-01T00:00:00Z", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := DateTimeAdd(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} + +func Test_ISO8601DurationParse(t *testing.T) { + tests := []struct { + name string + args string + expected Iso8601Duration + }{ + + { + name: "parse 1 year", + args: "P1Y", + expected: Iso8601Duration{Y: 1}, + }, + { + name: "parse 3 months", + args: "P3M", + expected: Iso8601Duration{M: 3}, + }, + { + name: "parse 12 hours", + args: "PT12H", + expected: Iso8601Duration{TH: 12}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual, err := parseISO8601(tt.args) + require.NoError(t, err) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/deployment.go b/pkg/iac/scanners/azure/functions/deployment.go new file mode 100644 index 000000000000..afafb2b3587c --- /dev/null +++ b/pkg/iac/scanners/azure/functions/deployment.go @@ -0,0 +1,75 @@ +package functions + +type DeploymentData interface { + GetParameter(name string) interface{} + GetVariable(variableName string) interface{} + GetEnvVariable(envVariableName string) interface{} +} + +func Deployment(deploymentProvider DeploymentData, args ...interface{}) interface{} { + + /* + + { + "name": "", + "properties": { + "templateLink": { + "uri": "" + }, + "template": { + "$schema": "", + "contentVersion": "", + "parameters": {}, + "variables": {}, + "resources": [], + "outputs": {} + }, + "templateHash": "", + "parameters": {}, + "mode": "", + "provisioningState": "" + } + } + + */ + + return nil +} + +func Environment(envProvider DeploymentData, args ...interface{}) interface{} { + if len(args) == 0 { + return nil + } + + envVarName, ok := args[0].(string) + if !ok { + return nil + } + return envProvider.GetEnvVariable(envVarName) +} + +func Variables(varProvider DeploymentData, args ...interface{}) interface{} { + if len(args) == 0 { + return nil + } + + varName, ok := args[0].(string) + if !ok { + return nil + } + return varProvider.GetVariable(varName) +} + +func Parameters(paramProvider DeploymentData, args ...interface{}) interface{} { + if len(args) == 0 { + return nil + } + + paramName, ok := args[0].(string) + if !ok { + return nil + } + + return paramProvider.GetParameter(paramName) + +} diff --git a/pkg/iac/scanners/azure/functions/div.go b/pkg/iac/scanners/azure/functions/div.go new file mode 100644 index 000000000000..9de0dfb05f73 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/div.go @@ -0,0 +1,15 @@ +package functions + +func Div(args ...interface{}) interface{} { + + if len(args) != 2 { + return nil + } + + if a, ok := args[0].(int); ok { + if b, ok := args[1].(int); ok { + return a / b + } + } + return nil +} diff --git a/pkg/iac/scanners/azure/functions/div_test.go b/pkg/iac/scanners/azure/functions/div_test.go new file mode 100644 index 000000000000..49166190fb5d --- /dev/null +++ b/pkg/iac/scanners/azure/functions/div_test.go @@ -0,0 +1,38 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Div(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected int + }{ + { + name: "Div 2 by 1", + args: []interface{}{2, 1}, + expected: 2, + }, + { + name: "Div 4 by 2", + args: []interface{}{4, 2}, + expected: 2, + }, + { + name: "Div 6 by 2", + args: []interface{}{6, 2}, + expected: 3, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := Div(tt.args...) + assert.Equal(t, tt.expected, got) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/empty.go b/pkg/iac/scanners/azure/functions/empty.go new file mode 100644 index 000000000000..1dbe8396f7c3 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/empty.go @@ -0,0 +1,33 @@ +package functions + +func Empty(args ...interface{}) interface{} { + + if len(args) != 1 { + return false + } + + container := args[0] + + switch cType := container.(type) { + case string: + return cType == "" + case map[string]interface{}: + return len(cType) == 0 + case interface{}: + switch iType := cType.(type) { + case []string: + return len(iType) == 0 + case []bool: + return len(iType) == 0 + case []int: + return len(iType) == 0 + case []float64: + return len(iType) == 0 + case map[string]interface{}: + return len(iType) == 0 + } + + } + + return false +} diff --git a/pkg/iac/scanners/azure/functions/empty_test.go b/pkg/iac/scanners/azure/functions/empty_test.go new file mode 100644 index 000000000000..a21fb96cd8cd --- /dev/null +++ b/pkg/iac/scanners/azure/functions/empty_test.go @@ -0,0 +1,68 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func Test_Empty(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected bool + }{ + { + name: "string is empty", + args: []interface{}{ + "", + }, + expected: true, + }, + { + name: "string is not empty", + args: []interface{}{ + "hello, world", + }, + expected: false, + }, + { + name: "array is empty", + args: []interface{}{ + []string{}, + }, + expected: true, + }, + { + name: "array is not empty", + args: []interface{}{ + []string{"Hello", "World"}, + }, + expected: false, + }, + { + name: "map is empty", + args: []interface{}{ + map[string]interface{}{}, + }, + expected: true, + }, + { + name: "map is not empty", + args: []interface{}{ + map[string]interface{}{ + "hello": "world", + }, + "world", + }, + expected: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + doesContain := Empty(tt.args...) + require.Equal(t, tt.expected, doesContain) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/ends_with.go b/pkg/iac/scanners/azure/functions/ends_with.go new file mode 100644 index 000000000000..2bcd66217ecb --- /dev/null +++ b/pkg/iac/scanners/azure/functions/ends_with.go @@ -0,0 +1,22 @@ +package functions + +import "strings" + +func EndsWith(args ...interface{}) interface{} { + + if len(args) != 2 { + return false + } + + stringToSearch, ok := args[0].(string) + if !ok { + return false + } + + stringToFind, ok := args[1].(string) + if !ok { + return false + } + + return strings.HasSuffix(stringToSearch, stringToFind) +} diff --git a/pkg/iac/scanners/azure/functions/ends_with_test.go b/pkg/iac/scanners/azure/functions/ends_with_test.go new file mode 100644 index 000000000000..b1d1900ba0d2 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/ends_with_test.go @@ -0,0 +1,41 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_EndsWith(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected bool + }{ + { + name: "string ends with", + args: []interface{}{ + "Hello world!", + "world!", + }, + expected: true, + }, + { + name: "string does not end with", + args: []interface{}{ + "Hello world!", + "world", + }, + expected: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := EndsWith(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } + +} diff --git a/pkg/iac/scanners/azure/functions/equals.go b/pkg/iac/scanners/azure/functions/equals.go new file mode 100644 index 000000000000..ca5174144cb8 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/equals.go @@ -0,0 +1,25 @@ +package functions + +func Equals(args ...interface{}) interface{} { + if len(args) != 2 { + return false + } + + slice1, ok := args[0].([]interface{}) + if ok { + slice2, ok := args[1].([]interface{}) + if ok { + if len(slice1) != len(slice2) { + return false + } + for i := 0; i < len(slice1); i++ { + if slice1[i] != slice2[i] { + return false + } + } + return true + } + } + + return args[0] == args[1] +} diff --git a/pkg/iac/scanners/azure/functions/equals_test.go b/pkg/iac/scanners/azure/functions/equals_test.go new file mode 100644 index 000000000000..e9ad7f03f7c7 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/equals_test.go @@ -0,0 +1,111 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Equals(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected interface{} + }{ + { + name: "equals with nil", + args: []interface{}{ + nil, + }, + expected: false, + }, + { + name: "equals with nil and string", + args: []interface{}{ + nil, + "test", + }, + expected: false, + }, + { + name: "equals with nil and string and int", + args: []interface{}{ + nil, + "test", + 1, + }, + expected: false, + }, + { + name: "equals with nil and nil and array", + args: []interface{}{ + nil, + nil, + []interface{}{"a", "b", "c"}, + }, + expected: false, + }, + { + name: "equals with nil and nil", + args: []interface{}{ + nil, + nil, + }, + expected: true, + }, + { + name: "equals with string and string", + args: []interface{}{ + "test", + "test", + }, + expected: true, + }, + { + name: "equals with string and string", + args: []interface{}{ + "test", + "test1", + }, + expected: false, + }, + { + name: "equals with int and int", + args: []interface{}{ + 1, + 1, + }, + expected: true, + }, + { + name: "equals with int and int", + args: []interface{}{ + 1, + 2, + }, + expected: false, + }, + { + name: "equals with array and array", + args: []interface{}{ + []interface{}{"a", "b", "c"}, + []interface{}{"a", "b", "c"}, + }, + expected: true, + }, + { + name: "equals with array and array", + args: []interface{}{ + []interface{}{"a", "b", "c"}, + []interface{}{"a", "b", "d"}, + }, + expected: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Equals(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/false.go b/pkg/iac/scanners/azure/functions/false.go new file mode 100644 index 000000000000..26309e333812 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/false.go @@ -0,0 +1,5 @@ +package functions + +func False(args ...interface{}) interface{} { + return false +} diff --git a/pkg/iac/scanners/azure/functions/first.go b/pkg/iac/scanners/azure/functions/first.go new file mode 100644 index 000000000000..3415b453ffe3 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/first.go @@ -0,0 +1,37 @@ +package functions + +func First(args ...interface{}) interface{} { + if len(args) != 1 { + return "" + } + + container := args[0] + + switch cType := container.(type) { + case string: + if len(cType) > 0 { + return string(cType[0]) + } + case interface{}: + switch iType := cType.(type) { + case []string: + if len(iType) > 0 { + return iType[0] + } + case []bool: + if len(iType) > 0 { + return iType[0] + } + case []int: + if len(iType) > 0 { + return iType[0] + } + case []float64: + if len(iType) > 0 { + return iType[0] + } + } + } + + return "" +} diff --git a/pkg/iac/scanners/azure/functions/first_test.go b/pkg/iac/scanners/azure/functions/first_test.go new file mode 100644 index 000000000000..5ce059750184 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/first_test.go @@ -0,0 +1,51 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func Test_First(t *testing.T) { + test := []struct { + name string + args []interface{} + expected interface{} + }{ + { + name: "first in empty string", + args: []interface{}{ + "", + }, + expected: "", + }, + { + name: "first in string", + args: []interface{}{ + "Hello", + }, + expected: "H", + }, + { + name: "first in empty slice", + args: []interface{}{ + []string{}, + }, + expected: "", + }, + { + name: "first in slice", + args: []interface{}{ + []string{"Hello", "World"}, + }, + expected: "Hello", + }, + } + + for _, tt := range test { + t.Run(tt.name, func(t *testing.T) { + actual := First(tt.args...) + require.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/float.go b/pkg/iac/scanners/azure/functions/float.go new file mode 100644 index 000000000000..512b471b9421 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/float.go @@ -0,0 +1,20 @@ +package functions + +import "strconv" + +func Float(args ...interface{}) interface{} { + if len(args) != 1 { + return 0.0 + } + if a, ok := args[0].(int); ok { + return float64(a) + } + if a, ok := args[0].(string); ok { + f, err := strconv.ParseFloat(a, 64) + if err != nil { + return 0.0 + } + return f + } + return 0.0 +} diff --git a/pkg/iac/scanners/azure/functions/float_test.go b/pkg/iac/scanners/azure/functions/float_test.go new file mode 100644 index 000000000000..a7f5f84a8c20 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/float_test.go @@ -0,0 +1,36 @@ +package functions + +import "testing" + +func Test_Float(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected float64 + }{ + { + name: "Float with 1", + args: []interface{}{1}, + expected: 1.0, + }, + { + name: "Float with 2", + args: []interface{}{"2"}, + expected: 2.0, + }, + { + name: "Float with 3", + args: []interface{}{"2.3"}, + expected: 2.3, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := Float(tt.args...) + if got != tt.expected { + t.Errorf("Float() = %v, want %v", got, tt.expected) + } + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/format.go b/pkg/iac/scanners/azure/functions/format.go new file mode 100644 index 000000000000..207b9ebfdda7 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/format.go @@ -0,0 +1,31 @@ +package functions + +import ( + "fmt" + "strings" +) + +func Format(args ...interface{}) interface{} { + formatter := generateFormatterString(args...) + + return fmt.Sprintf(formatter, args[1:]...) +} + +func generateFormatterString(args ...interface{}) string { + + formatter, ok := args[0].(string) + if !ok { + return "" + } + for i, arg := range args[1:] { + switch arg.(type) { + case string: + formatter = strings.ReplaceAll(formatter, fmt.Sprintf("{%d}", i), "%s") + case int, int32, int64, uint, uint32, uint64: + formatter = strings.ReplaceAll(formatter, fmt.Sprintf("{%d}", i), "%d") + case float64, float32: + formatter = strings.ReplaceAll(formatter, fmt.Sprintf("{%d}", i), "%f") + } + } + return formatter +} diff --git a/pkg/iac/scanners/azure/functions/format_test.go b/pkg/iac/scanners/azure/functions/format_test.go new file mode 100644 index 000000000000..8d5e840c61a6 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/format_test.go @@ -0,0 +1,42 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_FormatCall(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "simple format call", + args: []interface{}{ + "{0}/{1}", + "myPostgreSQLServer", + "log_checkpoints", + }, + expected: "myPostgreSQLServer/log_checkpoints", + }, + { + name: "complex format call", + args: []interface{}{ + "{0} + {1} = {2}", + 1, 2, 3, + }, + expected: "1 + 2 = 3", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Format(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } + +} diff --git a/pkg/iac/scanners/azure/functions/functions.go b/pkg/iac/scanners/azure/functions/functions.go new file mode 100644 index 000000000000..f4ed7815f485 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/functions.go @@ -0,0 +1,99 @@ +package functions + +var deploymentFuncs = map[string]func(dp DeploymentData, args ...interface{}) interface{}{ + "parameters": Parameters, + "deployment": Deployment, + "environment": Environment, + "variables": Variables, +} +var generalFuncs = map[string]func(...interface{}) interface{}{ + + "add": Add, + "and": And, + "array": Array, + "base64": Base64, + "base64ToJson": Base64ToJson, + "bool": Bool, + "coalesce": Coalesce, + "concat": Concat, + "contains": Contains, + "copyIndex": CopyIndex, + "createArray": CreateArray, + "createObject": CreateObject, + "dataUri": DataUri, + "dataUriToString": DataUriToString, + "dateTimeAdd": DateTimeAdd, + "dateTimeFromEpoch": DateTimeFromEpoch, + "dateTimeToEpoch": DateTimeToEpoch, + "div": Div, + "empty": Empty, + "endsWith": EndsWith, + "equals": Equals, + "extensionResourceId": ExtensionResourceID, + "false": False, + "float": Float, + "format": Format, + "greater": Greater, + "greaterOrEquals": GreaterOrEquals, + "guid": Guid, + "if": If, + "indexOf": IndexOf, + "int": Int, + "intersection": Intersection, + "items": Items, + "join": Join, + "lastIndexOf": LastIndexOf, + "length": Length, + "less": Less, + "lessOrEquals": LessOrEquals, + // "list": List, + "managementGroup": ManagementGroup, + "managementGroupResourceId": ManagementGroupResourceID, + "max": Max, + "min": Min, + "mod": Mod, + "mul": Mul, + "newGuid": NewGuid, + "not": Not, + "null": Null, + "or": Or, + "padLeft": PadLeft, + "pickZones": PickZones, + "range": Range, + "reference": Reference, + "replace": Replace, + "resourceGroup": ResourceGroup, + "resourceId": ResourceID, + "skip": Skip, + "split": Split, + "startsWith": StartsWith, + "string": String, + "sub": Sub, + "subscription": Subscription, + "subscriptionResourceId": SubscriptionResourceID, + "substring": SubString, + "tenant": Tenant, + "tenantResourceId": TenantResourceID, + "toLower": ToLower, + "toUpper": ToUpper, + "trim": Trim, + "true": True, + "union": Union, + "union:": Union, + "uniqueString": UniqueString, + "uri": Uri, + "utcNow": UTCNow, +} + +func Evaluate(deploymentProvider DeploymentData, name string, args ...interface{}) interface{} { + + if f, ok := deploymentFuncs[name]; ok { + return f(deploymentProvider, args...) + } + + if f, ok := generalFuncs[name]; ok { + return f(args...) + } + + return nil +} diff --git a/pkg/iac/scanners/azure/functions/greater.go b/pkg/iac/scanners/azure/functions/greater.go new file mode 100644 index 000000000000..24bf79834641 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/greater.go @@ -0,0 +1,47 @@ +package functions + +func Greater(args ...interface{}) interface{} { + + if len(args) != 2 { + return false + } + + switch arg0 := args[0].(type) { + case int: + arg1, ok := args[1].(int) + if ok { + return arg0 > arg1 + } + case string: + arg1, ok := args[1].(string) + if ok { + return arg0 > arg1 + } + } + + return false +} + +func GreaterOrEquals(args ...interface{}) interface{} { + + if len(args) != 2 { + return false + } + + switch arg0 := args[0].(type) { + case nil: + return args[1] == nil + case int: + arg1, ok := args[1].(int) + if ok { + return arg0 >= arg1 + } + case string: + arg1, ok := args[1].(string) + if ok { + return arg0 >= arg1 + } + } + + return false +} diff --git a/pkg/iac/scanners/azure/functions/greater_test.go b/pkg/iac/scanners/azure/functions/greater_test.go new file mode 100644 index 000000000000..8d3e1b21b25e --- /dev/null +++ b/pkg/iac/scanners/azure/functions/greater_test.go @@ -0,0 +1,119 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Greater(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected interface{} + }{ + + { + name: "greater with nil and string", + args: []interface{}{ + nil, + "test", + }, + expected: false, + }, + { + name: "greater with nil and nil", + args: []interface{}{ + nil, + nil, + }, + expected: false, + }, + { + name: "greater with string and string", + args: []interface{}{ + "test", + "test", + }, + expected: false, + }, + { + name: "greater with string and int", + args: []interface{}{ + "test", + 1, + }, + expected: false, + }, + { + name: "greater with int and int", + args: []interface{}{ + 1, + 1, + }, + expected: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Greater(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} + +func Test_GreaterThanOrEqual(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected interface{} + }{ + + { + name: "greater with nil and string", + args: []interface{}{ + nil, + "test", + }, + expected: false, + }, + { + name: "greater with nil and nil", + args: []interface{}{ + nil, + nil, + }, + expected: true, + }, + { + name: "greater with string and string", + args: []interface{}{ + "test", + "test", + }, + expected: true, + }, + { + name: "greater with string and int", + args: []interface{}{ + "test", + 1, + }, + expected: false, + }, + { + name: "greater with int and int", + args: []interface{}{ + 1, + 1, + }, + expected: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := GreaterOrEquals(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/guid.go b/pkg/iac/scanners/azure/functions/guid.go new file mode 100644 index 000000000000..d54bbacb1beb --- /dev/null +++ b/pkg/iac/scanners/azure/functions/guid.go @@ -0,0 +1,44 @@ +package functions + +import ( + "crypto/sha256" + "strings" + + "github.com/google/uuid" +) + +func Guid(args ...interface{}) interface{} { + + if len(args) == 0 { + return "" + } + + hashParts := make([]string, len(args)) + for i, str := range args { + hashParts[i] = str.(string) + } + + guid, err := generateSeededGUID(hashParts...) + if err != nil { + return "" + } + + return guid.String() +} + +func generateSeededGUID(seedParts ...string) (uuid.UUID, error) { + var id uuid.UUID + + stringToHash := strings.Join(seedParts, "") + + hsha2 := sha256.Sum256([]byte(stringToHash)) + + copy(id[:], hsha2[:16]) + id[6] = (id[6] & 0x0f) | 0x40 // Version 4 + id[8] = (id[8] & 0x3f) | 0x80 // Variant is 10 + return id, nil +} + +func NewGuid(args ...interface{}) interface{} { + return uuid.NewString() +} diff --git a/pkg/iac/scanners/azure/functions/guid_test.go b/pkg/iac/scanners/azure/functions/guid_test.go new file mode 100644 index 000000000000..0e47e5383a54 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/guid_test.go @@ -0,0 +1,35 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func Test_Guid(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "guid from a string", + args: []interface{}{ + "hello", + }, + expected: "2cf24dba-5fb0-430e-a6e8-3b2ac5b9e29e", + }, + { + name: "guid from an string", + args: []interface{}{}, + expected: "", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + guid := Guid(tt.args...) + require.Equal(t, tt.expected, guid) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/if.go b/pkg/iac/scanners/azure/functions/if.go new file mode 100644 index 000000000000..03fd35e360ff --- /dev/null +++ b/pkg/iac/scanners/azure/functions/if.go @@ -0,0 +1,15 @@ +package functions + +func If(args ...interface{}) interface{} { + + if len(args) != 3 { + return nil + } + + if condition, ok := args[0].(bool); ok { + if condition { + return args[1] + } + } + return args[2] +} diff --git a/pkg/iac/scanners/azure/functions/if_test.go b/pkg/iac/scanners/azure/functions/if_test.go new file mode 100644 index 000000000000..52c645fb30aa --- /dev/null +++ b/pkg/iac/scanners/azure/functions/if_test.go @@ -0,0 +1,44 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_If(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected interface{} + }{ + { + name: "If with true", + args: []interface{}{true, "true", "false"}, + expected: "true", + }, + { + name: "If with false", + args: []interface{}{false, "true", "false"}, + expected: "false", + }, + { + name: "If with true and slice returned", + args: []interface{}{ + true, + []interface{}{"Hello", "World"}, + []interface{}{"Goodbye", "World"}, + }, + expected: []interface{}{"Hello", "World"}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := If(tt.args...) + assert.Equal(t, tt.expected, got) + }) + } + +} diff --git a/pkg/iac/scanners/azure/functions/index_of.go b/pkg/iac/scanners/azure/functions/index_of.go new file mode 100644 index 000000000000..93896e21e897 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/index_of.go @@ -0,0 +1,22 @@ +package functions + +import "strings" + +func IndexOf(args ...interface{}) interface{} { + + if len(args) != 2 { + return -1 + } + + stringToSearch, ok := args[0].(string) + if !ok { + return -1 + } + + stringToFind, ok := args[1].(string) + if !ok { + return -1 + } + + return strings.Index(stringToSearch, stringToFind) +} diff --git a/pkg/iac/scanners/azure/functions/index_of_test.go b/pkg/iac/scanners/azure/functions/index_of_test.go new file mode 100644 index 000000000000..c35d59279942 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/index_of_test.go @@ -0,0 +1,48 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_IndexOf(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected int + }{ + { + name: "get index of string that is there", + args: []interface{}{ + "Hello world!", + "Hell", + }, + expected: 0, + }, + { + name: "get index of string that is there as well", + args: []interface{}{ + "Hello world!", + "world", + }, + expected: 6, + }, + { + name: "get index of string that isn't there", + args: []interface{}{ + "Hello world!", + "planet!", + }, + expected: -1, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := IndexOf(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/int.go b/pkg/iac/scanners/azure/functions/int.go new file mode 100644 index 000000000000..f873a29fb0bf --- /dev/null +++ b/pkg/iac/scanners/azure/functions/int.go @@ -0,0 +1,20 @@ +package functions + +import "strconv" + +func Int(args ...interface{}) interface{} { + if len(args) != 1 { + return 0 + } + if a, ok := args[0].(int); ok { + return a + } + if a, ok := args[0].(string); ok { + i, err := strconv.Atoi(a) + if err != nil { + return 0 + } + return i + } + return 0 +} diff --git a/pkg/iac/scanners/azure/functions/int_test.go b/pkg/iac/scanners/azure/functions/int_test.go new file mode 100644 index 000000000000..0834ecdd6fc2 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/int_test.go @@ -0,0 +1,36 @@ +package functions + +import "testing" + +func Test_Int(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected int + }{ + { + name: "Int with 1", + args: []interface{}{1}, + expected: 1, + }, + { + name: "Int with 2", + args: []interface{}{"2"}, + expected: 2, + }, + { + name: "Int with 2.3", + args: []interface{}{"2.3"}, + expected: 0, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := Int(tt.args...) + if got != tt.expected { + t.Errorf("Int() = %v, want %v", got, tt.expected) + } + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/intersection.go b/pkg/iac/scanners/azure/functions/intersection.go new file mode 100644 index 000000000000..5eace2fe0bc7 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/intersection.go @@ -0,0 +1,76 @@ +package functions + +import "sort" + +func Intersection(args ...interface{}) interface{} { + + if args == nil || len(args) < 2 { + return []interface{}{} + } + + switch args[0].(type) { + case map[string]interface{}: + return intersectionMap(args...) + case interface{}: + return intersectionArray(args...) + } + + return []interface{}{} +} + +func intersectionArray(args ...interface{}) interface{} { + result := []interface{}{} + hash := make(map[interface{}]bool) + + for _, arg := range args[0].([]interface{}) { + hash[arg] = true + } + + for i := 1; i < len(args); i++ { + workingHash := make(map[interface{}]bool) + argArr, ok := args[i].([]interface{}) + if !ok { + continue + } + for _, item := range argArr { + if _, ok := hash[item]; ok { + workingHash[item] = true + } + } + hash = workingHash + } + + for k := range hash { + result = append(result, k) + } + + sort.Slice(result, func(i, j int) bool { + return result[i].(string) < result[j].(string) + }) + + return result +} + +func intersectionMap(args ...interface{}) interface{} { + hash := make(map[string]interface{}) + + for k, v := range args[0].(map[string]interface{}) { + hash[k] = v + } + + for i := 1; i < len(args); i++ { + workingHash := make(map[string]interface{}) + argArr, ok := args[i].(map[string]interface{}) + if !ok { + continue + } + for k, v := range argArr { + if ev, ok := hash[k]; ok && ev == v { + workingHash[k] = v + } + } + hash = workingHash + } + + return hash +} diff --git a/pkg/iac/scanners/azure/functions/intersection_test.go b/pkg/iac/scanners/azure/functions/intersection_test.go new file mode 100644 index 000000000000..98630fa9687c --- /dev/null +++ b/pkg/iac/scanners/azure/functions/intersection_test.go @@ -0,0 +1,106 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Intersect(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected interface{} + }{ + { + name: "intersect two arrays", + args: []interface{}{ + []interface{}{"a", "b", "c"}, + []interface{}{"b", "c", "d"}, + }, + expected: []interface{}{"b", "c"}, + }, + { + name: "intersect three arrays", + args: []interface{}{ + []interface{}{"a", "b", "c", "d"}, + []interface{}{"b", "c", "d"}, + []interface{}{"b", "c"}, + }, + expected: []interface{}{"b", "c"}, + }, + { + name: "intersect two arrays with one empty", + args: []interface{}{ + []interface{}{"a", "b", "c"}, + []interface{}{}, + }, + expected: []interface{}{}, + }, + { + name: "intersect two arrays with both empty", + args: []interface{}{ + []interface{}{}, + []interface{}{}, + }, + expected: []interface{}{}, + }, + { + name: "intersect two arrays with both nil", + args: []interface{}{ + nil, + nil, + }, + expected: []interface{}{}, + }, + { + name: "intersect two maps", + args: []interface{}{ + map[string]interface{}{ + "a": "a", + "b": "b", + "c": "c", + }, + map[string]interface{}{ + "b": "b", + "c": "c", + "d": "d", + }, + }, + expected: map[string]interface{}{ + "b": "b", + "c": "c", + }, + }, + { + name: "intersect three maps", + args: []interface{}{ + map[string]interface{}{ + "a": "a", + "b": "b", + "c": "c", + }, + map[string]interface{}{ + "b": "b", + "c": "c", + "d": "d", + }, + map[string]interface{}{ + "b": "b", + "d": "d", + }, + }, + expected: map[string]interface{}{ + "b": "b", + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Intersection(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/items.go b/pkg/iac/scanners/azure/functions/items.go new file mode 100644 index 000000000000..2b40a369ea46 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/items.go @@ -0,0 +1,6 @@ +package functions + +func Items(args ...interface{}) interface{} { + + return nil +} diff --git a/pkg/iac/scanners/azure/functions/join.go b/pkg/iac/scanners/azure/functions/join.go new file mode 100644 index 000000000000..cdefa43fdad0 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/join.go @@ -0,0 +1,22 @@ +package functions + +import "strings" + +func Join(args ...interface{}) interface{} { + + if len(args) != 2 { + return "" + } + + container, ok := args[0].([]string) + if !ok { + return "" + } + + separator, ok := args[1].(string) + if !ok { + return "" + } + + return strings.Join(container, separator) +} diff --git a/pkg/iac/scanners/azure/functions/join_test.go b/pkg/iac/scanners/azure/functions/join_test.go new file mode 100644 index 000000000000..fab50a4e1e90 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/join_test.go @@ -0,0 +1,39 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Join(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "join strings with no items", + args: []interface{}{ + []string{}, + " ", + }, + expected: "", + }, + { + name: "join strings", + args: []interface{}{ + []string{"Hello", "World"}, + " ", + }, + expected: "Hello World", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Join(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/json.go b/pkg/iac/scanners/azure/functions/json.go new file mode 100644 index 000000000000..7694b358737b --- /dev/null +++ b/pkg/iac/scanners/azure/functions/json.go @@ -0,0 +1,20 @@ +package functions + +import "encoding/json" + +func JSON(args ...interface{}) interface{} { + if len(args) != 1 { + return "" + } + + value, ok := args[0].(string) + if !ok { + return "" + } + + var jsonType map[string]interface{} + if err := json.Unmarshal([]byte(value), &jsonType); err != nil { + return "" + } + return jsonType +} diff --git a/pkg/iac/scanners/azure/functions/json_test.go b/pkg/iac/scanners/azure/functions/json_test.go new file mode 100644 index 000000000000..1f04cd65026f --- /dev/null +++ b/pkg/iac/scanners/azure/functions/json_test.go @@ -0,0 +1,42 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_JSON(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected map[string]interface{} + }{ + { + name: "simple json string to json type", + args: []interface{}{ + `{"hello": "world"}`, + }, + expected: map[string]interface{}{ + "hello": "world", + }, + }, + { + name: "more complex json string to json type", + args: []interface{}{ + `{"hello": ["world", "world2"]}`, + }, + expected: map[string]interface{}{ + "hello": []interface{}{"world", "world2"}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := JSON(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/last.go b/pkg/iac/scanners/azure/functions/last.go new file mode 100644 index 000000000000..8466ec6b669f --- /dev/null +++ b/pkg/iac/scanners/azure/functions/last.go @@ -0,0 +1,37 @@ +package functions + +func Last(args ...interface{}) interface{} { + if len(args) != 1 { + return "" + } + + container := args[0] + + switch cType := container.(type) { + case string: + if len(cType) > 0 { + return string(cType[len(cType)-1]) + } + case interface{}: + switch iType := cType.(type) { + case []string: + if len(iType) > 0 { + return iType[len(iType)-1] + } + case []bool: + if len(iType) > 0 { + return iType[len(iType)-1] + } + case []int: + if len(iType) > 0 { + return iType[len(iType)-1] + } + case []float64: + if len(iType) > 0 { + return iType[len(iType)-1] + } + } + } + + return "" +} diff --git a/pkg/iac/scanners/azure/functions/last_index_of.go b/pkg/iac/scanners/azure/functions/last_index_of.go new file mode 100644 index 000000000000..7dce6320d8fb --- /dev/null +++ b/pkg/iac/scanners/azure/functions/last_index_of.go @@ -0,0 +1,22 @@ +package functions + +import "strings" + +func LastIndexOf(args ...interface{}) interface{} { + + if len(args) != 2 { + return -1 + } + + stringToSearch, ok := args[0].(string) + if !ok { + return -1 + } + + stringToFind, ok := args[1].(string) + if !ok { + return -1 + } + + return strings.LastIndex(stringToSearch, stringToFind) +} diff --git a/pkg/iac/scanners/azure/functions/last_index_of_test.go b/pkg/iac/scanners/azure/functions/last_index_of_test.go new file mode 100644 index 000000000000..96b78d72dc5f --- /dev/null +++ b/pkg/iac/scanners/azure/functions/last_index_of_test.go @@ -0,0 +1,48 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_LastIndexOf(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected int + }{ + { + name: "get last index of string that is there", + args: []interface{}{ + "Hello world!", + "l", + }, + expected: 9, + }, + { + name: "get last index of string that is there as well", + args: []interface{}{ + "Hello world!", + "world", + }, + expected: 6, + }, + { + name: "get last index of string that isn't there", + args: []interface{}{ + "Hello world!", + "planet!", + }, + expected: -1, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := LastIndexOf(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/last_test.go b/pkg/iac/scanners/azure/functions/last_test.go new file mode 100644 index 000000000000..2ceafbf8a69a --- /dev/null +++ b/pkg/iac/scanners/azure/functions/last_test.go @@ -0,0 +1,51 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func Test_Last(t *testing.T) { + test := []struct { + name string + args []interface{} + expected interface{} + }{ + { + name: "last in empty string", + args: []interface{}{ + "", + }, + expected: "", + }, + { + name: "last in string", + args: []interface{}{ + "Hello", + }, + expected: "o", + }, + { + name: "last in empty slice", + args: []interface{}{ + []string{}, + }, + expected: "", + }, + { + name: "last in slice", + args: []interface{}{ + []string{"Hello", "World"}, + }, + expected: "World", + }, + } + + for _, tt := range test { + t.Run(tt.name, func(t *testing.T) { + actual := Last(tt.args...) + require.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/length.go b/pkg/iac/scanners/azure/functions/length.go new file mode 100644 index 000000000000..d74bfb2553bf --- /dev/null +++ b/pkg/iac/scanners/azure/functions/length.go @@ -0,0 +1,29 @@ +package functions + +func Length(args ...interface{}) interface{} { + + if len(args) != 1 { + return 0 + } + + switch ctype := args[0].(type) { + case string: + return len(ctype) + case map[string]interface{}: + return len(ctype) + case interface{}: + switch iType := ctype.(type) { + case []string: + return len(iType) + case []bool: + return len(iType) + case []int: + return len(iType) + case []float64: + return len(iType) + case []interface{}: + return len(iType) + } + } + return 0 +} diff --git a/pkg/iac/scanners/azure/functions/length_test.go b/pkg/iac/scanners/azure/functions/length_test.go new file mode 100644 index 000000000000..2d15ba4968cf --- /dev/null +++ b/pkg/iac/scanners/azure/functions/length_test.go @@ -0,0 +1,53 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Length(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected int + }{ + { + name: "length of a string", + args: []interface{}{ + "hello", + }, + expected: 5, + }, + { + name: "length of an empty string", + args: []interface{}{ + "", + }, + expected: 0, + }, + { + name: "length of an empty slice", + args: []interface{}{ + []string{}, + }, + expected: 0, + }, + { + name: "length of an slice with items", + args: []interface{}{ + []string{ + "hello", "world", + }, + }, + expected: 2, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Length(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/less.go b/pkg/iac/scanners/azure/functions/less.go new file mode 100644 index 000000000000..e25b3662c5c9 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/less.go @@ -0,0 +1,47 @@ +package functions + +func Less(args ...interface{}) interface{} { + + if len(args) != 2 { + return false + } + + switch arg0 := args[0].(type) { + case int: + arg1, ok := args[1].(int) + if ok { + return arg0 < arg1 + } + case string: + arg1, ok := args[1].(string) + if ok { + return arg0 < arg1 + } + } + + return false +} + +func LessOrEquals(args ...interface{}) interface{} { + + if len(args) != 2 { + return false + } + + switch arg0 := args[0].(type) { + case nil: + return args[1] == nil + case int: + arg1, ok := args[1].(int) + if ok { + return arg0 <= arg1 + } + case string: + arg1, ok := args[1].(string) + if ok { + return arg0 <= arg1 + } + } + + return false +} diff --git a/pkg/iac/scanners/azure/functions/less_test.go b/pkg/iac/scanners/azure/functions/less_test.go new file mode 100644 index 000000000000..706ee89db33f --- /dev/null +++ b/pkg/iac/scanners/azure/functions/less_test.go @@ -0,0 +1,119 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Less(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected interface{} + }{ + + { + name: "less with nil and string", + args: []interface{}{ + nil, + "test", + }, + expected: false, + }, + { + name: "less with nil and nil", + args: []interface{}{ + nil, + nil, + }, + expected: false, + }, + { + name: "less with string and string", + args: []interface{}{ + "test", + "test", + }, + expected: false, + }, + { + name: "less with string and int", + args: []interface{}{ + "test", + 1, + }, + expected: false, + }, + { + name: "less with int and int", + args: []interface{}{ + 1, + 1, + }, + expected: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Less(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} + +func Test_LessThanOrEqual(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected interface{} + }{ + + { + name: "less with nil and string", + args: []interface{}{ + nil, + "test", + }, + expected: false, + }, + { + name: "less with nil and nil", + args: []interface{}{ + nil, + nil, + }, + expected: true, + }, + { + name: "less with string and string", + args: []interface{}{ + "test", + "test", + }, + expected: true, + }, + { + name: "less with string and int", + args: []interface{}{ + "test", + 1, + }, + expected: false, + }, + { + name: "less with int and int", + args: []interface{}{ + 1, + 1, + }, + expected: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := LessOrEquals(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/max.go b/pkg/iac/scanners/azure/functions/max.go new file mode 100644 index 000000000000..6cbfd5ba25bc --- /dev/null +++ b/pkg/iac/scanners/azure/functions/max.go @@ -0,0 +1,33 @@ +package functions + +func Max(args ...interface{}) interface{} { + switch args[0].(type) { + case int: + var ints []int + for _, arg := range args { + ints = append(ints, arg.(int)) + } + return maxInt(ints) + case interface{}: + switch iType := args[0].(type) { + case []int: + return maxInt(iType) + } + } + return 0 +} + +func maxInt(args []int) int { + if len(args) == 0 { + return 0 + } + + max := args[0] + + for i := 1; i < len(args); i++ { + if args[i] > max { + max = args[i] + } + } + return max +} diff --git a/pkg/iac/scanners/azure/functions/max_test.go b/pkg/iac/scanners/azure/functions/max_test.go new file mode 100644 index 000000000000..942fad7e9e59 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/max_test.go @@ -0,0 +1,58 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Max(t *testing.T) { + test := []struct { + name string + args []interface{} + expected int + }{ + { + name: "max of empty slice", + args: []interface{}{ + []int{}, + }, + expected: 0, + }, + { + name: "max of slice", + args: []interface{}{ + []int{1, 2, 3}, + }, + expected: 3, + }, + { + name: "max of slice with negative numbers", + args: []interface{}{ + []int{-1, -2, -3}, + }, + expected: -1, + }, + { + name: "max of slice with negative and positive numbers", + args: []interface{}{ + []int{-1, 2, -3}, + }, + expected: 2, + }, + { + name: "max of comma separated numbers", + args: []interface{}{ + 1, 2, 3, 4, 5, + }, + expected: 5, + }, + } + + for _, tt := range test { + t.Run(tt.name, func(t *testing.T) { + actual := Max(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/min.go b/pkg/iac/scanners/azure/functions/min.go new file mode 100644 index 000000000000..35900e26d33d --- /dev/null +++ b/pkg/iac/scanners/azure/functions/min.go @@ -0,0 +1,33 @@ +package functions + +func Min(args ...interface{}) interface{} { + switch args[0].(type) { + case int: + var ints []int + for _, arg := range args { + ints = append(ints, arg.(int)) + } + return minInt(ints) + case interface{}: + switch iType := args[0].(type) { + case []int: + return minInt(iType) + } + } + return 0 +} + +func minInt(args []int) int { + if len(args) == 0 { + return 0 + } + + min := args[0] + + for i := 1; i < len(args); i++ { + if args[i] < min { + min = args[i] + } + } + return min +} diff --git a/pkg/iac/scanners/azure/functions/min_test.go b/pkg/iac/scanners/azure/functions/min_test.go new file mode 100644 index 000000000000..28e12ef69de8 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/min_test.go @@ -0,0 +1,58 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Min(t *testing.T) { + test := []struct { + name string + args []interface{} + expected int + }{ + { + name: "min of empty slice", + args: []interface{}{ + []int{}, + }, + expected: 0, + }, + { + name: "min of slice", + args: []interface{}{ + []int{1, 2, 3}, + }, + expected: 1, + }, + { + name: "min of slice with negative numbers", + args: []interface{}{ + []int{-1, -2, -3}, + }, + expected: -3, + }, + { + name: "min of slice with negative and positive numbers", + args: []interface{}{ + []int{-1, 2, -3}, + }, + expected: -3, + }, + { + name: "min of comma separated numbers", + args: []interface{}{ + 1, 2, 3, 4, 5, + }, + expected: 1, + }, + } + + for _, tt := range test { + t.Run(tt.name, func(t *testing.T) { + actual := Min(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/mod.go b/pkg/iac/scanners/azure/functions/mod.go new file mode 100644 index 000000000000..34fb12b7a356 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/mod.go @@ -0,0 +1,14 @@ +package functions + +func Mod(args ...interface{}) interface{} { + if len(args) != 2 { + return 0 + } + + if a, ok := args[0].(int); ok { + if b, ok := args[1].(int); ok { + return a % b + } + } + return 0 +} diff --git a/pkg/iac/scanners/azure/functions/mod_test.go b/pkg/iac/scanners/azure/functions/mod_test.go new file mode 100644 index 000000000000..656e77e9aae3 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/mod_test.go @@ -0,0 +1,41 @@ +package functions + +import "testing" + +func Test_Mod(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected int + }{ + { + name: "Mod with 1 and 2", + args: []interface{}{1, 2}, + expected: 1, + }, + { + name: "Mod with 2 and 3", + args: []interface{}{2, 3}, + expected: 2, + }, + { + name: "Mod with 3 and -4", + args: []interface{}{3, -4}, + expected: 3, + }, + { + name: "Mod with 7 and 3", + args: []interface{}{7, 3}, + expected: 1, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := Mod(tt.args...) + if got != tt.expected { + t.Errorf("Mod() = %v, want %v", got, tt.expected) + } + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/mul.go b/pkg/iac/scanners/azure/functions/mul.go new file mode 100644 index 000000000000..9d079728107f --- /dev/null +++ b/pkg/iac/scanners/azure/functions/mul.go @@ -0,0 +1,15 @@ +package functions + +func Mul(args ...interface{}) interface{} { + + if len(args) != 2 { + return nil + } + + if a, ok := args[0].(int); ok { + if b, ok := args[1].(int); ok { + return a * b + } + } + return nil +} diff --git a/pkg/iac/scanners/azure/functions/mul_test.go b/pkg/iac/scanners/azure/functions/mul_test.go new file mode 100644 index 000000000000..cf4ff57607b2 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/mul_test.go @@ -0,0 +1,38 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Mul(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected int + }{ + { + name: "multiply -2 by 1", + args: []interface{}{-2, 1}, + expected: -2, + }, + { + name: "multiply 4 by 2", + args: []interface{}{4, 2}, + expected: 8, + }, + { + name: "multiply 6 by 3", + args: []interface{}{6, 3}, + expected: 18, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := Mul(tt.args...) + assert.Equal(t, tt.expected, got) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/not.go b/pkg/iac/scanners/azure/functions/not.go new file mode 100644 index 000000000000..5de10af5dffa --- /dev/null +++ b/pkg/iac/scanners/azure/functions/not.go @@ -0,0 +1,13 @@ +package functions + +func Not(args ...interface{}) interface{} { + + if len(args) != 1 { + return false + } + + if condition, ok := args[0].(bool); ok { + return !condition + } + return false +} diff --git a/pkg/iac/scanners/azure/functions/not_test.go b/pkg/iac/scanners/azure/functions/not_test.go new file mode 100644 index 000000000000..b1a209768f36 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/not_test.go @@ -0,0 +1,33 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Not(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected bool + }{ + { + name: "Not with true", + args: []interface{}{true}, + expected: false, + }, + { + name: "Not with false", + args: []interface{}{false}, + expected: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := Not(tt.args...) + assert.Equal(t, tt.expected, got) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/null.go b/pkg/iac/scanners/azure/functions/null.go new file mode 100644 index 000000000000..597c5485e9f5 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/null.go @@ -0,0 +1,5 @@ +package functions + +func Null(args ...interface{}) interface{} { + return nil +} diff --git a/pkg/iac/scanners/azure/functions/null_test.go b/pkg/iac/scanners/azure/functions/null_test.go new file mode 100644 index 000000000000..3394193415fb --- /dev/null +++ b/pkg/iac/scanners/azure/functions/null_test.go @@ -0,0 +1,12 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Null(t *testing.T) { + + assert.Nil(t, Null()) +} diff --git a/pkg/iac/scanners/azure/functions/or.go b/pkg/iac/scanners/azure/functions/or.go new file mode 100644 index 000000000000..87e6f8627ed4 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/or.go @@ -0,0 +1,20 @@ +package functions + +func Or(args ...interface{}) interface{} { + + if len(args) <= 1 { + return false + } + + for _, arg := range args { + arg1, ok := arg.(bool) + if !ok { + return false + } + if arg1 { + return true + } + + } + return false +} diff --git a/pkg/iac/scanners/azure/functions/or_test.go b/pkg/iac/scanners/azure/functions/or_test.go new file mode 100644 index 000000000000..2361c858a82a --- /dev/null +++ b/pkg/iac/scanners/azure/functions/or_test.go @@ -0,0 +1,44 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Or(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected bool + }{ + { + name: "And with same 2 bools", + args: []interface{}{true, true}, + expected: true, + }, + { + name: "And with same 3 bools", + args: []interface{}{true, true, true}, + expected: true, + }, + { + name: "And with different 4 bools", + args: []interface{}{true, true, false, true}, + expected: true, + }, + { + name: "And with same false 4 bools", + args: []interface{}{false, false, false, false}, + expected: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := Or(tt.args...) + assert.Equal(t, tt.expected, got) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/pad.go b/pkg/iac/scanners/azure/functions/pad.go new file mode 100644 index 000000000000..9d668210b11c --- /dev/null +++ b/pkg/iac/scanners/azure/functions/pad.go @@ -0,0 +1,32 @@ +package functions + +import "strings" + +func PadLeft(args ...interface{}) interface{} { + if len(args) != 3 { + return "" + } + + input, ok := args[0].(string) + if !ok { + return "" + } + + length, ok := args[1].(int) + if !ok { + return "" + } + + pad, ok := args[2].(string) + if !ok { + return "" + } + + if len(input) >= length { + return input + } + + repeat := (length - len(input)) / len(pad) + + return strings.Repeat(pad, repeat) + input +} diff --git a/pkg/iac/scanners/azure/functions/pad_test.go b/pkg/iac/scanners/azure/functions/pad_test.go new file mode 100644 index 000000000000..e7d274504298 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/pad_test.go @@ -0,0 +1,61 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_PadLeft(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "pad left with a input smaller than length", + args: []interface{}{ + "1234", + 8, + "0", + }, + expected: "00001234", + }, + { + name: "pad left with a input larger than length", + args: []interface{}{ + "1234", + 2, + "0", + }, + expected: "1234", + }, + { + name: "pad left with a input same as than length", + args: []interface{}{ + "1234", + 4, + "0", + }, + expected: "1234", + }, + { + name: "pad left with larger padding character", + args: []interface{}{ + "1234", + 8, + "00", + }, + expected: "00001234", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := PadLeft(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } + +} diff --git a/pkg/iac/scanners/azure/functions/parameters.go b/pkg/iac/scanners/azure/functions/parameters.go new file mode 100644 index 000000000000..b13ee3d60e4e --- /dev/null +++ b/pkg/iac/scanners/azure/functions/parameters.go @@ -0,0 +1 @@ +package functions diff --git a/pkg/iac/scanners/azure/functions/pick_zones.go b/pkg/iac/scanners/azure/functions/pick_zones.go new file mode 100644 index 000000000000..982936633dbe --- /dev/null +++ b/pkg/iac/scanners/azure/functions/pick_zones.go @@ -0,0 +1,23 @@ +package functions + +func PickZones(args ...interface{}) interface{} { + if len(args) < 3 { + return nil + } + numOfZones := 1 + + if len(args) > 3 { + numOfZones = args[3].(int) + if numOfZones > 3 { + numOfZones = 3 + } + } + + var zones []int + + for i := 1; i <= numOfZones; i++ { + zones = append(zones, i) + } + + return zones +} diff --git a/pkg/iac/scanners/azure/functions/pick_zones_test.go b/pkg/iac/scanners/azure/functions/pick_zones_test.go new file mode 100644 index 000000000000..19db480f9b0d --- /dev/null +++ b/pkg/iac/scanners/azure/functions/pick_zones_test.go @@ -0,0 +1,14 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_PickZones(t *testing.T) { + assert.Equal(t, []int{1}, PickZones("Microsoft.Compute", "virtualmachines", "eu-west-1")) + assert.Equal(t, []int{1, 2}, PickZones("Microsoft.Compute", "virtualmachines", "eu-west-1", 2)) + assert.Equal(t, []int{1, 2, 3}, PickZones("Microsoft.Compute", "virtualmachines", "eu-west-1", 3)) + assert.Equal(t, []int{1, 2, 3}, PickZones("Microsoft.Compute", "virtualmachines", "eu-west-1", 4)) +} diff --git a/pkg/iac/scanners/azure/functions/range.go b/pkg/iac/scanners/azure/functions/range.go new file mode 100644 index 000000000000..12a3526957d8 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/range.go @@ -0,0 +1,30 @@ +package functions + +func Range(args ...interface{}) interface{} { + + if len(args) != 2 { + return []interface{}{} + } + + start, ok := args[0].(int) + if !ok { + return []int{} + } + + count, ok := args[1].(int) + if !ok { + return []int{} + } + + if count > 10000 { + count = 10000 + } + + result := make([]int, count) + + for i := 0; i < count; i++ { + result[i] = start + i + } + + return result +} diff --git a/pkg/iac/scanners/azure/functions/range_test.go b/pkg/iac/scanners/azure/functions/range_test.go new file mode 100644 index 000000000000..9c0c6a084b6b --- /dev/null +++ b/pkg/iac/scanners/azure/functions/range_test.go @@ -0,0 +1,47 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Range(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected interface{} + }{ + { + name: "range for 3 from 1", + args: []interface{}{ + 1, + 3, + }, + expected: []int{1, 2, 3}, + }, + { + name: "range with for 10 from 3", + args: []interface{}{ + 3, + 10, + }, + expected: []int{3, 4, 5, 6, 7, 8, 9, 10, 11, 12}, + }, + { + name: "range with for 10 from -10", + args: []interface{}{ + -10, + 10, + }, + expected: []int{-10, -9, -8, -7, -6, -5, -4, -3, -2, -1}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Range(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } + +} diff --git a/pkg/iac/scanners/azure/functions/reference.go b/pkg/iac/scanners/azure/functions/reference.go new file mode 100644 index 000000000000..2f7b38ccf741 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/reference.go @@ -0,0 +1,12 @@ +package functions + +import "fmt" + +// Reference function can't work as per Azure because it requires Azure ARM logic +// best effort is to return the resourcename with a suffix to try and make it unique +func Reference(args ...interface{}) interface{} { + if len(args) < 1 { + return nil + } + return fmt.Sprintf("%v-reference", args[0]) +} diff --git a/pkg/iac/scanners/azure/functions/reference_test.go b/pkg/iac/scanners/azure/functions/reference_test.go new file mode 100644 index 000000000000..c669fe98d3f0 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/reference_test.go @@ -0,0 +1,12 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Reference(t *testing.T) { + assert.Equal(t, "test-reference", Reference("test")) + assert.Equal(t, "123-reference", Reference(123)) +} diff --git a/pkg/iac/scanners/azure/functions/replace.go b/pkg/iac/scanners/azure/functions/replace.go new file mode 100644 index 000000000000..00a7a8a4560f --- /dev/null +++ b/pkg/iac/scanners/azure/functions/replace.go @@ -0,0 +1,26 @@ +package functions + +import "strings" + +func Replace(args ...interface{}) interface{} { + if len(args) != 3 { + return "" + } + + input, ok := args[0].(string) + if !ok { + return "" + } + + old, ok := args[1].(string) + if !ok { + return "" + } + + new, ok := args[2].(string) + if !ok { + return "" + } + + return strings.ReplaceAll(input, old, new) +} diff --git a/pkg/iac/scanners/azure/functions/replace_test.go b/pkg/iac/scanners/azure/functions/replace_test.go new file mode 100644 index 000000000000..fe8fb40994cd --- /dev/null +++ b/pkg/iac/scanners/azure/functions/replace_test.go @@ -0,0 +1,41 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Replace(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "replace a string", + args: []interface{}{ + "hello", + "l", + "p", + }, + expected: "heppo", + }, + { + name: "replace a string with invalid replacement", + args: []interface{}{ + "hello", + "q", + "p", + }, + expected: "hello", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Replace(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/resource.go b/pkg/iac/scanners/azure/functions/resource.go new file mode 100644 index 000000000000..7eacfaeccff1 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/resource.go @@ -0,0 +1,48 @@ +package functions + +import ( + "fmt" +) + +func ResourceID(args ...interface{}) interface{} { + if len(args) < 2 { + return nil + } + + var resourceID string + + for _, arg := range args { + resourceID += "/" + fmt.Sprintf("%v", arg) + } + + return resourceID +} + +func ExtensionResourceID(args ...interface{}) interface{} { + if len(args) < 3 { + return nil + } + + var resourceID string + + for _, arg := range args { + resourceID += "/" + fmt.Sprintf("%v", arg) + } + + return resourceID +} + +func ResourceGroup(args ...interface{}) interface{} { + return fmt.Sprintf(`{ +"id": "/subscriptions/%s/resourceGroups/PlaceHolderResourceGroup", +"name": "Placeholder Resource Group", +"type":"Microsoft.Resources/resourceGroups", +"location": "westus", +"managedBy": "%s", +"tags": { +}, +"properties": { + "provisioningState": "Succeeded +} +}`, subscriptionID, managingResourceID) +} diff --git a/pkg/iac/scanners/azure/functions/resource_test.go b/pkg/iac/scanners/azure/functions/resource_test.go new file mode 100644 index 000000000000..d6dac14b4184 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/resource_test.go @@ -0,0 +1,12 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_ResourceID(t *testing.T) { + assert.Equal(t, "/test1/test2", ResourceID("test1", "test2")) + assert.Equal(t, "/test1/123", ResourceID("test1", 123)) +} diff --git a/pkg/iac/scanners/azure/functions/scope.go b/pkg/iac/scanners/azure/functions/scope.go new file mode 100644 index 000000000000..dcd1676b1945 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/scope.go @@ -0,0 +1,106 @@ +package functions + +import ( + "fmt" + + "github.com/google/uuid" +) + +var ( + tenantID = uuid.NewString() + groupID = uuid.NewString() + updaterID = uuid.NewString() + subscriptionID = uuid.NewString() + managingResourceID = uuid.NewString() +) + +func ManagementGroup(_ ...interface{}) interface{} { + + return fmt.Sprintf(`{ + "id": "/providers/Microsoft.Management/managementGroups/mgPlaceholder", + "name": "mgPlaceholder", + "properties": { + "details": { + "parent": { + "displayName": "Tenant Root Group", + "id": "/providers/Microsoft.Management/managementGroups/%[1]s", + "name": "%[1]s" + }, + "updatedBy": "%[2]s", + "updatedTime": "2020-07-23T21:05:52.661306Z", + "version": "1" + }, + "displayName": "Management PlaceHolder Group", + "tenantId": "%[3]s" + }, + "type": "/providers/Microsoft.Management/managementGroups" + } +`, groupID, updaterID, tenantID) +} + +func ManagementGroupResourceID(args ...interface{}) interface{} { + if len(args) < 2 { + return "" + } + + switch len(args) { + case 3: + return fmt.Sprintf("/providers/Microsoft.Management/managementGroups/%s/providers/%s/%s/%s", groupID, args[0], args[1], args[2]) + case 4: + return fmt.Sprintf("/providers/Microsoft.Management/managementGroups/%s/providers/%s/%s/%s", args[0], args[1], args[2], args[3]) + default: + return fmt.Sprintf("/providers/Microsoft.Management/managementGroups/%s/providers/%s/%s", groupID, args[0], args[1]) + } + +} + +func Subscription(_ ...interface{}) interface{} { + return fmt.Sprintf(`{ + "id": "/subscriptions/%[1]s", + "subscriptionId": "%[1]s", + "tenantId": "%[2]s", + "displayName": "Placeholder Subscription" +}`, subscriptionID, tenantID) +} + +func SubscriptionResourceID(args ...interface{}) interface{} { + if len(args) < 2 { + return nil + } + + switch len(args) { + + case 3: + return fmt.Sprintf("/subscriptions/%s/providers/%s/%s/%s", subscriptionID, args[0], args[1], args[2]) + case 4: + // subscription ID has been provided so use that + return fmt.Sprintf("/subscriptions/%s/providers/%s/%s/%s", args[0], args[1], args[2], args[3]) + default: + + return fmt.Sprintf("/subscriptions/%s/providers/%s/%s", subscriptionID, args[0], args[1]) + } +} + +func Tenant(_ ...interface{}) interface{} { + return fmt.Sprintf(`{ + "countryCode": "US", + "displayName": "Placeholder Tenant Name", + "id": "/tenants/%[1]s", + "tenantId": "%[1]s" + }`, tenantID) +} + +func TenantResourceID(args ...interface{}) interface{} { + if len(args) < 2 { + return nil + } + + switch len(args) { + case 3: + return fmt.Sprintf("/providers/%s/%s/%s", args[0], args[1], args[2]) + + default: + return fmt.Sprintf("/providers/%s/%s", args[0], args[1]) + } + +} diff --git a/pkg/iac/scanners/azure/functions/scope_test.go b/pkg/iac/scanners/azure/functions/scope_test.go new file mode 100644 index 000000000000..af84119e350e --- /dev/null +++ b/pkg/iac/scanners/azure/functions/scope_test.go @@ -0,0 +1,34 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_SubscriptionResourceID(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "scope resource id with subscription ID", + args: []interface{}{ + "4ec875a5-41a2-4837-88cf-4266466e65ed", + "Microsoft.Authorization/roleDefinitions", + "8e3af657-a8ff-443c-a75c-2fe8c4bcb635", + "b34282f6-5e3c-4306-8741-ebd7a871d187", + }, + expected: "/subscriptions/4ec875a5-41a2-4837-88cf-4266466e65ed/providers/Microsoft.Authorization/roleDefinitions/8e3af657-a8ff-443c-a75c-2fe8c4bcb635/b34282f6-5e3c-4306-8741-ebd7a871d187", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := SubscriptionResourceID(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/skip.go b/pkg/iac/scanners/azure/functions/skip.go new file mode 100644 index 000000000000..b68296fff66d --- /dev/null +++ b/pkg/iac/scanners/azure/functions/skip.go @@ -0,0 +1,34 @@ +package functions + +func Skip(args ...interface{}) interface{} { + if len(args) != 2 { + return "" + } + + count, ok := args[1].(int) + if !ok { + return "" + } + switch input := args[0].(type) { + case string: + if count > len(input) { + return "" + } + return input[count:] + case interface{}: + switch iType := input.(type) { + case []int: + return iType[count:] + case []string: + return iType[count:] + case []bool: + return iType[count:] + case []float64: + return iType[count:] + case []interface{}: + return iType[count:] + } + } + + return "" +} diff --git a/pkg/iac/scanners/azure/functions/skip_test.go b/pkg/iac/scanners/azure/functions/skip_test.go new file mode 100644 index 000000000000..692e6508f7f1 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/skip_test.go @@ -0,0 +1,65 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Skip(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected interface{} + }{ + { + name: "skip a string", + args: []interface{}{ + "hello", + 1, + }, + expected: "ello", + }, + { + name: "skip beyond the length a string", + args: []interface{}{ + "hello", + 6, + }, + expected: "", + }, + { + name: "skip with a zero count on a string", + args: []interface{}{ + "hello", + 0, + }, + expected: "hello", + }, + { + name: "skip with slice of ints", + args: []interface{}{ + []int{1, 2, 3, 4, 5}, + 2, + }, + expected: []int{3, 4, 5}, + }, + { + name: "skip with slice of strings", + args: []interface{}{ + []string{"hello", "world"}, + 1, + }, + expected: []string{"world"}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Skip(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } + +} diff --git a/pkg/iac/scanners/azure/functions/split.go b/pkg/iac/scanners/azure/functions/split.go new file mode 100644 index 000000000000..04b7f5779d33 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/split.go @@ -0,0 +1,36 @@ +package functions + +import "strings" + +func Split(args ...interface{}) interface{} { + if len(args) != 2 { + return "" + } + + input, ok := args[0].(string) + if !ok { + return "" + } + + switch separator := args[1].(type) { + case string: + return strings.Split(input, separator) + case interface{}: + switch separator := separator.(type) { + case []string: + m := make(map[rune]int) + for _, r := range separator { + r := rune(r[0]) + m[r] = 1 + } + + splitter := func(r rune) bool { + return m[r] == 1 + } + + return strings.FieldsFunc(input, splitter) + } + + } + return []string{} +} diff --git a/pkg/iac/scanners/azure/functions/split_test.go b/pkg/iac/scanners/azure/functions/split_test.go new file mode 100644 index 000000000000..e40df07526aa --- /dev/null +++ b/pkg/iac/scanners/azure/functions/split_test.go @@ -0,0 +1,38 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Split(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected []string + }{ + { + name: "split a string", + args: []interface{}{ + "hello, world", + ",", + }, + expected: []string{"hello", " world"}, + }, + { + name: "split a string with multiple separators", + args: []interface{}{ + "one;two,three", + []string{",", ";"}, + }, + expected: []string{"one", "two", "three"}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Split(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/starts_with.go b/pkg/iac/scanners/azure/functions/starts_with.go new file mode 100644 index 000000000000..a4eb398cea3d --- /dev/null +++ b/pkg/iac/scanners/azure/functions/starts_with.go @@ -0,0 +1,22 @@ +package functions + +import "strings" + +func StartsWith(args ...interface{}) interface{} { + + if len(args) != 2 { + return false + } + + stringToSearch, ok := args[0].(string) + if !ok { + return false + } + + stringToFind, ok := args[1].(string) + if !ok { + return false + } + + return strings.HasPrefix(stringToSearch, stringToFind) +} diff --git a/pkg/iac/scanners/azure/functions/starts_with_test.go b/pkg/iac/scanners/azure/functions/starts_with_test.go new file mode 100644 index 000000000000..4a745478ee51 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/starts_with_test.go @@ -0,0 +1,41 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_StartsWith(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected bool + }{ + { + name: "string ends with", + args: []interface{}{ + "Hello, world!", + "Hello,", + }, + expected: true, + }, + { + name: "string does not end with", + args: []interface{}{ + "Hello world!", + "Hello,", + }, + expected: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := StartsWith(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } + +} diff --git a/pkg/iac/scanners/azure/functions/string.go b/pkg/iac/scanners/azure/functions/string.go new file mode 100644 index 000000000000..cba9997d9e9c --- /dev/null +++ b/pkg/iac/scanners/azure/functions/string.go @@ -0,0 +1,16 @@ +package functions + +import "fmt" + +func String(args ...interface{}) interface{} { + if len(args) != 1 { + return "" + } + + input, ok := args[0].(string) + if !ok { + return fmt.Sprintf("%v", args[0]) + } + + return input +} diff --git a/pkg/iac/scanners/azure/functions/string_test.go b/pkg/iac/scanners/azure/functions/string_test.go new file mode 100644 index 000000000000..ecab50ea8b65 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/string_test.go @@ -0,0 +1,44 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_String(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "string from a string", + args: []interface{}{ + "hello", + }, + expected: "hello", + }, + { + name: "string from a bool", + args: []interface{}{ + false, + }, + expected: "false", + }, + { + name: "string from an int", + args: []interface{}{ + 10, + }, + expected: "10", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := String(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/sub.go b/pkg/iac/scanners/azure/functions/sub.go new file mode 100644 index 000000000000..6013a8c0d509 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/sub.go @@ -0,0 +1,15 @@ +package functions + +func Sub(args ...interface{}) interface{} { + + if len(args) != 2 { + return nil + } + + if a, ok := args[0].(int); ok { + if b, ok := args[1].(int); ok { + return a - b + } + } + return nil +} diff --git a/pkg/iac/scanners/azure/functions/sub_test.go b/pkg/iac/scanners/azure/functions/sub_test.go new file mode 100644 index 000000000000..a3f9308a2710 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/sub_test.go @@ -0,0 +1,43 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Sub(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected int + }{ + { + name: "subtract 2 from 5", + args: []interface{}{5, 2}, + expected: 3, + }, + { + name: "subtract 2 from 1", + args: []interface{}{1, 2}, + expected: -1, + }, + { + name: "subtract 3 from 2", + args: []interface{}{2, 3}, + expected: -1, + }, + { + name: "subtract -4 from 3", + args: []interface{}{3, -4}, + expected: 7, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := Sub(tt.args...) + assert.Equal(t, tt.expected, got) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/substring.go b/pkg/iac/scanners/azure/functions/substring.go new file mode 100644 index 000000000000..fed22f0d14a6 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/substring.go @@ -0,0 +1,36 @@ +package functions + +func SubString(args ...interface{}) interface{} { + if len(args) < 2 { + return "" + } + + input, ok := args[0].(string) + if !ok { + return "" + } + + start, ok := args[1].(int) + if !ok { + return "" + } + + if len(args) == 2 { + args = append(args, len(input)) + } + + length, ok := args[2].(int) + if !ok { + return "" + } + + if start > len(input) { + return "" + } + + if start+length > len(input) { + return input[start:] + } + + return input[start : start+length] +} diff --git a/pkg/iac/scanners/azure/functions/substring_test.go b/pkg/iac/scanners/azure/functions/substring_test.go new file mode 100644 index 000000000000..56e2ea107c73 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/substring_test.go @@ -0,0 +1,49 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_SubString(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "substring a string", + args: []interface{}{ + "hello", + 1, + 3, + }, + expected: "ell", + }, + { + name: "substring a string with no upper bound", + args: []interface{}{ + "hello", + 1, + }, + expected: "ello", + }, + { + name: "substring a string with start higher than the length", + args: []interface{}{ + "hello", + 10, + }, + expected: "", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := SubString(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/take.go b/pkg/iac/scanners/azure/functions/take.go new file mode 100644 index 000000000000..738c9d7d8064 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/take.go @@ -0,0 +1,49 @@ +package functions + +func Take(args ...interface{}) interface{} { + if len(args) != 2 { + return "" + } + + count, ok := args[1].(int) + if !ok { + return "" + } + switch input := args[0].(type) { + case string: + if count > len(input) { + return input + } + return input[:count] + case interface{}: + switch iType := input.(type) { + case []int: + if count > len(iType) { + return iType + } + return iType[:count] + case []string: + if count > len(iType) { + return iType + } + return iType[:count] + case []bool: + if count > len(iType) { + return iType + } + return iType[:count] + case []float64: + if count > len(iType) { + return iType + } + return iType[:count] + case []interface{}: + if count > len(iType) { + return iType + } + return iType[:count] + } + } + + return "" +} diff --git a/pkg/iac/scanners/azure/functions/take_test.go b/pkg/iac/scanners/azure/functions/take_test.go new file mode 100644 index 000000000000..68c19070a6e9 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/take_test.go @@ -0,0 +1,63 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Take(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected interface{} + }{ + { + name: "take a string", + args: []interface{}{ + "hello", + 2, + }, + expected: "he", + }, + { + name: "take a string with invalid count", + args: []interface{}{ + "hello", + 10, + }, + expected: "hello", + }, + { + name: "take a string from slice", + args: []interface{}{ + []string{"a", "b", "c"}, + 2, + }, + expected: []string{"a", "b"}, + }, + { + name: "take a string from a slice", + args: []interface{}{ + []string{"a", "b", "c"}, + 2, + }, + expected: []string{"a", "b"}, + }, + { + name: "take a string from a slice with invalid count", + args: []interface{}{ + []string{"a", "b", "c"}, + 10, + }, + expected: []string{"a", "b", "c"}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Take(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/trim.go b/pkg/iac/scanners/azure/functions/trim.go new file mode 100644 index 000000000000..5215bbe7f43d --- /dev/null +++ b/pkg/iac/scanners/azure/functions/trim.go @@ -0,0 +1,16 @@ +package functions + +import "strings" + +func Trim(args ...interface{}) interface{} { + if len(args) != 1 { + return "" + } + + input, ok := args[0].(string) + if !ok { + return "" + } + + return strings.TrimSpace(input) +} diff --git a/pkg/iac/scanners/azure/functions/trim_test.go b/pkg/iac/scanners/azure/functions/trim_test.go new file mode 100644 index 000000000000..44a787b0f268 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/trim_test.go @@ -0,0 +1,71 @@ +package functions + +import "testing" + +func Test_Trim(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "trim a string", + args: []interface{}{ + " hello ", + }, + expected: "hello", + }, + { + name: "trim a string with multiple spaces", + args: []interface{}{ + " hello ", + }, + expected: "hello", + }, + { + name: "trim a string with tabs", + args: []interface{}{ + " hello ", + }, + expected: "hello", + }, + { + name: "trim a string with new lines", + args: []interface{}{ + ` + +hello + +`, + }, + expected: "hello", + }, + { + name: "trim a string with tabs, spaces and new lines", + args: []interface{}{ + ` + +hello + +`, + }, + expected: "hello", + }, + { + name: "trim a string with non string input", + args: []interface{}{ + 10, + }, + expected: "", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Trim(tt.args...) + if actual != tt.expected { + t.Errorf("Trim(%v) = %v, expected %v", tt.args, actual, tt.expected) + } + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/true.go b/pkg/iac/scanners/azure/functions/true.go new file mode 100644 index 000000000000..9f13af580757 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/true.go @@ -0,0 +1,5 @@ +package functions + +func True(args ...interface{}) interface{} { + return true +} diff --git a/pkg/iac/scanners/azure/functions/union.go b/pkg/iac/scanners/azure/functions/union.go new file mode 100644 index 000000000000..07bb98f28eeb --- /dev/null +++ b/pkg/iac/scanners/azure/functions/union.go @@ -0,0 +1,60 @@ +package functions + +import "sort" + +func Union(args ...interface{}) interface{} { + if len(args) == 0 { + return []interface{}{} + } + if len(args) == 1 { + return args[0] + } + + switch args[0].(type) { + case map[string]interface{}: + return unionMap(args...) + case interface{}: + return unionArray(args...) + } + + return []interface{}{} + +} + +func unionMap(args ...interface{}) interface{} { + result := make(map[string]interface{}) + + for _, arg := range args { + switch iType := arg.(type) { + case map[string]interface{}: + for k, v := range iType { + result[k] = v + } + } + } + + return result +} + +func unionArray(args ...interface{}) interface{} { + result := []interface{}{} + union := make(map[interface{}]bool) + + for _, arg := range args { + switch iType := arg.(type) { + case []interface{}: + for _, item := range iType { + union[item] = true + } + } + } + + for k := range union { + result = append(result, k) + } + sort.Slice(result, func(i, j int) bool { + return result[i].(string) < result[j].(string) + }) + + return result +} diff --git a/pkg/iac/scanners/azure/functions/union_test.go b/pkg/iac/scanners/azure/functions/union_test.go new file mode 100644 index 000000000000..56d5bf809088 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/union_test.go @@ -0,0 +1,110 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Union(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected interface{} + }{ + { + name: "union single array", + args: []interface{}{ + []interface{}{"a", "b", "c"}, + }, + expected: []interface{}{"a", "b", "c"}, + }, + { + name: "union two arrays", + args: []interface{}{ + []interface{}{"a", "b", "c"}, + []interface{}{"b", "c", "d"}, + }, + expected: []interface{}{"a", "b", "c", "d"}, + }, + { + name: "union two arrays", + args: []interface{}{ + []interface{}{"a", "b", "c"}, + []interface{}{"b", "c", "d"}, + []interface{}{"b", "c", "d", "e"}, + }, + expected: []interface{}{"a", "b", "c", "d", "e"}, + }, + { + name: "union single maps", + args: []interface{}{ + map[string]interface{}{ + "a": "a", + "b": "b", + "c": "c", + }, + }, + expected: map[string]interface{}{ + "a": "a", + "b": "b", + "c": "c", + }, + }, + { + name: "union two maps", + args: []interface{}{ + map[string]interface{}{ + "a": "a", + "b": "b", + "c": "c", + }, + map[string]interface{}{ + "b": "b", + "c": "c", + "d": "d", + }, + }, + expected: map[string]interface{}{ + "a": "a", + "b": "b", + "c": "c", + "d": "d", + }, + }, + { + name: "union three maps", + args: []interface{}{ + map[string]interface{}{ + "a": "a", + "b": "b", + "c": "c", + }, + map[string]interface{}{ + "b": "b", + "c": "c", + "d": "d", + }, + map[string]interface{}{ + "b": "b", + "c": "c", + "e": "e", + }, + }, + expected: map[string]interface{}{ + "a": "a", + "b": "b", + "c": "c", + "d": "d", + "e": "e", + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Union(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/unique_string.go b/pkg/iac/scanners/azure/functions/unique_string.go new file mode 100644 index 000000000000..fba35c6459ac --- /dev/null +++ b/pkg/iac/scanners/azure/functions/unique_string.go @@ -0,0 +1,21 @@ +package functions + +import ( + "crypto/sha256" + "fmt" + "strings" +) + +func UniqueString(args ...interface{}) interface{} { + if len(args) == 0 { + return "" + } + + hashParts := make([]string, len(args)) + for i, str := range args { + hashParts[i] = str.(string) + } + + hash := sha256.New().Sum([]byte(strings.Join(hashParts, ""))) + return fmt.Sprintf("%x", hash)[:13] +} diff --git a/pkg/iac/scanners/azure/functions/unique_string_test.go b/pkg/iac/scanners/azure/functions/unique_string_test.go new file mode 100644 index 000000000000..035591eb46aa --- /dev/null +++ b/pkg/iac/scanners/azure/functions/unique_string_test.go @@ -0,0 +1,38 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_UniqueString(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "unique string from a string", + args: []interface{}{ + "hello", + }, + expected: "68656c6c6fe3b", + }, + { + name: "unique string from a string", + args: []interface{}{ + "hello", + "world", + }, + expected: "68656c6c6f776", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := UniqueString(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/iac/scanners/azure/functions/uri.go b/pkg/iac/scanners/azure/functions/uri.go new file mode 100644 index 000000000000..949e12235dea --- /dev/null +++ b/pkg/iac/scanners/azure/functions/uri.go @@ -0,0 +1,29 @@ +package functions + +import ( + "net/url" + "path" +) + +func Uri(args ...interface{}) interface{} { + if len(args) != 2 { + return "" + } + + result, err := joinPath(args[0].(string), args[1].(string)) + if err != nil { + return "" + } + return result +} + +// Backport url.JoinPath until we're ready for Go 1.19 +func joinPath(base string, elem ...string) (string, error) { + u, err := url.Parse(base) + if err != nil { + return "", err + } + elem = append([]string{u.EscapedPath()}, elem...) + u.Path = path.Join(elem...) + return u.String(), nil +} diff --git a/pkg/iac/scanners/azure/functions/uri_test.go b/pkg/iac/scanners/azure/functions/uri_test.go new file mode 100644 index 000000000000..1a63fe6bbd01 --- /dev/null +++ b/pkg/iac/scanners/azure/functions/uri_test.go @@ -0,0 +1,48 @@ +package functions + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func Test_Uri(t *testing.T) { + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "uri from a base and relative with no trailing slash", + args: []interface{}{ + "http://contoso.org/firstpath", + "myscript.sh", + }, + expected: "http://contoso.org/firstpath/myscript.sh", + }, + { + name: "uri from a base and relative with trailing slash", + args: []interface{}{ + "http://contoso.org/firstpath/", + "myscript.sh", + }, + expected: "http://contoso.org/firstpath/myscript.sh", + }, + { + name: "uri from a base with trailing slash and relative with ../", + args: []interface{}{ + "http://contoso.org/firstpath/", + "../myscript.sh", + }, + expected: "http://contoso.org/myscript.sh", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := Uri(tt.args...) + require.Equal(t, tt.expected, actual) + }) + } + +} diff --git a/pkg/iac/scanners/azure/functions/utc_now.go b/pkg/iac/scanners/azure/functions/utc_now.go new file mode 100644 index 000000000000..68c93bd58fee --- /dev/null +++ b/pkg/iac/scanners/azure/functions/utc_now.go @@ -0,0 +1,47 @@ +package functions + +import ( + "strings" + "time" +) + +func UTCNow(args ...interface{}) interface{} { + if len(args) > 1 { + return nil + } + + if len(args) == 1 { + format, ok := args[0].(string) + if ok { + goFormat := convertFormat(format) + return time.Now().UTC().Format(goFormat) + } + } + + return time.Now().UTC().Format(time.RFC3339) +} + +// don't look directly at this code +func convertFormat(format string) string { + goFormat := format + goFormat = strings.ReplaceAll(goFormat, "yyyy", "2006") + goFormat = strings.ReplaceAll(goFormat, "yy", "06") + goFormat = strings.ReplaceAll(goFormat, "MMMM", "January") + goFormat = strings.ReplaceAll(goFormat, "MMM", "Jan") + goFormat = strings.ReplaceAll(goFormat, "MM", "01") + goFormat = strings.ReplaceAll(goFormat, "M", "1") + goFormat = strings.ReplaceAll(goFormat, "dd", "02") + goFormat = strings.ReplaceAll(goFormat, "d", "2") + goFormat = strings.ReplaceAll(goFormat, "HH", "15") + goFormat = strings.ReplaceAll(goFormat, "H", "3") + goFormat = strings.ReplaceAll(goFormat, "hh", "03") + goFormat = strings.ReplaceAll(goFormat, "h", "3") + goFormat = strings.ReplaceAll(goFormat, "mm", "04") + goFormat = strings.ReplaceAll(goFormat, "m", "4") + goFormat = strings.ReplaceAll(goFormat, "ss", "05") + goFormat = strings.ReplaceAll(goFormat, "s", "5") + goFormat = strings.ReplaceAll(goFormat, "tt", "PM") + goFormat = strings.ReplaceAll(goFormat, "t", "PM") + return goFormat + +} diff --git a/pkg/iac/scanners/azure/functions/utc_now_test.go b/pkg/iac/scanners/azure/functions/utc_now_test.go new file mode 100644 index 000000000000..c203c3e70a0a --- /dev/null +++ b/pkg/iac/scanners/azure/functions/utc_now_test.go @@ -0,0 +1,40 @@ +package functions + +import ( + "fmt" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func Test_UTCNow(t *testing.T) { + + tests := []struct { + name string + args []interface{} + expected string + }{ + { + name: "utc now day", + args: []interface{}{ + "d", + }, + expected: fmt.Sprintf("%d", time.Now().UTC().Day()), + }, + { + name: "utc now date", + args: []interface{}{ + "yyyy-M-d", + }, + expected: fmt.Sprintf("%d-%d-%d", time.Now().UTC().Year(), time.Now().UTC().Month(), time.Now().UTC().Day()), + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := UTCNow(tt.args...) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/pkg/iac/scanners/azure/resolver/resolver.go b/pkg/iac/scanners/azure/resolver/resolver.go new file mode 100644 index 000000000000..43c16b1fbe73 --- /dev/null +++ b/pkg/iac/scanners/azure/resolver/resolver.go @@ -0,0 +1,51 @@ +package resolver + +import ( + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure/expressions" +) + +type Resolver interface { + ResolveExpression(expression azure.Value) azure.Value + SetDeployment(d *azure.Deployment) +} + +func NewResolver() Resolver { + return &resolver{} +} + +type resolver struct { + deployment *azure.Deployment +} + +func (r *resolver) SetDeployment(d *azure.Deployment) { + r.deployment = d +} + +func (r *resolver) ResolveExpression(expression azure.Value) azure.Value { + if expression.Kind != azure.KindExpression { + return expression + } + if r.deployment == nil { + panic("cannot resolve expression on nil deployment") + } + code := expression.AsString() + + resolved, err := r.resolveExpressionString(code, expression.GetMetadata()) + if err != nil { + expression.Kind = azure.KindUnresolvable + return expression + } + return resolved +} + +func (r *resolver) resolveExpressionString(code string, metadata defsecTypes.Metadata) (azure.Value, error) { + et, err := expressions.NewExpressionTree(code) + if err != nil { + return azure.NullValue, err + } + + evaluatedValue := et.Evaluate(r.deployment) + return azure.NewValue(evaluatedValue, metadata), nil +} diff --git a/pkg/iac/scanners/azure/resolver/resolver_test.go b/pkg/iac/scanners/azure/resolver/resolver_test.go new file mode 100644 index 000000000000..9ca63f031b3d --- /dev/null +++ b/pkg/iac/scanners/azure/resolver/resolver_test.go @@ -0,0 +1,101 @@ +package resolver + +import ( + "testing" + "time" + + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" + "github.com/stretchr/testify/require" +) + +func Test_resolveFunc(t *testing.T) { + + tests := []struct { + name string + expr string + expected string + }{ + { + name: "simple format call", + expr: "format('{0}/{1}', 'myPostgreSQLServer', 'log_checkpoints')", + expected: "myPostgreSQLServer/log_checkpoints", + }, + { + name: "simple format call with numbers", + expr: "format('{0} + {1} = {2}', 1, 2, 3)", + expected: "1 + 2 = 3", + }, + { + name: "format with nested format", + expr: "format('{0} + {1} = {2}', format('{0}', 1), 2, 3)", + expected: "1 + 2 = 3", + }, + { + name: "format with multiple nested format", + expr: "format('{0} + {1} = {2}', format('{0}', 1), 2, format('{0}', 3))", + expected: "1 + 2 = 3", + }, + { + name: "format with nested base64", + expr: "format('the base64 of \"hello, world\" is {0}', base64('hello, world'))", + expected: "the base64 of \"hello, world\" is aGVsbG8sIHdvcmxk", + }, + { + name: "dateTimeAdd with add a day", + expr: "dateTimeAdd(utcNow('yyyy-MM-dd'), 'P1D', 'yyyy-MM-dd')", + expected: time.Now().UTC().AddDate(0, 0, 1).Format("2006-01-02"), + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + resolver := resolver{} + + resolvedValue, err := resolver.resolveExpressionString(tt.expr, types.NewTestMetadata()) + require.NoError(t, err) + require.Equal(t, azure.KindString, resolvedValue.Kind) + + require.Equal(t, tt.expected, resolvedValue.AsString()) + }) + } +} + +func Test_resolveParameter(t *testing.T) { + tests := []struct { + name string + deployment *azure.Deployment + expr string + expected string + }{ + { + name: "format call with parameter", + deployment: &azure.Deployment{ + Parameters: []azure.Parameter{ + { + Variable: azure.Variable{ + Name: "dbName", + Value: azure.NewValue("myPostgreSQLServer", types.NewTestMetadata()), + }, + }, + }, + }, + expr: "format('{0}/{1}', parameters('dbName'), 'log_checkpoints')", + expected: "myPostgreSQLServer/log_checkpoints", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + resolver := resolver{ + deployment: tt.deployment, + } + + resolvedValue, err := resolver.resolveExpressionString(tt.expr, types.NewTestMetadata()) + require.NoError(t, err) + require.Equal(t, azure.KindString, resolvedValue.Kind) + + require.Equal(t, tt.expected, resolvedValue.AsString()) + }) + } + +} diff --git a/pkg/iac/scanners/azure/value.go b/pkg/iac/scanners/azure/value.go new file mode 100644 index 000000000000..bbcdfded6860 --- /dev/null +++ b/pkg/iac/scanners/azure/value.go @@ -0,0 +1,357 @@ +package azure + +import ( + "strings" + "time" + + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure/arm/parser/armjson" + "k8s.io/utils/strings/slices" +) + +type EvalContext struct{} + +type Kind string + +const ( + KindUnresolvable Kind = "unresolvable" + KindNull Kind = "null" + KindBoolean Kind = "boolean" + KindString Kind = "string" + KindNumber Kind = "number" + KindObject Kind = "object" + KindArray Kind = "array" + KindExpression Kind = "expression" +) + +type Value struct { + types.Metadata + rLit interface{} + rMap map[string]Value + rArr []Value + Kind Kind + Comments []string +} + +var NullValue = Value{ + Kind: KindNull, +} + +func NewValue(value interface{}, metadata types.Metadata) Value { + + v := Value{ + Metadata: metadata, + } + + switch ty := value.(type) { + case []interface{}: + v.Kind = KindArray + for _, child := range ty { + if internal, ok := child.(Value); ok { + v.rArr = append(v.rArr, internal) + } else { + v.rArr = append(v.rArr, NewValue(child, metadata)) + } + } + case []Value: + v.Kind = KindArray + v.rArr = append(v.rArr, ty...) + + case map[string]interface{}: + v.Kind = KindObject + v.rMap = make(map[string]Value) + for key, val := range ty { + if internal, ok := val.(Value); ok { + v.rMap[key] = internal + } else { + v.rMap[key] = NewValue(val, metadata) + } + } + case map[string]Value: + v.Kind = KindObject + v.rMap = make(map[string]Value) + for key, val := range ty { + v.rMap[key] = val + } + case string: + v.Kind = KindString + v.rLit = ty + case int, int64, int32, float32, float64, int8, int16, uint8, uint16, uint32, uint64: + v.Kind = KindNumber + v.rLit = ty + case bool: + v.Kind = KindBoolean + v.rLit = ty + case nil: + v.Kind = KindNull + v.rLit = ty + default: + v.Kind = KindUnresolvable + v.rLit = ty + } + + return v +} + +func (v *Value) GetMetadata() types.Metadata { + return v.Metadata +} + +func (v *Value) UnmarshalJSONWithMetadata(node armjson.Node) error { + + v.updateValueKind(node) + + v.Metadata = node.Metadata() + + switch node.Kind() { + case armjson.KindArray: + err := v.unmarshallArray(node) + if err != nil { + return err + } + case armjson.KindObject: + err := v.unmarshalObject(node) + if err != nil { + return err + } + case armjson.KindString: + err := v.unmarshalString(node) + if err != nil { + return err + } + default: + if err := node.Decode(&v.rLit); err != nil { + return err + } + } + + for _, comment := range node.Comments() { + var str string + if err := comment.Decode(&str); err != nil { + return err + } + // remove `\r` from comment when running windows + str = strings.ReplaceAll(str, "\r", "") + + v.Comments = append(v.Comments, str) + } + return nil +} + +func (v *Value) unmarshalString(node armjson.Node) error { + var str string + if err := node.Decode(&str); err != nil { + return err + } + if strings.HasPrefix(str, "[") && !strings.HasPrefix(str, "[[") && strings.HasSuffix(str, "]") { + // function! + v.Kind = KindExpression + v.rLit = str[1 : len(str)-1] + } else { + v.rLit = str + } + return nil +} + +func (v *Value) unmarshalObject(node armjson.Node) error { + obj := make(map[string]Value) + for i := 0; i < len(node.Content()); i += 2 { + var key string + if err := node.Content()[i].Decode(&key); err != nil { + return err + } + var val Value + if err := val.UnmarshalJSONWithMetadata(node.Content()[i+1]); err != nil { + return err + } + obj[key] = val + } + v.rMap = obj + return nil +} + +func (v *Value) unmarshallArray(node armjson.Node) error { + var arr []Value + for _, child := range node.Content() { + var val Value + if err := val.UnmarshalJSONWithMetadata(child); err != nil { + return err + } + arr = append(arr, val) + } + v.rArr = arr + return nil +} + +func (v *Value) updateValueKind(node armjson.Node) { + switch node.Kind() { + case armjson.KindString: + v.Kind = KindString + case armjson.KindNumber: + v.Kind = KindNumber + case armjson.KindBoolean: + v.Kind = KindBoolean + case armjson.KindObject: + v.Kind = KindObject + case armjson.KindNull: + v.Kind = KindNull + case armjson.KindArray: + v.Kind = KindArray + default: + panic(node.Kind()) + } +} + +func (v Value) AsString() string { + v.Resolve() + + if v.Kind != KindString { + return "" + } + + return v.rLit.(string) +} + +func (v Value) AsBool() bool { + v.Resolve() + if v.Kind != KindBoolean { + return false + } + return v.rLit.(bool) +} + +func (v Value) AsInt() int { + v.Resolve() + if v.Kind != KindNumber { + return 0 + } + return int(v.rLit.(int64)) +} + +func (v Value) AsFloat() float64 { + v.Resolve() + if v.Kind != KindNumber { + return 0 + } + return v.rLit.(float64) +} + +func (v Value) AsIntValue(defaultValue int, metadata types.Metadata) types.IntValue { + v.Resolve() + if v.Kind != KindNumber { + return types.Int(defaultValue, metadata) + } + return types.Int(v.AsInt(), metadata) +} + +func (v Value) AsBoolValue(defaultValue bool, metadata types.Metadata) types.BoolValue { + v.Resolve() + if v.Kind == KindString { + possibleValue := strings.ToLower(v.rLit.(string)) + if slices.Contains([]string{"true", "1", "yes", "on", "enabled"}, possibleValue) { + return types.Bool(true, metadata) + } + } + + if v.Kind != KindBoolean { + return types.Bool(defaultValue, metadata) + } + + return types.Bool(v.rLit.(bool), v.GetMetadata()) +} + +func (v Value) EqualTo(value interface{}) bool { + switch ty := value.(type) { + case string: + return v.AsString() == ty + default: + panic("not supported") + } +} + +func (v Value) AsStringValue(defaultValue string, metadata types.Metadata) types.StringValue { + v.Resolve() + if v.Kind != KindString { + return types.StringDefault(defaultValue, metadata) + } + return types.String(v.rLit.(string), v.Metadata) +} + +func (v Value) GetMapValue(key string) Value { + v.Resolve() + if v.Kind != KindObject { + return NullValue + } + return v.rMap[key] +} + +func (v Value) AsMap() map[string]Value { + v.Resolve() + if v.Kind != KindObject { + return nil + } + return v.rMap +} + +func (v Value) AsList() []Value { + v.Resolve() + if v.Kind != KindArray { + return nil + } + return v.rArr +} + +func (v Value) Raw() interface{} { + switch v.Kind { + case KindArray: + // TODO: recursively build raw array + return nil + case KindObject: + // TODO: recursively build raw object + return nil + default: + return v.rLit + } +} + +func (v *Value) Resolve() { + if v.Kind != KindExpression { + return + } + // if resolver, ok := v.Metadata.Internal().(Resolver); ok { + // *v = resolver.ResolveExpression(*v) + // } +} + +func (v Value) HasKey(key string) bool { + v.Resolve() + _, ok := v.rMap[key] + return ok +} + +func (v Value) AsTimeValue(metadata types.Metadata) types.TimeValue { + v.Resolve() + if v.Kind != KindString { + return types.Time(time.Time{}, metadata) + } + if v.Kind == KindNumber { + return types.Time(time.Unix(int64(v.AsFloat()), 0), metadata) + } + t, err := time.Parse(time.RFC3339, v.rLit.(string)) + if err != nil { + return types.Time(time.Time{}, metadata) + } + return types.Time(t, metadata) +} + +func (v Value) AsStringValuesList(defaultValue string) (stringValues []types.StringValue) { + v.Resolve() + if v.Kind != KindArray { + return + } + for _, item := range v.rArr { + stringValues = append(stringValues, item.AsStringValue(defaultValue, item.Metadata)) + } + + return stringValues +} diff --git a/pkg/iac/scanners/azure/value_test.go b/pkg/iac/scanners/azure/value_test.go new file mode 100644 index 000000000000..7b463722794e --- /dev/null +++ b/pkg/iac/scanners/azure/value_test.go @@ -0,0 +1,13 @@ +package azure + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + "github.com/stretchr/testify/assert" +) + +func Test_ValueAsInt(t *testing.T) { + val := NewValue(int64(10), types.NewTestMetadata()) + assert.Equal(t, 10, val.AsInt()) +} diff --git a/pkg/iac/scanners/cloudformation/cftypes/types.go b/pkg/iac/scanners/cloudformation/cftypes/types.go new file mode 100644 index 000000000000..44d9c1fd2a93 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/cftypes/types.go @@ -0,0 +1,12 @@ +package cftypes + +type CfType string + +const ( + String CfType = "string" + Int CfType = "int" + Float64 CfType = "float64" + Bool CfType = "bool" + Map CfType = "map" + List CfType = "list" +) diff --git a/pkg/iac/scanners/cloudformation/parser/errors.go b/pkg/iac/scanners/cloudformation/parser/errors.go new file mode 100644 index 000000000000..655f137cd271 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/errors.go @@ -0,0 +1,24 @@ +package parser + +import ( + "fmt" +) + +type InvalidContentError struct { + source string + err error +} + +func NewErrInvalidContent(source string, err error) *InvalidContentError { + return &InvalidContentError{ + source: source, + err: err, + } +} +func (e *InvalidContentError) Error() string { + return fmt.Sprintf("Invalid content in file: %s. Error: %v", e.source, e.err) +} + +func (e *InvalidContentError) Reason() error { + return e.err +} diff --git a/pkg/iac/scanners/cloudformation/parser/file_context.go b/pkg/iac/scanners/cloudformation/parser/file_context.go new file mode 100644 index 000000000000..35f4483018f8 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/file_context.go @@ -0,0 +1,61 @@ +package parser + +import ( + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +type SourceFormat string + +const ( + YamlSourceFormat SourceFormat = "yaml" + JsonSourceFormat SourceFormat = "json" +) + +type FileContexts []*FileContext + +type FileContext struct { + filepath string + lines []string + SourceFormat SourceFormat + Parameters map[string]*Parameter `json:"Parameters" yaml:"Parameters"` + Resources map[string]*Resource `json:"Resources" yaml:"Resources"` + Globals map[string]*Resource `json:"Globals" yaml:"Globals"` + Mappings map[string]interface{} `json:"Mappings,omitempty" yaml:"Mappings"` + Conditions map[string]Property `json:"Conditions,omitempty" yaml:"Conditions"` +} + +func (t *FileContext) GetResourceByLogicalID(name string) *Resource { + for n, r := range t.Resources { + if name == n { + return r + } + } + return nil +} + +func (t *FileContext) GetResourcesByType(names ...string) []*Resource { + var resources []*Resource + for _, r := range t.Resources { + for _, name := range names { + if name == r.Type() { + // + resources = append(resources, r) + } + } + } + return resources +} + +func (t *FileContext) Metadata() defsecTypes.Metadata { + rng := defsecTypes.NewRange(t.filepath, 1, len(t.lines), "", nil) + + return defsecTypes.NewMetadata(rng, NewCFReference("Template", rng).String()) +} + +func (t *FileContext) OverrideParameters(params map[string]any) { + for key := range t.Parameters { + if val, ok := params[key]; ok { + t.Parameters[key].UpdateDefault(val) + } + } +} diff --git a/pkg/iac/scanners/cloudformation/parser/file_context_test.go b/pkg/iac/scanners/cloudformation/parser/file_context_test.go new file mode 100644 index 000000000000..bbf5db4ddc39 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/file_context_test.go @@ -0,0 +1,61 @@ +package parser + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestFileContext_OverrideParameters(t *testing.T) { + tests := []struct { + name string + ctx FileContext + arg map[string]any + expected map[string]*Parameter + }{ + { + name: "happy", + ctx: FileContext{ + Parameters: map[string]*Parameter{ + "BucketName": { + inner: parameterInner{ + Type: "String", + Default: "test", + }, + }, + "QueueName": { + inner: parameterInner{ + Type: "String", + }, + }, + }, + }, + arg: map[string]any{ + "BucketName": "test2", + "QueueName": "test", + "SomeKey": "some_value", + }, + expected: map[string]*Parameter{ + "BucketName": { + inner: parameterInner{ + Type: "String", + Default: "test2", + }, + }, + "QueueName": { + inner: parameterInner{ + Type: "String", + Default: "test", + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + tt.ctx.OverrideParameters(tt.arg) + assert.Equal(t, tt.expected, tt.ctx.Parameters) + }) + } +} diff --git a/pkg/iac/scanners/cloudformation/parser/fn_and.go b/pkg/iac/scanners/cloudformation/parser/fn_and.go new file mode 100644 index 000000000000..92787aa81969 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/fn_and.go @@ -0,0 +1,38 @@ +package parser + +import "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" + +func ResolveAnd(property *Property) (resolved *Property, success bool) { + if !property.isFunction() { + return property, true + } + + refValue := property.AsMap()["Fn::And"].AsList() + + if len(refValue) < 2 { + return abortIntrinsic(property, "Fn::And should have at least 2 values, returning original Property") + } + + results := make([]bool, len(refValue)) + for i := 0; i < len(refValue); i++ { + + r := false + if refValue[i].IsBool() { + r = refValue[i].AsBool() + } + + results[i] = r + } + + theSame := allSameStrings(results) + return property.deriveResolved(cftypes.Bool, theSame), true +} + +func allSameStrings(a []bool) bool { + for i := 1; i < len(a); i++ { + if a[i] != a[0] { + return false + } + } + return true +} diff --git a/pkg/iac/scanners/cloudformation/parser/fn_and_test.go b/pkg/iac/scanners/cloudformation/parser/fn_and_test.go new file mode 100644 index 000000000000..6adfb94351ba --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/fn_and_test.go @@ -0,0 +1,186 @@ +package parser + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_resolve_and_value(t *testing.T) { + + property1 := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Equals": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + }, + }, + }, + }, + }, + } + + property2 := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Equals": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + }, + }, + }, + }, + }, + } + andProperty := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::And": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + property1, + property2, + }, + }, + }, + }, + }, + } + + resolvedProperty, success := ResolveIntrinsicFunc(andProperty) + require.True(t, success) + + assert.True(t, resolvedProperty.IsTrue()) +} + +func Test_resolve_and_value_not_the_same(t *testing.T) { + + property1 := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Equals": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "bar", + }, + }, + }, + }, + }, + }, + }, + } + + property2 := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Equals": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + }, + }, + }, + }, + }, + } + andProperty := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::And": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + property1, + property2, + }, + }, + }, + }, + }, + } + + resolvedProperty, success := ResolveIntrinsicFunc(andProperty) + require.True(t, success) + + assert.False(t, resolvedProperty.IsTrue()) +} diff --git a/pkg/iac/scanners/cloudformation/parser/fn_base64.go b/pkg/iac/scanners/cloudformation/parser/fn_base64.go new file mode 100644 index 000000000000..ad94ed08d6e8 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/fn_base64.go @@ -0,0 +1,19 @@ +package parser + +import ( + "encoding/base64" + + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" +) + +func ResolveBase64(property *Property) (*Property, bool) { + if !property.isFunction() { + return property, true + } + + refValue := property.AsMap()["Fn::Base64"].AsString() + + retVal := base64.StdEncoding.EncodeToString([]byte(refValue)) + + return property.deriveResolved(cftypes.String, retVal), true +} diff --git a/pkg/iac/scanners/cloudformation/parser/fn_base64_test.go b/pkg/iac/scanners/cloudformation/parser/fn_base64_test.go new file mode 100644 index 000000000000..d1f31600a8c1 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/fn_base64_test.go @@ -0,0 +1,35 @@ +package parser + +import ( + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "testing" +) + +func Test_resolve_base64_value(t *testing.T) { + + property := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Base64": { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "HelloWorld", + }, + }, + }, + }, + } + + resolvedProperty, success := ResolveIntrinsicFunc(property) + require.True(t, success) + + assert.Equal(t, "SGVsbG9Xb3JsZA==", resolvedProperty.AsString()) +} diff --git a/pkg/iac/scanners/cloudformation/parser/fn_builtin.go b/pkg/iac/scanners/cloudformation/parser/fn_builtin.go new file mode 100644 index 000000000000..a9786910f58b --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/fn_builtin.go @@ -0,0 +1,65 @@ +package parser + +import ( + "fmt" + "net" + + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" + + "github.com/apparentlymart/go-cidr/cidr" +) + +func GetAzs(property *Property) (*Property, bool) { + return property.deriveResolved(cftypes.List, []*Property{ + property.deriveResolved(cftypes.String, "us-east-1a"), + property.deriveResolved(cftypes.String, "us-east-1a"), + property.deriveResolved(cftypes.String, "us-east-1a"), + }), true +} + +func GetCidr(property *Property) (*Property, bool) { + if !property.isFunction() { + return property, true + } + + refValue := property.AsMap()["Fn::Cidr"] + if refValue.IsNotList() || len(refValue.AsList()) != 3 { + return abortIntrinsic(property, "Fn::Cidr expects a list of 3 attributes") + } + + listParts := refValue.AsList() + ipaddressProp := listParts[0] + ipAddress := "10.0.0.0/2" + if ipaddressProp.IsString() { + ipAddress = ipaddressProp.AsString() + } + count := listParts[1].AsInt() + bit := listParts[2].AsInt() + + ranges, err := calculateCidrs(ipAddress, count, bit, property) + if err != nil { + return abortIntrinsic(property, "Could not calculate the required ranges") + } + return property.deriveResolved(cftypes.List, ranges), true +} + +func calculateCidrs(ipaddress string, count int, bit int, original *Property) ([]*Property, error) { + + var cidrProperties []*Property + + _, network, err := net.ParseCIDR(ipaddress) + if err != nil { + return nil, err + } + + for i := 0; i < count; i++ { + next, err := cidr.Subnet(network, bit, i) + if err != nil { + return nil, fmt.Errorf("failed to create cidr blocks") + } + + cidrProperties = append(cidrProperties, original.deriveResolved(cftypes.String, next.String())) + } + + return cidrProperties, nil +} diff --git a/pkg/iac/scanners/cloudformation/parser/fn_builtin_test.go b/pkg/iac/scanners/cloudformation/parser/fn_builtin_test.go new file mode 100644 index 000000000000..9a14029344a8 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/fn_builtin_test.go @@ -0,0 +1,63 @@ +package parser + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_cidr_generator(t *testing.T) { + + original := &Property{ + ctx: nil, + name: "cidr", + comment: "", + Inner: PropertyInner{ + Type: "", + Value: nil, + }, + } + + ranges, err := calculateCidrs("10.1.0.0/16", 4, 4, original) + require.Nil(t, err) + require.Len(t, ranges, 4) + + results := make(map[int]string) + for i, property := range ranges { + value := property.AsString() + results[i] = value + } + + assert.Equal(t, "10.1.0.0/20", results[0]) + assert.Equal(t, "10.1.16.0/20", results[1]) + assert.Equal(t, "10.1.32.0/20", results[2]) + assert.Equal(t, "10.1.48.0/20", results[3]) +} + +func Test_cidr_generator_8_bits(t *testing.T) { + original := &Property{ + ctx: nil, + name: "cidr", + comment: "", + Inner: PropertyInner{ + Type: "", + Value: nil, + }, + } + + ranges, err := calculateCidrs("10.1.0.0/16", 4, 8, original) + require.Nil(t, err) + require.Len(t, ranges, 4) + + results := make(map[int]string) + for i, property := range ranges { + value := property.AsString() + results[i] = value + } + + assert.Equal(t, "10.1.0.0/24", results[0]) + assert.Equal(t, "10.1.1.0/24", results[1]) + assert.Equal(t, "10.1.2.0/24", results[2]) + assert.Equal(t, "10.1.3.0/24", results[3]) +} diff --git a/pkg/iac/scanners/cloudformation/parser/fn_condition.go b/pkg/iac/scanners/cloudformation/parser/fn_condition.go new file mode 100644 index 000000000000..8d5c923936ab --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/fn_condition.go @@ -0,0 +1,21 @@ +package parser + +func ResolveCondition(property *Property) (resolved *Property, success bool) { + if !property.isFunction() { + return property, true + } + + refProp := property.AsMap()["Condition"] + if refProp.IsNotString() { + return nil, false + } + refValue := refProp.AsString() + + for k, prop := range property.ctx.Conditions { + if k == refValue { + return prop.resolveValue() + } + } + + return nil, false +} diff --git a/pkg/iac/scanners/cloudformation/parser/fn_condition_test.go b/pkg/iac/scanners/cloudformation/parser/fn_condition_test.go new file mode 100644 index 000000000000..0bea529c280e --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/fn_condition_test.go @@ -0,0 +1,98 @@ +package parser + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_resolve_condition_value(t *testing.T) { + + fctx := new(FileContext) + fctx.Conditions = map[string]Property{ + "SomeCondition": { + ctx: fctx, + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Equals": { + ctx: fctx, + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "some val", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "some val", + }, + }, + }, + }, + }, + }, + }, + }, + "EnableVersioning": { + ctx: fctx, + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Condition": { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "SomeCondition", + }, + }, + }, + }, + }, + } + + property := &Property{ + ctx: fctx, + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::If": { + ctx: fctx, + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "EnableVersioning", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "Enabled", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "Suspended", + }, + }, + }, + }, + }, + }, + }, + } + + resolvedProperty, success := ResolveIntrinsicFunc(property) + require.True(t, success) + + assert.Equal(t, "Enabled", resolvedProperty.AsString()) +} diff --git a/pkg/iac/scanners/cloudformation/parser/fn_equals.go b/pkg/iac/scanners/cloudformation/parser/fn_equals.go new file mode 100644 index 000000000000..4043735849a2 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/fn_equals.go @@ -0,0 +1,21 @@ +package parser + +import ( + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" +) + +func ResolveEquals(property *Property) (resolved *Property, success bool) { + if !property.isFunction() { + return property, true + } + + refValue := property.AsMap()["Fn::Equals"].AsList() + + if len(refValue) != 2 { + return abortIntrinsic(property, "Fn::Equals should have exactly 2 values, returning original Property") + } + + propA, _ := refValue[0].resolveValue() + propB, _ := refValue[1].resolveValue() + return property.deriveResolved(cftypes.Bool, propA.EqualTo(propB.RawValue())), true +} diff --git a/pkg/iac/scanners/cloudformation/parser/fn_equals_test.go b/pkg/iac/scanners/cloudformation/parser/fn_equals_test.go new file mode 100644 index 000000000000..e83d5e679343 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/fn_equals_test.go @@ -0,0 +1,180 @@ +package parser + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_resolve_equals_value(t *testing.T) { + + property := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Equals": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + }, + }, + }, + }, + }, + } + + resolvedProperty, success := ResolveIntrinsicFunc(property) + require.True(t, success) + + assert.True(t, resolvedProperty.IsTrue()) +} + +func Test_resolve_equals_value_to_false(t *testing.T) { + + property := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Equals": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "bar", + }, + }, + }, + }, + }, + }, + }, + } + + resolvedProperty, success := ResolveIntrinsicFunc(property) + require.True(t, success) + + assert.False(t, resolvedProperty.IsTrue()) +} + +func Test_resolve_equals_value_to_true_when_boolean(t *testing.T) { + + property := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Equals": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.Bool, + Value: true, + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.Bool, + Value: true, + }, + }, + }, + }, + }, + }, + }, + } + + resolvedProperty, success := ResolveIntrinsicFunc(property) + require.True(t, success) + assert.True(t, resolvedProperty.IsTrue()) +} + +func Test_resolve_equals_value_when_one_is_a_reference(t *testing.T) { + + property := &Property{ + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Equals": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "staging", + }, + }, + { + ctx: &FileContext{ + filepath: "", + Parameters: map[string]*Parameter{ + "Environment": { + inner: parameterInner{ + Type: "string", + Default: "staging", + }, + }, + }, + }, + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Ref": { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "Environment", + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + } + + resolvedProperty, success := ResolveIntrinsicFunc(property) + require.True(t, success) + + assert.True(t, resolvedProperty.IsTrue()) +} diff --git a/pkg/iac/scanners/cloudformation/parser/fn_find_in_map.go b/pkg/iac/scanners/cloudformation/parser/fn_find_in_map.go new file mode 100644 index 000000000000..7767f0126456 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/fn_find_in_map.go @@ -0,0 +1,45 @@ +package parser + +import ( + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" +) + +func ResolveFindInMap(property *Property) (resolved *Property, success bool) { + if !property.isFunction() { + return property, true + } + + refValue := property.AsMap()["Fn::FindInMap"].AsList() + + if len(refValue) != 3 { + return abortIntrinsic(property, "Fn::FindInMap should have exactly 3 values, returning original Property") + } + + mapName := refValue[0].AsString() + topLevelKey := refValue[1].AsString() + secondaryLevelKey := refValue[2].AsString() + + if property.ctx == nil { + return abortIntrinsic(property, "the property does not have an attached context, returning original Property") + } + + m, ok := property.ctx.Mappings[mapName] + if !ok { + return abortIntrinsic(property, "could not find map %s, returning original Property") + } + + mapContents := m.(map[string]interface{}) + + k, ok := mapContents[topLevelKey] + if !ok { + return abortIntrinsic(property, "could not find %s in the %s map, returning original Property", topLevelKey, mapName) + } + + mapValues := k.(map[string]interface{}) + + if prop, ok := mapValues[secondaryLevelKey]; !ok { + return abortIntrinsic(property, "could not find a value for %s in %s, returning original Property", secondaryLevelKey, topLevelKey) + } else { + return property.deriveResolved(cftypes.String, prop), true + } +} diff --git a/pkg/iac/scanners/cloudformation/parser/fn_find_in_map_test.go b/pkg/iac/scanners/cloudformation/parser/fn_find_in_map_test.go new file mode 100644 index 000000000000..bbfa372b7121 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/fn_find_in_map_test.go @@ -0,0 +1,100 @@ +package parser + +import ( + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "testing" +) + +func Test_resolve_find_in_map_value(t *testing.T) { + + source := `--- +Parameters: + Environment: + Type: String + Default: production +Mappings: + CacheNodeTypes: + production: + NodeType: cache.t2.large + test: + NodeType: cache.t2.small + dev: + NodeType: cache.t2.micro +Resources: + ElasticacheSecurityGroup: + Type: 'AWS::EC2::SecurityGroup' + Properties: + GroupDescription: Elasticache Security Group + SecurityGroupIngress: + - IpProtocol: tcp + FromPort: 11211 + ToPort: 11211 + SourceSecurityGroupName: !Ref InstanceSecurityGroup + ElasticacheCluster: + Type: 'AWS::ElastiCache::CacheCluster' + Properties: + Engine: memcached + CacheNodeType: !FindInMap [ CacheNodeTypes, production, NodeType ] + NumCacheNodes: '1' + VpcSecurityGroupIds: + - !GetAtt + - ElasticacheSecurityGroup + - GroupId +` + ctx := createTestFileContext(t, source) + require.NotNil(t, ctx) + + testRes := ctx.GetResourceByLogicalID("ElasticacheCluster") + assert.NotNil(t, testRes) + + nodeTypeProp := testRes.GetStringProperty("CacheNodeType", "") + assert.Equal(t, "cache.t2.large", nodeTypeProp.Value()) +} + +func Test_resolve_find_in_map_with_nested_intrinsic_value(t *testing.T) { + + source := `--- +Parameters: + Environment: + Type: String + Default: dev +Mappings: + CacheNodeTypes: + production: + NodeType: cache.t2.large + test: + NodeType: cache.t2.small + dev: + NodeType: cache.t2.micro +Resources: + ElasticacheSecurityGroup: + Type: 'AWS::EC2::SecurityGroup' + Properties: + GroupDescription: Elasticache Security Group + SecurityGroupIngress: + - IpProtocol: tcp + FromPort: 11211 + ToPort: 11211 + SourceSecurityGroupName: !Ref InstanceSecurityGroup + ElasticacheCluster: + Type: 'AWS::ElastiCache::CacheCluster' + Properties: + Engine: memcached + CacheNodeType: !FindInMap [ CacheNodeTypes, !Ref Environment, NodeType ] + NumCacheNodes: '1' + VpcSecurityGroupIds: + - !GetAtt + - ElasticacheSecurityGroup + - GroupId +` + ctx := createTestFileContext(t, source) + require.NotNil(t, ctx) + + testRes := ctx.GetResourceByLogicalID("ElasticacheCluster") + assert.NotNil(t, testRes) + + nodeTypeProp := testRes.GetStringProperty("CacheNodeType", "") + assert.Equal(t, "cache.t2.micro", nodeTypeProp.Value()) +} diff --git a/pkg/iac/scanners/cloudformation/parser/fn_get_attr.go b/pkg/iac/scanners/cloudformation/parser/fn_get_attr.go new file mode 100644 index 000000000000..f6754d16a9b3 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/fn_get_attr.go @@ -0,0 +1,46 @@ +package parser + +import ( + "strings" + + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" +) + +func ResolveGetAtt(property *Property) (resolved *Property, success bool) { + if !property.isFunction() { + return property, true + } + + refValueProp := property.AsMap()["Fn::GetAtt"] + + var refValue []string + + if refValueProp.IsString() { + refValue = strings.Split(refValueProp.AsString(), ".") + } + + if refValueProp.IsList() { + for _, p := range refValueProp.AsList() { + refValue = append(refValue, p.AsString()) + } + } + + if len(refValue) != 2 { + return abortIntrinsic(property, "Fn::GetAtt should have exactly 2 values, returning original Property") + } + + logicalId := refValue[0] + attribute := refValue[1] + + referencedResource := property.ctx.GetResourceByLogicalID(logicalId) + if referencedResource == nil || referencedResource.IsNil() { + return property.deriveResolved(cftypes.String, ""), true + } + + referencedProperty := referencedResource.GetProperty(attribute) + if referencedProperty.IsNil() { + return property.deriveResolved(cftypes.String, referencedResource.ID()), true + } + + return property.deriveResolved(referencedProperty.Type(), referencedProperty.RawValue()), true +} diff --git a/pkg/iac/scanners/cloudformation/parser/fn_get_attr_test.go b/pkg/iac/scanners/cloudformation/parser/fn_get_attr_test.go new file mode 100644 index 000000000000..ebd52da035b0 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/fn_get_attr_test.go @@ -0,0 +1,50 @@ +package parser + +import ( + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "testing" +) + +func Test_resolve_get_attr_value(t *testing.T) { + + source := `--- +Resources: + ElasticacheSecurityGroup: + Type: 'AWS::EC2::SecurityGroup' + Properties: + GroupDescription: Elasticache Security Group + SecurityGroupIngress: + - IpProtocol: tcp + FromPort: 11211 + ToPort: 11211 + SourceSecurityGroupName: !Ref InstanceSecurityGroup + ElasticacheCluster: + Type: 'AWS::ElastiCache::CacheCluster' + Properties: + Engine: memcached + CacheNodeType: cache.t2.micro + NumCacheNodes: '1' + VpcSecurityGroupIds: + - !GetAtt + - ElasticacheSecurityGroup + - GroupId +` + ctx := createTestFileContext(t, source) + require.NotNil(t, ctx) + + testRes := ctx.GetResourceByLogicalID("ElasticacheCluster") + assert.NotNil(t, testRes) + + sgProp := testRes.GetProperty("VpcSecurityGroupIds") + require.True(t, sgProp.IsNotNil()) + require.True(t, sgProp.IsList()) + + for _, property := range sgProp.AsList() { + resolved, success := ResolveIntrinsicFunc(property) + require.True(t, success) + assert.True(t, resolved.IsNotNil()) + } + +} diff --git a/pkg/iac/scanners/cloudformation/parser/fn_if.go b/pkg/iac/scanners/cloudformation/parser/fn_if.go new file mode 100644 index 000000000000..d444952ff38a --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/fn_if.go @@ -0,0 +1,40 @@ +package parser + +func ResolveIf(property *Property) (resolved *Property, success bool) { + if !property.isFunction() { + return property, true + } + + refValue := property.AsMap()["Fn::If"].AsList() + + if len(refValue) != 3 { + return abortIntrinsic(property, "Fn::If should have exactly 3 values, returning original Property") + } + + condition, _ := refValue[0].resolveValue() + trueState, _ := refValue[1].resolveValue() + falseState, _ := refValue[2].resolveValue() + + conditionMet := false + + con, _ := condition.resolveValue() + if con.IsBool() { + conditionMet = con.AsBool() + } else if property.ctx.Conditions != nil && + condition.IsString() { + + condition := property.ctx.Conditions[condition.AsString()] + if condition.isFunction() { + con, _ := condition.resolveValue() + if con.IsBool() { + conditionMet = con.AsBool() + } + } + } + + if conditionMet { + return trueState, true + } else { + return falseState, true + } +} diff --git a/pkg/iac/scanners/cloudformation/parser/fn_if_test.go b/pkg/iac/scanners/cloudformation/parser/fn_if_test.go new file mode 100644 index 000000000000..6578c878776c --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/fn_if_test.go @@ -0,0 +1,56 @@ +package parser + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_resolve_if_value(t *testing.T) { + + property := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::If": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.Bool, + Value: true, + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "bar", + }, + }, + }, + }, + }, + }, + }, + } + + resolvedProperty, success := ResolveIntrinsicFunc(property) + require.True(t, success) + + assert.Equal(t, "foo", resolvedProperty.String()) +} diff --git a/pkg/iac/scanners/cloudformation/parser/fn_join.go b/pkg/iac/scanners/cloudformation/parser/fn_join.go new file mode 100644 index 000000000000..e1d39dc702f7 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/fn_join.go @@ -0,0 +1,34 @@ +package parser + +import ( + "strings" + + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" +) + +func ResolveJoin(property *Property) (resolved *Property, success bool) { + if !property.isFunction() { + return property, true + } + + refValue := property.AsMap()["Fn::Join"].AsList() + + if len(refValue) != 2 { + return abortIntrinsic(property, "Fn::Join should have exactly 2 values, returning original Property") + } + + joiner := refValue[0].AsString() + items := refValue[1].AsList() + + var itemValues []string + for _, item := range items { + resolved, success := item.resolveValue() + if success { + itemValues = append(itemValues, resolved.AsString()) + } + } + + joined := strings.Join(itemValues, joiner) + + return property.deriveResolved(cftypes.String, joined), true +} diff --git a/pkg/iac/scanners/cloudformation/parser/fn_join_test.go b/pkg/iac/scanners/cloudformation/parser/fn_join_test.go new file mode 100644 index 000000000000..da2a3158165c --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/fn_join_test.go @@ -0,0 +1,152 @@ +package parser + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_resolve_join_value(t *testing.T) { + + property := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Join": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "::", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "s3", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "part1", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "part2", + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + } + resolvedProperty, success := ResolveIntrinsicFunc(property) + require.True(t, success) + + assert.Equal(t, "s3::part1::part2", resolvedProperty.AsString()) +} + +func Test_resolve_join_value_with_reference(t *testing.T) { + + property := &Property{ + ctx: &FileContext{ + filepath: "", + Parameters: map[string]*Parameter{ + "Environment": { + inner: parameterInner{ + Type: "string", + Default: "staging", + }, + }, + }, + }, + name: "EnvironmentBucket", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Join": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "::", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "s3", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "part1", + }, + }, + { + ctx: &FileContext{ + filepath: "", + Parameters: map[string]*Parameter{ + "Environment": { + inner: parameterInner{ + Type: "string", + Default: "staging", + }, + }, + }, + }, + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Ref": { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "Environment", + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + } + resolvedProperty, success := ResolveIntrinsicFunc(property) + require.True(t, success) + + assert.Equal(t, "s3::part1::staging", resolvedProperty.AsString()) +} diff --git a/pkg/iac/scanners/cloudformation/parser/fn_length.go b/pkg/iac/scanners/cloudformation/parser/fn_length.go new file mode 100644 index 000000000000..62fb7297de26 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/fn_length.go @@ -0,0 +1,24 @@ +package parser + +import "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" + +func ResolveLength(property *Property) (*Property, bool) { + if !property.isFunction() { + return property, true + } + + val := property.AsMap()["Fn::Length"] + if val.IsList() { + return property.deriveResolved(cftypes.Int, val.Len()), true + } else if val.IsMap() { + resolved, _ := val.resolveValue() + + if resolved.IsList() { + return property.deriveResolved(cftypes.Int, resolved.Len()), true + } + return resolved, false + } + + return property, false + +} diff --git a/pkg/iac/scanners/cloudformation/parser/fn_length_test.go b/pkg/iac/scanners/cloudformation/parser/fn_length_test.go new file mode 100644 index 000000000000..aa916ad0a972 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/fn_length_test.go @@ -0,0 +1,99 @@ +package parser + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" + "github.com/stretchr/testify/require" +) + +func Test_ResolveLength_WhenPropIsArray(t *testing.T) { + prop := &Property{ + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Length": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.Int, + Value: 1, + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "IntParameter", + }, + }, + }, + }, + }, + }, + }, + } + resolved, ok := ResolveIntrinsicFunc(prop) + require.True(t, ok) + require.True(t, resolved.IsInt()) + require.Equal(t, 2, resolved.AsInt()) +} + +func Test_ResolveLength_WhenPropIsIntrinsicFunction(t *testing.T) { + fctx := &FileContext{ + Parameters: map[string]*Parameter{ + "SomeParameter": { + inner: parameterInner{ + Type: "string", + Default: "a|b|c|d", + }, + }, + }, + } + prop := &Property{ + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Length": { + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Split": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "|", + }, + }, + { + ctx: fctx, + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Ref": { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "SomeParameter", + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + } + resolved, ok := ResolveIntrinsicFunc(prop) + require.True(t, ok) + require.True(t, resolved.IsInt()) + require.Equal(t, 4, resolved.AsInt()) +} diff --git a/pkg/iac/scanners/cloudformation/parser/fn_not.go b/pkg/iac/scanners/cloudformation/parser/fn_not.go new file mode 100644 index 000000000000..831102154482 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/fn_not.go @@ -0,0 +1,23 @@ +package parser + +import "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" + +func ResolveNot(property *Property) (resolved *Property, success bool) { + if !property.isFunction() { + return property, true + } + + refValue := property.AsMap()["Fn::Not"].AsList() + + if len(refValue) != 1 { + return abortIntrinsic(property, "Fn::No should have at only 1 values, returning original Property") + } + + funcToInvert, _ := refValue[0].resolveValue() + + if funcToInvert.IsBool() { + return property.deriveResolved(cftypes.Bool, !funcToInvert.AsBool()), true + } + + return property, false +} diff --git a/pkg/iac/scanners/cloudformation/parser/fn_not_test.go b/pkg/iac/scanners/cloudformation/parser/fn_not_test.go new file mode 100644 index 000000000000..5bddef0af732 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/fn_not_test.go @@ -0,0 +1,124 @@ +package parser + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_resolve_not_value(t *testing.T) { + property1 := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Equals": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "bar", + }, + }, + }, + }, + }, + }, + }, + } + + notProperty := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Not": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + property1, + }, + }, + }, + }, + }, + } + + resolvedProperty, success := ResolveIntrinsicFunc(notProperty) + require.True(t, success) + + assert.True(t, resolvedProperty.IsTrue()) +} + +func Test_resolve_not_value_when_true(t *testing.T) { + property1 := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Equals": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + }, + }, + }, + }, + }, + } + + notProperty := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Not": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + property1, + }, + }, + }, + }, + }, + } + + resolvedProperty, success := ResolveIntrinsicFunc(notProperty) + require.True(t, success) + + assert.False(t, resolvedProperty.IsTrue()) +} diff --git a/pkg/iac/scanners/cloudformation/parser/fn_or.go b/pkg/iac/scanners/cloudformation/parser/fn_or.go new file mode 100644 index 000000000000..0aca35f456c0 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/fn_or.go @@ -0,0 +1,39 @@ +package parser + +import "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" + +func ResolveOr(property *Property) (resolved *Property, success bool) { + if !property.isFunction() { + return property, true + } + + refValue := property.AsMap()["Fn::Or"].AsList() + + if len(refValue) < 2 { + return abortIntrinsic(property, "Fn::Or should have at least 2 values, returning original Property") + } + + results := make([]bool, len(refValue)) + for i := 0; i < len(refValue); i++ { + + r := false + if refValue[i].IsBool() { + r = refValue[i].AsBool() + } + + results[i] = r + } + + atleastOne := atleastOne(results) + return property.deriveResolved(cftypes.Bool, atleastOne), true +} + +func atleastOne(a []bool) bool { + for _, b := range a { + if b { + return true + } + } + + return false +} diff --git a/pkg/iac/scanners/cloudformation/parser/fn_or_test.go b/pkg/iac/scanners/cloudformation/parser/fn_or_test.go new file mode 100644 index 000000000000..1f7bd1995433 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/fn_or_test.go @@ -0,0 +1,184 @@ +package parser + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_resolve_or_value(t *testing.T) { + property1 := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Equals": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "bar", + }, + }, + }, + }, + }, + }, + }, + } + + property2 := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Equals": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + }, + }, + }, + }, + }, + } + orProperty := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Or": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + property1, + property2, + }, + }, + }, + }, + }, + } + + resolvedProperty, success := ResolveIntrinsicFunc(orProperty) + require.True(t, success) + + assert.True(t, resolvedProperty.IsTrue()) +} + +func Test_resolve_or_value_when_neither_true(t *testing.T) { + property1 := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Equals": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "bar", + }, + }, + }, + }, + }, + }, + }, + } + + property2 := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Equals": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "bar", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + }, + }, + }, + }, + }, + } + orProperty := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Or": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + property1, + property2, + }, + }, + }, + }, + }, + } + + resolvedProperty, success := ResolveIntrinsicFunc(orProperty) + require.True(t, success) + + assert.False(t, resolvedProperty.IsTrue()) +} diff --git a/pkg/iac/scanners/cloudformation/parser/fn_ref.go b/pkg/iac/scanners/cloudformation/parser/fn_ref.go new file mode 100644 index 000000000000..a14740dff91a --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/fn_ref.go @@ -0,0 +1,54 @@ +package parser + +import ( + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" +) + +func ResolveReference(property *Property) (resolved *Property, success bool) { + if !property.isFunction() { + return property, true + } + + refProp := property.AsMap()["Ref"] + if refProp.IsNotString() { + return property, false + } + refValue := refProp.AsString() + + if pseudo, ok := pseudoParameters[refValue]; ok { + return property.deriveResolved(pseudo.t, pseudo.val), true + } + + if property.ctx == nil { + return property, false + } + + var param *Parameter + for k := range property.ctx.Parameters { + if k == refValue { + param = property.ctx.Parameters[k] + resolvedType := param.Type() + + switch param.Default().(type) { + case bool: + resolvedType = cftypes.Bool + case string: + resolvedType = cftypes.String + case int: + resolvedType = cftypes.Int + } + + resolved = property.deriveResolved(resolvedType, param.Default()) + return resolved, true + } + } + + for k := range property.ctx.Resources { + if k == refValue { + res := property.ctx.Resources[k] + resolved = property.deriveResolved(cftypes.String, res.ID()) + break + } + } + return resolved, true +} diff --git a/pkg/iac/scanners/cloudformation/parser/fn_ref_test.go b/pkg/iac/scanners/cloudformation/parser/fn_ref_test.go new file mode 100644 index 000000000000..0cb5d2b995ee --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/fn_ref_test.go @@ -0,0 +1,89 @@ +package parser + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_resolve_referenced_value(t *testing.T) { + + property := &Property{ + ctx: &FileContext{ + filepath: "", + Parameters: map[string]*Parameter{ + "BucketName": { + inner: parameterInner{ + Type: "string", + Default: "someBucketName", + }, + }, + }, + }, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Ref": { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "BucketName", + }, + }, + }, + }, + } + + resolvedProperty, success := ResolveIntrinsicFunc(property) + require.True(t, success) + + assert.Equal(t, "someBucketName", resolvedProperty.AsString()) +} + +func Test_property_value_correct_when_not_reference(t *testing.T) { + + property := &Property{ + ctx: &FileContext{ + filepath: "", + }, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.String, + Value: "someBucketName", + }, + } + + // should fail when trying to resolve function that is not in fact a function + resolvedProperty, success := ResolveIntrinsicFunc(property) + require.False(t, success) + + assert.Equal(t, "someBucketName", resolvedProperty.AsString()) +} + +func Test_resolve_ref_with_pseudo_value(t *testing.T) { + source := `--- +Resources: + TestInstance: + Type: AWS::EC2::Instance + Properties: + ImageId: "ami-79fd7eee" + KeyName: !Join [":", ["aws", !Ref AWS::Region, "key" ]] +` + ctx := createTestFileContext(t, source) + require.NotNil(t, ctx) + + testRes := ctx.GetResourceByLogicalID("TestInstance") + require.NotNil(t, testRes) + + keyNameProp := testRes.GetProperty("KeyName") + require.NotNil(t, keyNameProp) + + assert.Equal(t, "aws:eu-west-1:key", keyNameProp.AsString()) +} diff --git a/pkg/iac/scanners/cloudformation/parser/fn_select.go b/pkg/iac/scanners/cloudformation/parser/fn_select.go new file mode 100644 index 000000000000..c528223a2325 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/fn_select.go @@ -0,0 +1,41 @@ +package parser + +import ( + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" +) + +func ResolveSelect(property *Property) (resolved *Property, success bool) { + if !property.isFunction() { + return property, true + } + + refValue := property.AsMap()["Fn::Select"].AsList() + + if len(refValue) != 2 { + return abortIntrinsic(property, "Fn::Select should have exactly 2 values, returning original Property") + } + + index := refValue[0] + list := refValue[1] + + if index.IsNotInt() { + if index.IsConvertableTo(cftypes.Int) { + // + index = index.ConvertTo(cftypes.Int) + } else { + return abortIntrinsic(property, "index on property [%s] should be an int, returning original Property", property.name) + } + } + + if list.IsNotList() { + return abortIntrinsic(property, "list on property [%s] should be a list, returning original Property", property.name) + } + + listItems := list.AsList() + + if len(listItems) <= index.AsInt() { + return nil, false + } + + return listItems[index.AsInt()], true +} diff --git a/pkg/iac/scanners/cloudformation/parser/fn_select_test.go b/pkg/iac/scanners/cloudformation/parser/fn_select_test.go new file mode 100644 index 000000000000..92b634457b2d --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/fn_select_test.go @@ -0,0 +1,77 @@ +package parser + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_resolve_select_value(t *testing.T) { + + source := `--- +Parameters: + EngineIndex: + Type: Integer + Default: 1 +Resources: + ElasticacheCluster: + Type: 'AWS::ElastiCache::CacheCluster' + Properties: + Engine: !Select [ !Ref EngineIndex, [memcached, redis ]] + CacheNodeType: cache.t2.micro + NumCacheNodes: '1' +` + ctx := createTestFileContext(t, source) + require.NotNil(t, ctx) + + testRes := ctx.GetResourceByLogicalID("ElasticacheCluster") + assert.NotNil(t, testRes) + + engineProp := testRes.GetProperty("Engine") + require.True(t, engineProp.IsNotNil()) + require.True(t, engineProp.IsString()) + + require.Equal(t, "redis", engineProp.AsString()) +} + +func Test_SelectPseudoListParam(t *testing.T) { + src := `--- +Resources: + myASGrpOne: + Type: AWS::AutoScaling::AutoScalingGroup + Version: "2009-05-15" + Properties: + AvailabilityZones: + - "us-east-1a" + LaunchConfigurationName: + Ref: MyLaunchConfiguration + MinSize: "0" + MaxSize: "0" + NotificationConfigurations: + - TopicARN: + Fn::Select: + - "1" + - Ref: AWS::NotificationARNs + NotificationTypes: + - autoscaling:EC2_INSTANCE_LAUNCH + - autoscaling:EC2_INSTANCE_LAUNCH_ERROR + +` + + ctx := createTestFileContext(t, src) + require.NotNil(t, ctx) + + resource := ctx.GetResourceByLogicalID("myASGrpOne") + require.NotNil(t, resource) + + notification := resource.GetProperty("NotificationConfigurations") + require.True(t, notification.IsNotNil()) + require.True(t, notification.IsList()) + first := notification.AsList()[0] + require.True(t, first.IsMap()) + topic, ok := first.AsMap()["TopicARN"] + require.True(t, ok) + require.Equal(t, "notification::arn::2", topic.AsString()) + +} diff --git a/pkg/iac/scanners/cloudformation/parser/fn_split.go b/pkg/iac/scanners/cloudformation/parser/fn_split.go new file mode 100644 index 000000000000..fc14d63e93a1 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/fn_split.go @@ -0,0 +1,44 @@ +package parser + +import ( + "strings" + + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" +) + +func ResolveSplit(property *Property) (resolved *Property, success bool) { + if !property.isFunction() { + return property, true + } + + refValue := property.AsMap()["Fn::Split"].AsList() + + if len(refValue) != 2 { + return abortIntrinsic(property, "Fn::Split should have exactly 2 values, returning original Property") + } + + delimiterProp := refValue[0] + splitProp := refValue[1] + + if !splitProp.IsString() || !delimiterProp.IsString() { + abortIntrinsic(property, "Fn::Split requires two strings as input, returning original Property") + + } + + propertyList := createPropertyList(splitProp, delimiterProp, property) + + return property.deriveResolved(cftypes.List, propertyList), true +} + +func createPropertyList(splitProp *Property, delimiterProp *Property, parent *Property) []*Property { + + splitString := splitProp.AsString() + delimiter := delimiterProp.AsString() + + splits := strings.Split(splitString, delimiter) + var props []*Property + for _, split := range splits { + props = append(props, parent.deriveResolved(cftypes.String, split)) + } + return props +} diff --git a/pkg/iac/scanners/cloudformation/parser/fn_split_test.go b/pkg/iac/scanners/cloudformation/parser/fn_split_test.go new file mode 100644 index 000000000000..f3e73f2c3d4a --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/fn_split_test.go @@ -0,0 +1,56 @@ +package parser + +import ( + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "testing" +) + +/* + Fn::Split: ["::", "s3::bucket::to::split"] + +*/ + +func Test_resolve_split_value(t *testing.T) { + + property := &Property{ + ctx: &FileContext{}, + name: "BucketName", + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Split": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "::", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "s3::bucket::to::split", + }, + }, + }, + }, + }, + }, + }, + } + + resolvedProperty, success := ResolveIntrinsicFunc(property) + require.True(t, success) + assert.True(t, resolvedProperty.IsNotNil()) + assert.True(t, resolvedProperty.IsList()) + listContents := resolvedProperty.AsList() + assert.Len(t, listContents, 4) + +} diff --git a/pkg/iac/scanners/cloudformation/parser/fn_sub.go b/pkg/iac/scanners/cloudformation/parser/fn_sub.go new file mode 100644 index 000000000000..514680f95ad5 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/fn_sub.go @@ -0,0 +1,71 @@ +package parser + +import ( + "fmt" + "strconv" + "strings" + + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" +) + +func ResolveSub(property *Property) (resolved *Property, success bool) { + if !property.isFunction() { + return property, true + } + + refValue := property.AsMap()["Fn::Sub"] + + if refValue.IsString() { + return resolveStringSub(refValue, property), true + } + + if refValue.IsList() { + return resolveMapSub(refValue, property) + } + + return property, false +} + +func resolveMapSub(refValue *Property, original *Property) (*Property, bool) { + refValues := refValue.AsList() + if len(refValues) != 2 { + return abortIntrinsic(original, "Fn::Sub with list expects 2 values, returning original property") + } + + workingString := refValues[0].AsString() + components := refValues[1].AsMap() + + for k, v := range components { + replacement := "[failed to resolve]" + switch v.Type() { + case cftypes.Map: + resolved, _ := ResolveIntrinsicFunc(v) + replacement = resolved.AsString() + case cftypes.String: + replacement = v.AsString() + case cftypes.Int: + replacement = strconv.Itoa(v.AsInt()) + case cftypes.Bool: + replacement = fmt.Sprintf("%v", v.AsBool()) + case cftypes.List: + var parts []string + for _, p := range v.AsList() { + parts = append(parts, p.String()) + } + replacement = fmt.Sprintf("[%s]", strings.Join(parts, ", ")) + } + workingString = strings.ReplaceAll(workingString, fmt.Sprintf("${%s}", k), replacement) + } + + return original.deriveResolved(cftypes.String, workingString), true +} + +func resolveStringSub(refValue *Property, original *Property) *Property { + workingString := refValue.AsString() + + for k, param := range pseudoParameters { + workingString = strings.ReplaceAll(workingString, fmt.Sprintf("${%s}", k), fmt.Sprintf("%v", param.getRawValue())) + } + + return original.deriveResolved(cftypes.String, workingString) +} diff --git a/pkg/iac/scanners/cloudformation/parser/fn_sub_test.go b/pkg/iac/scanners/cloudformation/parser/fn_sub_test.go new file mode 100644 index 000000000000..5ab98a59692b --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/fn_sub_test.go @@ -0,0 +1,103 @@ +package parser + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_resolve_sub_value(t *testing.T) { + source := `--- +Resources: + TestInstance: + Type: AWS::EC2::Instance + Properties: + ImageId: "ami-79fd7eee" + KeyName: "testkey" + UserData: + !Sub | + #!/bin/bash -xe + yum update -y aws-cfn-bootstrap + /opt/aws/bin/cfn-init -v --stack ${AWS::StackName} --resource LaunchConfig --configsets wordpress_install --region ${AWS::Region} + /opt/aws/bin/cfn-signal -e $? --stack ${AWS::StackName} --resource WebServerGroup --region ${AWS::Region} +` + ctx := createTestFileContext(t, source) + require.NotNil(t, ctx) + + testRes := ctx.GetResourceByLogicalID("TestInstance") + require.NotNil(t, testRes) + + userDataProp := testRes.GetProperty("UserData") + require.NotNil(t, userDataProp) + + assert.Equal(t, "#!/bin/bash -xe\nyum update -y aws-cfn-bootstrap\n/opt/aws/bin/cfn-init -v --stack cfsec-test-stack --resource LaunchConfig --configsets wordpress_install --region eu-west-1\n/opt/aws/bin/cfn-signal -e $? --stack cfsec-test-stack --resource WebServerGroup --region eu-west-1\n", userDataProp.AsString()) +} + +func Test_resolve_sub_value_with_base64(t *testing.T) { + + source := `--- +Resources: + TestInstance: + Type: AWS::EC2::Instance + Properties: + ImageId: "ami-79fd7eee" + KeyName: "testkey" + UserData: + Fn::Base64: + !Sub | + #!/bin/bash -xe + yum update -y aws-cfn-bootstrap + /opt/aws/bin/cfn-init -v --stack ${AWS::StackName} --resource LaunchConfig --configsets wordpress_install --region ${AWS::Region} + /opt/aws/bin/cfn-signal -e $? --stack ${AWS::StackName} --resource WebServerGroup --region ${AWS::Region}` + ctx := createTestFileContext(t, source) + require.NotNil(t, ctx) + + testRes := ctx.GetResourceByLogicalID("TestInstance") + require.NotNil(t, testRes) + + userDataProp := testRes.GetProperty("UserData") + require.NotNil(t, userDataProp) + + assert.Equal(t, "IyEvYmluL2Jhc2ggLXhlCnl1bSB1cGRhdGUgLXkgYXdzLWNmbi1ib290c3RyYXAKL29wdC9hd3MvYmluL2Nmbi1pbml0IC12IC0tc3RhY2sgY2ZzZWMtdGVzdC1zdGFjayAtLXJlc291cmNlIExhdW5jaENvbmZpZyAtLWNvbmZpZ3NldHMgd29yZHByZXNzX2luc3RhbGwgLS1yZWdpb24gZXUtd2VzdC0xCi9vcHQvYXdzL2Jpbi9jZm4tc2lnbmFsIC1lICQ/IC0tc3RhY2sgY2ZzZWMtdGVzdC1zdGFjayAtLXJlc291cmNlIFdlYlNlcnZlckdyb3VwIC0tcmVnaW9uIGV1LXdlc3QtMQ==", userDataProp.AsString()) +} + +func Test_resolve_sub_value_with_map(t *testing.T) { + + source := `--- +Parameters: + RootDomainName: + Type: String + Default: somedomain.com +Resources: + TestDistribution: + Type: AWS::CloudFront::Distribution + Properties: + DistributionConfig: + DefaultCacheBehavior: + TargetOriginId: target + ViewerProtocolPolicy: https-only + Enabled: true + Origins: + - DomainName: + !Sub + - www.${Domain} + - { Domain: !Ref RootDomainName } + Id: somedomain1 + + +` + ctx := createTestFileContext(t, source) + require.NotNil(t, ctx) + + testRes := ctx.GetResourceByLogicalID("TestDistribution") + require.NotNil(t, testRes) + + originsList := testRes.GetProperty("DistributionConfig.Origins") + + domainNameProp := originsList.AsList()[0].GetProperty("DomainName") + require.NotNil(t, domainNameProp) + + assert.Equal(t, "www.somedomain.com", domainNameProp.AsString()) + +} diff --git a/pkg/iac/scanners/cloudformation/parser/intrinsics.go b/pkg/iac/scanners/cloudformation/parser/intrinsics.go new file mode 100644 index 000000000000..d455fd3d5c6e --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/intrinsics.go @@ -0,0 +1,101 @@ +package parser + +import ( + "fmt" + "strings" + + "gopkg.in/yaml.v3" +) + +var intrinsicFuncs map[string]func(property *Property) (*Property, bool) + +func init() { + intrinsicFuncs = map[string]func(property *Property) (*Property, bool){ + "Ref": ResolveReference, + "Fn::Base64": ResolveBase64, + "Fn::Equals": ResolveEquals, + "Fn::Join": ResolveJoin, + "Fn::Split": ResolveSplit, + "Fn::Sub": ResolveSub, + "Fn::FindInMap": ResolveFindInMap, + "Fn::Select": ResolveSelect, + "Fn::GetAtt": ResolveGetAtt, + "Fn::GetAZs": GetAzs, + "Fn::Cidr": GetCidr, + "Fn::ImportValue": ImportPlaceholder, + "Fn::If": ResolveIf, + "Fn::And": ResolveAnd, + "Fn::Or": ResolveOr, + "Fn::Not": ResolveNot, + "Fn::Length": ResolveLength, + "Condition": ResolveCondition, + } +} + +func ImportPlaceholder(property *Property) (*Property, bool) { + property.unresolved = true + return property, false +} + +func PassthroughResolution(property *Property) (*Property, bool) { + return property, false +} + +func IsIntrinsicFunc(node *yaml.Node) bool { + if node == nil || node.Tag == "" { + return false + } + + nodeTag := strings.TrimPrefix(node.Tag, "!") + if nodeTag != "Ref" && nodeTag != "Condition" { + nodeTag = fmt.Sprintf("Fn::%s", nodeTag) + } + for tag := range intrinsicFuncs { + + if nodeTag == tag { + return true + } + } + return false +} + +func IsIntrinsic(key string) bool { + for tag := range intrinsicFuncs { + if tag == key { + return true + } + } + return false +} + +func ResolveIntrinsicFunc(property *Property) (*Property, bool) { + if property == nil { + return nil, false + } + if !property.IsMap() { + return property, false + } + + for funcName := range property.AsMap() { + if fn := intrinsicFuncs[funcName]; fn != nil { + // + return fn(property) + } + } + return property, false +} + +func getIntrinsicTag(tag string) string { + tag = strings.TrimPrefix(tag, "!") + switch tag { + case "Ref", "Contains": + return tag + default: + return fmt.Sprintf("Fn::%s", tag) + } +} + +func abortIntrinsic(property *Property, msg string, components ...string) (*Property, bool) { + // + return property, false +} diff --git a/pkg/iac/scanners/cloudformation/parser/intrinsics_test.go b/pkg/iac/scanners/cloudformation/parser/intrinsics_test.go new file mode 100644 index 000000000000..a69e04dd0fba --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/intrinsics_test.go @@ -0,0 +1,45 @@ +package parser + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "gopkg.in/yaml.v3" +) + +func Test_is_intrinsic_returns_expected(t *testing.T) { + + testCases := []struct { + nodeTag string + expectedResult bool + }{ + { + nodeTag: "!Ref", + expectedResult: true, + }, + { + nodeTag: "!Join", + expectedResult: true, + }, + { + nodeTag: "!Sub", + expectedResult: true, + }, + { + nodeTag: "!Equals", + expectedResult: true, + }, + { + nodeTag: "!Equal", + expectedResult: false, + }, + } + + for _, tt := range testCases { + n := &yaml.Node{ + Tag: tt.nodeTag, + } + assert.Equal(t, tt.expectedResult, IsIntrinsicFunc(n)) + } + +} diff --git a/pkg/iac/scanners/cloudformation/parser/parameter.go b/pkg/iac/scanners/cloudformation/parser/parameter.go new file mode 100644 index 000000000000..2007ca65b2b0 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/parameter.go @@ -0,0 +1,129 @@ +package parser + +import ( + "bytes" + "encoding/json" + "fmt" + "strconv" + "strings" + + "github.com/liamg/jfather" + "gopkg.in/yaml.v3" + + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" +) + +type Parameter struct { + inner parameterInner +} + +type parameterInner struct { + Type string `yaml:"Type"` + Default interface{} `yaml:"Default"` +} + +func (p *Parameter) UnmarshalYAML(node *yaml.Node) error { + return node.Decode(&p.inner) +} + +func (p *Parameter) UnmarshalJSONWithMetadata(node jfather.Node) error { + return node.Decode(&p.inner) +} + +func (p *Parameter) Type() cftypes.CfType { + switch p.inner.Type { + case "Boolean": + return cftypes.Bool + case "String": + return cftypes.String + case "Integer": + return cftypes.Int + default: + return cftypes.String + } +} + +func (p *Parameter) Default() interface{} { + return p.inner.Default +} + +func (p *Parameter) UpdateDefault(inVal interface{}) { + passedVal := inVal.(string) + + switch p.inner.Type { + case "Boolean": + p.inner.Default, _ = strconv.ParseBool(passedVal) + case "String": + p.inner.Default = passedVal + case "Integer": + p.inner.Default, _ = strconv.Atoi(passedVal) + default: + p.inner.Default = passedVal + } +} + +type Parameters map[string]any + +func (p *Parameters) Merge(other Parameters) { + for k, v := range other { + (*p)[k] = v + } +} + +func (p *Parameters) UnmarshalJSON(data []byte) error { + (*p) = make(Parameters) + + if len(data) == 0 { + return nil + } + + switch { + case data[0] == '{' && data[len(data)-1] == '}': // object + // CodePipeline like format + var params struct { + Params map[string]any `json:"Parameters"` + } + + if err := json.Unmarshal(data, ¶ms); err != nil { + return err + } + + (*p) = params.Params + case data[0] == '[' && data[len(data)-1] == ']': // array + { + // Original format + var params []string + + if err := json.Unmarshal(data, ¶ms); err == nil { + for _, param := range params { + parts := strings.Split(param, "=") + if len(parts) != 2 { + return fmt.Errorf("invalid key-value parameter: %q", param) + } + (*p)[parts[0]] = parts[1] + } + return nil + } + + // CloudFormation like format + var cfparams []struct { + ParameterKey string `json:"ParameterKey"` + ParameterValue string `json:"ParameterValue"` + } + + d := json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&cfparams); err != nil { + return err + } + + for _, param := range cfparams { + (*p)[param.ParameterKey] = param.ParameterValue + } + } + default: + return fmt.Errorf("unsupported parameters format") + } + + return nil +} diff --git a/pkg/iac/scanners/cloudformation/parser/parameters_test.go b/pkg/iac/scanners/cloudformation/parser/parameters_test.go new file mode 100644 index 000000000000..703f07f5fe12 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/parameters_test.go @@ -0,0 +1,89 @@ +package parser + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestParameters_UnmarshalJSON(t *testing.T) { + tests := []struct { + name string + source string + expected Parameters + wantErr bool + }{ + { + name: "original format", + source: `[ + "Key1=Value1", + "Key2=Value2" + ]`, + expected: map[string]any{ + "Key1": "Value1", + "Key2": "Value2", + }, + }, + { + name: "CloudFormation like format", + source: `[ + { + "ParameterKey": "Key1", + "ParameterValue": "Value1" + }, + { + "ParameterKey": "Key2", + "ParameterValue": "Value2" + } + ]`, + expected: map[string]any{ + "Key1": "Value1", + "Key2": "Value2", + }, + }, + { + name: "CloudFormation like format, with unknown fields", + source: `[ + { + "ParameterKey": "Key1", + "ParameterValue": "Value1" + }, + { + "ParameterKey": "Key2", + "ParameterValue": "Value2", + "UsePreviousValue": true + } + ]`, + wantErr: true, + }, + { + name: "CodePipeline like format", + source: `{ + "Parameters": { + "Key1": "Value1", + "Key2": "Value2" + } + }`, + expected: map[string]any{ + "Key1": "Value1", + "Key2": "Value2", + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var params Parameters + + err := json.Unmarshal([]byte(tt.source), ¶ms) + if tt.wantErr { + require.Error(t, err) + return + } + require.NoError(t, err) + assert.Equal(t, tt.expected, params) + }) + } +} diff --git a/pkg/iac/scanners/cloudformation/parser/parser.go b/pkg/iac/scanners/cloudformation/parser/parser.go new file mode 100644 index 000000000000..f4d31ff565d9 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/parser.go @@ -0,0 +1,236 @@ +package parser + +import ( + "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "io" + "io/fs" + "path/filepath" + "strings" + + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/liamg/jfather" + "gopkg.in/yaml.v3" + + "github.com/aquasecurity/trivy/pkg/iac/detection" +) + +var _ options.ConfigurableParser = (*Parser)(nil) + +type Parser struct { + debug debug.Logger + skipRequired bool + parameterFiles []string + parameters map[string]any + overridedParameters Parameters + configsFS fs.FS +} + +func WithParameters(params map[string]any) options.ParserOption { + return func(cp options.ConfigurableParser) { + if p, ok := cp.(*Parser); ok { + p.parameters = params + } + } +} + +func WithParameterFiles(files ...string) options.ParserOption { + return func(cp options.ConfigurableParser) { + if p, ok := cp.(*Parser); ok { + p.parameterFiles = files + } + } +} + +func WithConfigsFS(fsys fs.FS) options.ParserOption { + return func(cp options.ConfigurableParser) { + if p, ok := cp.(*Parser); ok { + p.configsFS = fsys + } + } +} + +func (p *Parser) SetDebugWriter(writer io.Writer) { + p.debug = debug.New(writer, "cloudformation", "parser") +} + +func (p *Parser) SetSkipRequiredCheck(b bool) { + p.skipRequired = b +} + +func New(options ...options.ParserOption) *Parser { + p := &Parser{} + for _, option := range options { + option(p) + } + return p +} + +func (p *Parser) ParseFS(ctx context.Context, fsys fs.FS, dir string) (FileContexts, error) { + var contexts FileContexts + if err := fs.WalkDir(fsys, filepath.ToSlash(dir), func(path string, entry fs.DirEntry, err error) error { + select { + case <-ctx.Done(): + return ctx.Err() + default: + } + if err != nil { + return err + } + if entry.IsDir() { + return nil + } + + if !p.Required(fsys, path) { + p.debug.Log("not a CloudFormation file, skipping %s", path) + return nil + } + + c, err := p.ParseFile(ctx, fsys, path) + if err != nil { + p.debug.Log("Error parsing file '%s': %s", path, err) + return nil + } + contexts = append(contexts, c) + return nil + }); err != nil { + return nil, err + } + return contexts, nil +} + +func (p *Parser) Required(fs fs.FS, path string) bool { + if p.skipRequired { + return true + } + + f, err := fs.Open(filepath.ToSlash(path)) + if err != nil { + return false + } + defer func() { _ = f.Close() }() + if data, err := io.ReadAll(f); err == nil { + return detection.IsType(path, bytes.NewReader(data), detection.FileTypeCloudFormation) + } + return false + +} + +func (p *Parser) ParseFile(ctx context.Context, fsys fs.FS, path string) (context *FileContext, err error) { + defer func() { + if e := recover(); e != nil { + err = fmt.Errorf("panic during parse: %s", e) + } + }() + + select { + case <-ctx.Done(): + return nil, ctx.Err() + default: + } + + if p.configsFS == nil { + p.configsFS = fsys + } + + if err := p.parseParams(); err != nil { + return nil, fmt.Errorf("failed to parse parameters file: %w", err) + } + + sourceFmt := YamlSourceFormat + if strings.HasSuffix(strings.ToLower(path), ".json") { + sourceFmt = JsonSourceFormat + } + + f, err := fsys.Open(filepath.ToSlash(path)) + if err != nil { + return nil, err + } + defer func() { _ = f.Close() }() + + content, err := io.ReadAll(f) + if err != nil { + return nil, err + } + + lines := strings.Split(string(content), "\n") + + context = &FileContext{ + filepath: path, + lines: lines, + SourceFormat: sourceFmt, + } + + if strings.HasSuffix(strings.ToLower(path), ".json") { + if err := jfather.Unmarshal(content, context); err != nil { + return nil, NewErrInvalidContent(path, err) + } + } else { + if err := yaml.Unmarshal(content, context); err != nil { + return nil, NewErrInvalidContent(path, err) + } + } + + context.OverrideParameters(p.overridedParameters) + + context.lines = lines + context.SourceFormat = sourceFmt + context.filepath = path + + p.debug.Log("Context loaded from source %s", path) + + // the context must be set to conditions before resources + for _, c := range context.Conditions { + c.setContext(context) + } + + for name, r := range context.Resources { + r.ConfigureResource(name, fsys, path, context) + } + + return context, nil +} + +func (p *Parser) parseParams() error { + if p.overridedParameters != nil { // parameters have already been parsed + return nil + } + + params := make(Parameters) + + var errs []error + + for _, path := range p.parameterFiles { + if parameters, err := p.parseParametersFile(path); err != nil { + errs = append(errs, err) + } else { + params.Merge(parameters) + } + } + + if len(errs) != 0 { + return errors.Join(errs...) + } + + params.Merge(p.parameters) + + p.overridedParameters = params + return nil +} + +func (p *Parser) parseParametersFile(path string) (Parameters, error) { + f, err := p.configsFS.Open(path) + if err != nil { + return nil, fmt.Errorf("parameters file %q open error: %w", path, err) + } + + var parameters Parameters + if err := json.NewDecoder(f).Decode(¶meters); err != nil { + return nil, err + } + return parameters, nil +} diff --git a/pkg/iac/scanners/cloudformation/parser/parser_test.go b/pkg/iac/scanners/cloudformation/parser/parser_test.go new file mode 100644 index 000000000000..5862d4757186 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/parser_test.go @@ -0,0 +1,374 @@ +package parser + +import ( + "context" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy/test/testutil" +) + +func parseFile(t *testing.T, source string, name string) (FileContexts, error) { + tmp, err := os.MkdirTemp(os.TempDir(), "defsec") + require.NoError(t, err) + defer func() { _ = os.RemoveAll(tmp) }() + require.NoError(t, os.WriteFile(filepath.Join(tmp, name), []byte(source), 0600)) + fs := os.DirFS(tmp) + return New().ParseFS(context.TODO(), fs, ".") +} + +func Test_parse_yaml(t *testing.T) { + + source := `--- +Parameters: + BucketName: + Type: String + Default: naughty + EncryptBucket: + Type: Boolean + Default: false +Resources: + S3Bucket: + Type: 'AWS::S3::Bucket' + Properties: + BucketName: naughty + BucketEncryption: + ServerSideEncryptionConfiguration: + - BucketKeyEnabled: + Ref: EncryptBucket` + + files, err := parseFile(t, source, "cf.yaml") + require.NoError(t, err) + assert.Len(t, files, 1) + file := files[0] + + assert.Len(t, file.Resources, 1) + assert.Len(t, file.Parameters, 2) + + bucket, ok := file.Resources["S3Bucket"] + require.True(t, ok, "S3Bucket resource should be available") + assert.Equal(t, "cf.yaml", bucket.Range().GetFilename()) + assert.Equal(t, 10, bucket.Range().GetStartLine()) + assert.Equal(t, 17, bucket.Range().GetEndLine()) +} + +func Test_parse_json(t *testing.T) { + source := `{ + "Parameters": { + "BucketName": { + "Type": "String", + "Default": "naughty" + }, + "BucketKeyEnabled": { + "Type": "Boolean", + "Default": false + } + }, + "Resources": { + "S3Bucket": { + "Type": "AWS::S3::Bucket", + "properties": { + "BucketName": { + "Ref": "BucketName" + }, + "BucketEncryption": { + "ServerSideEncryptionConfiguration": [ + { + "BucketKeyEnabled": { + "Ref": "BucketKeyEnabled" + } + } + ] + } + } + } + } +} +` + + files, err := parseFile(t, source, "cf.json") + require.NoError(t, err) + assert.Len(t, files, 1) + file := files[0] + + assert.Len(t, file.Resources, 1) + assert.Len(t, file.Parameters, 2) +} + +func Test_parse_yaml_with_map_ref(t *testing.T) { + + source := `--- +Parameters: + BucketName: + Type: String + Default: referencedBucket + EncryptBucket: + Type: Boolean + Default: false +Resources: + S3Bucket: + Type: 'AWS::S3::Bucket' + Properties: + BucketName: + Ref: BucketName + BucketEncryption: + ServerSideEncryptionConfiguration: + - BucketKeyEnabled: + Ref: EncryptBucket` + + files, err := parseFile(t, source, "cf.yaml") + require.NoError(t, err) + assert.Len(t, files, 1) + file := files[0] + + assert.Len(t, file.Resources, 1) + assert.Len(t, file.Parameters, 2) + + res := file.GetResourceByLogicalID("S3Bucket") + assert.NotNil(t, res) + + refProp := res.GetProperty("BucketName") + assert.False(t, refProp.IsNil()) + assert.Equal(t, "referencedBucket", refProp.AsString()) +} + +func Test_parse_yaml_with_intrinsic_functions(t *testing.T) { + + source := `--- +Parameters: + BucketName: + Type: String + Default: somebucket + EncryptBucket: + Type: Boolean + Default: false +Resources: + S3Bucket: + Type: 'AWS::S3::Bucket' + Properties: + BucketName: !Ref BucketName + BucketEncryption: + ServerSideEncryptionConfiguration: + - BucketKeyEnabled: false +` + + files, err := parseFile(t, source, "cf.yaml") + require.NoError(t, err) + assert.Len(t, files, 1) + ctx := files[0] + + assert.Len(t, ctx.Resources, 1) + assert.Len(t, ctx.Parameters, 2) + + res := ctx.GetResourceByLogicalID("S3Bucket") + assert.NotNil(t, res) + + refProp := res.GetProperty("BucketName") + assert.False(t, refProp.IsNil()) + assert.Equal(t, "somebucket", refProp.AsString()) +} + +func createTestFileContext(t *testing.T, source string) *FileContext { + contexts, err := parseFile(t, source, "main.yaml") + require.NoError(t, err) + require.Len(t, contexts, 1) + return contexts[0] +} + +func Test_parse_yaml_use_condition_in_resource(t *testing.T) { + source := `--- +AWSTemplateFormatVersion: "2010-09-09" +Description: some description +Parameters: + ServiceName: + Type: String + Description: The service name + EnvName: + Type: String + Description: Optional environment name to prefix all resources with + Default: "" + +Conditions: + SuffixResources: !Not [!Equals [!Ref EnvName, ""]] + +Resources: + ErrorTimedOutMetricFilter: + Type: AWS::Logs::MetricFilter + Properties: + FilterPattern: '?ERROR ?error ?Error ?"timed out"' # If log contains one of these error words or timed out + LogGroupName: + !If [ + SuffixResources, + !Sub "/aws/lambda/${ServiceName}-${EnvName}", + !Sub "/aws/lambda/${ServiceName}", + ] + MetricTransformations: + - MetricName: !Sub "${ServiceName}-ErrorLogCount" + MetricNamespace: market-LogMetrics + MetricValue: 1 + DefaultValue: 0 +` + + files, err := parseFile(t, source, "cf.yaml") + require.NoError(t, err) + assert.Len(t, files, 1) + ctx := files[0] + + assert.Len(t, ctx.Parameters, 2) + assert.Len(t, ctx.Conditions, 1) + assert.Len(t, ctx.Resources, 1) + + res := ctx.GetResourceByLogicalID("ErrorTimedOutMetricFilter") + assert.NotNil(t, res) + + refProp := res.GetProperty("LogGroupName") + assert.False(t, refProp.IsNil()) + assert.Equal(t, "/aws/lambda/${ServiceName}", refProp.AsString()) +} + +func TestParse_WithParameters(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "main.yaml": `AWSTemplateFormatVersion: 2010-09-09 +Parameters: + KmsMasterKeyId: + Type: String +Resources: + TestQueue: + Type: 'AWS::SQS::Queue' + Properties: + QueueName: test-queue + KmsMasterKeyId: !Ref KmsMasterKeyId + `, + }) + + params := map[string]any{ + "KmsMasterKeyId": "some_id", + } + p := New(WithParameters(params)) + + files, err := p.ParseFS(context.TODO(), fs, ".") + require.NoError(t, err) + require.Len(t, files, 1) + + file := files[0] + res := file.GetResourceByLogicalID("TestQueue") + assert.NotNil(t, res) + + kmsProp := res.GetProperty("KmsMasterKeyId") + assert.False(t, kmsProp.IsNil()) + assert.Equal(t, "some_id", kmsProp.AsString()) +} + +func TestParse_WithParameterFiles(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "main.yaml": `AWSTemplateFormatVersion: 2010-09-09 +Parameters: + KmsMasterKeyId: + Type: String +Resources: + TestQueue: + Type: 'AWS::SQS::Queue' + Properties: + QueueName: test-queue + KmsMasterKeyId: !Ref KmsMasterKeyId +`, + "params.json": `[ + { + "ParameterKey": "KmsMasterKeyId", + "ParameterValue": "some_id" + } +] + `, + }) + + p := New(WithParameterFiles("params.json")) + + files, err := p.ParseFS(context.TODO(), fs, ".") + require.NoError(t, err) + require.Len(t, files, 1) + + file := files[0] + res := file.GetResourceByLogicalID("TestQueue") + assert.NotNil(t, res) + + kmsProp := res.GetProperty("KmsMasterKeyId") + assert.False(t, kmsProp.IsNil()) + assert.Equal(t, "some_id", kmsProp.AsString()) +} + +func TestParse_WithConfigFS(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "queue.yaml": `AWSTemplateFormatVersion: 2010-09-09 +Parameters: + KmsMasterKeyId: + Type: String +Resources: + TestQueue: + Type: 'AWS::SQS::Queue' + Properties: + QueueName: testqueue + KmsMasterKeyId: !Ref KmsMasterKeyId +`, + "bucket.yaml": `AWSTemplateFormatVersion: '2010-09-09' +Description: Bucket +Parameters: + BucketName: + Type: String +Resources: + S3Bucket: + Type: AWS::S3::Bucket + Properties: + BucketName: !Ref BucketName +`, + }) + + configFS := testutil.CreateFS(t, map[string]string{ + "/workdir/parameters/queue.json": `[ + { + "ParameterKey": "KmsMasterKeyId", + "ParameterValue": "some_id" + } + ] + `, + "/workdir/parameters/s3.json": `[ + { + "ParameterKey": "BucketName", + "ParameterValue": "testbucket" + } + ]`, + }) + + p := New( + WithParameterFiles("/workdir/parameters/queue.json", "/workdir/parameters/s3.json"), + WithConfigsFS(configFS), + ) + + files, err := p.ParseFS(context.TODO(), fs, ".") + require.NoError(t, err) + require.Len(t, files, 2) + + for _, file := range files { + if strings.Contains(file.filepath, "queue") { + res := file.GetResourceByLogicalID("TestQueue") + assert.NotNil(t, res) + + kmsProp := res.GetProperty("KmsMasterKeyId") + assert.False(t, kmsProp.IsNil()) + assert.Equal(t, "some_id", kmsProp.AsString()) + } else if strings.Contains(file.filepath, "s3") { + res := file.GetResourceByLogicalID("S3Bucket") + assert.NotNil(t, res) + + bucketNameProp := res.GetProperty("BucketName") + assert.False(t, bucketNameProp.IsNil()) + assert.Equal(t, "testbucket", bucketNameProp.AsString()) + } + } + +} diff --git a/pkg/iac/scanners/cloudformation/parser/property.go b/pkg/iac/scanners/cloudformation/parser/property.go new file mode 100644 index 000000000000..2de7c937808d --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/property.go @@ -0,0 +1,428 @@ +package parser + +import ( + "encoding/json" + "io/fs" + "strconv" + "strings" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" + + "github.com/liamg/jfather" + "gopkg.in/yaml.v3" +) + +type EqualityOptions = int + +const ( + IgnoreCase EqualityOptions = iota +) + +type Property struct { + ctx *FileContext + name string + comment string + rng defsecTypes.Range + parentRange defsecTypes.Range + Inner PropertyInner + logicalId string + unresolved bool +} + +type PropertyInner struct { + Type cftypes.CfType + Value interface{} `json:"Value" yaml:"Value"` +} + +func (p *Property) Comment() string { + return p.comment +} + +func (p *Property) setName(name string) { + p.name = name + if p.Type() == cftypes.Map { + for n, subProp := range p.AsMap() { + if subProp == nil { + continue + } + subProp.setName(n) + } + } +} + +func (p *Property) setContext(ctx *FileContext) { + p.ctx = ctx + + if p.IsMap() { + for _, subProp := range p.AsMap() { + if subProp == nil { + continue + } + subProp.setContext(ctx) + } + } + + if p.IsList() { + for _, subProp := range p.AsList() { + subProp.setContext(ctx) + } + } +} + +func (p *Property) setFileAndParentRange(target fs.FS, filepath string, parentRange defsecTypes.Range) { + p.rng = defsecTypes.NewRange(filepath, p.rng.GetStartLine(), p.rng.GetEndLine(), p.rng.GetSourcePrefix(), target) + p.parentRange = parentRange + + switch p.Type() { + case cftypes.Map: + for _, subProp := range p.AsMap() { + if subProp == nil { + continue + } + subProp.setFileAndParentRange(target, filepath, parentRange) + } + case cftypes.List: + for _, subProp := range p.AsList() { + if subProp == nil { + continue + } + subProp.setFileAndParentRange(target, filepath, parentRange) + } + } +} + +func (p *Property) UnmarshalYAML(node *yaml.Node) error { + p.rng = defsecTypes.NewRange("", node.Line, calculateEndLine(node), "", nil) + + p.comment = node.LineComment + return setPropertyValueFromYaml(node, &p.Inner) +} + +func (p *Property) UnmarshalJSONWithMetadata(node jfather.Node) error { + p.rng = defsecTypes.NewRange("", node.Range().Start.Line, node.Range().End.Line, "", nil) + return setPropertyValueFromJson(node, &p.Inner) +} + +func (p *Property) Type() cftypes.CfType { + return p.Inner.Type +} + +func (p *Property) Range() defsecTypes.Range { + return p.rng +} + +func (p *Property) Metadata() defsecTypes.Metadata { + base := p + if p.isFunction() { + if resolved, ok := p.resolveValue(); ok { + base = resolved + } + } + ref := NewCFReferenceWithValue(p.parentRange, *base, p.logicalId) + return defsecTypes.NewMetadata(p.Range(), ref.String()) +} + +func (p *Property) MetadataWithValue(resolvedValue *Property) defsecTypes.Metadata { + ref := NewCFReferenceWithValue(p.parentRange, *resolvedValue, p.logicalId) + return defsecTypes.NewMetadata(p.Range(), ref.String()) +} + +func (p *Property) isFunction() bool { + if p == nil { + return false + } + if p.Type() == cftypes.Map { + for n := range p.AsMap() { + return IsIntrinsic(n) + } + } + return false +} + +func (p *Property) RawValue() interface{} { + return p.Inner.Value +} + +func (p *Property) AsRawStrings() ([]string, error) { + + if len(p.ctx.lines) < p.rng.GetEndLine() { + return p.ctx.lines, nil + } + return p.ctx.lines[p.rng.GetStartLine()-1 : p.rng.GetEndLine()], nil +} + +func (p *Property) resolveValue() (*Property, bool) { + if !p.isFunction() || p.IsUnresolved() { + return p, true + } + + resolved, ok := ResolveIntrinsicFunc(p) + if ok { + return resolved, true + } + + p.unresolved = true + return p, false +} + +func (p *Property) GetStringProperty(path string, defaultValue ...string) defsecTypes.StringValue { + defVal := "" + if len(defaultValue) > 0 { + defVal = defaultValue[0] + } + + if p.IsUnresolved() { + return defsecTypes.StringUnresolvable(p.Metadata()) + } + + prop := p.GetProperty(path) + if prop.IsNotString() { + return p.StringDefault(defVal) + } + return prop.AsStringValue() +} + +func (p *Property) StringDefault(defaultValue string) defsecTypes.StringValue { + return defsecTypes.StringDefault(defaultValue, p.Metadata()) +} + +func (p *Property) GetBoolProperty(path string, defaultValue ...bool) defsecTypes.BoolValue { + defVal := false + if len(defaultValue) > 0 { + defVal = defaultValue[0] + } + + if p.IsUnresolved() { + return defsecTypes.BoolUnresolvable(p.Metadata()) + } + + prop := p.GetProperty(path) + + if prop.isFunction() { + prop, _ = prop.resolveValue() + } + + if prop.IsNotBool() { + return p.inferBool(prop, defVal) + } + return prop.AsBoolValue() +} + +func (p *Property) GetIntProperty(path string, defaultValue ...int) defsecTypes.IntValue { + defVal := 0 + if len(defaultValue) > 0 { + defVal = defaultValue[0] + } + + if p.IsUnresolved() { + return defsecTypes.IntUnresolvable(p.Metadata()) + } + + prop := p.GetProperty(path) + + if prop.IsNotInt() { + return p.IntDefault(defVal) + } + return prop.AsIntValue() +} + +func (p *Property) BoolDefault(defaultValue bool) defsecTypes.BoolValue { + return defsecTypes.BoolDefault(defaultValue, p.Metadata()) +} + +func (p *Property) IntDefault(defaultValue int) defsecTypes.IntValue { + return defsecTypes.IntDefault(defaultValue, p.Metadata()) +} + +func (p *Property) GetProperty(path string) *Property { + + pathParts := strings.Split(path, ".") + + first := pathParts[0] + property := p + + if p.isFunction() { + property, _ = p.resolveValue() + } + + if property.IsNotMap() { + return nil + } + + for n, p := range property.AsMap() { + if n == first { + property = p + break + } + } + + if len(pathParts) == 1 || property == nil { + return property + } + + if nestedProperty := property.GetProperty(strings.Join(pathParts[1:], ".")); nestedProperty != nil { + if nestedProperty.isFunction() { + resolved, _ := nestedProperty.resolveValue() + return resolved + } else { + return nestedProperty + } + } + + return &Property{} +} + +func (p *Property) deriveResolved(propType cftypes.CfType, propValue interface{}) *Property { + return &Property{ + ctx: p.ctx, + name: p.name, + comment: p.comment, + rng: p.rng, + parentRange: p.parentRange, + logicalId: p.logicalId, + Inner: PropertyInner{ + Type: propType, + Value: propValue, + }, + } +} + +func (p *Property) ParentRange() defsecTypes.Range { + return p.parentRange +} + +func (p *Property) inferBool(prop *Property, defaultValue bool) defsecTypes.BoolValue { + if prop.IsString() { + if prop.EqualTo("true", IgnoreCase) { + return defsecTypes.Bool(true, prop.Metadata()) + } + if prop.EqualTo("yes", IgnoreCase) { + return defsecTypes.Bool(true, prop.Metadata()) + } + if prop.EqualTo("1", IgnoreCase) { + return defsecTypes.Bool(true, prop.Metadata()) + } + if prop.EqualTo("false", IgnoreCase) { + return defsecTypes.Bool(false, prop.Metadata()) + } + if prop.EqualTo("no", IgnoreCase) { + return defsecTypes.Bool(false, prop.Metadata()) + } + if prop.EqualTo("0", IgnoreCase) { + return defsecTypes.Bool(false, prop.Metadata()) + } + } + + if prop.IsInt() { + if prop.EqualTo(0) { + return defsecTypes.Bool(false, prop.Metadata()) + } + if prop.EqualTo(1) { + return defsecTypes.Bool(true, prop.Metadata()) + } + } + + return p.BoolDefault(defaultValue) +} + +func (p *Property) String() string { + r := "" + switch p.Type() { + case cftypes.String: + r = p.AsString() + case cftypes.Int: + r = strconv.Itoa(p.AsInt()) + } + return r +} + +func (p *Property) SetLogicalResource(id string) { + p.logicalId = id + + if p.isFunction() { + return + } + + if p.IsMap() { + for _, subProp := range p.AsMap() { + if subProp == nil { + continue + } + subProp.SetLogicalResource(id) + } + } + + if p.IsList() { + for _, subProp := range p.AsList() { + subProp.SetLogicalResource(id) + } + } + +} + +func (p *Property) GetJsonBytes(squashList ...bool) []byte { + if p.IsNil() { + return []byte{} + } + lines, err := p.AsRawStrings() + if err != nil { + return nil + } + if p.ctx.SourceFormat == JsonSourceFormat { + return []byte(strings.Join(lines, " ")) + } + + if len(squashList) > 0 { + lines[0] = strings.Replace(lines[0], "-", " ", 1) + } + + lines = removeLeftMargin(lines) + + yamlContent := strings.Join(lines, "\n") + var body interface{} + if err := yaml.Unmarshal([]byte(yamlContent), &body); err != nil { + return nil + } + jsonBody := convert(body) + policyJson, err := json.Marshal(jsonBody) + if err != nil { + return nil + } + return policyJson +} + +func (p *Property) GetJsonBytesAsString(squashList ...bool) string { + return string(p.GetJsonBytes(squashList...)) +} + +func removeLeftMargin(lines []string) []string { + if len(lines) == 0 { + return lines + } + prefixSpace := len(lines[0]) - len(strings.TrimLeft(lines[0], " ")) + + for i, line := range lines { + if len(line) >= prefixSpace { + lines[i] = line[prefixSpace:] + } + } + return lines +} + +func convert(input interface{}) interface{} { + switch x := input.(type) { + case map[interface{}]interface{}: + outpMap := map[string]interface{}{} + for k, v := range x { + outpMap[k.(string)] = convert(v) + } + return outpMap + case []interface{}: + for i, v := range x { + x[i] = convert(v) + } + } + return input +} diff --git a/pkg/iac/scanners/cloudformation/parser/property_conversion.go b/pkg/iac/scanners/cloudformation/parser/property_conversion.go new file mode 100644 index 000000000000..d286fa4dd797 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/property_conversion.go @@ -0,0 +1,129 @@ +package parser + +import ( + "fmt" + "os" + "strconv" + "strings" + + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" +) + +func (p *Property) IsConvertableTo(conversionType cftypes.CfType) bool { + switch conversionType { + case cftypes.Int: + return p.isConvertableToInt() + case cftypes.Bool: + return p.isConvertableToBool() + case cftypes.String: + return p.isConvertableToString() + } + return false +} + +func (p *Property) isConvertableToString() bool { + switch p.Type() { + case cftypes.Map: + return false + case cftypes.List: + for _, p := range p.AsList() { + if !p.IsString() { + return false + } + } + } + return true +} + +func (p *Property) isConvertableToBool() bool { + switch p.Type() { + case cftypes.String: + return p.EqualTo("true", IgnoreCase) || p.EqualTo("false", IgnoreCase) || + p.EqualTo("1", IgnoreCase) || p.EqualTo("0", IgnoreCase) + + case cftypes.Int: + return p.EqualTo(1) || p.EqualTo(0) + } + return false +} + +func (p *Property) isConvertableToInt() bool { + switch p.Type() { + case cftypes.String: + if _, err := strconv.Atoi(p.AsString()); err == nil { + return true + } + case cftypes.Bool: + return true + } + return false +} + +func (p *Property) ConvertTo(conversionType cftypes.CfType) *Property { + + if !p.IsConvertableTo(conversionType) { + _, _ = fmt.Fprintf(os.Stderr, "property of type %s cannot be converted to %s\n", p.Type(), conversionType) + return p + } + switch conversionType { + case cftypes.Int: + return p.convertToInt() + case cftypes.Bool: + return p.convertToBool() + case cftypes.String: + return p.convertToString() + } + return p +} + +func (p *Property) convertToString() *Property { + switch p.Type() { + case cftypes.Int: + return p.deriveResolved(cftypes.String, strconv.Itoa(p.AsInt())) + case cftypes.Bool: + return p.deriveResolved(cftypes.String, fmt.Sprintf("%v", p.AsBool())) + case cftypes.List: + var parts []string + for _, property := range p.AsList() { + parts = append(parts, property.AsString()) + } + return p.deriveResolved(cftypes.String, fmt.Sprintf("[%s]", strings.Join(parts, ", "))) + } + return p +} + +func (p *Property) convertToBool() *Property { + switch p.Type() { + case cftypes.String: + if p.EqualTo("true", IgnoreCase) || p.EqualTo("1") { + return p.deriveResolved(cftypes.Bool, true) + } + if p.EqualTo("false", IgnoreCase) || p.EqualTo("0") { + return p.deriveResolved(cftypes.Bool, false) + } + case cftypes.Int: + if p.EqualTo(1) { + return p.deriveResolved(cftypes.Bool, true) + } + if p.EqualTo(0) { + return p.deriveResolved(cftypes.Bool, false) + } + } + return p +} + +func (p *Property) convertToInt() *Property { + // + switch p.Type() { + case cftypes.String: + if val, err := strconv.Atoi(p.AsString()); err == nil { + return p.deriveResolved(cftypes.Int, val) + } + case cftypes.Bool: + if p.IsTrue() { + return p.deriveResolved(cftypes.Int, 1) + } + return p.deriveResolved(cftypes.Int, 0) + } + return p +} diff --git a/pkg/iac/scanners/cloudformation/parser/property_helpers.go b/pkg/iac/scanners/cloudformation/parser/property_helpers.go new file mode 100644 index 000000000000..86c50dcc9639 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/property_helpers.go @@ -0,0 +1,267 @@ +package parser + +import ( + "strconv" + "strings" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" +) + +func (p *Property) IsNil() bool { + return p == nil || p.Inner.Value == nil +} + +func (p *Property) IsNotNil() bool { + return !p.IsUnresolved() && !p.IsNil() +} + +func (p *Property) Is(t cftypes.CfType) bool { + if p.IsNil() || p.IsUnresolved() { + return false + } + if p.isFunction() { + if prop, success := p.resolveValue(); success && prop != p { + return prop.Is(t) + } + } + return p.Inner.Type == t +} + +func (p *Property) IsString() bool { + return p.Is(cftypes.String) +} + +func (p *Property) IsNotString() bool { + return !p.IsUnresolved() && !p.IsString() +} + +func (p *Property) IsInt() bool { + return p.Is(cftypes.Int) +} + +func (p *Property) IsNotInt() bool { + return !p.IsUnresolved() && !p.IsInt() +} + +func (p *Property) IsMap() bool { + if p.IsNil() || p.IsUnresolved() { + return false + } + return p.Inner.Type == cftypes.Map +} + +func (p *Property) IsNotMap() bool { + return !p.IsUnresolved() && !p.IsMap() +} + +func (p *Property) IsList() bool { + return p.Is(cftypes.List) +} + +func (p *Property) IsNotList() bool { + return !p.IsUnresolved() && !p.IsList() +} + +func (p *Property) IsBool() bool { + return p.Is(cftypes.Bool) +} + +func (p *Property) IsUnresolved() bool { + return p != nil && p.unresolved +} + +func (p *Property) IsNotBool() bool { + return !p.IsUnresolved() && !p.IsBool() +} + +func (p *Property) AsString() string { + if p.isFunction() { + if prop, success := p.resolveValue(); success && prop != p { + return prop.AsString() + } + return "" + } + if p.IsNil() { + return "" + } + if !p.IsString() { + return "" + } + + return p.Inner.Value.(string) +} + +func (p *Property) AsStringValue() defsecTypes.StringValue { + if p.unresolved { + return defsecTypes.StringUnresolvable(p.Metadata()) + } + return defsecTypes.StringExplicit(p.AsString(), p.Metadata()) +} + +func (p *Property) AsInt() int { + if p.isFunction() { + if prop, success := p.resolveValue(); success && prop != p { + return prop.AsInt() + } + return 0 + } + if p.IsNotInt() { + if p.isConvertableToInt() { + return p.convertToInt().AsInt() + } + return 0 + } + + return p.Inner.Value.(int) +} + +func (p *Property) AsIntValue() defsecTypes.IntValue { + if p.unresolved { + return defsecTypes.IntUnresolvable(p.Metadata()) + } + return defsecTypes.IntExplicit(p.AsInt(), p.Metadata()) +} + +func (p *Property) AsBool() bool { + if p.isFunction() { + if prop, success := p.resolveValue(); success && prop != p { + return prop.AsBool() + } + return false + } + if !p.IsBool() { + return false + } + return p.Inner.Value.(bool) +} + +func (p *Property) AsBoolValue() defsecTypes.BoolValue { + if p.unresolved { + return defsecTypes.BoolUnresolvable(p.Metadata()) + } + return defsecTypes.Bool(p.AsBool(), p.Metadata()) +} + +func (p *Property) AsMap() map[string]*Property { + val, ok := p.Inner.Value.(map[string]*Property) + if !ok { + return nil + } + return val +} + +func (p *Property) AsList() []*Property { + if p.isFunction() { + if prop, success := p.resolveValue(); success && prop != p { + return prop.AsList() + } + return []*Property{} + } + + if list, ok := p.Inner.Value.([]*Property); ok { + return list + } + return nil +} + +func (p *Property) Len() int { + return len(p.AsList()) +} + +func (p *Property) EqualTo(checkValue interface{}, equalityOptions ...EqualityOptions) bool { + var ignoreCase bool + for _, option := range equalityOptions { + if option == IgnoreCase { + ignoreCase = true + } + } + + switch checkerVal := checkValue.(type) { + case string: + if p.IsNil() { + return false + } + + if p.Inner.Type == cftypes.String || p.IsString() { + if ignoreCase { + return strings.EqualFold(p.AsString(), checkerVal) + } + return p.AsString() == checkerVal + } else if p.Inner.Type == cftypes.Int || p.IsInt() { + if val, err := strconv.Atoi(checkerVal); err == nil { + return p.AsInt() == val + } + } + return false + case bool: + if p.Inner.Type == cftypes.Bool || p.IsBool() { + return p.AsBool() == checkerVal + } + case int: + if p.Inner.Type == cftypes.Int || p.IsInt() { + return p.AsInt() == checkerVal + } + } + + return false + +} + +func (p *Property) IsTrue() bool { + if p.IsNil() || !p.IsBool() { + return false + } + + return p.AsBool() +} + +func (p *Property) IsEmpty() bool { + + if p.IsNil() { + return true + } + if p.IsUnresolved() { + return false + } + + switch p.Inner.Type { + case cftypes.String: + return p.AsString() == "" + case cftypes.List, cftypes.Map: + return len(p.AsList()) == 0 + default: + return false + } +} + +func (p *Property) Contains(checkVal interface{}) bool { + if p == nil || p.IsNil() { + return false + } + + switch p.Type() { + case cftypes.List: + for _, p := range p.AsList() { + if p.EqualTo(checkVal) { + return true + } + } + case cftypes.Map: + if _, ok := checkVal.(string); !ok { + return false + } + for key := range p.AsMap() { + if key == checkVal.(string) { + return true + } + } + case cftypes.String: + if _, ok := checkVal.(string); !ok { + return false + } + return strings.Contains(p.AsString(), checkVal.(string)) + } + return false +} diff --git a/pkg/iac/scanners/cloudformation/parser/property_helpers_test.go b/pkg/iac/scanners/cloudformation/parser/property_helpers_test.go new file mode 100644 index 000000000000..1fa1885a408b --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/property_helpers_test.go @@ -0,0 +1,195 @@ +package parser + +import ( + "testing" + + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" + "github.com/stretchr/testify/assert" +) + +func newProp(inner PropertyInner) *Property { + return &Property{ + name: "test_prop", + ctx: &FileContext{}, + rng: types.NewRange("testfile", 1, 1, "", nil), + Inner: inner, + } +} + +func Test_EqualTo(t *testing.T) { + tests := []struct { + name string + property *Property + checkValue interface{} + opts []EqualityOptions + isEqual bool + }{ + { + name: "prop is nil", + property: nil, + checkValue: "some value", + isEqual: false, + }, + { + name: "compare strings", + property: newProp(PropertyInner{ + Type: cftypes.String, + Value: "is str", + }), + checkValue: "is str", + isEqual: true, + }, + { + name: "compare strings ignoring case", + property: newProp(PropertyInner{ + Type: cftypes.String, + Value: "is str", + }), + opts: []EqualityOptions{IgnoreCase}, + checkValue: "Is StR", + isEqual: true, + }, + { + name: "strings ate not equal", + property: newProp(PropertyInner{ + Type: cftypes.String, + Value: "some value", + }), + checkValue: "some other value", + isEqual: false, + }, + { + name: "compare prop with a int represented by a string", + property: newProp(PropertyInner{ + Type: cftypes.Int, + Value: 147, + }), + checkValue: "147", + isEqual: true, + }, + { + name: "compare ints", + property: newProp(PropertyInner{ + Type: cftypes.Int, + Value: 701, + }), + checkValue: 701, + isEqual: true, + }, + { + name: "compare bools", + property: newProp(PropertyInner{ + Type: cftypes.Bool, + Value: true, + }), + checkValue: true, + isEqual: true, + }, + { + name: "prop is string fn", + property: newProp(PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::If": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.Bool, + Value: false, + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "bad", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "some value", + }, + }, + }, + }, + }, + }, + }), + checkValue: "some value", + isEqual: true, + }, + { + name: "prop is int fn", + property: newProp(PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::If": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.Bool, + Value: true, + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.Int, + Value: 121, + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.Int, + Value: -1, + }, + }, + }, + }, + }, + }, + }), + checkValue: 121, + isEqual: true, + }, + { + name: "prop is bool fn", + property: newProp(PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::Equals": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "foo", + }, + }, + }, + }, + }, + }, + }), + checkValue: true, + isEqual: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.isEqual, tt.property.EqualTo(tt.checkValue, tt.opts...)) + }) + } +} diff --git a/pkg/iac/scanners/cloudformation/parser/pseudo_parameters.go b/pkg/iac/scanners/cloudformation/parser/pseudo_parameters.go new file mode 100644 index 000000000000..3775026678a3 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/pseudo_parameters.go @@ -0,0 +1,46 @@ +package parser + +import "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" + +type pseudoParameter struct { + t cftypes.CfType + val interface{} + raw interface{} +} + +var pseudoParameters = map[string]pseudoParameter{ + "AWS::AccountId": {t: cftypes.String, val: "123456789012"}, + "AWS::NotificationARNs": { + t: cftypes.List, + val: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "notification::arn::1", + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "notification::arn::2", + }, + }, + }, + raw: []string{"notification::arn::1", "notification::arn::2"}, + }, + "AWS::NoValue": {t: cftypes.String, val: ""}, + "AWS::Partition": {t: cftypes.String, val: "aws"}, + "AWS::Region": {t: cftypes.String, val: "eu-west-1"}, + "AWS::StackId": {t: cftypes.String, val: "arn:aws:cloudformation:eu-west-1:stack/ID"}, + "AWS::StackName": {t: cftypes.String, val: "cfsec-test-stack"}, + "AWS::URLSuffix": {t: cftypes.String, val: "amazonaws.com"}, +} + +func (p pseudoParameter) getRawValue() interface{} { + switch p.t { + case cftypes.List: + return p.raw + default: + return p.val + } +} diff --git a/pkg/iac/scanners/cloudformation/parser/pseudo_parameters_test.go b/pkg/iac/scanners/cloudformation/parser/pseudo_parameters_test.go new file mode 100644 index 000000000000..281bf9083a14 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/pseudo_parameters_test.go @@ -0,0 +1,36 @@ +package parser + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Raw(t *testing.T) { + tests := []struct { + name string + key string + expected interface{} + }{ + { + name: "parameter with a string type value", + key: "AWS::AccountId", + expected: "123456789012", + }, + { + name: "a parameter with a list type value", + key: "AWS::NotificationARNs", + expected: []string{"notification::arn::1", "notification::arn::2"}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if val, ok := pseudoParameters[tt.key]; ok { + assert.Equal(t, tt.expected, val.getRawValue()) + } else { + t.Fatal("unexpected parameter key") + } + }) + } +} diff --git a/pkg/iac/scanners/cloudformation/parser/reference.go b/pkg/iac/scanners/cloudformation/parser/reference.go new file mode 100644 index 000000000000..2ff10058d868 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/reference.go @@ -0,0 +1,58 @@ +package parser + +import ( + "fmt" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" +) + +type CFReference struct { + logicalId string + resourceRange defsecTypes.Range + resolvedValue Property +} + +func NewCFReference(id string, resourceRange defsecTypes.Range) CFReference { + return CFReference{ + logicalId: id, + resourceRange: resourceRange, + } +} + +func NewCFReferenceWithValue(resourceRange defsecTypes.Range, resolvedValue Property, logicalId string) CFReference { + return CFReference{ + resourceRange: resourceRange, + resolvedValue: resolvedValue, + logicalId: logicalId, + } +} + +func (cf CFReference) String() string { + return cf.resourceRange.String() +} + +func (cf CFReference) LogicalID() string { + return cf.logicalId +} + +func (cf CFReference) ResourceRange() defsecTypes.Range { + return cf.resourceRange +} + +func (cf CFReference) PropertyRange() defsecTypes.Range { + if cf.resolvedValue.IsNotNil() { + return cf.resolvedValue.Range() + } + return defsecTypes.Range{} +} + +func (cf CFReference) DisplayValue() string { + if cf.resolvedValue.IsNotNil() { + return fmt.Sprintf("%v", cf.resolvedValue.RawValue()) + } + return "" +} + +func (cf *CFReference) Comment() string { + return cf.resolvedValue.Comment() +} diff --git a/pkg/iac/scanners/cloudformation/parser/resource.go b/pkg/iac/scanners/cloudformation/parser/resource.go new file mode 100644 index 000000000000..1258ac3fd5c0 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/resource.go @@ -0,0 +1,211 @@ +package parser + +import ( + "io/fs" + "strings" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + + "github.com/liamg/jfather" + "gopkg.in/yaml.v3" +) + +type Resource struct { + ctx *FileContext + rng defsecTypes.Range + id string + comment string + Inner ResourceInner +} + +type ResourceInner struct { + Type string `json:"Type" yaml:"Type"` + Properties map[string]*Property `json:"Properties" yaml:"Properties"` +} + +func (r *Resource) ConfigureResource(id string, target fs.FS, filepath string, ctx *FileContext) { + r.setId(id) + r.setFile(target, filepath) + r.setContext(ctx) +} + +func (r *Resource) setId(id string) { + r.id = id + + for n, p := range r.properties() { + p.setName(n) + } +} + +func (r *Resource) setFile(target fs.FS, filepath string) { + r.rng = defsecTypes.NewRange(filepath, r.rng.GetStartLine(), r.rng.GetEndLine(), r.rng.GetSourcePrefix(), target) + + for _, p := range r.Inner.Properties { + p.setFileAndParentRange(target, filepath, r.rng) + } +} + +func (r *Resource) setContext(ctx *FileContext) { + r.ctx = ctx + + for _, p := range r.Inner.Properties { + p.SetLogicalResource(r.id) + p.setContext(ctx) + } +} + +func (r *Resource) UnmarshalYAML(value *yaml.Node) error { + r.rng = defsecTypes.NewRange("", value.Line-1, calculateEndLine(value), "", nil) + r.comment = value.LineComment + return value.Decode(&r.Inner) +} + +func (r *Resource) UnmarshalJSONWithMetadata(node jfather.Node) error { + r.rng = defsecTypes.NewRange("", node.Range().Start.Line, node.Range().End.Line, "", nil) + return node.Decode(&r.Inner) +} + +func (r *Resource) ID() string { + return r.id +} + +func (r *Resource) Type() string { + return r.Inner.Type +} + +func (r *Resource) Range() defsecTypes.Range { + return r.rng +} + +func (r *Resource) SourceFormat() SourceFormat { + return r.ctx.SourceFormat +} + +func (r *Resource) Metadata() defsecTypes.Metadata { + return defsecTypes.NewMetadata(r.Range(), NewCFReference(r.id, r.rng).String()) +} + +func (r *Resource) properties() map[string]*Property { + return r.Inner.Properties +} + +func (r *Resource) IsNil() bool { + return r.id == "" +} + +func (r *Resource) GetProperty(path string) *Property { + + pathParts := strings.Split(path, ".") + + first := pathParts[0] + property := &Property{} + + for n, p := range r.properties() { + if n == first { + property = p + break + } + } + + if len(pathParts) == 1 || property.IsNil() { + if property.isFunction() { + resolved, _ := property.resolveValue() + return resolved + } + return property + } + + if nestedProperty := property.GetProperty(strings.Join(pathParts[1:], ".")); nestedProperty != nil { + return nestedProperty + } + + return &Property{} +} + +func (r *Resource) GetStringProperty(path string, defaultValue ...string) defsecTypes.StringValue { + defVal := "" + if len(defaultValue) > 0 { + defVal = defaultValue[0] + } + + prop := r.GetProperty(path) + + if prop.IsNotString() { + return r.StringDefault(defVal) + } + return prop.AsStringValue() +} + +func (r *Resource) GetBoolProperty(path string, defaultValue ...bool) defsecTypes.BoolValue { + defVal := false + if len(defaultValue) > 0 { + defVal = defaultValue[0] + } + + prop := r.GetProperty(path) + + if prop.IsNotBool() { + return r.inferBool(prop, defVal) + } + return prop.AsBoolValue() +} + +func (r *Resource) GetIntProperty(path string, defaultValue ...int) defsecTypes.IntValue { + defVal := 0 + if len(defaultValue) > 0 { + defVal = defaultValue[0] + } + + prop := r.GetProperty(path) + + if prop.IsNotInt() { + return r.IntDefault(defVal) + } + return prop.AsIntValue() +} + +func (r *Resource) StringDefault(defaultValue string) defsecTypes.StringValue { + return defsecTypes.StringDefault(defaultValue, r.Metadata()) +} + +func (r *Resource) BoolDefault(defaultValue bool) defsecTypes.BoolValue { + return defsecTypes.BoolDefault(defaultValue, r.Metadata()) +} + +func (r *Resource) IntDefault(defaultValue int) defsecTypes.IntValue { + return defsecTypes.IntDefault(defaultValue, r.Metadata()) +} + +func (r *Resource) inferBool(prop *Property, defaultValue bool) defsecTypes.BoolValue { + if prop.IsString() { + if prop.EqualTo("true", IgnoreCase) { + return defsecTypes.Bool(true, prop.Metadata()) + } + if prop.EqualTo("yes", IgnoreCase) { + return defsecTypes.Bool(true, prop.Metadata()) + } + if prop.EqualTo("1", IgnoreCase) { + return defsecTypes.Bool(true, prop.Metadata()) + } + if prop.EqualTo("false", IgnoreCase) { + return defsecTypes.Bool(false, prop.Metadata()) + } + if prop.EqualTo("no", IgnoreCase) { + return defsecTypes.Bool(false, prop.Metadata()) + } + if prop.EqualTo("0", IgnoreCase) { + return defsecTypes.Bool(false, prop.Metadata()) + } + } + + if prop.IsInt() { + if prop.EqualTo(0) { + return defsecTypes.Bool(false, prop.Metadata()) + } + if prop.EqualTo(1) { + return defsecTypes.Bool(true, prop.Metadata()) + } + } + + return r.BoolDefault(defaultValue) +} diff --git a/pkg/iac/scanners/cloudformation/parser/resource_test.go b/pkg/iac/scanners/cloudformation/parser/resource_test.go new file mode 100644 index 000000000000..89d2448954e6 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/resource_test.go @@ -0,0 +1,75 @@ +package parser + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" + "github.com/stretchr/testify/require" +) + +func Test_GetProperty_PropIsFunction(t *testing.T) { + resource := Resource{ + Inner: ResourceInner{ + Type: "AWS::S3::Bucket", + Properties: map[string]*Property{ + "BucketName": { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "mybucket", + }, + }, + "VersioningConfiguration": { + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Fn::If": { + Inner: PropertyInner{ + Type: cftypes.List, + Value: []*Property{ + { + Inner: PropertyInner{ + Type: cftypes.Bool, + Value: false, + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Status": { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "Enabled", + }, + }, + }, + }, + }, + { + Inner: PropertyInner{ + Type: cftypes.Map, + Value: map[string]*Property{ + "Status": { + Inner: PropertyInner{ + Type: cftypes.String, + Value: "Suspended", + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + } + + prop := resource.GetProperty("VersioningConfiguration.Status") + require.NotNil(t, prop) + require.True(t, prop.IsString()) + require.Equal(t, "Suspended", prop.AsString()) +} diff --git a/pkg/iac/scanners/cloudformation/parser/util.go b/pkg/iac/scanners/cloudformation/parser/util.go new file mode 100644 index 000000000000..a5252e354a56 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/parser/util.go @@ -0,0 +1,139 @@ +package parser + +import ( + "strconv" + + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" + + "github.com/liamg/jfather" + "gopkg.in/yaml.v3" +) + +func setPropertyValueFromJson(node jfather.Node, propertyData *PropertyInner) error { + + switch node.Kind() { + + case jfather.KindNumber: + propertyData.Type = cftypes.Float64 + return node.Decode(&propertyData.Value) + case jfather.KindBoolean: + propertyData.Type = cftypes.Bool + return node.Decode(&propertyData.Value) + case jfather.KindString: + propertyData.Type = cftypes.String + return node.Decode(&propertyData.Value) + case jfather.KindObject: + var childData map[string]*Property + if err := node.Decode(&childData); err != nil { + return err + } + propertyData.Type = cftypes.Map + propertyData.Value = childData + return nil + case jfather.KindArray: + var childData []*Property + if err := node.Decode(&childData); err != nil { + return err + } + propertyData.Type = cftypes.List + propertyData.Value = childData + return nil + default: + propertyData.Type = cftypes.String + return node.Decode(&propertyData.Value) + } + +} + +func setPropertyValueFromYaml(node *yaml.Node, propertyData *PropertyInner) error { + if IsIntrinsicFunc(node) { + var newContent []*yaml.Node + + newContent = append(newContent, &yaml.Node{ + Tag: "!!str", + Value: getIntrinsicTag(node.Tag), + Kind: yaml.ScalarNode, + }) + + newContent = createNode(node, newContent) + + node.Tag = "!!map" + node.Kind = yaml.MappingNode + node.Content = newContent + } + + if node.Content == nil { + + switch node.Tag { + + case "!!int": + propertyData.Type = cftypes.Int + propertyData.Value, _ = strconv.Atoi(node.Value) + case "!!bool": + propertyData.Type = cftypes.Bool + propertyData.Value, _ = strconv.ParseBool(node.Value) + case "!!str", "!!string": + propertyData.Type = cftypes.String + propertyData.Value = node.Value + } + return nil + } + + switch node.Tag { + case "!!map": + var childData map[string]*Property + if err := node.Decode(&childData); err != nil { + return err + } + propertyData.Type = cftypes.Map + propertyData.Value = childData + return nil + case "!!seq": + var childData []*Property + if err := node.Decode(&childData); err != nil { + return err + } + propertyData.Type = cftypes.List + propertyData.Value = childData + return nil + } + + return nil +} + +func createNode(node *yaml.Node, newContent []*yaml.Node) []*yaml.Node { + if node.Content == nil { + newContent = append(newContent, &yaml.Node{ + Tag: "!!str", + Value: node.Value, + Kind: yaml.ScalarNode, + }) + } else { + + newNode := &yaml.Node{ + Content: node.Content, + Kind: node.Kind, + } + + switch node.Kind { + case yaml.SequenceNode: + newNode.Tag = "!!seq" + case yaml.MappingNode: + newNode.Tag = "!!map" + case yaml.ScalarNode: + default: + newNode.Tag = node.Tag + } + newContent = append(newContent, newNode) + } + return newContent +} + +func calculateEndLine(node *yaml.Node) int { + if node.Content == nil { + return node.Line + } + + return calculateEndLine(node.Content[len(node.Content)-1]) + +} diff --git a/pkg/iac/scanners/cloudformation/scanner.go b/pkg/iac/scanners/cloudformation/scanner.go new file mode 100644 index 000000000000..43f1f839426f --- /dev/null +++ b/pkg/iac/scanners/cloudformation/scanner.go @@ -0,0 +1,263 @@ +package cloudformation + +import ( + "context" + "fmt" + "io" + "io/fs" + "sort" + "sync" + + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/framework" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/defsec/pkg/types" + + adapter "github.com/aquasecurity/trivy/internal/adapters/cloudformation" + "github.com/aquasecurity/trivy/pkg/iac/rego" + "github.com/aquasecurity/trivy/pkg/iac/rules" + "github.com/aquasecurity/trivy/pkg/iac/scanners" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +func WithParameters(params map[string]any) options.ScannerOption { + return func(cs options.ConfigurableScanner) { + if s, ok := cs.(*Scanner); ok { + s.addParserOptions(parser.WithParameters(params)) + } + } +} + +func WithParameterFiles(files ...string) options.ScannerOption { + return func(cs options.ConfigurableScanner) { + if s, ok := cs.(*Scanner); ok { + s.addParserOptions(parser.WithParameterFiles(files...)) + } + } +} + +func WithConfigsFS(fsys fs.FS) options.ScannerOption { + return func(cs options.ConfigurableScanner) { + if s, ok := cs.(*Scanner); ok { + s.addParserOptions(parser.WithConfigsFS(fsys)) + } + } +} + +var _ scanners.FSScanner = (*Scanner)(nil) +var _ options.ConfigurableScanner = (*Scanner)(nil) + +type Scanner struct { + debug debug.Logger + policyDirs []string + policyReaders []io.Reader + parser *parser.Parser + regoScanner *rego.Scanner + skipRequired bool + regoOnly bool + loadEmbeddedPolicies bool + loadEmbeddedLibraries bool + options []options.ScannerOption + parserOptions []options.ParserOption + frameworks []framework.Framework + spec string + sync.Mutex +} + +func (s *Scanner) addParserOptions(opt options.ParserOption) { + s.parserOptions = append(s.parserOptions, opt) +} + +func (s *Scanner) SetFrameworks(frameworks []framework.Framework) { + s.frameworks = frameworks +} + +func (s *Scanner) SetSpec(spec string) { + s.spec = spec +} + +func (s *Scanner) SetUseEmbeddedPolicies(b bool) { + s.loadEmbeddedPolicies = b +} + +func (s *Scanner) SetUseEmbeddedLibraries(b bool) { + s.loadEmbeddedLibraries = b +} + +func (s *Scanner) SetRegoOnly(regoOnly bool) { + s.regoOnly = regoOnly +} + +func (s *Scanner) Name() string { + return "CloudFormation" +} + +func (s *Scanner) SetPolicyReaders(readers []io.Reader) { + s.policyReaders = readers +} + +func (s *Scanner) SetSkipRequiredCheck(skip bool) { + s.skipRequired = skip +} + +func (s *Scanner) SetDebugWriter(writer io.Writer) { + s.debug = debug.New(writer, "cloudformation", "scanner") +} + +func (s *Scanner) SetPolicyDirs(dirs ...string) { + s.policyDirs = dirs +} + +func (s *Scanner) SetPolicyFilesystem(_ fs.FS) { + // handled by rego when option is passed on +} + +func (s *Scanner) SetDataFilesystem(_ fs.FS) { + // handled by rego when option is passed on +} +func (s *Scanner) SetRegoErrorLimit(_ int) {} + +func (s *Scanner) SetTraceWriter(_ io.Writer) {} +func (s *Scanner) SetPerResultTracingEnabled(_ bool) {} +func (s *Scanner) SetDataDirs(_ ...string) {} +func (s *Scanner) SetPolicyNamespaces(_ ...string) {} + +// New creates a new Scanner +func New(opts ...options.ScannerOption) *Scanner { + s := &Scanner{ + options: opts, + } + for _, opt := range opts { + opt(s) + } + s.addParserOptions(options.ParserWithSkipRequiredCheck(s.skipRequired)) + s.parser = parser.New(s.parserOptions...) + return s +} + +func (s *Scanner) initRegoScanner(srcFS fs.FS) (*rego.Scanner, error) { + s.Lock() + defer s.Unlock() + if s.regoScanner != nil { + return s.regoScanner, nil + } + regoScanner := rego.NewScanner(types.SourceCloud, s.options...) + regoScanner.SetParentDebugLogger(s.debug) + if err := regoScanner.LoadPolicies(s.loadEmbeddedLibraries, s.loadEmbeddedPolicies, srcFS, s.policyDirs, s.policyReaders); err != nil { + return nil, err + } + s.regoScanner = regoScanner + return regoScanner, nil +} + +func (s *Scanner) ScanFS(ctx context.Context, fs fs.FS, dir string) (results scan.Results, err error) { + + contexts, err := s.parser.ParseFS(ctx, fs, dir) + if err != nil { + return nil, err + } + + if len(contexts) == 0 { + return nil, nil + } + + regoScanner, err := s.initRegoScanner(fs) + if err != nil { + return nil, err + } + + for _, cfCtx := range contexts { + if cfCtx == nil { + continue + } + fileResults, err := s.scanFileContext(ctx, regoScanner, cfCtx, fs) + if err != nil { + return nil, err + } + results = append(results, fileResults...) + } + sort.Slice(results, func(i, j int) bool { + return results[i].Rule().AVDID < results[j].Rule().AVDID + }) + return results, nil +} + +func (s *Scanner) ScanFile(ctx context.Context, fs fs.FS, path string) (scan.Results, error) { + + cfCtx, err := s.parser.ParseFile(ctx, fs, path) + if err != nil { + return nil, err + } + + regoScanner, err := s.initRegoScanner(fs) + if err != nil { + return nil, err + } + + results, err := s.scanFileContext(ctx, regoScanner, cfCtx, fs) + if err != nil { + return nil, err + } + results.SetSourceAndFilesystem("", fs, false) + + sort.Slice(results, func(i, j int) bool { + return results[i].Rule().AVDID < results[j].Rule().AVDID + }) + return results, nil +} + +func (s *Scanner) scanFileContext(ctx context.Context, regoScanner *rego.Scanner, cfCtx *parser.FileContext, fs fs.FS) (results scan.Results, err error) { + state := adapter.Adapt(*cfCtx) + if state == nil { + return nil, nil + } + if !s.regoOnly { + for _, rule := range rules.GetRegistered(s.frameworks...) { + select { + case <-ctx.Done(): + return nil, ctx.Err() + default: + } + if rule.GetRule().RegoPackage != "" { + continue + } + evalResult := rule.Evaluate(state) + if len(evalResult) > 0 { + s.debug.Log("Found %d results for %s", len(evalResult), rule.GetRule().AVDID) + for _, scanResult := range evalResult { + + ref := scanResult.Metadata().Reference() + + if ref == "" && scanResult.Metadata().Parent() != nil { + ref = scanResult.Metadata().Parent().Reference() + } + + description := getDescription(scanResult, ref) + scanResult.OverrideDescription(description) + results = append(results, scanResult) + } + } + } + } + regoResults, err := regoScanner.ScanInput(ctx, rego.Input{ + Path: cfCtx.Metadata().Range().GetFilename(), + FS: fs, + Contents: state.ToRego(), + }) + if err != nil { + return nil, fmt.Errorf("rego scan error: %w", err) + } + return append(results, regoResults...), nil +} + +func getDescription(scanResult scan.Result, ref string) string { + switch scanResult.Status() { + case scan.StatusPassed: + return fmt.Sprintf("Resource '%s' passed check: %s", ref, scanResult.Rule().Summary) + case scan.StatusIgnored: + return fmt.Sprintf("Resource '%s' had check ignored: %s", ref, scanResult.Rule().Summary) + default: + return scanResult.Description() + } +} diff --git a/pkg/iac/scanners/cloudformation/scanner_test.go b/pkg/iac/scanners/cloudformation/scanner_test.go new file mode 100644 index 000000000000..1fab8452e98c --- /dev/null +++ b/pkg/iac/scanners/cloudformation/scanner_test.go @@ -0,0 +1,103 @@ +package cloudformation + +import ( + "context" + "testing" + + "github.com/aquasecurity/defsec/pkg/framework" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/test/testutil" +) + +func Test_BasicScan(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "/code/main.yaml": `--- +Resources: + S3Bucket: + Type: 'AWS::S3::Bucket' + Properties: + BucketName: public-bucket + +`, + "/rules/rule.rego": `package builtin.dockerfile.DS006 + +__rego_metadata__ := { + "id": "DS006", + "avd_id": "AVD-DS-0006", + "title": "COPY '--from' referring to the current image", + "short_code": "no-self-referencing-copy-from", + "version": "v1.0.0", + "severity": "CRITICAL", + "type": "Dockerfile Security Check", + "description": "COPY '--from' should not mention the current FROM alias, since it is impossible to copy from itself.", + "recommended_actions": "Change the '--from' so that it will not refer to itself", + "url": "https://docs.docker.com/develop/develop-images/multistage-build/", +} + +__rego_input__ := { + "combine": false, + "selector": [{"type": "defsec", "subtypes": [{"service": "s3", "provider": "aws"}]}], +} + +deny[res] { + res := { + "msg": "oh no", + "filepath": "code/main.yaml", + "startline": 6, + "endline": 6, + } +} + +`, + }) + + scanner := New(options.ScannerWithPolicyDirs("rules"), options.ScannerWithRegoOnly(true)) + + results, err := scanner.ScanFS(context.TODO(), fs, "code") + require.NoError(t, err) + + require.Len(t, results.GetFailed(), 1) + + assert.Equal(t, scan.Rule{ + AVDID: "AVD-DS-0006", + Aliases: []string{"DS006"}, + ShortCode: "no-self-referencing-copy-from", + Summary: "COPY '--from' referring to the current image", + Explanation: "COPY '--from' should not mention the current FROM alias, since it is impossible to copy from itself.", + Impact: "", + Resolution: "Change the '--from' so that it will not refer to itself", + Provider: "cloud", + Service: "general", + Links: []string{"https://docs.docker.com/develop/develop-images/multistage-build/"}, + Severity: "CRITICAL", + Terraform: &scan.EngineMetadata{}, + CloudFormation: &scan.EngineMetadata{}, + CustomChecks: scan.CustomChecks{ + Terraform: (*scan.TerraformCustomCheck)(nil), + }, + RegoPackage: "data.builtin.dockerfile.DS006", + Frameworks: map[framework.Framework][]string{}, + }, results.GetFailed()[0].Rule()) + + failure := results.GetFailed()[0] + actualCode, err := failure.GetCode() + require.NoError(t, err) + for i := range actualCode.Lines { + actualCode.Lines[i].Highlighted = "" + } + assert.Equal(t, []scan.Line{ + { + Number: 6, + Content: " BucketName: public-bucket", + IsCause: true, + FirstCause: true, + LastCause: true, + Annotation: "", + }, + }, actualCode.Lines) +} diff --git a/pkg/iac/scanners/cloudformation/test/cf_scanning_test.go b/pkg/iac/scanners/cloudformation/test/cf_scanning_test.go new file mode 100644 index 000000000000..22c139222164 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/test/cf_scanning_test.go @@ -0,0 +1,48 @@ +package test + +import ( + "bytes" + "context" + "os" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation" +) + +func Test_basic_cloudformation_scanning(t *testing.T) { + cfScanner := cloudformation.New(options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true)) + + results, err := cfScanner.ScanFS(context.TODO(), os.DirFS("./examples/bucket"), ".") + require.NoError(t, err) + + assert.Greater(t, len(results.GetFailed()), 0) +} + +func Test_cloudformation_scanning_has_expected_errors(t *testing.T) { + cfScanner := cloudformation.New(options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true)) + + results, err := cfScanner.ScanFS(context.TODO(), os.DirFS("./examples/bucket"), ".") + require.NoError(t, err) + + assert.Greater(t, len(results.GetFailed()), 0) +} + +func Test_cloudformation_scanning_with_debug(t *testing.T) { + + debugWriter := bytes.NewBufferString("") + + scannerOptions := []options.ScannerOption{ + options.ScannerWithDebug(debugWriter), + } + cfScanner := cloudformation.New(scannerOptions...) + + _, err := cfScanner.ScanFS(context.TODO(), os.DirFS("./examples/bucket"), ".") + require.NoError(t, err) + + // check debug is as expected + assert.Greater(t, len(debugWriter.String()), 0) +} diff --git a/pkg/iac/scanners/cloudformation/test/examples/bucket/bucket.yaml b/pkg/iac/scanners/cloudformation/test/examples/bucket/bucket.yaml new file mode 100644 index 000000000000..21f1c25042b0 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/test/examples/bucket/bucket.yaml @@ -0,0 +1,24 @@ +--- +AWSTemplateFormatVersion: "2010-09-09" +Description: An example Stack for a bucket +Parameters: + BucketName: + Type: String + Default: naughty-bucket + EncryptBucket: + Type: Boolean + Default: false +Resources: + S3Bucket: + Type: 'AWS::S3::Bucket' + Properties: + BucketName: + Ref: BucketName + PublicAccessBlockConfiguration: + BlockPublicAcls: false + BlockPublicPolicy: false + IgnorePublicAcls: true + RestrictPublicBuckets: false + BucketEncryption: + ServerSideEncryptionConfiguration: + - BucketKeyEnabled: !Ref EncryptBucket diff --git a/pkg/iac/scanners/cloudformation/test/examples/ignores/bucket_with_ignores.yaml b/pkg/iac/scanners/cloudformation/test/examples/ignores/bucket_with_ignores.yaml new file mode 100644 index 000000000000..ec5e8a8d7661 --- /dev/null +++ b/pkg/iac/scanners/cloudformation/test/examples/ignores/bucket_with_ignores.yaml @@ -0,0 +1,24 @@ +--- +AWSTemplateFormatVersion: "2010-09-09" +Description: An example Stack for a bucket +Parameters: + BucketName: + Type: String + Default: naughty-bucket + EncryptBucket: + Type: Boolean + Default: false +Resources: + S3Bucket: + Type: 'AWS::S3::Bucket' + Properties: + BucketName: + Ref: BucketName + PublicAccessBlockConfiguration: + BlockPublicAcls: false + BlockPublicPolicy: false # cfsec:ignore:AVD-AWS-0087 + IgnorePublicAcls: true + RestrictPublicBuckets: false + BucketEncryption: + ServerSideEncryptionConfiguration: + - BucketKeyEnabled: !Ref EncryptBucket diff --git a/pkg/iac/scanners/cloudformation/test/examples/roles/roles.yml b/pkg/iac/scanners/cloudformation/test/examples/roles/roles.yml new file mode 100644 index 000000000000..5b927457762b --- /dev/null +++ b/pkg/iac/scanners/cloudformation/test/examples/roles/roles.yml @@ -0,0 +1,51 @@ +Resources: + LambdaAPIRole: + Type: "AWS::IAM::Role" + Properties: + RoleName: "${self:service}-${self:provider.stage}-LambdaAPI" + Policies: + - PolicyName: "${self:service}-${self:provider.stage}-lambda" + PolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Action: + - "logs:CreateLogStream" + - "logs:CreateLogGroup" + - "logs:PutLogEvents" + Resource: !Sub "arn:aws:logs:${AWS::Region}:${AWS::AccountId}:log-group:/aws/lambda/${self:service}-${self:provider.stage}*:*" + - !If + - EnableCrossAccountSnsPublish + - PolicyName: "${self:service}-${self:provider.stage}-asngen-sns-publish" + PolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Action: + - "SNS:Publish" + Resource: + - !Sub "arn:aws:sns:${self:provider.region}:${self:provider.itopia_account_id}:${self:provider.stage}-*-PurchaseOrder.fifo" + - !Sub "arn:aws:sns:${self:provider.region}:${self:provider.itopia_account_id}:${self:provider.stage}-*-Vendor.fifo" + - !Sub "arn:aws:sns:${self:provider.region}:${self:provider.itopia_account_id}:${self:provider.stage}-*-Customer.fifo" + - !Sub "arn:aws:sns:${self:provider.region}:${self:provider.itopia_account_id}:${self:provider.stage}-*-Manufacturer.fifo" + - !Sub "arn:aws:sns:${self:provider.region}:${self:provider.itopia_account_id}:${self:provider.stage}-*-ManufacturerItem.fifo" + - !Sub "arn:aws:sns:${self:provider.region}:${self:provider.itopia_account_id}:${self:provider.stage}-*-Item.fifo" + - !Sub "arn:aws:sns:${self:provider.region}:${self:provider.itopia_account_id}:${self:provider.stage}-*-VendorItem.fifo" + - !Ref "AWS::NoValue" + AssumeRolePolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Principal: + Service: + - "lambda.amazonaws.com" + Action: + - "sts:AssumeRole" + + + + +Conditions: + EnableCrossAccountSnsPublish: !Equals + - ${env:ALLOW_SNS_PUBLISH, true} + - true diff --git a/pkg/iac/scanners/dockerfile/parser/parser.go b/pkg/iac/scanners/dockerfile/parser/parser.go new file mode 100644 index 000000000000..6f86e0419031 --- /dev/null +++ b/pkg/iac/scanners/dockerfile/parser/parser.go @@ -0,0 +1,151 @@ +package parser + +import ( + "context" + "fmt" + "io" + "io/fs" + "path/filepath" + "strings" + + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/providers/dockerfile" + "github.com/moby/buildkit/frontend/dockerfile/instructions" + "github.com/moby/buildkit/frontend/dockerfile/parser" + + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/pkg/iac/detection" +) + +var _ options.ConfigurableParser = (*Parser)(nil) + +type Parser struct { + debug debug.Logger + skipRequired bool +} + +func (p *Parser) SetDebugWriter(writer io.Writer) { + p.debug = debug.New(writer, "dockerfile", "parser") +} + +func (p *Parser) SetSkipRequiredCheck(b bool) { + p.skipRequired = b +} + +// New creates a new Dockerfile parser +func New(options ...options.ParserOption) *Parser { + p := &Parser{} + for _, option := range options { + option(p) + } + return p +} + +func (p *Parser) ParseFS(ctx context.Context, target fs.FS, path string) (map[string]*dockerfile.Dockerfile, error) { + + files := make(map[string]*dockerfile.Dockerfile) + if err := fs.WalkDir(target, filepath.ToSlash(path), func(path string, entry fs.DirEntry, err error) error { + select { + case <-ctx.Done(): + return ctx.Err() + default: + } + if err != nil { + return err + } + if entry.IsDir() { + return nil + } + if !p.Required(path) { + return nil + } + df, err := p.ParseFile(ctx, target, path) + if err != nil { + // TODO add debug for parse errors + return nil + } + files[path] = df + return nil + }); err != nil { + return nil, err + } + return files, nil +} + +// ParseFile parses Dockerfile content from the provided filesystem path. +func (p *Parser) ParseFile(_ context.Context, fs fs.FS, path string) (*dockerfile.Dockerfile, error) { + f, err := fs.Open(filepath.ToSlash(path)) + if err != nil { + return nil, err + } + defer func() { _ = f.Close() }() + return p.parse(path, f) +} + +func (p *Parser) Required(path string) bool { + if p.skipRequired { + return true + } + return detection.IsType(path, nil, detection.FileTypeDockerfile) +} + +func (p *Parser) parse(path string, r io.Reader) (*dockerfile.Dockerfile, error) { + parsed, err := parser.Parse(r) + if err != nil { + return nil, fmt.Errorf("dockerfile parse error: %w", err) + } + + var parsedFile dockerfile.Dockerfile + var stage dockerfile.Stage + var stageIndex int + fromValue := "args" + for _, child := range parsed.AST.Children { + child.Value = strings.ToLower(child.Value) + + instr, err := instructions.ParseInstruction(child) + if err != nil { + return nil, fmt.Errorf("process dockerfile instructions: %w", err) + } + + if _, ok := instr.(*instructions.Stage); ok { + if len(stage.Commands) > 0 { + parsedFile.Stages = append(parsedFile.Stages, stage) + } + if fromValue != "args" { + stageIndex++ + } + fromValue = strings.TrimSpace(strings.TrimPrefix(child.Original, "FROM ")) + stage = dockerfile.Stage{ + Name: fromValue, + } + } + + cmd := dockerfile.Command{ + Cmd: child.Value, + Original: child.Original, + Flags: child.Flags, + Stage: stageIndex, + Path: path, + StartLine: child.StartLine, + EndLine: child.EndLine, + } + + if child.Next != nil && len(child.Next.Children) > 0 { + cmd.SubCmd = child.Next.Children[0].Value + child = child.Next.Children[0] + } + + cmd.JSON = child.Attributes["json"] + for n := child.Next; n != nil; n = n.Next { + cmd.Value = append(cmd.Value, n.Value) + } + + stage.Commands = append(stage.Commands, cmd) + + } + if len(stage.Commands) > 0 { + parsedFile.Stages = append(parsedFile.Stages, stage) + } + + return &parsedFile, nil +} diff --git a/pkg/iac/scanners/dockerfile/parser/parser_test.go b/pkg/iac/scanners/dockerfile/parser/parser_test.go new file mode 100644 index 000000000000..04a45ea4695d --- /dev/null +++ b/pkg/iac/scanners/dockerfile/parser/parser_test.go @@ -0,0 +1,56 @@ +package parser + +import ( + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_Parser(t *testing.T) { + input := `FROM ubuntu:18.04 +COPY . /app +RUN make /app +CMD python /app/app.py +` + + df, err := New().parse("Dockerfile", strings.NewReader(input)) + require.NoError(t, err) + + assert.Equal(t, 1, len(df.Stages)) + + require.Len(t, df.Stages, 1) + + assert.Equal(t, "ubuntu:18.04", df.Stages[0].Name) + commands := df.Stages[0].Commands + assert.Equal(t, 4, len(commands)) + + // FROM ubuntu:18.04 + assert.Equal(t, "from", commands[0].Cmd) + assert.Equal(t, "ubuntu:18.04", commands[0].Value[0]) + assert.Equal(t, "Dockerfile", commands[0].Path) + assert.Equal(t, 1, commands[0].StartLine) + assert.Equal(t, 1, commands[0].EndLine) + + // COPY . /app + assert.Equal(t, "copy", commands[1].Cmd) + assert.Equal(t, ". /app", strings.Join(commands[1].Value, " ")) + assert.Equal(t, "Dockerfile", commands[1].Path) + assert.Equal(t, 2, commands[1].StartLine) + assert.Equal(t, 2, commands[1].EndLine) + + // RUN make /app + assert.Equal(t, "run", commands[2].Cmd) + assert.Equal(t, "make /app", commands[2].Value[0]) + assert.Equal(t, "Dockerfile", commands[2].Path) + assert.Equal(t, 3, commands[2].StartLine) + assert.Equal(t, 3, commands[2].EndLine) + + // CMD python /app/app.py + assert.Equal(t, "cmd", commands[3].Cmd) + assert.Equal(t, "python /app/app.py", commands[3].Value[0]) + assert.Equal(t, "Dockerfile", commands[3].Path) + assert.Equal(t, 4, commands[3].StartLine) + assert.Equal(t, 4, commands[3].EndLine) +} diff --git a/pkg/iac/scanners/dockerfile/scanner.go b/pkg/iac/scanners/dockerfile/scanner.go new file mode 100644 index 000000000000..a9d7c6f6f951 --- /dev/null +++ b/pkg/iac/scanners/dockerfile/scanner.go @@ -0,0 +1,182 @@ +package dockerfile + +import ( + "context" + "io" + "io/fs" + "sync" + + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/framework" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/types" + + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/pkg/iac/rego" + "github.com/aquasecurity/trivy/pkg/iac/scanners" + "github.com/aquasecurity/trivy/pkg/iac/scanners/dockerfile/parser" +) + +var _ scanners.FSScanner = (*Scanner)(nil) +var _ options.ConfigurableScanner = (*Scanner)(nil) + +type Scanner struct { + debug debug.Logger + policyDirs []string + policyReaders []io.Reader + parser *parser.Parser + regoScanner *rego.Scanner + skipRequired bool + options []options.ScannerOption + frameworks []framework.Framework + spec string + sync.Mutex + loadEmbeddedLibraries bool + loadEmbeddedPolicies bool +} + +func (s *Scanner) SetSpec(spec string) { + s.spec = spec +} + +func (s *Scanner) SetRegoOnly(bool) { +} + +func (s *Scanner) SetFrameworks(frameworks []framework.Framework) { + s.frameworks = frameworks +} + +func (s *Scanner) SetUseEmbeddedPolicies(b bool) { + s.loadEmbeddedPolicies = b +} + +func (s *Scanner) SetUseEmbeddedLibraries(b bool) { + s.loadEmbeddedLibraries = b +} + +func (s *Scanner) Name() string { + return "Dockerfile" +} + +func (s *Scanner) SetPolicyReaders(readers []io.Reader) { + s.policyReaders = readers +} + +func (s *Scanner) SetSkipRequiredCheck(skip bool) { + s.skipRequired = skip +} + +func (s *Scanner) SetDebugWriter(writer io.Writer) { + s.debug = debug.New(writer, "dockerfile", "scanner") +} + +func (s *Scanner) SetTraceWriter(_ io.Writer) { + // handled by rego later - nothing to do for now... +} + +func (s *Scanner) SetPerResultTracingEnabled(_ bool) { + // handled by rego later - nothing to do for now... +} + +func (s *Scanner) SetPolicyDirs(dirs ...string) { + s.policyDirs = dirs +} + +func (s *Scanner) SetDataDirs(_ ...string) { + // handled by rego later - nothing to do for now... +} + +func (s *Scanner) SetPolicyNamespaces(_ ...string) { + // handled by rego later - nothing to do for now... +} + +func (s *Scanner) SetPolicyFilesystem(_ fs.FS) { + // handled by rego when option is passed on +} + +func (s *Scanner) SetDataFilesystem(_ fs.FS) { + // handled by rego when option is passed on +} + +func (s *Scanner) SetRegoErrorLimit(_ int) { + // handled by rego when option is passed on +} + +func NewScanner(opts ...options.ScannerOption) *Scanner { + s := &Scanner{ + options: opts, + } + for _, opt := range opts { + opt(s) + } + s.parser = parser.New(options.ParserWithSkipRequiredCheck(s.skipRequired)) + return s +} + +func (s *Scanner) ScanFS(ctx context.Context, fs fs.FS, path string) (scan.Results, error) { + + files, err := s.parser.ParseFS(ctx, fs, path) + if err != nil { + return nil, err + } + + if len(files) == 0 { + return nil, nil + } + + var inputs []rego.Input + for path, dfile := range files { + inputs = append(inputs, rego.Input{ + Path: path, + FS: fs, + Contents: dfile.ToRego(), + }) + } + + results, err := s.scanRego(ctx, fs, inputs...) + if err != nil { + return nil, err + } + return results, nil +} + +func (s *Scanner) ScanFile(ctx context.Context, fs fs.FS, path string) (scan.Results, error) { + dockerfile, err := s.parser.ParseFile(ctx, fs, path) + if err != nil { + return nil, err + } + s.debug.Log("Scanning %s...", path) + return s.scanRego(ctx, fs, rego.Input{ + Path: path, + Contents: dockerfile.ToRego(), + }) +} + +func (s *Scanner) initRegoScanner(srcFS fs.FS) (*rego.Scanner, error) { + s.Lock() + defer s.Unlock() + if s.regoScanner != nil { + return s.regoScanner, nil + } + + regoScanner := rego.NewScanner(types.SourceDockerfile, s.options...) + regoScanner.SetParentDebugLogger(s.debug) + if err := regoScanner.LoadPolicies(s.loadEmbeddedLibraries, s.loadEmbeddedPolicies, srcFS, s.policyDirs, s.policyReaders); err != nil { + return nil, err + } + s.regoScanner = regoScanner + return regoScanner, nil +} + +func (s *Scanner) scanRego(ctx context.Context, srcFS fs.FS, inputs ...rego.Input) (scan.Results, error) { + regoScanner, err := s.initRegoScanner(srcFS) + if err != nil { + return nil, err + } + results, err := regoScanner.ScanInput(ctx, inputs...) + if err != nil { + return nil, err + } + results.SetSourceAndFilesystem("", srcFS, false) + return results, nil +} diff --git a/pkg/iac/scanners/dockerfile/scanner_test.go b/pkg/iac/scanners/dockerfile/scanner_test.go new file mode 100644 index 000000000000..991283d45e4d --- /dev/null +++ b/pkg/iac/scanners/dockerfile/scanner_test.go @@ -0,0 +1,638 @@ +package dockerfile + +import ( + "bytes" + "context" + "testing" + + "github.com/aquasecurity/defsec/pkg/framework" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/pkg/iac/rego" + "github.com/aquasecurity/trivy/pkg/iac/rego/schemas" + "github.com/aquasecurity/trivy/test/testutil" +) + +const DS006PolicyWithDockerfileSchema = `# METADATA +# title: "COPY '--from' referring to the current image" +# description: "COPY '--from' should not mention the current FROM alias, since it is impossible to copy from itself." +# scope: package +# schemas: +# - input: schema["dockerfile"] +# related_resources: +# - https://docs.docker.com/develop/develop-images/multistage-build/ +# custom: +# id: DS006 +# avd_id: AVD-DS-0006 +# severity: CRITICAL +# short_code: no-self-referencing-copy-from +# recommended_action: "Change the '--from' so that it will not refer to itself" +# input: +# selector: +# - type: dockerfile +package builtin.dockerfile.DS006 + +import data.lib.docker + +get_alias_from_copy[output] { + copies := docker.stage_copies[stage] + + copy := copies[_] + flag := copy.Flags[_] + contains(flag, "--from=") + parts := split(flag, "=") + + is_alias_current_from_alias(stage.Name, parts[1]) + args := parts[1] + output := { + "args": args, + "cmd": copy, + } +} + +is_alias_current_from_alias(current_name, current_alias) = allow { + current_name_lower := lower(current_name) + current_alias_lower := lower(current_alias) + + #expecting stage name as "myimage:tag as dep" + [_, alias] := regex.split(` + "`\\s+as\\s+`" + `, current_name_lower) + + alias == current_alias + + allow = true +} + +deny[res] { + output := get_alias_from_copy[_] + msg := sprintf("'COPY --from' should not mention current alias '%s' since it is impossible to copy from itself", [output.args]) + res := result.new(msg, output.cmd) +} +` + +const DS006PolicyWithMyFancyDockerfileSchema = `# METADATA +# title: "COPY '--from' referring to the current image" +# description: "COPY '--from' should not mention the current FROM alias, since it is impossible to copy from itself." +# scope: package +# schemas: +# - input: schema["myfancydockerfile"] +# related_resources: +# - https://docs.docker.com/develop/develop-images/multistage-build/ +# custom: +# id: DS006 +# avd_id: AVD-DS-0006 +# severity: CRITICAL +# short_code: no-self-referencing-copy-from +# recommended_action: "Change the '--from' so that it will not refer to itself" +# input: +# selector: +# - type: dockerfile +package builtin.dockerfile.DS006 + +import data.lib.docker + +get_alias_from_copy[output] { +copies := docker.stage_copies[stage] + +copy := copies[_] +flag := copy.Flags[_] +contains(flag, "--from=") +parts := split(flag, "=") + +is_alias_current_from_alias(stage.Name, parts[1]) +args := parts[1] +output := { +"args": args, +"cmd": copy, +} +} + +is_alias_current_from_alias(current_name, current_alias) = allow { +current_name_lower := lower(current_name) +current_alias_lower := lower(current_alias) + +#expecting stage name as "myimage:tag as dep" +[_, alias] := regex.split(` + "`\\s+as\\s+`" + `, current_name_lower) + +alias == current_alias + +allow = true +} + +deny[res] { +output := get_alias_from_copy[_] +msg := sprintf("'COPY --from' should not mention current alias '%s' since it is impossible to copy from itself", [output.args]) +res := result.new(msg, output.cmd) +} +` + +const DS006PolicyWithOldSchemaSelector = `# METADATA +# title: "COPY '--from' referring to the current image" +# description: "COPY '--from' should not mention the current FROM alias, since it is impossible to copy from itself." +# scope: package +# schemas: +# - input: schema["input"] +# related_resources: +# - https://docs.docker.com/develop/develop-images/multistage-build/ +# custom: +# id: DS006 +# avd_id: AVD-DS-0006 +# severity: CRITICAL +# short_code: no-self-referencing-copy-from +# recommended_action: "Change the '--from' so that it will not refer to itself" +# input: +# selector: +# - type: dockerfile +package builtin.dockerfile.DS006 + +import data.lib.docker + +get_alias_from_copy[output] { + copies := docker.stage_copies[stage] + + copy := copies[_] + flag := copy.Flags[_] + contains(flag, "--from=") + parts := split(flag, "=") + + is_alias_current_from_alias(stage.Name, parts[1]) + args := parts[1] + output := { + "args": args, + "cmd": copy, + } +} + +is_alias_current_from_alias(current_name, current_alias) = allow { + current_name_lower := lower(current_name) + current_alias_lower := lower(current_alias) + + #expecting stage name as "myimage:tag as dep" + [_, alias] := regex.split(` + "`\\s+as\\s+`" + `, current_name_lower) + + alias == current_alias + + allow = true +} + +deny[res] { + output := get_alias_from_copy[_] + msg := sprintf("'COPY --from' should not mention current alias '%s' since it is impossible to copy from itself", [output.args]) + res := result.new(msg, output.cmd) +} +` +const DS006LegacyWithOldStyleMetadata = `package builtin.dockerfile.DS006 + +__rego_metadata__ := { + "id": "DS006", + "avd_id": "AVD-DS-0006", + "title": "COPY '--from' referring to the current image", + "short_code": "no-self-referencing-copy-from", + "version": "v1.0.0", + "severity": "CRITICAL", + "type": "Dockerfile Security Check", + "description": "COPY '--from' should not mention the current FROM alias, since it is impossible to copy from itself.", + "recommended_actions": "Change the '--from' so that it will not refer to itself", + "url": "https://docs.docker.com/develop/develop-images/multistage-build/", +} + +__rego_input__ := { + "combine": false, + "selector": [{"type": "dockerfile"}], +} + +deny[res] { + res := { + "msg": "oh no", + "filepath": "code/Dockerfile", + "startline": 1, + "endline": 1, + } +}` + +func Test_BasicScanLegacyRegoMetadata(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "/code/Dockerfile": `FROM ubuntu +USER root +`, + "/rules/rule.rego": DS006LegacyWithOldStyleMetadata, + }) + + scanner := NewScanner(options.ScannerWithPolicyDirs("rules")) + + results, err := scanner.ScanFS(context.TODO(), fs, "code") + require.NoError(t, err) + + require.Len(t, results.GetFailed(), 1) + + failure := results.GetFailed()[0] + metadata := failure.Metadata() + assert.Equal(t, 1, metadata.Range().GetStartLine()) + assert.Equal(t, 1, metadata.Range().GetEndLine()) + assert.Equal(t, "code/Dockerfile", metadata.Range().GetFilename()) + + assert.Equal( + t, + scan.Rule{ + AVDID: "AVD-DS-0006", + Aliases: []string{"DS006"}, + ShortCode: "no-self-referencing-copy-from", + Summary: "COPY '--from' referring to the current image", + Explanation: "COPY '--from' should not mention the current FROM alias, since it is impossible to copy from itself.", + Impact: "", + Resolution: "Change the '--from' so that it will not refer to itself", + Provider: "dockerfile", + Service: "general", + Links: []string{"https://docs.docker.com/develop/develop-images/multistage-build/"}, + Severity: "CRITICAL", + Terraform: &scan.EngineMetadata{}, + CloudFormation: &scan.EngineMetadata{}, + CustomChecks: scan.CustomChecks{ + Terraform: (*scan.TerraformCustomCheck)(nil)}, + RegoPackage: "data.builtin.dockerfile.DS006", + Frameworks: map[framework.Framework][]string{}, + }, + results.GetFailed()[0].Rule(), + ) + + actualCode, err := results.GetFailed()[0].GetCode() + require.NoError(t, err) + for i := range actualCode.Lines { + actualCode.Lines[i].Highlighted = "" + } + assert.Equal(t, []scan.Line{ + { + Number: 1, + Content: "FROM ubuntu", + IsCause: true, + FirstCause: true, + LastCause: true, + Annotation: "", + }, + }, actualCode.Lines) +} + +func Test_BasicScanNewRegoMetadata(t *testing.T) { + var testCases = []struct { + name string + inputRegoPolicy string + expectedError string + expectedInputTraceLogs string + expectedOutputTraceLogs string + }{ + { + name: "old schema selector schema.input", + inputRegoPolicy: DS006PolicyWithOldSchemaSelector, + expectedInputTraceLogs: `REGO INPUT: +{ + "path": "code/Dockerfile", + "contents": { + "Stages": [ + { + "Commands": [ + { + "Cmd": "from", + "EndLine": 1, + "Flags": [], + "JSON": false, + "Original": "FROM golang:1.7.3 as dep", + "Path": "code/Dockerfile", + "Stage": 0, + "StartLine": 1, + "SubCmd": "", + "Value": [ + "golang:1.7.3", + "as", + "dep" + ] + }, + { + "Cmd": "copy", + "EndLine": 2, + "Flags": [ + "--from=dep" + ], + "JSON": false, + "Original": "COPY --from=dep /binary /", + "Path": "code/Dockerfile", + "Stage": 0, + "StartLine": 2, + "SubCmd": "", + "Value": [ + "/binary", + "/" + ] + } + ], + "Name": "golang:1.7.3 as dep" + } + ] + } +} +END REGO INPUT +`, + expectedOutputTraceLogs: `REGO RESULTSET: +[ + { + "expressions": [ + { + "value": [ + { + "endline": 2, + "explicit": false, + "filepath": "code/Dockerfile", + "fskey": "", + "managed": true, + "msg": "'COPY --from' should not mention current alias 'dep' since it is impossible to copy from itself", + "parent": null, + "resource": "", + "sourceprefix": "", + "startline": 2 + } + ], + "text": "data.builtin.dockerfile.DS006.deny", + "location": { + "row": 1, + "col": 1 + } + } + ] + } +] +END REGO RESULTSET + +`, + }, + { + name: "new schema selector schema.dockerfile", + inputRegoPolicy: DS006PolicyWithDockerfileSchema, + expectedInputTraceLogs: `REGO INPUT: +{ + "path": "code/Dockerfile", + "contents": { + "Stages": [ + { + "Commands": [ + { + "Cmd": "from", + "EndLine": 1, + "Flags": [], + "JSON": false, + "Original": "FROM golang:1.7.3 as dep", + "Path": "code/Dockerfile", + "Stage": 0, + "StartLine": 1, + "SubCmd": "", + "Value": [ + "golang:1.7.3", + "as", + "dep" + ] + }, + { + "Cmd": "copy", + "EndLine": 2, + "Flags": [ + "--from=dep" + ], + "JSON": false, + "Original": "COPY --from=dep /binary /", + "Path": "code/Dockerfile", + "Stage": 0, + "StartLine": 2, + "SubCmd": "", + "Value": [ + "/binary", + "/" + ] + } + ], + "Name": "golang:1.7.3 as dep" + } + ] + } +} +END REGO INPUT +`, + expectedOutputTraceLogs: `REGO RESULTSET: +[ + { + "expressions": [ + { + "value": [ + { + "endline": 2, + "explicit": false, + "filepath": "code/Dockerfile", + "fskey": "", + "managed": true, + "msg": "'COPY --from' should not mention current alias 'dep' since it is impossible to copy from itself", + "parent": null, + "resource": "", + "sourceprefix": "", + "startline": 2 + } + ], + "text": "data.builtin.dockerfile.DS006.deny", + "location": { + "row": 1, + "col": 1 + } + } + ] + } +] +END REGO RESULTSET + +`, + }, + { + name: "new schema selector with custom schema.myfancydockerfile", + inputRegoPolicy: DS006PolicyWithMyFancyDockerfileSchema, + expectedInputTraceLogs: `REGO INPUT: +{ + "path": "code/Dockerfile", + "contents": { + "Stages": [ + { + "Commands": [ + { + "Cmd": "from", + "EndLine": 1, + "Flags": [], + "JSON": false, + "Original": "FROM golang:1.7.3 as dep", + "Path": "code/Dockerfile", + "Stage": 0, + "StartLine": 1, + "SubCmd": "", + "Value": [ + "golang:1.7.3", + "as", + "dep" + ] + }, + { + "Cmd": "copy", + "EndLine": 2, + "Flags": [ + "--from=dep" + ], + "JSON": false, + "Original": "COPY --from=dep /binary /", + "Path": "code/Dockerfile", + "Stage": 0, + "StartLine": 2, + "SubCmd": "", + "Value": [ + "/binary", + "/" + ] + } + ], + "Name": "golang:1.7.3 as dep" + } + ] + } +} +END REGO INPUT +`, + expectedOutputTraceLogs: `REGO RESULTSET: +[ + { + "expressions": [ + { + "value": [ + { + "endline": 2, + "explicit": false, + "filepath": "code/Dockerfile", + "fskey": "", + "managed": true, + "msg": "'COPY --from' should not mention current alias 'dep' since it is impossible to copy from itself", + "parent": null, + "resource": "", + "sourceprefix": "", + "startline": 2 + } + ], + "text": "data.builtin.dockerfile.DS006.deny", + "location": { + "row": 1, + "col": 1 + } + } + ] + } +] +END REGO RESULTSET + +`, + }, + { + name: "new schema selector but invalid", + inputRegoPolicy: `# METADATA +# title: "COPY '--from' referring to the current image" +# description: "COPY '--from' should not mention the current FROM alias, since it is impossible to copy from itself." +# scope: package +# schemas: +# - input: schema["spooky-schema"] +# custom: +# input: +# selector: +# - type: dockerfile +package builtin.dockerfile.DS006 +deny[res]{ +res := true +}`, + expectedError: `1 error occurred: rules/rule.rego:12: rego_type_error: undefined schema: schema["spooky-schema"]`, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + regoMap := make(map[string]string) + libs, err := rego.LoadEmbeddedLibraries() + require.NoError(t, err) + for name, library := range libs { + regoMap["/rules/"+name] = library.String() + } + regoMap["/code/Dockerfile"] = `FROM golang:1.7.3 as dep +COPY --from=dep /binary /` + regoMap["/rules/rule.rego"] = tc.inputRegoPolicy + regoMap["/rules/schemas/myfancydockerfile.json"] = string(schemas.Dockerfile) // just use the same for testing + fs := testutil.CreateFS(t, regoMap) + + var traceBuf bytes.Buffer + var debugBuf bytes.Buffer + + scanner := NewScanner( + options.ScannerWithPolicyDirs("rules"), + options.ScannerWithTrace(&traceBuf), + options.ScannerWithDebug(&debugBuf), + options.ScannerWithRegoErrorLimits(0), + ) + + results, err := scanner.ScanFS(context.TODO(), fs, "code") + if tc.expectedError != "" && err != nil { + require.Equal(t, tc.expectedError, err.Error(), tc.name) + } else { + require.NoError(t, err) + require.Len(t, results.GetFailed(), 1) + + failure := results.GetFailed()[0] + metadata := failure.Metadata() + assert.Equal(t, 2, metadata.Range().GetStartLine()) + assert.Equal(t, 2, metadata.Range().GetEndLine()) + assert.Equal(t, "code/Dockerfile", metadata.Range().GetFilename()) + + assert.Equal( + t, + scan.Rule{ + AVDID: "AVD-DS-0006", + Aliases: []string{"DS006"}, + ShortCode: "no-self-referencing-copy-from", + Summary: "COPY '--from' referring to the current image", + Explanation: "COPY '--from' should not mention the current FROM alias, since it is impossible to copy from itself.", + Impact: "", + Resolution: "Change the '--from' so that it will not refer to itself", + Provider: "dockerfile", + Service: "general", + Links: []string{"https://docs.docker.com/develop/develop-images/multistage-build/"}, + Severity: "CRITICAL", + Terraform: &scan.EngineMetadata{}, + CloudFormation: &scan.EngineMetadata{}, + CustomChecks: scan.CustomChecks{ + Terraform: (*scan.TerraformCustomCheck)(nil)}, + RegoPackage: "data.builtin.dockerfile.DS006", + Frameworks: map[framework.Framework][]string{}, + }, + results.GetFailed()[0].Rule(), + ) + + actualCode, err := results.GetFailed()[0].GetCode() + require.NoError(t, err) + for i := range actualCode.Lines { + actualCode.Lines[i].Highlighted = "" + } + assert.Equal(t, []scan.Line{ + { + Number: 2, + Content: "COPY --from=dep /binary /", + IsCause: true, + FirstCause: true, + LastCause: true, + Annotation: "", + }, + }, actualCode.Lines) + + // assert logs + assert.Contains(t, traceBuf.String(), tc.expectedInputTraceLogs, traceBuf.String()) + assert.Contains(t, traceBuf.String(), tc.expectedOutputTraceLogs, traceBuf.String()) + } + }) + } + +} diff --git a/pkg/iac/scanners/helm/options.go b/pkg/iac/scanners/helm/options.go new file mode 100644 index 000000000000..b2ec6ddf987d --- /dev/null +++ b/pkg/iac/scanners/helm/options.go @@ -0,0 +1,51 @@ +package helm + +import ( + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/pkg/iac/scanners/helm/parser" +) + +type ConfigurableHelmScanner interface { + options.ConfigurableScanner + AddParserOptions(options ...options.ParserOption) +} + +func ScannerWithValuesFile(paths ...string) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if helmScanner, ok := s.(ConfigurableHelmScanner); ok { + helmScanner.AddParserOptions(parser.OptionWithValuesFile(paths...)) + } + } +} + +func ScannerWithValues(values ...string) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if helmScanner, ok := s.(ConfigurableHelmScanner); ok { + helmScanner.AddParserOptions(parser.OptionWithValues(values...)) + } + } +} + +func ScannerWithFileValues(values ...string) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if helmScanner, ok := s.(ConfigurableHelmScanner); ok { + helmScanner.AddParserOptions(parser.OptionWithFileValues(values...)) + } + } +} + +func ScannerWithStringValues(values ...string) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if helmScanner, ok := s.(ConfigurableHelmScanner); ok { + helmScanner.AddParserOptions(parser.OptionWithStringValues(values...)) + } + } +} + +func ScannerWithAPIVersions(values ...string) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if helmScanner, ok := s.(ConfigurableHelmScanner); ok { + helmScanner.AddParserOptions(parser.OptionWithAPIVersions(values...)) + } + } +} diff --git a/pkg/iac/scanners/helm/parser/option.go b/pkg/iac/scanners/helm/parser/option.go new file mode 100644 index 000000000000..6cf79009615d --- /dev/null +++ b/pkg/iac/scanners/helm/parser/option.go @@ -0,0 +1,52 @@ +package parser + +import "github.com/aquasecurity/defsec/pkg/scanners/options" + +type ConfigurableHelmParser interface { + options.ConfigurableParser + SetValuesFile(...string) + SetValues(...string) + SetFileValues(...string) + SetStringValues(...string) + SetAPIVersions(...string) +} + +func OptionWithValuesFile(paths ...string) options.ParserOption { + return func(p options.ConfigurableParser) { + if helmParser, ok := p.(ConfigurableHelmParser); ok { + helmParser.SetValuesFile(paths...) + } + } +} + +func OptionWithValues(values ...string) options.ParserOption { + return func(p options.ConfigurableParser) { + if helmParser, ok := p.(ConfigurableHelmParser); ok { + helmParser.SetValues(values...) + } + } +} + +func OptionWithFileValues(values ...string) options.ParserOption { + return func(p options.ConfigurableParser) { + if helmParser, ok := p.(ConfigurableHelmParser); ok { + helmParser.SetValues(values...) + } + } +} + +func OptionWithStringValues(values ...string) options.ParserOption { + return func(p options.ConfigurableParser) { + if helmParser, ok := p.(ConfigurableHelmParser); ok { + helmParser.SetValues(values...) + } + } +} + +func OptionWithAPIVersions(values ...string) options.ParserOption { + return func(p options.ConfigurableParser) { + if helmParser, ok := p.(ConfigurableHelmParser); ok { + helmParser.SetAPIVersions(values...) + } + } +} diff --git a/pkg/iac/scanners/helm/parser/parser.go b/pkg/iac/scanners/helm/parser/parser.go new file mode 100644 index 000000000000..cc417e13e16d --- /dev/null +++ b/pkg/iac/scanners/helm/parser/parser.go @@ -0,0 +1,322 @@ +package parser + +import ( + "bytes" + "context" + "errors" + "fmt" + "io" + "io/fs" + "os" + "path/filepath" + "regexp" + "sort" + "strings" + + "gopkg.in/yaml.v3" + + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/google/uuid" + "helm.sh/helm/v3/pkg/action" + "helm.sh/helm/v3/pkg/chart" + "helm.sh/helm/v3/pkg/chart/loader" + "helm.sh/helm/v3/pkg/release" + "helm.sh/helm/v3/pkg/releaseutil" + + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/pkg/iac/detection" +) + +var manifestNameRegex = regexp.MustCompile("# Source: [^/]+/(.+)") + +type Parser struct { + helmClient *action.Install + rootPath string + ChartSource string + filepaths []string + debug debug.Logger + skipRequired bool + workingFS fs.FS + valuesFiles []string + values []string + fileValues []string + stringValues []string + apiVersions []string +} + +type ChartFile struct { + TemplateFilePath string + ManifestContent string +} + +func (p *Parser) SetDebugWriter(writer io.Writer) { + p.debug = debug.New(writer, "helm", "parser") +} + +func (p *Parser) SetSkipRequiredCheck(b bool) { + p.skipRequired = b +} + +func (p *Parser) SetValuesFile(s ...string) { + p.valuesFiles = s +} + +func (p *Parser) SetValues(values ...string) { + p.values = values +} + +func (p *Parser) SetFileValues(values ...string) { + p.fileValues = values +} + +func (p *Parser) SetStringValues(values ...string) { + p.stringValues = values +} + +func (p *Parser) SetAPIVersions(values ...string) { + p.apiVersions = values +} + +func New(path string, options ...options.ParserOption) *Parser { + + client := action.NewInstall(&action.Configuration{}) + client.DryRun = true // don't do anything + client.Replace = true // skip name check + client.ClientOnly = true // don't try to talk to a cluster + + p := &Parser{ + helmClient: client, + ChartSource: path, + } + + for _, option := range options { + option(p) + } + + if p.apiVersions != nil { + p.helmClient.APIVersions = p.apiVersions + } + + return p +} + +func (p *Parser) ParseFS(ctx context.Context, target fs.FS, path string) error { + p.workingFS = target + + if err := fs.WalkDir(p.workingFS, filepath.ToSlash(path), func(path string, entry fs.DirEntry, err error) error { + select { + case <-ctx.Done(): + return ctx.Err() + default: + } + if err != nil { + return err + } + if entry.IsDir() { + return nil + } + + if !p.required(path, p.workingFS) { + return nil + } + + if detection.IsArchive(path) { + tarFS, err := p.addTarToFS(path) + if errors.Is(err, errSkipFS) { + // an unpacked Chart already exists + return nil + } else if err != nil { + return fmt.Errorf("failed to add tar %q to FS: %w", path, err) + } + + targetPath := filepath.Dir(path) + if targetPath == "" { + targetPath = "." + } + + if err := p.ParseFS(ctx, tarFS, targetPath); err != nil { + return fmt.Errorf("parse tar FS error: %w", err) + } + return nil + } else { + return p.addPaths(path) + } + }); err != nil { + return fmt.Errorf("walk dir error: %w", err) + } + + return nil +} + +func (p *Parser) addPaths(paths ...string) error { + for _, path := range paths { + if _, err := fs.Stat(p.workingFS, path); err != nil { + return err + } + + if strings.HasSuffix(path, "Chart.yaml") && p.rootPath == "" { + if err := p.extractChartName(path); err != nil { + return err + } + p.rootPath = filepath.Dir(path) + } + p.filepaths = append(p.filepaths, path) + } + return nil +} + +func (p *Parser) extractChartName(chartPath string) error { + + chart, err := p.workingFS.Open(chartPath) + if err != nil { + return err + } + defer func() { _ = chart.Close() }() + + var chartContent map[string]interface{} + if err := yaml.NewDecoder(chart).Decode(&chartContent); err != nil { + // the chart likely has the name templated and so cannot be parsed as yaml - use a temporary name + if dir := filepath.Dir(chartPath); dir != "" && dir != "." { + p.helmClient.ReleaseName = dir + } else { + p.helmClient.ReleaseName = uuid.NewString() + } + return nil + } + + if name, ok := chartContent["name"]; !ok { + return fmt.Errorf("could not extract the chart name from %s", chartPath) + } else { + p.helmClient.ReleaseName = fmt.Sprintf("%v", name) + } + return nil +} + +func (p *Parser) RenderedChartFiles() ([]ChartFile, error) { + + tempDir, err := os.MkdirTemp(os.TempDir(), "defsec") + if err != nil { + return nil, err + } + + if err := p.writeBuildFiles(tempDir); err != nil { + return nil, err + } + + workingChart, err := loadChart(tempDir) + if err != nil { + return nil, err + } + + workingRelease, err := p.getRelease(workingChart) + if err != nil { + return nil, err + } + + var manifests bytes.Buffer + _, _ = fmt.Fprintln(&manifests, strings.TrimSpace(workingRelease.Manifest)) + + splitManifests := releaseutil.SplitManifests(manifests.String()) + manifestsKeys := make([]string, 0, len(splitManifests)) + for k := range splitManifests { + manifestsKeys = append(manifestsKeys, k) + } + return p.getRenderedManifests(manifestsKeys, splitManifests), nil +} + +func (p *Parser) getRelease(chart *chart.Chart) (*release.Release, error) { + opts := &ValueOptions{ + ValueFiles: p.valuesFiles, + Values: p.values, + FileValues: p.fileValues, + StringValues: p.stringValues, + } + + vals, err := opts.MergeValues() + if err != nil { + return nil, err + } + r, err := p.helmClient.RunWithContext(context.Background(), chart, vals) + if err != nil { + return nil, err + } + + if r == nil { + return nil, fmt.Errorf("there is nothing in the release") + } + return r, nil +} + +func loadChart(tempFs string) (*chart.Chart, error) { + loadedChart, err := loader.Load(tempFs) + if err != nil { + return nil, err + } + + if req := loadedChart.Metadata.Dependencies; req != nil { + if err := action.CheckDependencies(loadedChart, req); err != nil { + return nil, err + } + } + + return loadedChart, nil +} + +func (*Parser) getRenderedManifests(manifestsKeys []string, splitManifests map[string]string) []ChartFile { + sort.Sort(releaseutil.BySplitManifestsOrder(manifestsKeys)) + var manifestsToRender []ChartFile + for _, manifestKey := range manifestsKeys { + manifest := splitManifests[manifestKey] + submatch := manifestNameRegex.FindStringSubmatch(manifest) + if len(submatch) == 0 { + continue + } + manifestsToRender = append(manifestsToRender, ChartFile{ + TemplateFilePath: getManifestPath(manifest), + ManifestContent: manifest, + }) + } + return manifestsToRender +} + +func getManifestPath(manifest string) string { + lines := strings.Split(manifest, "\n") + if len(lines) == 0 { + return "unknown.yaml" + } + manifestFilePathParts := strings.SplitN(strings.TrimPrefix(lines[0], "# Source: "), "/", 2) + if len(manifestFilePathParts) > 1 { + return manifestFilePathParts[1] + } + return manifestFilePathParts[0] +} + +func (p *Parser) writeBuildFiles(tempFs string) error { + for _, path := range p.filepaths { + content, err := fs.ReadFile(p.workingFS, path) + if err != nil { + return err + } + workingPath := strings.TrimPrefix(path, p.rootPath) + workingPath = filepath.Join(tempFs, workingPath) + if err := os.MkdirAll(filepath.Dir(workingPath), os.ModePerm); err != nil { + return err + } + if err := os.WriteFile(workingPath, content, os.ModePerm); err != nil { + return err + } + } + return nil +} + +func (p *Parser) required(path string, workingFS fs.FS) bool { + if p.skipRequired { + return true + } + content, err := fs.ReadFile(workingFS, path) + if err != nil { + return false + } + + return detection.IsType(path, bytes.NewReader(content), detection.FileTypeHelm) +} diff --git a/pkg/iac/scanners/helm/parser/parser_tar.go b/pkg/iac/scanners/helm/parser/parser_tar.go new file mode 100644 index 000000000000..ad3abdad82ca --- /dev/null +++ b/pkg/iac/scanners/helm/parser/parser_tar.go @@ -0,0 +1,110 @@ +package parser + +import ( + "archive/tar" + "bytes" + "compress/gzip" + "errors" + "fmt" + "io" + "io/fs" + "os" + "path/filepath" + + "github.com/aquasecurity/trivy/pkg/iac/detection" + "github.com/liamg/memoryfs" +) + +var errSkipFS = errors.New("skip parse FS") + +func (p *Parser) addTarToFS(path string) (fs.FS, error) { + tarFS := memoryfs.CloneFS(p.workingFS) + + file, err := tarFS.Open(path) + if err != nil { + return nil, fmt.Errorf("failed to open tar: %w", err) + } + defer file.Close() + + var tr *tar.Reader + + if detection.IsZip(path) { + zipped, err := gzip.NewReader(file) + if err != nil { + return nil, fmt.Errorf("failed to create gzip reader: %w", err) + } + defer zipped.Close() + tr = tar.NewReader(zipped) + } else { + tr = tar.NewReader(file) + } + + checkExistedChart := true + + for { + header, err := tr.Next() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return nil, fmt.Errorf("failed to get next entry: %w", err) + } + + if checkExistedChart { + // Do not add archive files to FS if the chart already exists + // This can happen when the source chart is located next to an archived chart (with the `helm package` command) + // The first level folder in the archive is equal to the Chart name + if _, err := tarFS.Stat(filepath.Dir(path) + "/" + filepath.Dir(header.Name)); err == nil { + return nil, errSkipFS + } + checkExistedChart = false + } + + // get the individual path and extract to the current directory + entryPath := header.Name + + switch header.Typeflag { + case tar.TypeDir: + if err := tarFS.MkdirAll(entryPath, os.FileMode(header.Mode)); err != nil && !errors.Is(err, fs.ErrExist) { + return nil, err + } + case tar.TypeReg: + writePath := filepath.Dir(path) + "/" + entryPath + p.debug.Log("Unpacking tar entry %s", writePath) + + _ = tarFS.MkdirAll(filepath.Dir(writePath), fs.ModePerm) + + buf, err := copyChunked(tr, 1024) + if err != nil { + return nil, err + } + + p.debug.Log("writing file contents to %s", writePath) + if err := tarFS.WriteFile(writePath, buf.Bytes(), fs.ModePerm); err != nil { + return nil, fmt.Errorf("write file error: %w", err) + } + default: + return nil, fmt.Errorf("header type %q is not supported", header.Typeflag) + } + } + + if err := tarFS.Remove(path); err != nil { + return nil, fmt.Errorf("failed to remove tar from FS: %w", err) + } + + return tarFS, nil +} + +func copyChunked(src io.Reader, chunkSize int64) (*bytes.Buffer, error) { + buf := new(bytes.Buffer) + for { + if _, err := io.CopyN(buf, src, chunkSize); err != nil { + if errors.Is(err, io.EOF) { + break + } + return nil, fmt.Errorf("failed to copy: %w", err) + } + } + + return buf, nil +} diff --git a/pkg/iac/scanners/helm/parser/parser_test.go b/pkg/iac/scanners/helm/parser/parser_test.go new file mode 100644 index 000000000000..c146b8f9e18f --- /dev/null +++ b/pkg/iac/scanners/helm/parser/parser_test.go @@ -0,0 +1,24 @@ +package parser + +import ( + "context" + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestParseFS(t *testing.T) { + t.Run("source chart is located next to an same archived chart", func(t *testing.T) { + p := New(".") + require.NoError(t, p.ParseFS(context.TODO(), os.DirFS(filepath.Join("testdata", "chart-and-archived-chart")), ".")) + + expectedFiles := []string{ + "my-chart/Chart.yaml", + "my-chart/templates/pod.yaml", + } + assert.Equal(t, expectedFiles, p.filepaths) + }) +} diff --git a/pkg/iac/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart-0.1.0.tgz b/pkg/iac/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart-0.1.0.tgz new file mode 100644 index 0000000000000000000000000000000000000000..e36b2b474f3e54d8048b61ca6f9bc5a47afda833 GIT binary patch literal 419 zcmV;U0bKqciwG0|00000|0w_~VMtOiV@ORlOnEsqVl!4SWK%V1T2nbTPgYhoO;>Dc zVQyr3R8em|NM&qo0PK{_Zq+alg}wGu%nKyfPTUrGhhTvO3s&r>$p9`&G!Bo=J7p^-@)s zkljt!GY}WAbVMk$-%HJm$+F|Mz&mDjD{Bjq8PSmwR=@QJL0 zd1T;wNbvdLmXfDJnR|4kVHqf1rbrqvMr#JF(fqt*7OiXr|KF`2FveE@7jn^Jp?r#A zyzD*jC;wk{YUh8YRCoCQU$FH*#+XlM$$uC>YWPUehRKmH+)=?13u+GV=yHE&{Lf~5 zmH=41C7Y`RYNjU*&yj|@nP|QUvWA`mWU@4rkRIg@%W(f?6aOLuz%rXp>P9Surk1T#ZMS}lA(hQ(&2!SPAr%epUd_KhI5>FQ{0aa7 N|NoRUvhx58002m6!sGw| literal 0 HcmV?d00001 diff --git a/pkg/iac/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart/Chart.yaml b/pkg/iac/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart/Chart.yaml new file mode 100644 index 000000000000..767f748a8d59 --- /dev/null +++ b/pkg/iac/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart/Chart.yaml @@ -0,0 +1,6 @@ +apiVersion: v2 +name: my-chart +description: A Helm chart for Kubernetes +type: application +version: 0.1.0 +appVersion: "1.16.0" diff --git a/pkg/iac/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart/templates/pod.yaml b/pkg/iac/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart/templates/pod.yaml new file mode 100644 index 000000000000..3649247c1bb1 --- /dev/null +++ b/pkg/iac/scanners/helm/parser/testdata/chart-and-archived-chart/my-chart/templates/pod.yaml @@ -0,0 +1,21 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: nginx-deployment + labels: + app: nginx +spec: + replicas: 3 + selector: + matchLabels: + app: nginx + template: + metadata: + labels: + app: nginx + spec: + containers: + - name: nginx + image: nginx:1.14.2 + ports: + - containerPort: 80 \ No newline at end of file diff --git a/pkg/iac/scanners/helm/parser/vals.go b/pkg/iac/scanners/helm/parser/vals.go new file mode 100644 index 000000000000..300dad819730 --- /dev/null +++ b/pkg/iac/scanners/helm/parser/vals.go @@ -0,0 +1,114 @@ +package parser + +import ( + "fmt" + "io" + "net/url" + "os" + "strings" + + "gopkg.in/yaml.v3" + "helm.sh/helm/v3/pkg/getter" + "helm.sh/helm/v3/pkg/strvals" +) + +type ValueOptions struct { + ValueFiles []string + StringValues []string + Values []string + FileValues []string +} + +// MergeValues merges values from files specified via -f/--values and directly +// via --set, --set-string, or --set-file, marshaling them to YAML +func (opts *ValueOptions) MergeValues() (map[string]interface{}, error) { + base := map[string]interface{}{} + + // User specified a values files via -f/--values + for _, filePath := range opts.ValueFiles { + currentMap := map[string]interface{}{} + + bytes, err := readFile(filePath) + if err != nil { + return nil, err + } + + if err := yaml.Unmarshal(bytes, ¤tMap); err != nil { + return nil, fmt.Errorf("failed to parse %s: %w", filePath, err) + } + // Merge with the previous map + base = mergeMaps(base, currentMap) + } + + // User specified a value via --set + for _, value := range opts.Values { + if err := strvals.ParseInto(value, base); err != nil { + return nil, fmt.Errorf("failed parsing --set data, %w", err) + } + } + + // User specified a value via --set-string + for _, value := range opts.StringValues { + if err := strvals.ParseIntoString(value, base); err != nil { + return nil, fmt.Errorf("failed parsing --set-string data %w", err) + } + } + + // User specified a value via --set-file + for _, value := range opts.FileValues { + reader := func(rs []rune) (interface{}, error) { + bytes, err := readFile(string(rs)) + if err != nil { + return nil, err + } + return string(bytes), err + } + if err := strvals.ParseIntoFile(value, base, reader); err != nil { + return nil, fmt.Errorf("failed parsing --set-file data: %w", err) + } + } + + return base, nil +} + +func mergeMaps(a, b map[string]interface{}) map[string]interface{} { + out := make(map[string]interface{}, len(a)) + for k, v := range a { + out[k] = v + } + for k, v := range b { + if v, ok := v.(map[string]interface{}); ok { + if bv, ok := out[k]; ok { + if bv, ok := bv.(map[string]interface{}); ok { + out[k] = mergeMaps(bv, v) + continue + } + } + } + out[k] = v + } + return out +} + +// readFile load a file from stdin, the local directory, or a remote file with a url. +func readFile(filePath string) ([]byte, error) { + if strings.TrimSpace(filePath) == "-" { + return io.ReadAll(os.Stdin) + } + u, _ := url.Parse(filePath) + + // FIXME: maybe someone handle other protocols like ftp. + if u.Scheme == "http" || u.Scheme == "https" { + g, err := getter.NewHTTPGetter() + if err != nil { + return nil, err + } + data, err := g.Get(filePath, getter.WithURL(filePath)) + if err != nil { + return nil, err + } + return data.Bytes(), err + } else { + return os.ReadFile(filePath) + } +} diff --git a/pkg/iac/scanners/helm/scanner.go b/pkg/iac/scanners/helm/scanner.go new file mode 100644 index 000000000000..a4235b6ea279 --- /dev/null +++ b/pkg/iac/scanners/helm/scanner.go @@ -0,0 +1,219 @@ +package helm + +import ( + "context" + "fmt" + "io" + "io/fs" + "path/filepath" + "strings" + + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/framework" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/liamg/memoryfs" + + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/pkg/iac/detection" + "github.com/aquasecurity/trivy/pkg/iac/rego" + "github.com/aquasecurity/trivy/pkg/iac/scanners" + "github.com/aquasecurity/trivy/pkg/iac/scanners/helm/parser" + kparser "github.com/aquasecurity/trivy/pkg/iac/scanners/kubernetes/parser" +) + +var _ scanners.FSScanner = (*Scanner)(nil) +var _ options.ConfigurableScanner = (*Scanner)(nil) + +type Scanner struct { + policyDirs []string + dataDirs []string + debug debug.Logger + options []options.ScannerOption + parserOptions []options.ParserOption + policyReaders []io.Reader + loadEmbeddedLibraries bool + loadEmbeddedPolicies bool + policyFS fs.FS + skipRequired bool + frameworks []framework.Framework + spec string +} + +func (s *Scanner) SetSpec(spec string) { + s.spec = spec +} + +func (s *Scanner) SetRegoOnly(bool) { +} + +func (s *Scanner) SetFrameworks(frameworks []framework.Framework) { + s.frameworks = frameworks +} + +// New creates a new Scanner +func New(options ...options.ScannerOption) *Scanner { + s := &Scanner{ + options: options, + } + + for _, option := range options { + option(s) + } + return s +} + +func (s *Scanner) AddParserOptions(options ...options.ParserOption) { + s.parserOptions = append(s.parserOptions, options...) +} + +func (s *Scanner) SetUseEmbeddedPolicies(b bool) { + s.loadEmbeddedPolicies = b +} + +func (s *Scanner) SetUseEmbeddedLibraries(b bool) { + s.loadEmbeddedLibraries = b +} + +func (s *Scanner) Name() string { + return "Helm" +} + +func (s *Scanner) SetPolicyReaders(readers []io.Reader) { + s.policyReaders = readers +} + +func (s *Scanner) SetSkipRequiredCheck(skip bool) { + s.skipRequired = skip +} + +func (s *Scanner) SetDebugWriter(writer io.Writer) { + s.debug = debug.New(writer, "helm", "scanner") +} + +func (s *Scanner) SetTraceWriter(_ io.Writer) { + // handled by rego later - nothing to do for now... +} + +func (s *Scanner) SetPerResultTracingEnabled(_ bool) { + // handled by rego later - nothing to do for now... +} + +func (s *Scanner) SetPolicyDirs(dirs ...string) { + s.policyDirs = dirs +} + +func (s *Scanner) SetDataDirs(dirs ...string) { + s.dataDirs = dirs +} + +func (s *Scanner) SetPolicyNamespaces(namespaces ...string) { + // handled by rego later - nothing to do for now... +} + +func (s *Scanner) SetPolicyFilesystem(policyFS fs.FS) { + s.policyFS = policyFS +} + +func (s *Scanner) SetDataFilesystem(_ fs.FS) {} +func (s *Scanner) SetRegoErrorLimit(_ int) {} + +func (s *Scanner) ScanFS(ctx context.Context, target fs.FS, path string) (scan.Results, error) { + + var results []scan.Result + if err := fs.WalkDir(target, path, func(path string, d fs.DirEntry, err error) error { + select { + case <-ctx.Done(): + return ctx.Err() + default: + } + + if err != nil { + return err + } + + if d.IsDir() { + return nil + } + + if detection.IsArchive(path) { + if scanResults, err := s.getScanResults(path, ctx, target); err != nil { + return err + } else { + results = append(results, scanResults...) + } + } + + if strings.HasSuffix(path, "Chart.yaml") { + if scanResults, err := s.getScanResults(filepath.Dir(path), ctx, target); err != nil { + return err + } else { + results = append(results, scanResults...) + } + } + + return nil + }); err != nil { + return nil, err + } + + return results, nil + +} + +func (s *Scanner) getScanResults(path string, ctx context.Context, target fs.FS) (results []scan.Result, err error) { + helmParser := parser.New(path, s.parserOptions...) + + if err := helmParser.ParseFS(ctx, target, path); err != nil { + return nil, err + } + + chartFiles, err := helmParser.RenderedChartFiles() + if err != nil { // not valid helm, maybe some other yaml etc., abort + return nil, nil + } + + regoScanner := rego.NewScanner(types.SourceKubernetes, s.options...) + policyFS := target + if s.policyFS != nil { + policyFS = s.policyFS + } + if err := regoScanner.LoadPolicies(s.loadEmbeddedLibraries, s.loadEmbeddedPolicies, policyFS, s.policyDirs, s.policyReaders); err != nil { + return nil, fmt.Errorf("policies load: %w", err) + } + for _, file := range chartFiles { + s.debug.Log("Processing rendered chart file: %s", file.TemplateFilePath) + + manifests, err := kparser.New().Parse(strings.NewReader(file.ManifestContent), file.TemplateFilePath) + if err != nil { + return nil, fmt.Errorf("unmarshal yaml: %w", err) + } + for _, manifest := range manifests { + fileResults, err := regoScanner.ScanInput(ctx, rego.Input{ + Path: file.TemplateFilePath, + Contents: manifest, + FS: target, + }) + if err != nil { + return nil, fmt.Errorf("scanning error: %w", err) + } + + if len(fileResults) > 0 { + renderedFS := memoryfs.New() + if err := renderedFS.MkdirAll(filepath.Dir(file.TemplateFilePath), fs.ModePerm); err != nil { + return nil, err + } + if err := renderedFS.WriteLazyFile(file.TemplateFilePath, func() (io.Reader, error) { + return strings.NewReader(file.ManifestContent), nil + }, fs.ModePerm); err != nil { + return nil, err + } + fileResults.SetSourceAndFilesystem(helmParser.ChartSource, renderedFS, detection.IsArchive(helmParser.ChartSource)) + } + + results = append(results, fileResults...) + } + + } + return results, nil +} diff --git a/pkg/iac/scanners/helm/test/mysql/.helmignore b/pkg/iac/scanners/helm/test/mysql/.helmignore new file mode 100644 index 000000000000..f0c131944441 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/.helmignore @@ -0,0 +1,21 @@ +# Patterns to ignore when building packages. +# This supports shell glob matching, relative path matching, and +# negation (prefixed with !). Only one pattern per line. +.DS_Store +# Common VCS dirs +.git/ +.gitignore +.bzr/ +.bzrignore +.hg/ +.hgignore +.svn/ +# Common backup files +*.swp +*.bak +*.tmp +*~ +# Various IDEs +.project +.idea/ +*.tmproj diff --git a/pkg/iac/scanners/helm/test/mysql/Chart.lock b/pkg/iac/scanners/helm/test/mysql/Chart.lock new file mode 100644 index 000000000000..2a6356005c25 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/Chart.lock @@ -0,0 +1,6 @@ +dependencies: +- name: common + repository: https://charts.bitnami.com/bitnami + version: 1.11.1 +digest: sha256:a000bcd4d4cdd813c67d633b5523b4a4cd478fb95f1cae665d9b0ba5c45b40e2 +generated: "2022-02-16T22:19:57.971058445Z" diff --git a/pkg/iac/scanners/helm/test/mysql/Chart.yaml b/pkg/iac/scanners/helm/test/mysql/Chart.yaml new file mode 100644 index 000000000000..7d5f5c6ce834 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/Chart.yaml @@ -0,0 +1,28 @@ +annotations: + category: Database +apiVersion: v2 +appVersion: 8.0.28 +dependencies: +- name: common + repository: https://charts.bitnami.com/bitnami + tags: + - bitnami-common + version: 1.x.x +description: MySQL is a fast, reliable, scalable, and easy to use open source relational + database system. Designed to handle mission-critical, heavy-load production applications. +home: https://github.com/bitnami/charts/tree/master/bitnami/mysql +icon: https://bitnami.com/assets/stacks/mysql/img/mysql-stack-220x234.png +keywords: +- mysql +- database +- sql +- cluster +- high availability +maintainers: +- email: containers@bitnami.com + name: Bitnami +name: mysql +sources: +- https://github.com/bitnami/bitnami-docker-mysql +- https://mysql.com +version: 8.8.26 diff --git a/pkg/iac/scanners/helm/test/mysql/README.md b/pkg/iac/scanners/helm/test/mysql/README.md new file mode 100644 index 000000000000..b03fa495893f --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/README.md @@ -0,0 +1,491 @@ + + +# MySQL packaged by Bitnami + +MySQL is a fast, reliable, scalable, and easy to use open source relational database system. Designed to handle mission-critical, heavy-load production applications. + +[Overview of MySQL](http://www.mysql.com) + +Trademarks: This software listing is packaged by Bitnami. The respective trademarks mentioned in the offering are owned by the respective companies, and use of them does not imply any affiliation or endorsement. + +## TL;DR + +```bash +$ helm repo add bitnami https://charts.bitnami.com/bitnami +$ helm install my-release bitnami/mysql +``` + +## Introduction + +This chart bootstraps a [MySQL](https://github.com/bitnami/bitnami-docker-mysql) replication cluster deployment on a [Kubernetes](https://kubernetes.io) cluster using the [Helm](https://helm.sh) package manager. + +Bitnami charts can be used with [Kubeapps](https://kubeapps.com/) for deployment and management of Helm Charts in clusters. This Helm chart has been tested on top of [Bitnami Kubernetes Production Runtime](https://kubeprod.io/) (BKPR). Deploy BKPR to get automated TLS certificates, logging and monitoring for your applications. + +## Prerequisites + +- Kubernetes 1.19+ +- Helm 3.2.0+ +- PV provisioner support in the underlying infrastructure + +## Installing the Chart + +To install the chart with the release name `my-release`: + +```bash +$ helm repo add bitnami https://charts.bitnami.com/bitnami +$ helm install my-release bitnami/mysql +``` + +These commands deploy MySQL on the Kubernetes cluster in the default configuration. The [Parameters](#parameters) section lists the parameters that can be configured during installation. + +> **Tip**: List all releases using `helm list` + +## Uninstalling the Chart + +To uninstall/delete the `my-release` deployment: + +```bash +$ helm delete my-release +``` + +The command removes all the Kubernetes components associated with the chart and deletes the release. + +## Parameters + +### Global parameters + +| Name | Description | Value | +| ------------------------- | ----------------------------------------------- | ----- | +| `global.imageRegistry` | Global Docker image registry | `""` | +| `global.imagePullSecrets` | Global Docker registry secret names as an array | `[]` | +| `global.storageClass` | Global StorageClass for Persistent Volume(s) | `""` | + + +### Common parameters + +| Name | Description | Value | +| ------------------------ | --------------------------------------------------------------------------------------------------------- | --------------- | +| `nameOverride` | String to partially override common.names.fullname template (will maintain the release name) | `""` | +| `fullnameOverride` | String to fully override common.names.fullname template | `""` | +| `clusterDomain` | Cluster domain | `cluster.local` | +| `commonAnnotations` | Common annotations to add to all MySQL resources (sub-charts are not considered). Evaluated as a template | `{}` | +| `commonLabels` | Common labels to add to all MySQL resources (sub-charts are not considered). Evaluated as a template | `{}` | +| `extraDeploy` | Array with extra yaml to deploy with the chart. Evaluated as a template | `[]` | +| `schedulerName` | Use an alternate scheduler, e.g. "stork". | `""` | +| `diagnosticMode.enabled` | Enable diagnostic mode (all probes will be disabled and the command will be overridden) | `false` | +| `diagnosticMode.command` | Command to override all containers in the deployment | `["sleep"]` | +| `diagnosticMode.args` | Args to override all containers in the deployment | `["infinity"]` | + + +### MySQL common parameters + +| Name | Description | Value | +| -------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------- | +| `image.registry` | MySQL image registry | `docker.io` | +| `image.repository` | MySQL image repository | `bitnami/mysql` | +| `image.tag` | MySQL image tag (immutable tags are recommended) | `8.0.28-debian-10-r0` | +| `image.pullPolicy` | MySQL image pull policy | `IfNotPresent` | +| `image.pullSecrets` | Specify docker-registry secret names as an array | `[]` | +| `image.debug` | Specify if debug logs should be enabled | `false` | +| `architecture` | MySQL architecture (`standalone` or `replication`) | `standalone` | +| `auth.rootPassword` | Password for the `root` user. Ignored if existing secret is provided | `""` | +| `auth.database` | Name for a custom database to create | `my_database` | +| `auth.username` | Name for a custom user to create | `""` | +| `auth.password` | Password for the new user. Ignored if existing secret is provided | `""` | +| `auth.replicationUser` | MySQL replication user | `replicator` | +| `auth.replicationPassword` | MySQL replication user password. Ignored if existing secret is provided | `""` | +| `auth.existingSecret` | Use existing secret for password details. The secret has to contain the keys `mysql-root-password`, `mysql-replication-password` and `mysql-password` | `""` | +| `auth.forcePassword` | Force users to specify required passwords | `false` | +| `auth.usePasswordFiles` | Mount credentials as files instead of using an environment variable | `false` | +| `auth.customPasswordFiles` | Use custom password files when `auth.usePasswordFiles` is set to `true`. Define path for keys `root` and `user`, also define `replicator` if `architecture` is set to `replication` | `{}` | +| `initdbScripts` | Dictionary of initdb scripts | `{}` | +| `initdbScriptsConfigMap` | ConfigMap with the initdb scripts (Note: Overrides `initdbScripts`) | `""` | + + +### MySQL Primary parameters + +| Name | Description | Value | +| -------------------------------------------- | --------------------------------------------------------------------------------------------------------------- | ------------------- | +| `primary.command` | Override default container command on MySQL Primary container(s) (useful when using custom images) | `[]` | +| `primary.args` | Override default container args on MySQL Primary container(s) (useful when using custom images) | `[]` | +| `primary.hostAliases` | Deployment pod host aliases | `[]` | +| `primary.configuration` | Configure MySQL Primary with a custom my.cnf file | `""` | +| `primary.existingConfigmap` | Name of existing ConfigMap with MySQL Primary configuration. | `""` | +| `primary.updateStrategy` | Update strategy type for the MySQL primary statefulset | `RollingUpdate` | +| `primary.rollingUpdatePartition` | Partition update strategy for MySQL Primary statefulset | `""` | +| `primary.podAnnotations` | Additional pod annotations for MySQL primary pods | `{}` | +| `primary.podAffinityPreset` | MySQL primary pod affinity preset. Ignored if `primary.affinity` is set. Allowed values: `soft` or `hard` | `""` | +| `primary.podAntiAffinityPreset` | MySQL primary pod anti-affinity preset. Ignored if `primary.affinity` is set. Allowed values: `soft` or `hard` | `soft` | +| `primary.nodeAffinityPreset.type` | MySQL primary node affinity preset type. Ignored if `primary.affinity` is set. Allowed values: `soft` or `hard` | `""` | +| `primary.nodeAffinityPreset.key` | MySQL primary node label key to match Ignored if `primary.affinity` is set. | `""` | +| `primary.nodeAffinityPreset.values` | MySQL primary node label values to match. Ignored if `primary.affinity` is set. | `[]` | +| `primary.affinity` | Affinity for MySQL primary pods assignment | `{}` | +| `primary.nodeSelector` | Node labels for MySQL primary pods assignment | `{}` | +| `primary.tolerations` | Tolerations for MySQL primary pods assignment | `[]` | +| `primary.podSecurityContext.enabled` | Enable security context for MySQL primary pods | `true` | +| `primary.podSecurityContext.fsGroup` | Group ID for the mounted volumes' filesystem | `1001` | +| `primary.containerSecurityContext.enabled` | MySQL primary container securityContext | `true` | +| `primary.containerSecurityContext.runAsUser` | User ID for the MySQL primary container | `1001` | +| `primary.resources.limits` | The resources limits for MySQL primary containers | `{}` | +| `primary.resources.requests` | The requested resources for MySQL primary containers | `{}` | +| `primary.livenessProbe.enabled` | Enable livenessProbe | `true` | +| `primary.livenessProbe.initialDelaySeconds` | Initial delay seconds for livenessProbe | `5` | +| `primary.livenessProbe.periodSeconds` | Period seconds for livenessProbe | `10` | +| `primary.livenessProbe.timeoutSeconds` | Timeout seconds for livenessProbe | `1` | +| `primary.livenessProbe.failureThreshold` | Failure threshold for livenessProbe | `3` | +| `primary.livenessProbe.successThreshold` | Success threshold for livenessProbe | `1` | +| `primary.readinessProbe.enabled` | Enable readinessProbe | `true` | +| `primary.readinessProbe.initialDelaySeconds` | Initial delay seconds for readinessProbe | `5` | +| `primary.readinessProbe.periodSeconds` | Period seconds for readinessProbe | `10` | +| `primary.readinessProbe.timeoutSeconds` | Timeout seconds for readinessProbe | `1` | +| `primary.readinessProbe.failureThreshold` | Failure threshold for readinessProbe | `3` | +| `primary.readinessProbe.successThreshold` | Success threshold for readinessProbe | `1` | +| `primary.startupProbe.enabled` | Enable startupProbe | `true` | +| `primary.startupProbe.initialDelaySeconds` | Initial delay seconds for startupProbe | `15` | +| `primary.startupProbe.periodSeconds` | Period seconds for startupProbe | `10` | +| `primary.startupProbe.timeoutSeconds` | Timeout seconds for startupProbe | `1` | +| `primary.startupProbe.failureThreshold` | Failure threshold for startupProbe | `10` | +| `primary.startupProbe.successThreshold` | Success threshold for startupProbe | `1` | +| `primary.customLivenessProbe` | Override default liveness probe for MySQL primary containers | `{}` | +| `primary.customReadinessProbe` | Override default readiness probe for MySQL primary containers | `{}` | +| `primary.customStartupProbe` | Override default startup probe for MySQL primary containers | `{}` | +| `primary.extraFlags` | MySQL primary additional command line flags | `""` | +| `primary.extraEnvVars` | Extra environment variables to be set on MySQL primary containers | `[]` | +| `primary.extraEnvVarsCM` | Name of existing ConfigMap containing extra env vars for MySQL primary containers | `""` | +| `primary.extraEnvVarsSecret` | Name of existing Secret containing extra env vars for MySQL primary containers | `""` | +| `primary.persistence.enabled` | Enable persistence on MySQL primary replicas using a `PersistentVolumeClaim`. If false, use emptyDir | `true` | +| `primary.persistence.existingClaim` | Name of an existing `PersistentVolumeClaim` for MySQL primary replicas | `""` | +| `primary.persistence.storageClass` | MySQL primary persistent volume storage Class | `""` | +| `primary.persistence.annotations` | MySQL primary persistent volume claim annotations | `{}` | +| `primary.persistence.accessModes` | MySQL primary persistent volume access Modes | `["ReadWriteOnce"]` | +| `primary.persistence.size` | MySQL primary persistent volume size | `8Gi` | +| `primary.persistence.selector` | Selector to match an existing Persistent Volume | `{}` | +| `primary.extraVolumes` | Optionally specify extra list of additional volumes to the MySQL Primary pod(s) | `[]` | +| `primary.extraVolumeMounts` | Optionally specify extra list of additional volumeMounts for the MySQL Primary container(s) | `[]` | +| `primary.initContainers` | Add additional init containers for the MySQL Primary pod(s) | `[]` | +| `primary.sidecars` | Add additional sidecar containers for the MySQL Primary pod(s) | `[]` | +| `primary.service.type` | MySQL Primary K8s service type | `ClusterIP` | +| `primary.service.port` | MySQL Primary K8s service port | `3306` | +| `primary.service.nodePort` | MySQL Primary K8s service node port | `""` | +| `primary.service.clusterIP` | MySQL Primary K8s service clusterIP IP | `""` | +| `primary.service.loadBalancerIP` | MySQL Primary loadBalancerIP if service type is `LoadBalancer` | `""` | +| `primary.service.externalTrafficPolicy` | Enable client source IP preservation | `Cluster` | +| `primary.service.loadBalancerSourceRanges` | Addresses that are allowed when MySQL Primary service is LoadBalancer | `[]` | +| `primary.service.annotations` | Provide any additional annotations which may be required | `{}` | +| `primary.pdb.enabled` | Enable/disable a Pod Disruption Budget creation for MySQL primary pods | `false` | +| `primary.pdb.minAvailable` | Minimum number/percentage of MySQL primary pods that should remain scheduled | `1` | +| `primary.pdb.maxUnavailable` | Maximum number/percentage of MySQL primary pods that may be made unavailable | `""` | +| `primary.podLabels` | MySQL Primary pod label. If labels are same as commonLabels , this will take precedence | `{}` | + + +### MySQL Secondary parameters + +| Name | Description | Value | +| ---------------------------------------------- | ------------------------------------------------------------------------------------------------------------------- | ------------------- | +| `secondary.replicaCount` | Number of MySQL secondary replicas | `1` | +| `secondary.hostAliases` | Deployment pod host aliases | `[]` | +| `secondary.command` | Override default container command on MySQL Secondary container(s) (useful when using custom images) | `[]` | +| `secondary.args` | Override default container args on MySQL Secondary container(s) (useful when using custom images) | `[]` | +| `secondary.configuration` | Configure MySQL Secondary with a custom my.cnf file | `""` | +| `secondary.existingConfigmap` | Name of existing ConfigMap with MySQL Secondary configuration. | `""` | +| `secondary.updateStrategy` | Update strategy type for the MySQL secondary statefulset | `RollingUpdate` | +| `secondary.rollingUpdatePartition` | Partition update strategy for MySQL Secondary statefulset | `""` | +| `secondary.podAnnotations` | Additional pod annotations for MySQL secondary pods | `{}` | +| `secondary.podAffinityPreset` | MySQL secondary pod affinity preset. Ignored if `secondary.affinity` is set. Allowed values: `soft` or `hard` | `""` | +| `secondary.podAntiAffinityPreset` | MySQL secondary pod anti-affinity preset. Ignored if `secondary.affinity` is set. Allowed values: `soft` or `hard` | `soft` | +| `secondary.nodeAffinityPreset.type` | MySQL secondary node affinity preset type. Ignored if `secondary.affinity` is set. Allowed values: `soft` or `hard` | `""` | +| `secondary.nodeAffinityPreset.key` | MySQL secondary node label key to match Ignored if `secondary.affinity` is set. | `""` | +| `secondary.nodeAffinityPreset.values` | MySQL secondary node label values to match. Ignored if `secondary.affinity` is set. | `[]` | +| `secondary.affinity` | Affinity for MySQL secondary pods assignment | `{}` | +| `secondary.nodeSelector` | Node labels for MySQL secondary pods assignment | `{}` | +| `secondary.tolerations` | Tolerations for MySQL secondary pods assignment | `[]` | +| `secondary.podSecurityContext.enabled` | Enable security context for MySQL secondary pods | `true` | +| `secondary.podSecurityContext.fsGroup` | Group ID for the mounted volumes' filesystem | `1001` | +| `secondary.containerSecurityContext.enabled` | MySQL secondary container securityContext | `true` | +| `secondary.containerSecurityContext.runAsUser` | User ID for the MySQL secondary container | `1001` | +| `secondary.resources.limits` | The resources limits for MySQL secondary containers | `{}` | +| `secondary.resources.requests` | The requested resources for MySQL secondary containers | `{}` | +| `secondary.livenessProbe.enabled` | Enable livenessProbe | `true` | +| `secondary.livenessProbe.initialDelaySeconds` | Initial delay seconds for livenessProbe | `5` | +| `secondary.livenessProbe.periodSeconds` | Period seconds for livenessProbe | `10` | +| `secondary.livenessProbe.timeoutSeconds` | Timeout seconds for livenessProbe | `1` | +| `secondary.livenessProbe.failureThreshold` | Failure threshold for livenessProbe | `3` | +| `secondary.livenessProbe.successThreshold` | Success threshold for livenessProbe | `1` | +| `secondary.readinessProbe.enabled` | Enable readinessProbe | `true` | +| `secondary.readinessProbe.initialDelaySeconds` | Initial delay seconds for readinessProbe | `5` | +| `secondary.readinessProbe.periodSeconds` | Period seconds for readinessProbe | `10` | +| `secondary.readinessProbe.timeoutSeconds` | Timeout seconds for readinessProbe | `1` | +| `secondary.readinessProbe.failureThreshold` | Failure threshold for readinessProbe | `3` | +| `secondary.readinessProbe.successThreshold` | Success threshold for readinessProbe | `1` | +| `secondary.startupProbe.enabled` | Enable startupProbe | `true` | +| `secondary.startupProbe.initialDelaySeconds` | Initial delay seconds for startupProbe | `15` | +| `secondary.startupProbe.periodSeconds` | Period seconds for startupProbe | `10` | +| `secondary.startupProbe.timeoutSeconds` | Timeout seconds for startupProbe | `1` | +| `secondary.startupProbe.failureThreshold` | Failure threshold for startupProbe | `15` | +| `secondary.startupProbe.successThreshold` | Success threshold for startupProbe | `1` | +| `secondary.customLivenessProbe` | Override default liveness probe for MySQL secondary containers | `{}` | +| `secondary.customReadinessProbe` | Override default readiness probe for MySQL secondary containers | `{}` | +| `secondary.customStartupProbe` | Override default startup probe for MySQL secondary containers | `{}` | +| `secondary.extraFlags` | MySQL secondary additional command line flags | `""` | +| `secondary.extraEnvVars` | An array to add extra environment variables on MySQL secondary containers | `[]` | +| `secondary.extraEnvVarsCM` | Name of existing ConfigMap containing extra env vars for MySQL secondary containers | `""` | +| `secondary.extraEnvVarsSecret` | Name of existing Secret containing extra env vars for MySQL secondary containers | `""` | +| `secondary.persistence.enabled` | Enable persistence on MySQL secondary replicas using a `PersistentVolumeClaim` | `true` | +| `secondary.persistence.storageClass` | MySQL secondary persistent volume storage Class | `""` | +| `secondary.persistence.annotations` | MySQL secondary persistent volume claim annotations | `{}` | +| `secondary.persistence.accessModes` | MySQL secondary persistent volume access Modes | `["ReadWriteOnce"]` | +| `secondary.persistence.size` | MySQL secondary persistent volume size | `8Gi` | +| `secondary.persistence.selector` | Selector to match an existing Persistent Volume | `{}` | +| `secondary.extraVolumes` | Optionally specify extra list of additional volumes to the MySQL secondary pod(s) | `[]` | +| `secondary.extraVolumeMounts` | Optionally specify extra list of additional volumeMounts for the MySQL secondary container(s) | `[]` | +| `secondary.initContainers` | Add additional init containers for the MySQL secondary pod(s) | `[]` | +| `secondary.sidecars` | Add additional sidecar containers for the MySQL secondary pod(s) | `[]` | +| `secondary.service.type` | MySQL secondary Kubernetes service type | `ClusterIP` | +| `secondary.service.port` | MySQL secondary Kubernetes service port | `3306` | +| `secondary.service.nodePort` | MySQL secondary Kubernetes service node port | `""` | +| `secondary.service.clusterIP` | MySQL secondary Kubernetes service clusterIP IP | `""` | +| `secondary.service.loadBalancerIP` | MySQL secondary loadBalancerIP if service type is `LoadBalancer` | `""` | +| `secondary.service.externalTrafficPolicy` | Enable client source IP preservation | `Cluster` | +| `secondary.service.loadBalancerSourceRanges` | Addresses that are allowed when MySQL secondary service is LoadBalancer | `[]` | +| `secondary.service.annotations` | Provide any additional annotations which may be required | `{}` | +| `secondary.pdb.enabled` | Enable/disable a Pod Disruption Budget creation for MySQL secondary pods | `false` | +| `secondary.pdb.minAvailable` | Minimum number/percentage of MySQL secondary pods that should remain scheduled | `1` | +| `secondary.pdb.maxUnavailable` | Maximum number/percentage of MySQL secondary pods that may be made unavailable | `""` | +| `secondary.podLabels` | Additional pod labels for MySQL secondary pods | `{}` | + + +### RBAC parameters + +| Name | Description | Value | +| ---------------------------- | ------------------------------------------------------ | ------- | +| `serviceAccount.create` | Enable the creation of a ServiceAccount for MySQL pods | `true` | +| `serviceAccount.name` | Name of the created ServiceAccount | `""` | +| `serviceAccount.annotations` | Annotations for MySQL Service Account | `{}` | +| `rbac.create` | Whether to create & use RBAC resources or not | `false` | + + +### Network Policy + +| Name | Description | Value | +| ------------------------------------------ | --------------------------------------------------------------------------------------------------------------- | ------- | +| `networkPolicy.enabled` | Enable creation of NetworkPolicy resources | `false` | +| `networkPolicy.allowExternal` | The Policy model to apply. | `true` | +| `networkPolicy.explicitNamespacesSelector` | A Kubernetes LabelSelector to explicitly select namespaces from which ingress traffic could be allowed to MySQL | `{}` | + + +### Volume Permissions parameters + +| Name | Description | Value | +| ------------------------------------- | -------------------------------------------------------------------------------------------------------------------- | ----------------------- | +| `volumePermissions.enabled` | Enable init container that changes the owner and group of the persistent volume(s) mountpoint to `runAsUser:fsGroup` | `false` | +| `volumePermissions.image.registry` | Init container volume-permissions image registry | `docker.io` | +| `volumePermissions.image.repository` | Init container volume-permissions image repository | `bitnami/bitnami-shell` | +| `volumePermissions.image.tag` | Init container volume-permissions image tag (immutable tags are recommended) | `10-debian-10-r312` | +| `volumePermissions.image.pullPolicy` | Init container volume-permissions image pull policy | `IfNotPresent` | +| `volumePermissions.image.pullSecrets` | Specify docker-registry secret names as an array | `[]` | +| `volumePermissions.resources` | Init container volume-permissions resources | `{}` | + + +### Metrics parameters + +| Name | Description | Value | +| -------------------------------------------- | --------------------------------------------------------------------------------------------------------------------- | ------------------------- | +| `metrics.enabled` | Start a side-car prometheus exporter | `false` | +| `metrics.image.registry` | Exporter image registry | `docker.io` | +| `metrics.image.repository` | Exporter image repository | `bitnami/mysqld-exporter` | +| `metrics.image.tag` | Exporter image tag (immutable tags are recommended) | `0.13.0-debian-10-r216` | +| `metrics.image.pullPolicy` | Exporter image pull policy | `IfNotPresent` | +| `metrics.image.pullSecrets` | Specify docker-registry secret names as an array | `[]` | +| `metrics.service.type` | Kubernetes service type for MySQL Prometheus Exporter | `ClusterIP` | +| `metrics.service.port` | MySQL Prometheus Exporter service port | `9104` | +| `metrics.service.annotations` | Prometheus exporter service annotations | `{}` | +| `metrics.extraArgs.primary` | Extra args to be passed to mysqld_exporter on Primary pods | `[]` | +| `metrics.extraArgs.secondary` | Extra args to be passed to mysqld_exporter on Secondary pods | `[]` | +| `metrics.resources.limits` | The resources limits for MySQL prometheus exporter containers | `{}` | +| `metrics.resources.requests` | The requested resources for MySQL prometheus exporter containers | `{}` | +| `metrics.livenessProbe.enabled` | Enable livenessProbe | `true` | +| `metrics.livenessProbe.initialDelaySeconds` | Initial delay seconds for livenessProbe | `120` | +| `metrics.livenessProbe.periodSeconds` | Period seconds for livenessProbe | `10` | +| `metrics.livenessProbe.timeoutSeconds` | Timeout seconds for livenessProbe | `1` | +| `metrics.livenessProbe.failureThreshold` | Failure threshold for livenessProbe | `3` | +| `metrics.livenessProbe.successThreshold` | Success threshold for livenessProbe | `1` | +| `metrics.readinessProbe.enabled` | Enable readinessProbe | `true` | +| `metrics.readinessProbe.initialDelaySeconds` | Initial delay seconds for readinessProbe | `30` | +| `metrics.readinessProbe.periodSeconds` | Period seconds for readinessProbe | `10` | +| `metrics.readinessProbe.timeoutSeconds` | Timeout seconds for readinessProbe | `1` | +| `metrics.readinessProbe.failureThreshold` | Failure threshold for readinessProbe | `3` | +| `metrics.readinessProbe.successThreshold` | Success threshold for readinessProbe | `1` | +| `metrics.serviceMonitor.enabled` | Create ServiceMonitor Resource for scraping metrics using PrometheusOperator | `false` | +| `metrics.serviceMonitor.namespace` | Specify the namespace in which the serviceMonitor resource will be created | `""` | +| `metrics.serviceMonitor.interval` | Specify the interval at which metrics should be scraped | `30s` | +| `metrics.serviceMonitor.scrapeTimeout` | Specify the timeout after which the scrape is ended | `""` | +| `metrics.serviceMonitor.relabellings` | Specify Metric Relabellings to add to the scrape endpoint | `[]` | +| `metrics.serviceMonitor.honorLabels` | Specify honorLabels parameter to add the scrape endpoint | `false` | +| `metrics.serviceMonitor.additionalLabels` | Used to pass Labels that are used by the Prometheus installed in your cluster to select Service Monitors to work with | `{}` | + + +The above parameters map to the env variables defined in [bitnami/mysql](https://github.com/bitnami/bitnami-docker-mysql). For more information please refer to the [bitnami/mysql](https://github.com/bitnami/bitnami-docker-mysql) image documentation. + +Specify each parameter using the `--set key=value[,key=value]` argument to `helm install`. For example, + +```bash +$ helm install my-release \ + --set auth.rootPassword=secretpassword,auth.database=app_database \ + bitnami/mysql +``` + +The above command sets the MySQL `root` account password to `secretpassword`. Additionally it creates a database named `app_database`. + +> NOTE: Once this chart is deployed, it is not possible to change the application's access credentials, such as usernames or passwords, using Helm. To change these application credentials after deployment, delete any persistent volumes (PVs) used by the chart and re-deploy it, or use the application's built-in administrative tools if available. + +Alternatively, a YAML file that specifies the values for the parameters can be provided while installing the chart. For example, + +```bash +$ helm install my-release -f values.yaml bitnami/mysql +``` + +> **Tip**: You can use the default [values.yaml](values.yaml) + +## Configuration and installation details + +### [Rolling VS Immutable tags](https://docs.bitnami.com/containers/how-to/understand-rolling-tags-containers/) + +It is strongly recommended to use immutable tags in a production environment. This ensures your deployment does not change automatically if the same tag is updated with a different image. + +Bitnami will release a new chart updating its containers if a new version of the main container, significant changes, or critical vulnerabilities exist. + +### Use a different MySQL version + +To modify the application version used in this chart, specify a different version of the image using the `image.tag` parameter and/or a different repository using the `image.repository` parameter. Refer to the [chart documentation for more information on these parameters and how to use them with images from a private registry](https://docs.bitnami.com/kubernetes/infrastructure/mysql/configuration/change-image-version/). + +### Customize a new MySQL instance + +The [Bitnami MySQL](https://github.com/bitnami/bitnami-docker-mysql) image allows you to use your custom scripts to initialize a fresh instance. Custom scripts may be specified using the `initdbScripts` parameter. Alternatively, an external ConfigMap may be created with all the initialization scripts and the ConfigMap passed to the chart via the `initdbScriptsConfigMap` parameter. Note that this will override the `initdbScripts` parameter. + +The allowed extensions are `.sh`, `.sql` and `.sql.gz`. + +These scripts are treated differently depending on their extension. While `.sh` scripts are executed on all the nodes, `.sql` and `.sql.gz` scripts are only executed on the primary nodes. This is because `.sh` scripts support conditional tests to identify the type of node they are running on, while such tests are not supported in `.sql` or `sql.gz` files. + +Refer to the [chart documentation for more information and a usage example](http://docs.bitnami.com/kubernetes/infrastructure/mysql/configuration/customize-new-instance/). + +### Sidecars and Init Containers + +If you have a need for additional containers to run within the same pod as MySQL, you can do so via the `sidecars` config parameter. Simply define your container according to the Kubernetes container spec. + +```yaml +sidecars: + - name: your-image-name + image: your-image + imagePullPolicy: Always + ports: + - name: portname + containerPort: 1234 +``` + +Similarly, you can add extra init containers using the `initContainers` parameter. + +```yaml +initContainers: + - name: your-image-name + image: your-image + imagePullPolicy: Always + ports: + - name: portname + containerPort: 1234 +``` + +## Persistence + +The [Bitnami MySQL](https://github.com/bitnami/bitnami-docker-mysql) image stores the MySQL data and configurations at the `/bitnami/mysql` path of the container. + +The chart mounts a [Persistent Volume](https://kubernetes.io/docs/concepts/storage/persistent-volumes/) volume at this location. The volume is created using dynamic volume provisioning by default. An existing PersistentVolumeClaim can also be defined for this purpose. + +If you encounter errors when working with persistent volumes, refer to our [troubleshooting guide for persistent volumes](https://docs.bitnami.com/kubernetes/faq/troubleshooting/troubleshooting-persistence-volumes/). + +## Network Policy + +To enable network policy for MySQL, install [a networking plugin that implements the Kubernetes NetworkPolicy spec](https://kubernetes.io/docs/tasks/administer-cluster/declare-network-policy#before-you-begin), and set `networkPolicy.enabled` to `true`. + +For Kubernetes v1.5 & v1.6, you must also turn on NetworkPolicy by setting the DefaultDeny namespace annotation. Note: this will enforce policy for _all_ pods in the namespace: + +```console +$ kubectl annotate namespace default "net.beta.kubernetes.io/network-policy={\"ingress\":{\"isolation\":\"DefaultDeny\"}}" +``` + +With NetworkPolicy enabled, traffic will be limited to just port 3306. + +For more precise policy, set `networkPolicy.allowExternal=false`. This will only allow pods with the generated client label to connect to MySQL. +This label will be displayed in the output of a successful install. + +## Pod affinity + +This chart allows you to set your custom affinity using the `XXX.affinity` parameter(s). Find more information about Pod affinity in the [Kubernetes documentation](https://kubernetes.io/docs/concepts/configuration/assign-pod-node/#affinity-and-anti-affinity). + +As an alternative, you can use the preset configurations for pod affinity, pod anti-affinity, and node affinity available at the [bitnami/common](https://github.com/bitnami/charts/tree/master/bitnami/common#affinities) chart. To do so, set the `XXX.podAffinityPreset`, `XXX.podAntiAffinityPreset`, or `XXX.nodeAffinityPreset` parameters. + +## Troubleshooting + +Find more information about how to deal with common errors related to Bitnami's Helm charts in [this troubleshooting guide](https://docs.bitnami.com/general/how-to/troubleshoot-helm-chart-issues). + +## Upgrading + +It's necessary to set the `auth.rootPassword` parameter when upgrading for readiness/liveness probes to work properly. When you install this chart for the first time, some notes will be displayed providing the credentials you must use under the 'Administrator credentials' section. Please note down the password and run the command below to upgrade your chart: + +```bash +$ helm upgrade my-release bitnami/mysql --set auth.rootPassword=[ROOT_PASSWORD] +``` + +| Note: you need to substitute the placeholder _[ROOT_PASSWORD]_ with the value obtained in the installation notes. + +### To 8.0.0 + +- Several parameters were renamed or disappeared in favor of new ones on this major version: + - The terms *master* and *slave* have been replaced by the terms *primary* and *secondary*. Therefore, parameters prefixed with `master` or `slave` are now prefixed with `primary` or `secondary`, respectively. + - Credentials parameters are reorganized under the `auth` parameter. + - `replication.enabled` parameter is deprecated in favor of `architecture` parameter that accepts two values: `standalone` and `replication`. +- Chart labels were adapted to follow the [Helm charts standard labels](https://helm.sh/docs/chart_best_practices/labels/#standard-labels). +- This version also introduces `bitnami/common`, a [library chart](https://helm.sh/docs/topics/library_charts/#helm) as a dependency. More documentation about this new utility could be found [here](https://github.com/bitnami/charts/tree/master/bitnami/common#bitnami-common-library-chart). Please, make sure that you have updated the chart dependencies before executing any upgrade. + +Consequences: + +- Backwards compatibility is not guaranteed. To upgrade to `8.0.0`, install a new release of the MySQL chart, and migrate the data from your previous release. You have 2 alternatives to do so: + - Create a backup of the database, and restore it on the new release using tools such as [mysqldump](https://dev.mysql.com/doc/refman/8.0/en/mysqldump.html). + - Reuse the PVC used to hold the master data on your previous release. To do so, use the `primary.persistence.existingClaim` parameter. The following example assumes that the release name is `mysql`: + +```bash +$ helm install mysql bitnami/mysql --set auth.rootPassword=[ROOT_PASSWORD] --set primary.persistence.existingClaim=[EXISTING_PVC] +``` + +| Note: you need to substitute the placeholder _[EXISTING_PVC]_ with the name of the PVC used on your previous release, and _[ROOT_PASSWORD]_ with the root password used in your previous release. + +### To 7.0.0 + +[On November 13, 2020, Helm v2 support formally ended](https://github.com/helm/charts#status-of-the-project). This major version is the result of the required changes applied to the Helm Chart to be able to incorporate the different features added in Helm v3 and to be consistent with the Helm project itself regarding the Helm v2 EOL. + +[Learn more about this change and related upgrade considerations](https://docs.bitnami.com/kubernetes/infrastructure/mysql/administration/upgrade-helm3/). + +### To 3.0.0 + +Backwards compatibility is not guaranteed unless you modify the labels used on the chart's deployments. +Use the workaround below to upgrade from versions previous to 3.0.0. The following example assumes that the release name is mysql: + +```console +$ kubectl delete statefulset mysql-master --cascade=false +$ kubectl delete statefulset mysql-slave --cascade=false +``` + +## License + +Copyright © 2022 Bitnami + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. \ No newline at end of file diff --git a/pkg/iac/scanners/helm/test/mysql/charts/common/.helmignore b/pkg/iac/scanners/helm/test/mysql/charts/common/.helmignore new file mode 100644 index 000000000000..50af03172541 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/charts/common/.helmignore @@ -0,0 +1,22 @@ +# Patterns to ignore when building packages. +# This supports shell glob matching, relative path matching, and +# negation (prefixed with !). Only one pattern per line. +.DS_Store +# Common VCS dirs +.git/ +.gitignore +.bzr/ +.bzrignore +.hg/ +.hgignore +.svn/ +# Common backup files +*.swp +*.bak +*.tmp +*~ +# Various IDEs +.project +.idea/ +*.tmproj +.vscode/ diff --git a/pkg/iac/scanners/helm/test/mysql/charts/common/Chart.yaml b/pkg/iac/scanners/helm/test/mysql/charts/common/Chart.yaml new file mode 100644 index 000000000000..87226649a57c --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/charts/common/Chart.yaml @@ -0,0 +1,23 @@ +annotations: + category: Infrastructure +apiVersion: v2 +appVersion: 1.11.1 +description: A Library Helm Chart for grouping common logic between bitnami charts. + This chart is not deployable by itself. +home: https://github.com/bitnami/charts/tree/master/bitnami/common +icon: https://bitnami.com/downloads/logos/bitnami-mark.png +keywords: +- common +- helper +- template +- function +- bitnami +maintainers: +- email: containers@bitnami.com + name: Bitnami +name: common +sources: +- https://github.com/bitnami/charts +- https://www.bitnami.com/ +type: library +version: 1.11.1 diff --git a/pkg/iac/scanners/helm/test/mysql/charts/common/README.md b/pkg/iac/scanners/helm/test/mysql/charts/common/README.md new file mode 100644 index 000000000000..da84c426d0db --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/charts/common/README.md @@ -0,0 +1,345 @@ +# Bitnami Common Library Chart + +A [Helm Library Chart](https://helm.sh/docs/topics/library_charts/#helm) for grouping common logic between bitnami charts. + +## TL;DR + +```yaml +dependencies: + - name: common + version: 0.x.x + repository: https://charts.bitnami.com/bitnami +``` + +```bash +$ helm dependency update +``` + +```yaml +apiVersion: v1 +kind: ConfigMap +metadata: + name: {{ include "common.names.fullname" . }} +data: + myvalue: "Hello World" +``` + +## Introduction + +This chart provides a common template helpers which can be used to develop new charts using [Helm](https://helm.sh) package manager. + +Bitnami charts can be used with [Kubeapps](https://kubeapps.com/) for deployment and management of Helm Charts in clusters. This Helm chart has been tested on top of [Bitnami Kubernetes Production Runtime](https://kubeprod.io/) (BKPR). Deploy BKPR to get automated TLS certificates, logging and monitoring for your applications. + +## Prerequisites + +- Kubernetes 1.19+ +- Helm 3.2.0+ + +## Parameters + +The following table lists the helpers available in the library which are scoped in different sections. + +### Affinities + +| Helper identifier | Description | Expected Input | +|-------------------------------|------------------------------------------------------|------------------------------------------------| +| `common.affinities.node.soft` | Return a soft nodeAffinity definition | `dict "key" "FOO" "values" (list "BAR" "BAZ")` | +| `common.affinities.node.hard` | Return a hard nodeAffinity definition | `dict "key" "FOO" "values" (list "BAR" "BAZ")` | +| `common.affinities.pod.soft` | Return a soft podAffinity/podAntiAffinity definition | `dict "component" "FOO" "context" $` | +| `common.affinities.pod.hard` | Return a hard podAffinity/podAntiAffinity definition | `dict "component" "FOO" "context" $` | + +### Capabilities + +| Helper identifier | Description | Expected Input | +|------------------------------------------------|------------------------------------------------------------------------------------------------|-------------------| +| `common.capabilities.kubeVersion` | Return the target Kubernetes version (using client default if .Values.kubeVersion is not set). | `.` Chart context | +| `common.capabilities.cronjob.apiVersion` | Return the appropriate apiVersion for cronjob. | `.` Chart context | +| `common.capabilities.deployment.apiVersion` | Return the appropriate apiVersion for deployment. | `.` Chart context | +| `common.capabilities.statefulset.apiVersion` | Return the appropriate apiVersion for statefulset. | `.` Chart context | +| `common.capabilities.ingress.apiVersion` | Return the appropriate apiVersion for ingress. | `.` Chart context | +| `common.capabilities.rbac.apiVersion` | Return the appropriate apiVersion for RBAC resources. | `.` Chart context | +| `common.capabilities.crd.apiVersion` | Return the appropriate apiVersion for CRDs. | `.` Chart context | +| `common.capabilities.policy.apiVersion` | Return the appropriate apiVersion for podsecuritypolicy. | `.` Chart context | +| `common.capabilities.networkPolicy.apiVersion` | Return the appropriate apiVersion for networkpolicy. | `.` Chart context | +| `common.capabilities.supportsHelmVersion` | Returns true if the used Helm version is 3.3+ | `.` Chart context | + +### Errors + +| Helper identifier | Description | Expected Input | +|-----------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------| +| `common.errors.upgrade.passwords.empty` | It will ensure required passwords are given when we are upgrading a chart. If `validationErrors` is not empty it will throw an error and will stop the upgrade action. | `dict "validationErrors" (list $validationError00 $validationError01) "context" $` | + +### Images + +| Helper identifier | Description | Expected Input | +|-----------------------------|------------------------------------------------------|---------------------------------------------------------------------------------------------------------| +| `common.images.image` | Return the proper and full image name | `dict "imageRoot" .Values.path.to.the.image "global" $`, see [ImageRoot](#imageroot) for the structure. | +| `common.images.pullSecrets` | Return the proper Docker Image Registry Secret Names (deprecated: use common.images.renderPullSecrets instead) | `dict "images" (list .Values.path.to.the.image1, .Values.path.to.the.image2) "global" .Values.global` | +| `common.images.renderPullSecrets` | Return the proper Docker Image Registry Secret Names (evaluates values as templates) | `dict "images" (list .Values.path.to.the.image1, .Values.path.to.the.image2) "context" $` | + +### Ingress + +| Helper identifier | Description | Expected Input | +|-------------------------------------------|-------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `common.ingress.backend` | Generate a proper Ingress backend entry depending on the API version | `dict "serviceName" "foo" "servicePort" "bar"`, see the [Ingress deprecation notice](https://kubernetes.io/blog/2019/07/18/api-deprecations-in-1-16/) for the syntax differences | +| `common.ingress.supportsPathType` | Prints "true" if the pathType field is supported | `.` Chart context | +| `common.ingress.supportsIngressClassname` | Prints "true" if the ingressClassname field is supported | `.` Chart context | +| `common.ingress.certManagerRequest` | Prints "true" if required cert-manager annotations for TLS signed certificates are set in the Ingress annotations | `dict "annotations" .Values.path.to.the.ingress.annotations` | + +### Labels + +| Helper identifier | Description | Expected Input | +|-----------------------------|------------------------------------------------------|-------------------| +| `common.labels.standard` | Return Kubernetes standard labels | `.` Chart context | +| `common.labels.matchLabels` | Return the proper Docker Image Registry Secret Names | `.` Chart context | + +### Names + +| Helper identifier | Description | Expected Input | +|-------------------------|------------------------------------------------------------|-------------------| +| `common.names.name` | Expand the name of the chart or use `.Values.nameOverride` | `.` Chart context | +| `common.names.fullname` | Create a default fully qualified app name. | `.` Chart context | +| `common.names.chart` | Chart name plus version | `.` Chart context | + +### Secrets + +| Helper identifier | Description | Expected Input | +|---------------------------|--------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `common.secrets.name` | Generate the name of the secret. | `dict "existingSecret" .Values.path.to.the.existingSecret "defaultNameSuffix" "mySuffix" "context" $` see [ExistingSecret](#existingsecret) for the structure. | +| `common.secrets.key` | Generate secret key. | `dict "existingSecret" .Values.path.to.the.existingSecret "key" "keyName"` see [ExistingSecret](#existingsecret) for the structure. | +| `common.passwords.manage` | Generate secret password or retrieve one if already created. | `dict "secret" "secret-name" "key" "keyName" "providedValues" (list "path.to.password1" "path.to.password2") "length" 10 "strong" false "chartName" "chartName" "context" $`, length, strong and chartNAme fields are optional. | +| `common.secrets.exists` | Returns whether a previous generated secret already exists. | `dict "secret" "secret-name" "context" $` | + +### Storage + +| Helper identifier | Description | Expected Input | +|-------------------------------|---------------------------------------|---------------------------------------------------------------------------------------------------------------------| +| `common.storage.class` | Return the proper Storage Class | `dict "persistence" .Values.path.to.the.persistence "global" $`, see [Persistence](#persistence) for the structure. | + +### TplValues + +| Helper identifier | Description | Expected Input | +|---------------------------|----------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------| +| `common.tplvalues.render` | Renders a value that contains template | `dict "value" .Values.path.to.the.Value "context" $`, value is the value should rendered as template, context frequently is the chart context `$` or `.` | + +### Utils + +| Helper identifier | Description | Expected Input | +|--------------------------------|------------------------------------------------------------------------------------------|------------------------------------------------------------------------| +| `common.utils.fieldToEnvVar` | Build environment variable name given a field. | `dict "field" "my-password"` | +| `common.utils.secret.getvalue` | Print instructions to get a secret value. | `dict "secret" "secret-name" "field" "secret-value-field" "context" $` | +| `common.utils.getValueFromKey` | Gets a value from `.Values` object given its key path | `dict "key" "path.to.key" "context" $` | +| `common.utils.getKeyFromList` | Returns first `.Values` key with a defined value or first of the list if all non-defined | `dict "keys" (list "path.to.key1" "path.to.key2") "context" $` | + +### Validations + +| Helper identifier | Description | Expected Input | +|--------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `common.validations.values.single.empty` | Validate a value must not be empty. | `dict "valueKey" "path.to.value" "secret" "secret.name" "field" "my-password" "subchart" "subchart" "context" $` secret, field and subchart are optional. In case they are given, the helper will generate a how to get instruction. See [ValidateValue](#validatevalue) | +| `common.validations.values.multiple.empty` | Validate a multiple values must not be empty. It returns a shared error for all the values. | `dict "required" (list $validateValueConf00 $validateValueConf01) "context" $`. See [ValidateValue](#validatevalue) | +| `common.validations.values.mariadb.passwords` | This helper will ensure required password for MariaDB are not empty. It returns a shared error for all the values. | `dict "secret" "mariadb-secret" "subchart" "true" "context" $` subchart field is optional and could be true or false it depends on where you will use mariadb chart and the helper. | +| `common.validations.values.postgresql.passwords` | This helper will ensure required password for PostgreSQL are not empty. It returns a shared error for all the values. | `dict "secret" "postgresql-secret" "subchart" "true" "context" $` subchart field is optional and could be true or false it depends on where you will use postgresql chart and the helper. | +| `common.validations.values.redis.passwords` | This helper will ensure required password for Redis™ are not empty. It returns a shared error for all the values. | `dict "secret" "redis-secret" "subchart" "true" "context" $` subchart field is optional and could be true or false it depends on where you will use redis chart and the helper. | +| `common.validations.values.cassandra.passwords` | This helper will ensure required password for Cassandra are not empty. It returns a shared error for all the values. | `dict "secret" "cassandra-secret" "subchart" "true" "context" $` subchart field is optional and could be true or false it depends on where you will use cassandra chart and the helper. | +| `common.validations.values.mongodb.passwords` | This helper will ensure required password for MongoDB® are not empty. It returns a shared error for all the values. | `dict "secret" "mongodb-secret" "subchart" "true" "context" $` subchart field is optional and could be true or false it depends on where you will use mongodb chart and the helper. | + +### Warnings + +| Helper identifier | Description | Expected Input | +|------------------------------|----------------------------------|------------------------------------------------------------| +| `common.warnings.rollingTag` | Warning about using rolling tag. | `ImageRoot` see [ImageRoot](#imageroot) for the structure. | + +## Special input schemas + +### ImageRoot + +```yaml +registry: + type: string + description: Docker registry where the image is located + example: docker.io + +repository: + type: string + description: Repository and image name + example: bitnami/nginx + +tag: + type: string + description: image tag + example: 1.16.1-debian-10-r63 + +pullPolicy: + type: string + description: Specify a imagePullPolicy. Defaults to 'Always' if image tag is 'latest', else set to 'IfNotPresent' + +pullSecrets: + type: array + items: + type: string + description: Optionally specify an array of imagePullSecrets (evaluated as templates). + +debug: + type: boolean + description: Set to true if you would like to see extra information on logs + example: false + +## An instance would be: +# registry: docker.io +# repository: bitnami/nginx +# tag: 1.16.1-debian-10-r63 +# pullPolicy: IfNotPresent +# debug: false +``` + +### Persistence + +```yaml +enabled: + type: boolean + description: Whether enable persistence. + example: true + +storageClass: + type: string + description: Ghost data Persistent Volume Storage Class, If set to "-", storageClassName: "" which disables dynamic provisioning. + example: "-" + +accessMode: + type: string + description: Access mode for the Persistent Volume Storage. + example: ReadWriteOnce + +size: + type: string + description: Size the Persistent Volume Storage. + example: 8Gi + +path: + type: string + description: Path to be persisted. + example: /bitnami + +## An instance would be: +# enabled: true +# storageClass: "-" +# accessMode: ReadWriteOnce +# size: 8Gi +# path: /bitnami +``` + +### ExistingSecret + +```yaml +name: + type: string + description: Name of the existing secret. + example: mySecret +keyMapping: + description: Mapping between the expected key name and the name of the key in the existing secret. + type: object + +## An instance would be: +# name: mySecret +# keyMapping: +# password: myPasswordKey +``` + +#### Example of use + +When we store sensitive data for a deployment in a secret, some times we want to give to users the possibility of using theirs existing secrets. + +```yaml +# templates/secret.yaml +--- +apiVersion: v1 +kind: Secret +metadata: + name: {{ include "common.names.fullname" . }} + labels: + app: {{ include "common.names.fullname" . }} +type: Opaque +data: + password: {{ .Values.password | b64enc | quote }} + +# templates/dpl.yaml +--- +... + env: + - name: PASSWORD + valueFrom: + secretKeyRef: + name: {{ include "common.secrets.name" (dict "existingSecret" .Values.existingSecret "context" $) }} + key: {{ include "common.secrets.key" (dict "existingSecret" .Values.existingSecret "key" "password") }} +... + +# values.yaml +--- +name: mySecret +keyMapping: + password: myPasswordKey +``` + +### ValidateValue + +#### NOTES.txt + +```console +{{- $validateValueConf00 := (dict "valueKey" "path.to.value00" "secret" "secretName" "field" "password-00") -}} +{{- $validateValueConf01 := (dict "valueKey" "path.to.value01" "secret" "secretName" "field" "password-01") -}} + +{{ include "common.validations.values.multiple.empty" (dict "required" (list $validateValueConf00 $validateValueConf01) "context" $) }} +``` + +If we force those values to be empty we will see some alerts + +```console +$ helm install test mychart --set path.to.value00="",path.to.value01="" + 'path.to.value00' must not be empty, please add '--set path.to.value00=$PASSWORD_00' to the command. To get the current value: + + export PASSWORD_00=$(kubectl get secret --namespace default secretName -o jsonpath="{.data.password-00}" | base64 --decode) + + 'path.to.value01' must not be empty, please add '--set path.to.value01=$PASSWORD_01' to the command. To get the current value: + + export PASSWORD_01=$(kubectl get secret --namespace default secretName -o jsonpath="{.data.password-01}" | base64 --decode) +``` + +## Upgrading + +### To 1.0.0 + +[On November 13, 2020, Helm v2 support was formally finished](https://github.com/helm/charts#status-of-the-project), this major version is the result of the required changes applied to the Helm Chart to be able to incorporate the different features added in Helm v3 and to be consistent with the Helm project itself regarding the Helm v2 EOL. + +**What changes were introduced in this major version?** + +- Previous versions of this Helm Chart use `apiVersion: v1` (installable by both Helm 2 and 3), this Helm Chart was updated to `apiVersion: v2` (installable by Helm 3 only). [Here](https://helm.sh/docs/topics/charts/#the-apiversion-field) you can find more information about the `apiVersion` field. +- Use `type: library`. [Here](https://v3.helm.sh/docs/faq/#library-chart-support) you can find more information. +- The different fields present in the *Chart.yaml* file has been ordered alphabetically in a homogeneous way for all the Bitnami Helm Charts + +**Considerations when upgrading to this version** + +- If you want to upgrade to this version from a previous one installed with Helm v3, you shouldn't face any issues +- If you want to upgrade to this version using Helm v2, this scenario is not supported as this version doesn't support Helm v2 anymore +- If you installed the previous version with Helm v2 and wants to upgrade to this version with Helm v3, please refer to the [official Helm documentation](https://helm.sh/docs/topics/v2_v3_migration/#migration-use-cases) about migrating from Helm v2 to v3 + +**Useful links** + +- https://docs.bitnami.com/tutorials/resolve-helm2-helm3-post-migration-issues/ +- https://helm.sh/docs/topics/v2_v3_migration/ +- https://helm.sh/blog/migrate-from-helm-v2-to-helm-v3/ + +## License + +Copyright © 2022 Bitnami + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_affinities.tpl b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_affinities.tpl new file mode 100644 index 000000000000..189ea403d558 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_affinities.tpl @@ -0,0 +1,102 @@ +{{/* vim: set filetype=mustache: */}} + +{{/* +Return a soft nodeAffinity definition +{{ include "common.affinities.nodes.soft" (dict "key" "FOO" "values" (list "BAR" "BAZ")) -}} +*/}} +{{- define "common.affinities.nodes.soft" -}} +preferredDuringSchedulingIgnoredDuringExecution: + - preference: + matchExpressions: + - key: {{ .key }} + operator: In + values: + {{- range .values }} + - {{ . | quote }} + {{- end }} + weight: 1 +{{- end -}} + +{{/* +Return a hard nodeAffinity definition +{{ include "common.affinities.nodes.hard" (dict "key" "FOO" "values" (list "BAR" "BAZ")) -}} +*/}} +{{- define "common.affinities.nodes.hard" -}} +requiredDuringSchedulingIgnoredDuringExecution: + nodeSelectorTerms: + - matchExpressions: + - key: {{ .key }} + operator: In + values: + {{- range .values }} + - {{ . | quote }} + {{- end }} +{{- end -}} + +{{/* +Return a nodeAffinity definition +{{ include "common.affinities.nodes" (dict "type" "soft" "key" "FOO" "values" (list "BAR" "BAZ")) -}} +*/}} +{{- define "common.affinities.nodes" -}} + {{- if eq .type "soft" }} + {{- include "common.affinities.nodes.soft" . -}} + {{- else if eq .type "hard" }} + {{- include "common.affinities.nodes.hard" . -}} + {{- end -}} +{{- end -}} + +{{/* +Return a soft podAffinity/podAntiAffinity definition +{{ include "common.affinities.pods.soft" (dict "component" "FOO" "extraMatchLabels" .Values.extraMatchLabels "context" $) -}} +*/}} +{{- define "common.affinities.pods.soft" -}} +{{- $component := default "" .component -}} +{{- $extraMatchLabels := default (dict) .extraMatchLabels -}} +preferredDuringSchedulingIgnoredDuringExecution: + - podAffinityTerm: + labelSelector: + matchLabels: {{- (include "common.labels.matchLabels" .context) | nindent 10 }} + {{- if not (empty $component) }} + {{ printf "app.kubernetes.io/component: %s" $component }} + {{- end }} + {{- range $key, $value := $extraMatchLabels }} + {{ $key }}: {{ $value | quote }} + {{- end }} + namespaces: + - {{ .context.Release.Namespace | quote }} + topologyKey: kubernetes.io/hostname + weight: 1 +{{- end -}} + +{{/* +Return a hard podAffinity/podAntiAffinity definition +{{ include "common.affinities.pods.hard" (dict "component" "FOO" "extraMatchLabels" .Values.extraMatchLabels "context" $) -}} +*/}} +{{- define "common.affinities.pods.hard" -}} +{{- $component := default "" .component -}} +{{- $extraMatchLabels := default (dict) .extraMatchLabels -}} +requiredDuringSchedulingIgnoredDuringExecution: + - labelSelector: + matchLabels: {{- (include "common.labels.matchLabels" .context) | nindent 8 }} + {{- if not (empty $component) }} + {{ printf "app.kubernetes.io/component: %s" $component }} + {{- end }} + {{- range $key, $value := $extraMatchLabels }} + {{ $key }}: {{ $value | quote }} + {{- end }} + namespaces: + - {{ .context.Release.Namespace | quote }} + topologyKey: kubernetes.io/hostname +{{- end -}} + +{{/* +Return a podAffinity/podAntiAffinity definition +{{ include "common.affinities.pods" (dict "type" "soft" "key" "FOO" "values" (list "BAR" "BAZ")) -}} +*/}} +{{- define "common.affinities.pods" -}} + {{- if eq .type "soft" }} + {{- include "common.affinities.pods.soft" . -}} + {{- else if eq .type "hard" }} + {{- include "common.affinities.pods.hard" . -}} + {{- end -}} +{{- end -}} diff --git a/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_capabilities.tpl b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_capabilities.tpl new file mode 100644 index 000000000000..b94212bbe77c --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_capabilities.tpl @@ -0,0 +1,128 @@ +{{/* vim: set filetype=mustache: */}} + +{{/* +Return the target Kubernetes version +*/}} +{{- define "common.capabilities.kubeVersion" -}} +{{- if .Values.global }} + {{- if .Values.global.kubeVersion }} + {{- .Values.global.kubeVersion -}} + {{- else }} + {{- default .Capabilities.KubeVersion.Version .Values.kubeVersion -}} + {{- end -}} +{{- else }} +{{- default .Capabilities.KubeVersion.Version .Values.kubeVersion -}} +{{- end -}} +{{- end -}} + +{{/* +Return the appropriate apiVersion for poddisruptionbudget. +*/}} +{{- define "common.capabilities.policy.apiVersion" -}} +{{- if semverCompare "<1.21-0" (include "common.capabilities.kubeVersion" .) -}} +{{- print "policy/v1beta1" -}} +{{- else -}} +{{- print "policy/v1" -}} +{{- end -}} +{{- end -}} + +{{/* +Return the appropriate apiVersion for networkpolicy. +*/}} +{{- define "common.capabilities.networkPolicy.apiVersion" -}} +{{- if semverCompare "<1.7-0" (include "common.capabilities.kubeVersion" .) -}} +{{- print "extensions/v1beta1" -}} +{{- else -}} +{{- print "networking.k8s.io/v1" -}} +{{- end -}} +{{- end -}} + +{{/* +Return the appropriate apiVersion for cronjob. +*/}} +{{- define "common.capabilities.cronjob.apiVersion" -}} +{{- if semverCompare "<1.21-0" (include "common.capabilities.kubeVersion" .) -}} +{{- print "batch/v1beta1" -}} +{{- else -}} +{{- print "batch/v1" -}} +{{- end -}} +{{- end -}} + +{{/* +Return the appropriate apiVersion for deployment. +*/}} +{{- define "common.capabilities.deployment.apiVersion" -}} +{{- if semverCompare "<1.14-0" (include "common.capabilities.kubeVersion" .) -}} +{{- print "extensions/v1beta1" -}} +{{- else -}} +{{- print "apps/v1" -}} +{{- end -}} +{{- end -}} + +{{/* +Return the appropriate apiVersion for statefulset. +*/}} +{{- define "common.capabilities.statefulset.apiVersion" -}} +{{- if semverCompare "<1.14-0" (include "common.capabilities.kubeVersion" .) -}} +{{- print "apps/v1beta1" -}} +{{- else -}} +{{- print "apps/v1" -}} +{{- end -}} +{{- end -}} + +{{/* +Return the appropriate apiVersion for ingress. +*/}} +{{- define "common.capabilities.ingress.apiVersion" -}} +{{- if .Values.ingress -}} +{{- if .Values.ingress.apiVersion -}} +{{- .Values.ingress.apiVersion -}} +{{- else if semverCompare "<1.14-0" (include "common.capabilities.kubeVersion" .) -}} +{{- print "extensions/v1beta1" -}} +{{- else if semverCompare "<1.19-0" (include "common.capabilities.kubeVersion" .) -}} +{{- print "networking.k8s.io/v1beta1" -}} +{{- else -}} +{{- print "networking.k8s.io/v1" -}} +{{- end }} +{{- else if semverCompare "<1.14-0" (include "common.capabilities.kubeVersion" .) -}} +{{- print "extensions/v1beta1" -}} +{{- else if semverCompare "<1.19-0" (include "common.capabilities.kubeVersion" .) -}} +{{- print "networking.k8s.io/v1beta1" -}} +{{- else -}} +{{- print "networking.k8s.io/v1" -}} +{{- end -}} +{{- end -}} + +{{/* +Return the appropriate apiVersion for RBAC resources. +*/}} +{{- define "common.capabilities.rbac.apiVersion" -}} +{{- if semverCompare "<1.17-0" (include "common.capabilities.kubeVersion" .) -}} +{{- print "rbac.authorization.k8s.io/v1beta1" -}} +{{- else -}} +{{- print "rbac.authorization.k8s.io/v1" -}} +{{- end -}} +{{- end -}} + +{{/* +Return the appropriate apiVersion for CRDs. +*/}} +{{- define "common.capabilities.crd.apiVersion" -}} +{{- if semverCompare "<1.19-0" (include "common.capabilities.kubeVersion" .) -}} +{{- print "apiextensions.k8s.io/v1beta1" -}} +{{- else -}} +{{- print "apiextensions.k8s.io/v1" -}} +{{- end -}} +{{- end -}} + +{{/* +Returns true if the used Helm version is 3.3+. +A way to check the used Helm version was not introduced until version 3.3.0 with .Capabilities.HelmVersion, which contains an additional "{}}" structure. +This check is introduced as a regexMatch instead of {{ if .Capabilities.HelmVersion }} because checking for the key HelmVersion in <3.3 results in a "interface not found" error. +**To be removed when the catalog's minimun Helm version is 3.3** +*/}} +{{- define "common.capabilities.supportsHelmVersion" -}} +{{- if regexMatch "{(v[0-9])*[^}]*}}$" (.Capabilities | toString ) }} + {{- true -}} +{{- end -}} +{{- end -}} diff --git a/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_errors.tpl b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_errors.tpl new file mode 100644 index 000000000000..a79cc2e322e0 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_errors.tpl @@ -0,0 +1,23 @@ +{{/* vim: set filetype=mustache: */}} +{{/* +Through error when upgrading using empty passwords values that must not be empty. + +Usage: +{{- $validationError00 := include "common.validations.values.single.empty" (dict "valueKey" "path.to.password00" "secret" "secretName" "field" "password-00") -}} +{{- $validationError01 := include "common.validations.values.single.empty" (dict "valueKey" "path.to.password01" "secret" "secretName" "field" "password-01") -}} +{{ include "common.errors.upgrade.passwords.empty" (dict "validationErrors" (list $validationError00 $validationError01) "context" $) }} + +Required password params: + - validationErrors - String - Required. List of validation strings to be return, if it is empty it won't throw error. + - context - Context - Required. Parent context. +*/}} +{{- define "common.errors.upgrade.passwords.empty" -}} + {{- $validationErrors := join "" .validationErrors -}} + {{- if and $validationErrors .context.Release.IsUpgrade -}} + {{- $errorString := "\nPASSWORDS ERROR: You must provide your current passwords when upgrading the release." -}} + {{- $errorString = print $errorString "\n Note that even after reinstallation, old credentials may be needed as they may be kept in persistent volume claims." -}} + {{- $errorString = print $errorString "\n Further information can be obtained at https://docs.bitnami.com/general/how-to/troubleshoot-helm-chart-issues/#credential-errors-while-upgrading-chart-releases" -}} + {{- $errorString = print $errorString "\n%s" -}} + {{- printf $errorString $validationErrors | fail -}} + {{- end -}} +{{- end -}} diff --git a/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_images.tpl b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_images.tpl new file mode 100644 index 000000000000..42ffbc7227eb --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_images.tpl @@ -0,0 +1,75 @@ +{{/* vim: set filetype=mustache: */}} +{{/* +Return the proper image name +{{ include "common.images.image" ( dict "imageRoot" .Values.path.to.the.image "global" $) }} +*/}} +{{- define "common.images.image" -}} +{{- $registryName := .imageRoot.registry -}} +{{- $repositoryName := .imageRoot.repository -}} +{{- $tag := .imageRoot.tag | toString -}} +{{- if .global }} + {{- if .global.imageRegistry }} + {{- $registryName = .global.imageRegistry -}} + {{- end -}} +{{- end -}} +{{- if $registryName }} +{{- printf "%s/%s:%s" $registryName $repositoryName $tag -}} +{{- else -}} +{{- printf "%s:%s" $repositoryName $tag -}} +{{- end -}} +{{- end -}} + +{{/* +Return the proper Docker Image Registry Secret Names (deprecated: use common.images.renderPullSecrets instead) +{{ include "common.images.pullSecrets" ( dict "images" (list .Values.path.to.the.image1, .Values.path.to.the.image2) "global" .Values.global) }} +*/}} +{{- define "common.images.pullSecrets" -}} + {{- $pullSecrets := list }} + + {{- if .global }} + {{- range .global.imagePullSecrets -}} + {{- $pullSecrets = append $pullSecrets . -}} + {{- end -}} + {{- end -}} + + {{- range .images -}} + {{- range .pullSecrets -}} + {{- $pullSecrets = append $pullSecrets . -}} + {{- end -}} + {{- end -}} + + {{- if (not (empty $pullSecrets)) }} +imagePullSecrets: + {{- range $pullSecrets }} + - name: {{ . }} + {{- end }} + {{- end }} +{{- end -}} + +{{/* +Return the proper Docker Image Registry Secret Names evaluating values as templates +{{ include "common.images.renderPullSecrets" ( dict "images" (list .Values.path.to.the.image1, .Values.path.to.the.image2) "context" $) }} +*/}} +{{- define "common.images.renderPullSecrets" -}} + {{- $pullSecrets := list }} + {{- $context := .context }} + + {{- if $context.Values.global }} + {{- range $context.Values.global.imagePullSecrets -}} + {{- $pullSecrets = append $pullSecrets (include "common.tplvalues.render" (dict "value" . "context" $context)) -}} + {{- end -}} + {{- end -}} + + {{- range .images -}} + {{- range .pullSecrets -}} + {{- $pullSecrets = append $pullSecrets (include "common.tplvalues.render" (dict "value" . "context" $context)) -}} + {{- end -}} + {{- end -}} + + {{- if (not (empty $pullSecrets)) }} +imagePullSecrets: + {{- range $pullSecrets }} + - name: {{ . }} + {{- end }} + {{- end }} +{{- end -}} diff --git a/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_ingress.tpl b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_ingress.tpl new file mode 100644 index 000000000000..8caf73a61082 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_ingress.tpl @@ -0,0 +1,68 @@ +{{/* vim: set filetype=mustache: */}} + +{{/* +Generate backend entry that is compatible with all Kubernetes API versions. + +Usage: +{{ include "common.ingress.backend" (dict "serviceName" "backendName" "servicePort" "backendPort" "context" $) }} + +Params: + - serviceName - String. Name of an existing service backend + - servicePort - String/Int. Port name (or number) of the service. It will be translated to different yaml depending if it is a string or an integer. + - context - Dict - Required. The context for the template evaluation. +*/}} +{{- define "common.ingress.backend" -}} +{{- $apiVersion := (include "common.capabilities.ingress.apiVersion" .context) -}} +{{- if or (eq $apiVersion "extensions/v1beta1") (eq $apiVersion "networking.k8s.io/v1beta1") -}} +serviceName: {{ .serviceName }} +servicePort: {{ .servicePort }} +{{- else -}} +service: + name: {{ .serviceName }} + port: + {{- if typeIs "string" .servicePort }} + name: {{ .servicePort }} + {{- else if or (typeIs "int" .servicePort) (typeIs "float64" .servicePort) }} + number: {{ .servicePort | int }} + {{- end }} +{{- end -}} +{{- end -}} + +{{/* +Print "true" if the API pathType field is supported +Usage: +{{ include "common.ingress.supportsPathType" . }} +*/}} +{{- define "common.ingress.supportsPathType" -}} +{{- if (semverCompare "<1.18-0" (include "common.capabilities.kubeVersion" .)) -}} +{{- print "false" -}} +{{- else -}} +{{- print "true" -}} +{{- end -}} +{{- end -}} + +{{/* +Returns true if the ingressClassname field is supported +Usage: +{{ include "common.ingress.supportsIngressClassname" . }} +*/}} +{{- define "common.ingress.supportsIngressClassname" -}} +{{- if semverCompare "<1.18-0" (include "common.capabilities.kubeVersion" .) -}} +{{- print "false" -}} +{{- else -}} +{{- print "true" -}} +{{- end -}} +{{- end -}} + +{{/* +Return true if cert-manager required annotations for TLS signed +certificates are set in the Ingress annotations +Ref: https://cert-manager.io/docs/usage/ingress/#supported-annotations +Usage: +{{ include "common.ingress.certManagerRequest" ( dict "annotations" .Values.path.to.the.ingress.annotations ) }} +*/}} +{{- define "common.ingress.certManagerRequest" -}} +{{ if or (hasKey .annotations "cert-manager.io/cluster-issuer") (hasKey .annotations "cert-manager.io/issuer") }} + {{- true -}} +{{- end -}} +{{- end -}} diff --git a/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_labels.tpl b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_labels.tpl new file mode 100644 index 000000000000..252066c7e2b3 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_labels.tpl @@ -0,0 +1,18 @@ +{{/* vim: set filetype=mustache: */}} +{{/* +Kubernetes standard labels +*/}} +{{- define "common.labels.standard" -}} +app.kubernetes.io/name: {{ include "common.names.name" . }} +helm.sh/chart: {{ include "common.names.chart" . }} +app.kubernetes.io/instance: {{ .Release.Name }} +app.kubernetes.io/managed-by: {{ .Release.Service }} +{{- end -}} + +{{/* +Labels to use on deploy.spec.selector.matchLabels and svc.spec.selector +*/}} +{{- define "common.labels.matchLabels" -}} +app.kubernetes.io/name: {{ include "common.names.name" . }} +app.kubernetes.io/instance: {{ .Release.Name }} +{{- end -}} diff --git a/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_names.tpl b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_names.tpl new file mode 100644 index 000000000000..cf0323171f39 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_names.tpl @@ -0,0 +1,52 @@ +{{/* vim: set filetype=mustache: */}} +{{/* +Expand the name of the chart. +*/}} +{{- define "common.names.name" -}} +{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" -}} +{{- end -}} + +{{/* +Create chart name and version as used by the chart label. +*/}} +{{- define "common.names.chart" -}} +{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" -}} +{{- end -}} + +{{/* +Create a default fully qualified app name. +We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). +If release name contains chart name it will be used as a full name. +*/}} +{{- define "common.names.fullname" -}} +{{- if .Values.fullnameOverride -}} +{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" -}} +{{- else -}} +{{- $name := default .Chart.Name .Values.nameOverride -}} +{{- if contains $name .Release.Name -}} +{{- .Release.Name | trunc 63 | trimSuffix "-" -}} +{{- else -}} +{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}} +{{- end -}} +{{- end -}} +{{- end -}} + +{{/* +Create a default fully qualified dependency name. +We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). +If release name contains chart name it will be used as a full name. +Usage: +{{ include "common.names.dependency.fullname" (dict "chartName" "dependency-chart-name" "chartValues" .Values.dependency-chart "context" $) }} +*/}} +{{- define "common.names.dependency.fullname" -}} +{{- if .chartValues.fullnameOverride -}} +{{- .chartValues.fullnameOverride | trunc 63 | trimSuffix "-" -}} +{{- else -}} +{{- $name := default .chartName .chartValues.nameOverride -}} +{{- if contains $name .context.Release.Name -}} +{{- .context.Release.Name | trunc 63 | trimSuffix "-" -}} +{{- else -}} +{{- printf "%s-%s" .context.Release.Name $name | trunc 63 | trimSuffix "-" -}} +{{- end -}} +{{- end -}} +{{- end -}} diff --git a/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_secrets.tpl b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_secrets.tpl new file mode 100644 index 000000000000..a1afc1195996 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_secrets.tpl @@ -0,0 +1,131 @@ +{{/* vim: set filetype=mustache: */}} +{{/* +Generate secret name. + +Usage: +{{ include "common.secrets.name" (dict "existingSecret" .Values.path.to.the.existingSecret "defaultNameSuffix" "mySuffix" "context" $) }} + +Params: + - existingSecret - ExistingSecret/String - Optional. The path to the existing secrets in the values.yaml given by the user + to be used instead of the default one. Allows for it to be of type String (just the secret name) for backwards compatibility. + +info: https://github.com/bitnami/charts/tree/master/bitnami/common#existingsecret + - defaultNameSuffix - String - Optional. It is used only if we have several secrets in the same deployment. + - context - Dict - Required. The context for the template evaluation. +*/}} +{{- define "common.secrets.name" -}} +{{- $name := (include "common.names.fullname" .context) -}} + +{{- if .defaultNameSuffix -}} +{{- $name = printf "%s-%s" $name .defaultNameSuffix | trunc 63 | trimSuffix "-" -}} +{{- end -}} + +{{- with .existingSecret -}} +{{- if not (typeIs "string" .) -}} +{{- with .name -}} +{{- $name = . -}} +{{- end -}} +{{- else -}} +{{- $name = . -}} +{{- end -}} +{{- end -}} + +{{- printf "%s" $name -}} +{{- end -}} + +{{/* +Generate secret key. + +Usage: +{{ include "common.secrets.key" (dict "existingSecret" .Values.path.to.the.existingSecret "key" "keyName") }} + +Params: + - existingSecret - ExistingSecret/String - Optional. The path to the existing secrets in the values.yaml given by the user + to be used instead of the default one. Allows for it to be of type String (just the secret name) for backwards compatibility. + +info: https://github.com/bitnami/charts/tree/master/bitnami/common#existingsecret + - key - String - Required. Name of the key in the secret. +*/}} +{{- define "common.secrets.key" -}} +{{- $key := .key -}} + +{{- if .existingSecret -}} + {{- if not (typeIs "string" .existingSecret) -}} + {{- if .existingSecret.keyMapping -}} + {{- $key = index .existingSecret.keyMapping $.key -}} + {{- end -}} + {{- end }} +{{- end -}} + +{{- printf "%s" $key -}} +{{- end -}} + +{{/* +Generate secret password or retrieve one if already created. + +Usage: +{{ include "common.secrets.passwords.manage" (dict "secret" "secret-name" "key" "keyName" "providedValues" (list "path.to.password1" "path.to.password2") "length" 10 "strong" false "chartName" "chartName" "context" $) }} + +Params: + - secret - String - Required - Name of the 'Secret' resource where the password is stored. + - key - String - Required - Name of the key in the secret. + - providedValues - List - Required - The path to the validating value in the values.yaml, e.g: "mysql.password". Will pick first parameter with a defined value. + - length - int - Optional - Length of the generated random password. + - strong - Boolean - Optional - Whether to add symbols to the generated random password. + - chartName - String - Optional - Name of the chart used when said chart is deployed as a subchart. + - context - Context - Required - Parent context. +*/}} +{{- define "common.secrets.passwords.manage" -}} + +{{- $password := "" }} +{{- $subchart := "" }} +{{- $chartName := default "" .chartName }} +{{- $passwordLength := default 10 .length }} +{{- $providedPasswordKey := include "common.utils.getKeyFromList" (dict "keys" .providedValues "context" $.context) }} +{{- $providedPasswordValue := include "common.utils.getValueFromKey" (dict "key" $providedPasswordKey "context" $.context) }} +{{- $secretData := (lookup "v1" "Secret" $.context.Release.Namespace .secret).data }} +{{- if $secretData }} + {{- if hasKey $secretData .key }} + {{- $password = index $secretData .key }} + {{- else }} + {{- printf "\nPASSWORDS ERROR: The secret \"%s\" does not contain the key \"%s\"\n" .secret .key | fail -}} + {{- end -}} +{{- else if $providedPasswordValue }} + {{- $password = $providedPasswordValue | toString | b64enc | quote }} +{{- else }} + + {{- if .context.Values.enabled }} + {{- $subchart = $chartName }} + {{- end -}} + + {{- $requiredPassword := dict "valueKey" $providedPasswordKey "secret" .secret "field" .key "subchart" $subchart "context" $.context -}} + {{- $requiredPasswordError := include "common.validations.values.single.empty" $requiredPassword -}} + {{- $passwordValidationErrors := list $requiredPasswordError -}} + {{- include "common.errors.upgrade.passwords.empty" (dict "validationErrors" $passwordValidationErrors "context" $.context) -}} + + {{- if .strong }} + {{- $subStr := list (lower (randAlpha 1)) (randNumeric 1) (upper (randAlpha 1)) | join "_" }} + {{- $password = randAscii $passwordLength }} + {{- $password = regexReplaceAllLiteral "\\W" $password "@" | substr 5 $passwordLength }} + {{- $password = printf "%s%s" $subStr $password | toString | shuffle | b64enc | quote }} + {{- else }} + {{- $password = randAlphaNum $passwordLength | b64enc | quote }} + {{- end }} +{{- end -}} +{{- printf "%s" $password -}} +{{- end -}} + +{{/* +Returns whether a previous generated secret already exists + +Usage: +{{ include "common.secrets.exists" (dict "secret" "secret-name" "context" $) }} + +Params: + - secret - String - Required - Name of the 'Secret' resource where the password is stored. + - context - Context - Required - Parent context. +*/}} +{{- define "common.secrets.exists" -}} +{{- $secret := (lookup "v1" "Secret" $.context.Release.Namespace .secret) }} +{{- if $secret }} + {{- true -}} +{{- end -}} +{{- end -}} diff --git a/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_storage.tpl b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_storage.tpl new file mode 100644 index 000000000000..60e2a844f6eb --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_storage.tpl @@ -0,0 +1,23 @@ +{{/* vim: set filetype=mustache: */}} +{{/* +Return the proper Storage Class +{{ include "common.storage.class" ( dict "persistence" .Values.path.to.the.persistence "global" $) }} +*/}} +{{- define "common.storage.class" -}} + +{{- $storageClass := .persistence.storageClass -}} +{{- if .global -}} + {{- if .global.storageClass -}} + {{- $storageClass = .global.storageClass -}} + {{- end -}} +{{- end -}} + +{{- if $storageClass -}} + {{- if (eq "-" $storageClass) -}} + {{- printf "storageClassName: \"\"" -}} + {{- else }} + {{- printf "storageClassName: %s" $storageClass -}} + {{- end -}} +{{- end -}} + +{{- end -}} diff --git a/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_tplvalues.tpl b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_tplvalues.tpl new file mode 100644 index 000000000000..2db166851bb5 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_tplvalues.tpl @@ -0,0 +1,13 @@ +{{/* vim: set filetype=mustache: */}} +{{/* +Renders a value that contains template. +Usage: +{{ include "common.tplvalues.render" ( dict "value" .Values.path.to.the.Value "context" $) }} +*/}} +{{- define "common.tplvalues.render" -}} + {{- if typeIs "string" .value }} + {{- tpl .value .context }} + {{- else }} + {{- tpl (.value | toYaml) .context }} + {{- end }} +{{- end -}} diff --git a/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_utils.tpl b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_utils.tpl new file mode 100644 index 000000000000..ea083a249f80 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_utils.tpl @@ -0,0 +1,62 @@ +{{/* vim: set filetype=mustache: */}} +{{/* +Print instructions to get a secret value. +Usage: +{{ include "common.utils.secret.getvalue" (dict "secret" "secret-name" "field" "secret-value-field" "context" $) }} +*/}} +{{- define "common.utils.secret.getvalue" -}} +{{- $varname := include "common.utils.fieldToEnvVar" . -}} +export {{ $varname }}=$(kubectl get secret --namespace {{ .context.Release.Namespace | quote }} {{ .secret }} -o jsonpath="{.data.{{ .field }}}" | base64 --decode) +{{- end -}} + +{{/* +Build env var name given a field +Usage: +{{ include "common.utils.fieldToEnvVar" dict "field" "my-password" }} +*/}} +{{- define "common.utils.fieldToEnvVar" -}} + {{- $fieldNameSplit := splitList "-" .field -}} + {{- $upperCaseFieldNameSplit := list -}} + + {{- range $fieldNameSplit -}} + {{- $upperCaseFieldNameSplit = append $upperCaseFieldNameSplit ( upper . ) -}} + {{- end -}} + + {{ join "_" $upperCaseFieldNameSplit }} +{{- end -}} + +{{/* +Gets a value from .Values given +Usage: +{{ include "common.utils.getValueFromKey" (dict "key" "path.to.key" "context" $) }} +*/}} +{{- define "common.utils.getValueFromKey" -}} +{{- $splitKey := splitList "." .key -}} +{{- $value := "" -}} +{{- $latestObj := $.context.Values -}} +{{- range $splitKey -}} + {{- if not $latestObj -}} + {{- printf "please review the entire path of '%s' exists in values" $.key | fail -}} + {{- end -}} + {{- $value = ( index $latestObj . ) -}} + {{- $latestObj = $value -}} +{{- end -}} +{{- printf "%v" (default "" $value) -}} +{{- end -}} + +{{/* +Returns first .Values key with a defined value or first of the list if all non-defined +Usage: +{{ include "common.utils.getKeyFromList" (dict "keys" (list "path.to.key1" "path.to.key2") "context" $) }} +*/}} +{{- define "common.utils.getKeyFromList" -}} +{{- $key := first .keys -}} +{{- $reverseKeys := reverse .keys }} +{{- range $reverseKeys }} + {{- $value := include "common.utils.getValueFromKey" (dict "key" . "context" $.context ) }} + {{- if $value -}} + {{- $key = . }} + {{- end -}} +{{- end -}} +{{- printf "%s" $key -}} +{{- end -}} diff --git a/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_warnings.tpl b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_warnings.tpl new file mode 100644 index 000000000000..ae10fa41ee7d --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/_warnings.tpl @@ -0,0 +1,14 @@ +{{/* vim: set filetype=mustache: */}} +{{/* +Warning about using rolling tag. +Usage: +{{ include "common.warnings.rollingTag" .Values.path.to.the.imageRoot }} +*/}} +{{- define "common.warnings.rollingTag" -}} + +{{- if and (contains "bitnami/" .repository) (not (.tag | toString | regexFind "-r\\d+$|sha256:")) }} +WARNING: Rolling tag detected ({{ .repository }}:{{ .tag }}), please note that it is strongly recommended to avoid using rolling tags in a production environment. ++info https://docs.bitnami.com/containers/how-to/understand-rolling-tags-containers/ +{{- end }} + +{{- end -}} diff --git a/pkg/iac/scanners/helm/test/mysql/charts/common/templates/validations/_cassandra.tpl b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/validations/_cassandra.tpl new file mode 100644 index 000000000000..ded1ae3bcad7 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/validations/_cassandra.tpl @@ -0,0 +1,72 @@ +{{/* vim: set filetype=mustache: */}} +{{/* +Validate Cassandra required passwords are not empty. + +Usage: +{{ include "common.validations.values.cassandra.passwords" (dict "secret" "secretName" "subchart" false "context" $) }} +Params: + - secret - String - Required. Name of the secret where Cassandra values are stored, e.g: "cassandra-passwords-secret" + - subchart - Boolean - Optional. Whether Cassandra is used as subchart or not. Default: false +*/}} +{{- define "common.validations.values.cassandra.passwords" -}} + {{- $existingSecret := include "common.cassandra.values.existingSecret" . -}} + {{- $enabled := include "common.cassandra.values.enabled" . -}} + {{- $dbUserPrefix := include "common.cassandra.values.key.dbUser" . -}} + {{- $valueKeyPassword := printf "%s.password" $dbUserPrefix -}} + + {{- if and (or (not $existingSecret) (eq $existingSecret "\"\"")) (eq $enabled "true") -}} + {{- $requiredPasswords := list -}} + + {{- $requiredPassword := dict "valueKey" $valueKeyPassword "secret" .secret "field" "cassandra-password" -}} + {{- $requiredPasswords = append $requiredPasswords $requiredPassword -}} + + {{- include "common.validations.values.multiple.empty" (dict "required" $requiredPasswords "context" .context) -}} + + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to get the right value for existingSecret. + +Usage: +{{ include "common.cassandra.values.existingSecret" (dict "context" $) }} +Params: + - subchart - Boolean - Optional. Whether Cassandra is used as subchart or not. Default: false +*/}} +{{- define "common.cassandra.values.existingSecret" -}} + {{- if .subchart -}} + {{- .context.Values.cassandra.dbUser.existingSecret | quote -}} + {{- else -}} + {{- .context.Values.dbUser.existingSecret | quote -}} + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to get the right value for enabled cassandra. + +Usage: +{{ include "common.cassandra.values.enabled" (dict "context" $) }} +*/}} +{{- define "common.cassandra.values.enabled" -}} + {{- if .subchart -}} + {{- printf "%v" .context.Values.cassandra.enabled -}} + {{- else -}} + {{- printf "%v" (not .context.Values.enabled) -}} + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to get the right value for the key dbUser + +Usage: +{{ include "common.cassandra.values.key.dbUser" (dict "subchart" "true" "context" $) }} +Params: + - subchart - Boolean - Optional. Whether Cassandra is used as subchart or not. Default: false +*/}} +{{- define "common.cassandra.values.key.dbUser" -}} + {{- if .subchart -}} + cassandra.dbUser + {{- else -}} + dbUser + {{- end -}} +{{- end -}} diff --git a/pkg/iac/scanners/helm/test/mysql/charts/common/templates/validations/_mariadb.tpl b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/validations/_mariadb.tpl new file mode 100644 index 000000000000..b6906ff77b72 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/validations/_mariadb.tpl @@ -0,0 +1,103 @@ +{{/* vim: set filetype=mustache: */}} +{{/* +Validate MariaDB required passwords are not empty. + +Usage: +{{ include "common.validations.values.mariadb.passwords" (dict "secret" "secretName" "subchart" false "context" $) }} +Params: + - secret - String - Required. Name of the secret where MariaDB values are stored, e.g: "mysql-passwords-secret" + - subchart - Boolean - Optional. Whether MariaDB is used as subchart or not. Default: false +*/}} +{{- define "common.validations.values.mariadb.passwords" -}} + {{- $existingSecret := include "common.mariadb.values.auth.existingSecret" . -}} + {{- $enabled := include "common.mariadb.values.enabled" . -}} + {{- $architecture := include "common.mariadb.values.architecture" . -}} + {{- $authPrefix := include "common.mariadb.values.key.auth" . -}} + {{- $valueKeyRootPassword := printf "%s.rootPassword" $authPrefix -}} + {{- $valueKeyUsername := printf "%s.username" $authPrefix -}} + {{- $valueKeyPassword := printf "%s.password" $authPrefix -}} + {{- $valueKeyReplicationPassword := printf "%s.replicationPassword" $authPrefix -}} + + {{- if and (or (not $existingSecret) (eq $existingSecret "\"\"")) (eq $enabled "true") -}} + {{- $requiredPasswords := list -}} + + {{- $requiredRootPassword := dict "valueKey" $valueKeyRootPassword "secret" .secret "field" "mariadb-root-password" -}} + {{- $requiredPasswords = append $requiredPasswords $requiredRootPassword -}} + + {{- $valueUsername := include "common.utils.getValueFromKey" (dict "key" $valueKeyUsername "context" .context) }} + {{- if not (empty $valueUsername) -}} + {{- $requiredPassword := dict "valueKey" $valueKeyPassword "secret" .secret "field" "mariadb-password" -}} + {{- $requiredPasswords = append $requiredPasswords $requiredPassword -}} + {{- end -}} + + {{- if (eq $architecture "replication") -}} + {{- $requiredReplicationPassword := dict "valueKey" $valueKeyReplicationPassword "secret" .secret "field" "mariadb-replication-password" -}} + {{- $requiredPasswords = append $requiredPasswords $requiredReplicationPassword -}} + {{- end -}} + + {{- include "common.validations.values.multiple.empty" (dict "required" $requiredPasswords "context" .context) -}} + + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to get the right value for existingSecret. + +Usage: +{{ include "common.mariadb.values.auth.existingSecret" (dict "context" $) }} +Params: + - subchart - Boolean - Optional. Whether MariaDB is used as subchart or not. Default: false +*/}} +{{- define "common.mariadb.values.auth.existingSecret" -}} + {{- if .subchart -}} + {{- .context.Values.mariadb.auth.existingSecret | quote -}} + {{- else -}} + {{- .context.Values.auth.existingSecret | quote -}} + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to get the right value for enabled mariadb. + +Usage: +{{ include "common.mariadb.values.enabled" (dict "context" $) }} +*/}} +{{- define "common.mariadb.values.enabled" -}} + {{- if .subchart -}} + {{- printf "%v" .context.Values.mariadb.enabled -}} + {{- else -}} + {{- printf "%v" (not .context.Values.enabled) -}} + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to get the right value for architecture + +Usage: +{{ include "common.mariadb.values.architecture" (dict "subchart" "true" "context" $) }} +Params: + - subchart - Boolean - Optional. Whether MariaDB is used as subchart or not. Default: false +*/}} +{{- define "common.mariadb.values.architecture" -}} + {{- if .subchart -}} + {{- .context.Values.mariadb.architecture -}} + {{- else -}} + {{- .context.Values.architecture -}} + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to get the right value for the key auth + +Usage: +{{ include "common.mariadb.values.key.auth" (dict "subchart" "true" "context" $) }} +Params: + - subchart - Boolean - Optional. Whether MariaDB is used as subchart or not. Default: false +*/}} +{{- define "common.mariadb.values.key.auth" -}} + {{- if .subchart -}} + mariadb.auth + {{- else -}} + auth + {{- end -}} +{{- end -}} diff --git a/pkg/iac/scanners/helm/test/mysql/charts/common/templates/validations/_mongodb.tpl b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/validations/_mongodb.tpl new file mode 100644 index 000000000000..a071ea4d3127 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/validations/_mongodb.tpl @@ -0,0 +1,108 @@ +{{/* vim: set filetype=mustache: */}} +{{/* +Validate MongoDB® required passwords are not empty. + +Usage: +{{ include "common.validations.values.mongodb.passwords" (dict "secret" "secretName" "subchart" false "context" $) }} +Params: + - secret - String - Required. Name of the secret where MongoDB® values are stored, e.g: "mongodb-passwords-secret" + - subchart - Boolean - Optional. Whether MongoDB® is used as subchart or not. Default: false +*/}} +{{- define "common.validations.values.mongodb.passwords" -}} + {{- $existingSecret := include "common.mongodb.values.auth.existingSecret" . -}} + {{- $enabled := include "common.mongodb.values.enabled" . -}} + {{- $authPrefix := include "common.mongodb.values.key.auth" . -}} + {{- $architecture := include "common.mongodb.values.architecture" . -}} + {{- $valueKeyRootPassword := printf "%s.rootPassword" $authPrefix -}} + {{- $valueKeyUsername := printf "%s.username" $authPrefix -}} + {{- $valueKeyDatabase := printf "%s.database" $authPrefix -}} + {{- $valueKeyPassword := printf "%s.password" $authPrefix -}} + {{- $valueKeyReplicaSetKey := printf "%s.replicaSetKey" $authPrefix -}} + {{- $valueKeyAuthEnabled := printf "%s.enabled" $authPrefix -}} + + {{- $authEnabled := include "common.utils.getValueFromKey" (dict "key" $valueKeyAuthEnabled "context" .context) -}} + + {{- if and (or (not $existingSecret) (eq $existingSecret "\"\"")) (eq $enabled "true") (eq $authEnabled "true") -}} + {{- $requiredPasswords := list -}} + + {{- $requiredRootPassword := dict "valueKey" $valueKeyRootPassword "secret" .secret "field" "mongodb-root-password" -}} + {{- $requiredPasswords = append $requiredPasswords $requiredRootPassword -}} + + {{- $valueUsername := include "common.utils.getValueFromKey" (dict "key" $valueKeyUsername "context" .context) }} + {{- $valueDatabase := include "common.utils.getValueFromKey" (dict "key" $valueKeyDatabase "context" .context) }} + {{- if and $valueUsername $valueDatabase -}} + {{- $requiredPassword := dict "valueKey" $valueKeyPassword "secret" .secret "field" "mongodb-password" -}} + {{- $requiredPasswords = append $requiredPasswords $requiredPassword -}} + {{- end -}} + + {{- if (eq $architecture "replicaset") -}} + {{- $requiredReplicaSetKey := dict "valueKey" $valueKeyReplicaSetKey "secret" .secret "field" "mongodb-replica-set-key" -}} + {{- $requiredPasswords = append $requiredPasswords $requiredReplicaSetKey -}} + {{- end -}} + + {{- include "common.validations.values.multiple.empty" (dict "required" $requiredPasswords "context" .context) -}} + + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to get the right value for existingSecret. + +Usage: +{{ include "common.mongodb.values.auth.existingSecret" (dict "context" $) }} +Params: + - subchart - Boolean - Optional. Whether MongoDb is used as subchart or not. Default: false +*/}} +{{- define "common.mongodb.values.auth.existingSecret" -}} + {{- if .subchart -}} + {{- .context.Values.mongodb.auth.existingSecret | quote -}} + {{- else -}} + {{- .context.Values.auth.existingSecret | quote -}} + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to get the right value for enabled mongodb. + +Usage: +{{ include "common.mongodb.values.enabled" (dict "context" $) }} +*/}} +{{- define "common.mongodb.values.enabled" -}} + {{- if .subchart -}} + {{- printf "%v" .context.Values.mongodb.enabled -}} + {{- else -}} + {{- printf "%v" (not .context.Values.enabled) -}} + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to get the right value for the key auth + +Usage: +{{ include "common.mongodb.values.key.auth" (dict "subchart" "true" "context" $) }} +Params: + - subchart - Boolean - Optional. Whether MongoDB® is used as subchart or not. Default: false +*/}} +{{- define "common.mongodb.values.key.auth" -}} + {{- if .subchart -}} + mongodb.auth + {{- else -}} + auth + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to get the right value for architecture + +Usage: +{{ include "common.mongodb.values.architecture" (dict "subchart" "true" "context" $) }} +Params: + - subchart - Boolean - Optional. Whether MariaDB is used as subchart or not. Default: false +*/}} +{{- define "common.mongodb.values.architecture" -}} + {{- if .subchart -}} + {{- .context.Values.mongodb.architecture -}} + {{- else -}} + {{- .context.Values.architecture -}} + {{- end -}} +{{- end -}} diff --git a/pkg/iac/scanners/helm/test/mysql/charts/common/templates/validations/_postgresql.tpl b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/validations/_postgresql.tpl new file mode 100644 index 000000000000..164ec0d01252 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/validations/_postgresql.tpl @@ -0,0 +1,129 @@ +{{/* vim: set filetype=mustache: */}} +{{/* +Validate PostgreSQL required passwords are not empty. + +Usage: +{{ include "common.validations.values.postgresql.passwords" (dict "secret" "secretName" "subchart" false "context" $) }} +Params: + - secret - String - Required. Name of the secret where postgresql values are stored, e.g: "postgresql-passwords-secret" + - subchart - Boolean - Optional. Whether postgresql is used as subchart or not. Default: false +*/}} +{{- define "common.validations.values.postgresql.passwords" -}} + {{- $existingSecret := include "common.postgresql.values.existingSecret" . -}} + {{- $enabled := include "common.postgresql.values.enabled" . -}} + {{- $valueKeyPostgresqlPassword := include "common.postgresql.values.key.postgressPassword" . -}} + {{- $valueKeyPostgresqlReplicationEnabled := include "common.postgresql.values.key.replicationPassword" . -}} + {{- if and (or (not $existingSecret) (eq $existingSecret "\"\"")) (eq $enabled "true") -}} + {{- $requiredPasswords := list -}} + {{- $requiredPostgresqlPassword := dict "valueKey" $valueKeyPostgresqlPassword "secret" .secret "field" "postgresql-password" -}} + {{- $requiredPasswords = append $requiredPasswords $requiredPostgresqlPassword -}} + + {{- $enabledReplication := include "common.postgresql.values.enabled.replication" . -}} + {{- if (eq $enabledReplication "true") -}} + {{- $requiredPostgresqlReplicationPassword := dict "valueKey" $valueKeyPostgresqlReplicationEnabled "secret" .secret "field" "postgresql-replication-password" -}} + {{- $requiredPasswords = append $requiredPasswords $requiredPostgresqlReplicationPassword -}} + {{- end -}} + + {{- include "common.validations.values.multiple.empty" (dict "required" $requiredPasswords "context" .context) -}} + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to decide whether evaluate global values. + +Usage: +{{ include "common.postgresql.values.use.global" (dict "key" "key-of-global" "context" $) }} +Params: + - key - String - Required. Field to be evaluated within global, e.g: "existingSecret" +*/}} +{{- define "common.postgresql.values.use.global" -}} + {{- if .context.Values.global -}} + {{- if .context.Values.global.postgresql -}} + {{- index .context.Values.global.postgresql .key | quote -}} + {{- end -}} + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to get the right value for existingSecret. + +Usage: +{{ include "common.postgresql.values.existingSecret" (dict "context" $) }} +*/}} +{{- define "common.postgresql.values.existingSecret" -}} + {{- $globalValue := include "common.postgresql.values.use.global" (dict "key" "existingSecret" "context" .context) -}} + + {{- if .subchart -}} + {{- default (.context.Values.postgresql.existingSecret | quote) $globalValue -}} + {{- else -}} + {{- default (.context.Values.existingSecret | quote) $globalValue -}} + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to get the right value for enabled postgresql. + +Usage: +{{ include "common.postgresql.values.enabled" (dict "context" $) }} +*/}} +{{- define "common.postgresql.values.enabled" -}} + {{- if .subchart -}} + {{- printf "%v" .context.Values.postgresql.enabled -}} + {{- else -}} + {{- printf "%v" (not .context.Values.enabled) -}} + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to get the right value for the key postgressPassword. + +Usage: +{{ include "common.postgresql.values.key.postgressPassword" (dict "subchart" "true" "context" $) }} +Params: + - subchart - Boolean - Optional. Whether postgresql is used as subchart or not. Default: false +*/}} +{{- define "common.postgresql.values.key.postgressPassword" -}} + {{- $globalValue := include "common.postgresql.values.use.global" (dict "key" "postgresqlUsername" "context" .context) -}} + + {{- if not $globalValue -}} + {{- if .subchart -}} + postgresql.postgresqlPassword + {{- else -}} + postgresqlPassword + {{- end -}} + {{- else -}} + global.postgresql.postgresqlPassword + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to get the right value for enabled.replication. + +Usage: +{{ include "common.postgresql.values.enabled.replication" (dict "subchart" "true" "context" $) }} +Params: + - subchart - Boolean - Optional. Whether postgresql is used as subchart or not. Default: false +*/}} +{{- define "common.postgresql.values.enabled.replication" -}} + {{- if .subchart -}} + {{- printf "%v" .context.Values.postgresql.replication.enabled -}} + {{- else -}} + {{- printf "%v" .context.Values.replication.enabled -}} + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to get the right value for the key replication.password. + +Usage: +{{ include "common.postgresql.values.key.replicationPassword" (dict "subchart" "true" "context" $) }} +Params: + - subchart - Boolean - Optional. Whether postgresql is used as subchart or not. Default: false +*/}} +{{- define "common.postgresql.values.key.replicationPassword" -}} + {{- if .subchart -}} + postgresql.replication.password + {{- else -}} + replication.password + {{- end -}} +{{- end -}} diff --git a/pkg/iac/scanners/helm/test/mysql/charts/common/templates/validations/_redis.tpl b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/validations/_redis.tpl new file mode 100644 index 000000000000..5d72959b9eee --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/validations/_redis.tpl @@ -0,0 +1,76 @@ + +{{/* vim: set filetype=mustache: */}} +{{/* +Validate Redis™ required passwords are not empty. + +Usage: +{{ include "common.validations.values.redis.passwords" (dict "secret" "secretName" "subchart" false "context" $) }} +Params: + - secret - String - Required. Name of the secret where redis values are stored, e.g: "redis-passwords-secret" + - subchart - Boolean - Optional. Whether redis is used as subchart or not. Default: false +*/}} +{{- define "common.validations.values.redis.passwords" -}} + {{- $enabled := include "common.redis.values.enabled" . -}} + {{- $valueKeyPrefix := include "common.redis.values.keys.prefix" . -}} + {{- $standarizedVersion := include "common.redis.values.standarized.version" . }} + + {{- $existingSecret := ternary (printf "%s%s" $valueKeyPrefix "auth.existingSecret") (printf "%s%s" $valueKeyPrefix "existingSecret") (eq $standarizedVersion "true") }} + {{- $existingSecretValue := include "common.utils.getValueFromKey" (dict "key" $existingSecret "context" .context) }} + + {{- $valueKeyRedisPassword := ternary (printf "%s%s" $valueKeyPrefix "auth.password") (printf "%s%s" $valueKeyPrefix "password") (eq $standarizedVersion "true") }} + {{- $valueKeyRedisUseAuth := ternary (printf "%s%s" $valueKeyPrefix "auth.enabled") (printf "%s%s" $valueKeyPrefix "usePassword") (eq $standarizedVersion "true") }} + + {{- if and (or (not $existingSecret) (eq $existingSecret "\"\"")) (eq $enabled "true") -}} + {{- $requiredPasswords := list -}} + + {{- $useAuth := include "common.utils.getValueFromKey" (dict "key" $valueKeyRedisUseAuth "context" .context) -}} + {{- if eq $useAuth "true" -}} + {{- $requiredRedisPassword := dict "valueKey" $valueKeyRedisPassword "secret" .secret "field" "redis-password" -}} + {{- $requiredPasswords = append $requiredPasswords $requiredRedisPassword -}} + {{- end -}} + + {{- include "common.validations.values.multiple.empty" (dict "required" $requiredPasswords "context" .context) -}} + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to get the right value for enabled redis. + +Usage: +{{ include "common.redis.values.enabled" (dict "context" $) }} +*/}} +{{- define "common.redis.values.enabled" -}} + {{- if .subchart -}} + {{- printf "%v" .context.Values.redis.enabled -}} + {{- else -}} + {{- printf "%v" (not .context.Values.enabled) -}} + {{- end -}} +{{- end -}} + +{{/* +Auxiliary function to get the right prefix path for the values + +Usage: +{{ include "common.redis.values.key.prefix" (dict "subchart" "true" "context" $) }} +Params: + - subchart - Boolean - Optional. Whether redis is used as subchart or not. Default: false +*/}} +{{- define "common.redis.values.keys.prefix" -}} + {{- if .subchart -}}redis.{{- else -}}{{- end -}} +{{- end -}} + +{{/* +Checks whether the redis chart's includes the standarizations (version >= 14) + +Usage: +{{ include "common.redis.values.standarized.version" (dict "context" $) }} +*/}} +{{- define "common.redis.values.standarized.version" -}} + + {{- $standarizedAuth := printf "%s%s" (include "common.redis.values.keys.prefix" .) "auth" -}} + {{- $standarizedAuthValues := include "common.utils.getValueFromKey" (dict "key" $standarizedAuth "context" .context) }} + + {{- if $standarizedAuthValues -}} + {{- true -}} + {{- end -}} +{{- end -}} diff --git a/pkg/iac/scanners/helm/test/mysql/charts/common/templates/validations/_validations.tpl b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/validations/_validations.tpl new file mode 100644 index 000000000000..9a814cf40dcb --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/charts/common/templates/validations/_validations.tpl @@ -0,0 +1,46 @@ +{{/* vim: set filetype=mustache: */}} +{{/* +Validate values must not be empty. + +Usage: +{{- $validateValueConf00 := (dict "valueKey" "path.to.value" "secret" "secretName" "field" "password-00") -}} +{{- $validateValueConf01 := (dict "valueKey" "path.to.value" "secret" "secretName" "field" "password-01") -}} +{{ include "common.validations.values.empty" (dict "required" (list $validateValueConf00 $validateValueConf01) "context" $) }} + +Validate value params: + - valueKey - String - Required. The path to the validating value in the values.yaml, e.g: "mysql.password" + - secret - String - Optional. Name of the secret where the validating value is generated/stored, e.g: "mysql-passwords-secret" + - field - String - Optional. Name of the field in the secret data, e.g: "mysql-password" +*/}} +{{- define "common.validations.values.multiple.empty" -}} + {{- range .required -}} + {{- include "common.validations.values.single.empty" (dict "valueKey" .valueKey "secret" .secret "field" .field "context" $.context) -}} + {{- end -}} +{{- end -}} + +{{/* +Validate a value must not be empty. + +Usage: +{{ include "common.validations.value.empty" (dict "valueKey" "mariadb.password" "secret" "secretName" "field" "my-password" "subchart" "subchart" "context" $) }} + +Validate value params: + - valueKey - String - Required. The path to the validating value in the values.yaml, e.g: "mysql.password" + - secret - String - Optional. Name of the secret where the validating value is generated/stored, e.g: "mysql-passwords-secret" + - field - String - Optional. Name of the field in the secret data, e.g: "mysql-password" + - subchart - String - Optional - Name of the subchart that the validated password is part of. +*/}} +{{- define "common.validations.values.single.empty" -}} + {{- $value := include "common.utils.getValueFromKey" (dict "key" .valueKey "context" .context) }} + {{- $subchart := ternary "" (printf "%s." .subchart) (empty .subchart) }} + + {{- if not $value -}} + {{- $varname := "my-value" -}} + {{- $getCurrentValue := "" -}} + {{- if and .secret .field -}} + {{- $varname = include "common.utils.fieldToEnvVar" . -}} + {{- $getCurrentValue = printf " To get the current value:\n\n %s\n" (include "common.utils.secret.getvalue" .) -}} + {{- end -}} + {{- printf "\n '%s' must not be empty, please add '--set %s%s=$%s' to the command.%s" .valueKey $subchart .valueKey $varname $getCurrentValue -}} + {{- end -}} +{{- end -}} diff --git a/pkg/iac/scanners/helm/test/mysql/charts/common/values.yaml b/pkg/iac/scanners/helm/test/mysql/charts/common/values.yaml new file mode 100644 index 000000000000..f2df68e5e6af --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/charts/common/values.yaml @@ -0,0 +1,5 @@ +## bitnami/common +## It is required by CI/CD tools and processes. +## @skip exampleValue +## +exampleValue: common-chart diff --git a/pkg/iac/scanners/helm/test/mysql/ci/values-production-with-rbac.yaml b/pkg/iac/scanners/helm/test/mysql/ci/values-production-with-rbac.yaml new file mode 100644 index 000000000000..d3370c931113 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/ci/values-production-with-rbac.yaml @@ -0,0 +1,30 @@ +# Test values file for generating all of the yaml and check that +# the rendering is correct + +architecture: replication +auth: + usePasswordFiles: true + +primary: + extraEnvVars: + - name: TEST + value: "3" + podDisruptionBudget: + create: true + +secondary: + replicaCount: 2 + extraEnvVars: + - name: TEST + value: "2" + podDisruptionBudget: + create: true + +serviceAccount: + create: true + name: mysql-service-account +rbac: + create: true + +metrics: + enabled: true diff --git a/pkg/iac/scanners/helm/test/mysql/templates/NOTES.txt b/pkg/iac/scanners/helm/test/mysql/templates/NOTES.txt new file mode 100644 index 000000000000..1b8b6d5ea7d2 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/templates/NOTES.txt @@ -0,0 +1,102 @@ +CHART NAME: {{ .Chart.Name }} +CHART VERSION: {{ .Chart.Version }} +APP VERSION: {{ .Chart.AppVersion }} + +** Please be patient while the chart is being deployed ** + +{{- if .Values.diagnosticMode.enabled }} +The chart has been deployed in diagnostic mode. All probes have been disabled and the command has been overwritten with: + + command: {{- include "common.tplvalues.render" (dict "value" .Values.diagnosticMode.command "context" $) | nindent 4 }} + args: {{- include "common.tplvalues.render" (dict "value" .Values.diagnosticMode.args "context" $) | nindent 4 }} + +Get the list of pods by executing: + + kubectl get pods --namespace {{ .Release.Namespace }} -l app.kubernetes.io/instance={{ .Release.Name }} + +Access the pod you want to debug by executing + + kubectl exec --namespace {{ .Release.Namespace }} -ti -- bash + +In order to replicate the container startup scripts execute this command: + + /opt/bitnami/scripts/mysql/entrypoint.sh /opt/bitnami/scripts/mysql/run.sh + +{{- else }} + +Tip: + + Watch the deployment status using the command: kubectl get pods -w --namespace {{ .Release.Namespace }} + +Services: + + echo Primary: {{ include "mysql.primary.fullname" . }}.{{ .Release.Namespace }}.svc.{{ .Values.clusterDomain }}:{{ .Values.primary.service.port }} +{{- if eq .Values.architecture "replication" }} + echo Secondary: {{ include "mysql.secondary.fullname" . }}.{{ .Release.Namespace }}.svc.{{ .Values.clusterDomain }}:{{ .Values.secondary.service.port }} +{{- end }} + +Execute the following to get the administrator credentials: + + echo Username: root + MYSQL_ROOT_PASSWORD=$(kubectl get secret --namespace {{ .Release.Namespace }} {{ template "mysql.secretName" . }} -o jsonpath="{.data.mysql-root-password}" | base64 --decode) + +To connect to your database: + + 1. Run a pod that you can use as a client: + + kubectl run {{ include "common.names.fullname" . }}-client --rm --tty -i --restart='Never' --image {{ template "mysql.image" . }} --namespace {{ .Release.Namespace }} --command -- bash + + 2. To connect to primary service (read/write): + + mysql -h {{ include "mysql.primary.fullname" . }}.{{ .Release.Namespace }}.svc.{{ .Values.clusterDomain }} -uroot -p"$MYSQL_ROOT_PASSWORD" + +{{- if eq .Values.architecture "replication" }} + + 3. To connect to secondary service (read-only): + + mysql -h {{ include "mysql.secondary.fullname" . }}.{{ .Release.Namespace }}.svc.{{ .Values.clusterDomain }} -uroot -p"$MYSQL_ROOT_PASSWORD" +{{- end }} + +{{ if and (.Values.networkPolicy.enabled) (not .Values.networkPolicy.allowExternal) }} +Note: Since NetworkPolicy is enabled, only pods with label {{ template "common.names.fullname" . }}-client=true" will be able to connect to MySQL. +{{- end }} + +{{- if .Values.metrics.enabled }} + +To access the MySQL Prometheus metrics from outside the cluster execute the following commands: + + kubectl port-forward --namespace {{ .Release.Namespace }} svc/{{ printf "%s-metrics" (include "common.names.fullname" .) }} {{ .Values.metrics.service.port }}:{{ .Values.metrics.service.port }} & + curl http://127.0.0.1:{{ .Values.metrics.service.port }}/metrics + +{{- end }} + +To upgrade this helm chart: + + 1. Obtain the password as described on the 'Administrator credentials' section and set the 'root.password' parameter as shown below: + + ROOT_PASSWORD=$(kubectl get secret --namespace {{ .Release.Namespace }} {{ include "common.names.fullname" . }} -o jsonpath="{.data.mysql-root-password}" | base64 --decode) + helm upgrade --namespace {{ .Release.Namespace }} {{ .Release.Name }} bitnami/mysql --set auth.rootPassword=$ROOT_PASSWORD + +{{ include "mysql.validateValues" . }} +{{ include "mysql.checkRollingTags" . }} +{{- if and (not .Values.auth.existingSecret) (not .Values.auth.customPasswordFiles) -}} + {{- $secretName := include "mysql.secretName" . -}} + {{- $requiredPasswords := list -}} + + {{- $requiredRootPassword := dict "valueKey" "auth.rootPassword" "secret" $secretName "field" "mysql-root-password" -}} + {{- $requiredPasswords = append $requiredPasswords $requiredRootPassword -}} + + {{- if not (empty .Values.auth.username) -}} + {{- $requiredPassword := dict "valueKey" "auth.password" "secret" $secretName "field" "mysql-password" -}} + {{- $requiredPasswords = append $requiredPasswords $requiredPassword -}} + {{- end -}} + + {{- if (eq .Values.architecture "replication") -}} + {{- $requiredReplicationPassword := dict "valueKey" "auth.replicationPassword" "secret" $secretName "field" "mysql-replication-password" -}} + {{- $requiredPasswords = append $requiredPasswords $requiredReplicationPassword -}} + {{- end -}} + + {{- $mysqlPasswordValidationErrors := include "common.validations.values.multiple.empty" (dict "required" $requiredPasswords "context" $) -}} + {{- include "common.errors.upgrade.passwords.empty" (dict "validationErrors" $mysqlPasswordValidationErrors "context" $) -}} +{{- end }} +{{- end }} diff --git a/pkg/iac/scanners/helm/test/mysql/templates/_helpers.tpl b/pkg/iac/scanners/helm/test/mysql/templates/_helpers.tpl new file mode 100644 index 000000000000..6c2bcff81398 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/templates/_helpers.tpl @@ -0,0 +1,192 @@ +{{/* vim: set filetype=mustache: */}} + +{{- define "mysql.primary.fullname" -}} +{{- if eq .Values.architecture "replication" }} +{{- printf "%s-%s" (include "common.names.fullname" .) "primary" | trunc 63 | trimSuffix "-" -}} +{{- else -}} +{{- include "common.names.fullname" . -}} +{{- end -}} +{{- end -}} + +{{- define "mysql.secondary.fullname" -}} +{{- printf "%s-%s" (include "common.names.fullname" .) "secondary" | trunc 63 | trimSuffix "-" -}} +{{- end -}} + +{{/* +Return the proper MySQL image name +*/}} +{{- define "mysql.image" -}} +{{ include "common.images.image" (dict "imageRoot" .Values.image "global" .Values.global) }} +{{- end -}} + +{{/* +Return the proper metrics image name +*/}} +{{- define "mysql.metrics.image" -}} +{{ include "common.images.image" (dict "imageRoot" .Values.metrics.image "global" .Values.global) }} +{{- end -}} + +{{/* +Return the proper image name (for the init container volume-permissions image) +*/}} +{{- define "mysql.volumePermissions.image" -}} +{{ include "common.images.image" (dict "imageRoot" .Values.volumePermissions.image "global" .Values.global) }} +{{- end -}} + +{{/* +Return the proper Docker Image Registry Secret Names +*/}} +{{- define "mysql.imagePullSecrets" -}} +{{ include "common.images.pullSecrets" (dict "images" (list .Values.image .Values.metrics.image .Values.volumePermissions.image) "global" .Values.global) }} +{{- end -}} + +{{ template "mysql.initdbScriptsCM" . }} +{{/* +Get the initialization scripts ConfigMap name. +*/}} +{{- define "mysql.initdbScriptsCM" -}} +{{- if .Values.initdbScriptsConfigMap -}} + {{- printf "%s" .Values.initdbScriptsConfigMap -}} +{{- else -}} + {{- printf "%s-init-scripts" (include "mysql.primary.fullname" .) -}} +{{- end -}} +{{- end -}} + +{{/* + Returns the proper service account name depending if an explicit service account name is set + in the values file. If the name is not set it will default to either mysql.fullname if serviceAccount.create + is true or default otherwise. +*/}} +{{- define "mysql.serviceAccountName" -}} + {{- if .Values.serviceAccount.create -}} + {{ default (include "common.names.fullname" .) .Values.serviceAccount.name }} + {{- else -}} + {{ default "default" .Values.serviceAccount.name }} + {{- end -}} +{{- end -}} + +{{/* +Return the configmap with the MySQL Primary configuration +*/}} +{{- define "mysql.primary.configmapName" -}} +{{- if .Values.primary.existingConfigmap -}} + {{- printf "%s" (tpl .Values.primary.existingConfigmap $) -}} +{{- else -}} + {{- printf "%s" (include "mysql.primary.fullname" .) -}} +{{- end -}} +{{- end -}} + +{{/* +Return true if a configmap object should be created for MySQL Secondary +*/}} +{{- define "mysql.primary.createConfigmap" -}} +{{- if and .Values.primary.configuration (not .Values.primary.existingConfigmap) }} + {{- true -}} +{{- else -}} +{{- end -}} +{{- end -}} + +{{/* +Return the configmap with the MySQL Primary configuration +*/}} +{{- define "mysql.secondary.configmapName" -}} +{{- if .Values.secondary.existingConfigmap -}} + {{- printf "%s" (tpl .Values.secondary.existingConfigmap $) -}} +{{- else -}} + {{- printf "%s" (include "mysql.secondary.fullname" .) -}} +{{- end -}} +{{- end -}} + +{{/* +Return true if a configmap object should be created for MySQL Secondary +*/}} +{{- define "mysql.secondary.createConfigmap" -}} +{{- if and (eq .Values.architecture "replication") .Values.secondary.configuration (not .Values.secondary.existingConfigmap) }} + {{- true -}} +{{- else -}} +{{- end -}} +{{- end -}} + +{{/* +Return the secret with MySQL credentials +*/}} +{{- define "mysql.secretName" -}} + {{- if .Values.auth.existingSecret -}} + {{- printf "%s" .Values.auth.existingSecret -}} + {{- else -}} + {{- printf "%s" (include "common.names.fullname" .) -}} + {{- end -}} +{{- end -}} + +{{/* +Return true if a secret object should be created for MySQL +*/}} +{{- define "mysql.createSecret" -}} +{{- if and (not .Values.auth.existingSecret) (not .Values.auth.customPasswordFiles) }} + {{- true -}} +{{- end -}} +{{- end -}} + +{{/* +Returns the available value for certain key in an existing secret (if it exists), +otherwise it generates a random value. +*/}} +{{- define "getValueFromSecret" }} + {{- $len := (default 16 .Length) | int -}} + {{- $obj := (lookup "v1" "Secret" .Namespace .Name).data -}} + {{- if $obj }} + {{- index $obj .Key | b64dec -}} + {{- else -}} + {{- randAlphaNum $len -}} + {{- end -}} +{{- end }} + +{{- define "mysql.root.password" -}} + {{- if not (empty .Values.auth.rootPassword) }} + {{- .Values.auth.rootPassword }} + {{- else if (not .Values.auth.forcePassword) }} + {{- include "getValueFromSecret" (dict "Namespace" .Release.Namespace "Name" (include "common.names.fullname" .) "Length" 10 "Key" "mysql-root-password") }} + {{- else }} + {{- required "A MySQL Root Password is required!" .Values.auth.rootPassword }} + {{- end }} +{{- end -}} + +{{- define "mysql.password" -}} + {{- if and (not (empty .Values.auth.username)) (not (empty .Values.auth.password)) }} + {{- .Values.auth.password }} + {{- else if (not .Values.auth.forcePassword) }} + {{- include "getValueFromSecret" (dict "Namespace" .Release.Namespace "Name" (include "common.names.fullname" .) "Length" 10 "Key" "mysql-password") }} + {{- else }} + {{- required "A MySQL Database Password is required!" .Values.auth.password }} + {{- end }} +{{- end -}} + +{{- define "mysql.replication.password" -}} + {{- if not (empty .Values.auth.replicationPassword) }} + {{- .Values.auth.replicationPassword }} + {{- else if (not .Values.auth.forcePassword) }} + {{- include "getValueFromSecret" (dict "Namespace" .Release.Namespace "Name" (include "common.names.fullname" .) "Length" 10 "Key" "mysql-replication-password") }} + {{- else }} + {{- required "A MySQL Replication Password is required!" .Values.auth.replicationPassword }} + {{- end }} +{{- end -}} + +{{/* Check if there are rolling tags in the images */}} +{{- define "mysql.checkRollingTags" -}} +{{- include "common.warnings.rollingTag" .Values.image }} +{{- include "common.warnings.rollingTag" .Values.metrics.image }} +{{- include "common.warnings.rollingTag" .Values.volumePermissions.image }} +{{- end -}} + +{{/* +Compile all warnings into a single message, and call fail. +*/}} +{{- define "mysql.validateValues" -}} +{{- $messages := list -}} +{{- $messages := without $messages "" -}} +{{- $message := join "\n" $messages -}} + +{{- if $message -}} +{{- printf "\nVALUES VALIDATION:\n%s" $message | fail -}} +{{- end -}} +{{- end -}} diff --git a/pkg/iac/scanners/helm/test/mysql/templates/extra-list.yaml b/pkg/iac/scanners/helm/test/mysql/templates/extra-list.yaml new file mode 100644 index 000000000000..9ac65f9e16f4 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/templates/extra-list.yaml @@ -0,0 +1,4 @@ +{{- range .Values.extraDeploy }} +--- +{{ include "common.tplvalues.render" (dict "value" . "context" $) }} +{{- end }} diff --git a/pkg/iac/scanners/helm/test/mysql/templates/metrics-svc.yaml b/pkg/iac/scanners/helm/test/mysql/templates/metrics-svc.yaml new file mode 100644 index 000000000000..fb0d9d761dc6 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/templates/metrics-svc.yaml @@ -0,0 +1,29 @@ +{{- if .Values.metrics.enabled }} +apiVersion: v1 +kind: Service +metadata: + name: {{ printf "%s-metrics" (include "common.names.fullname" .) }} + namespace: {{ .Release.Namespace }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 4 }} + {{- end }} + app.kubernetes.io/component: metrics + {{- if or .Values.metrics.service.annotations .Values.commonAnnotations }} + annotations: + {{- if .Values.metrics.service.annotations }} + {{- include "common.tplvalues.render" (dict "value" .Values.metrics.service.annotations "context" $) | nindent 4 }} + {{- end }} + {{- if .Values.commonAnnotations }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} + {{- end }} +spec: + type: {{ .Values.metrics.service.type }} + ports: + - port: {{ .Values.metrics.service.port }} + targetPort: metrics + protocol: TCP + name: metrics + selector: {{- include "common.labels.matchLabels" $ | nindent 4 }} +{{- end }} diff --git a/pkg/iac/scanners/helm/test/mysql/templates/networkpolicy.yaml b/pkg/iac/scanners/helm/test/mysql/templates/networkpolicy.yaml new file mode 100644 index 000000000000..a0d1d01d4079 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/templates/networkpolicy.yaml @@ -0,0 +1,38 @@ +{{- if .Values.networkPolicy.enabled }} +kind: NetworkPolicy +apiVersion: {{ template "common.capabilities.networkPolicy.apiVersion" . }} +metadata: + name: {{ template "common.names.fullname" . }} + labels: + {{- include "common.labels.standard" . | nindent 4 }} + {{- if .Values.commonAnnotations }} + annotations: {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} + namespace: {{ .Release.Namespace }} +spec: + podSelector: + matchLabels: + {{- include "common.labels.matchLabels" . | nindent 6 }} + ingress: + # Allow inbound connections + - ports: + - port: {{ .Values.primary.service.port }} + {{- if not .Values.networkPolicy.allowExternal }} + from: + - podSelector: + matchLabels: + {{ template "common.names.fullname" . }}-client: "true" + {{- if .Values.networkPolicy.explicitNamespacesSelector }} + namespaceSelector: +{{ toYaml .Values.networkPolicy.explicitNamespacesSelector | indent 12 }} + {{- end }} + - podSelector: + matchLabels: + {{- include "common.labels.matchLabels" . | nindent 14 }} + {{- end }} + {{- if .Values.metrics.enabled }} + # Allow prometheus scrapes + - ports: + - port: 9104 + {{- end }} +{{- end }} diff --git a/pkg/iac/scanners/helm/test/mysql/templates/primary/configmap.yaml b/pkg/iac/scanners/helm/test/mysql/templates/primary/configmap.yaml new file mode 100644 index 000000000000..540b7b9072e9 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/templates/primary/configmap.yaml @@ -0,0 +1,18 @@ +{{- if (include "mysql.primary.createConfigmap" .) }} +apiVersion: v1 +kind: ConfigMap +metadata: + name: {{ include "mysql.primary.fullname" . }} + namespace: {{ .Release.Namespace }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + app.kubernetes.io/component: primary + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 4 }} + {{- end }} + {{- if .Values.commonAnnotations }} + annotations: {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} +data: + my.cnf: |- + {{ .Values.primary.configuration | nindent 4 }} +{{- end -}} diff --git a/pkg/iac/scanners/helm/test/mysql/templates/primary/initialization-configmap.yaml b/pkg/iac/scanners/helm/test/mysql/templates/primary/initialization-configmap.yaml new file mode 100644 index 000000000000..83cbaea74883 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/templates/primary/initialization-configmap.yaml @@ -0,0 +1,14 @@ +{{- if and .Values.initdbScripts (not .Values.initdbScriptsConfigMap) }} +apiVersion: v1 +kind: ConfigMap +metadata: + name: {{ printf "%s-init-scripts" (include "mysql.primary.fullname" .) }} + namespace: {{ .Release.Namespace }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + app.kubernetes.io/component: primary + {{- if .Values.commonAnnotations }} + annotations: {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} +data: +{{- include "common.tplvalues.render" (dict "value" .Values.initdbScripts "context" .) | nindent 2 }} +{{ end }} diff --git a/pkg/iac/scanners/helm/test/mysql/templates/primary/pdb.yaml b/pkg/iac/scanners/helm/test/mysql/templates/primary/pdb.yaml new file mode 100644 index 000000000000..106ad5207e5a --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/templates/primary/pdb.yaml @@ -0,0 +1,25 @@ +{{- if .Values.primary.pdb.enabled }} +apiVersion: {{ include "common.capabilities.policy.apiVersion" . }} +kind: PodDisruptionBudget +metadata: + name: {{ include "mysql.primary.fullname" . }} + namespace: {{ .Release.Namespace }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + app.kubernetes.io/component: primary + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 4 }} + {{- end }} + {{- if .Values.commonAnnotations }} + annotations: {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} +spec: + {{- if .Values.primary.pdb.minAvailable }} + minAvailable: {{ .Values.primary.pdb.minAvailable }} + {{- end }} + {{- if .Values.primary.pdb.maxUnavailable }} + maxUnavailable: {{ .Values.primary.pdb.maxUnavailable }} + {{- end }} + selector: + matchLabels: {{ include "common.labels.matchLabels" . | nindent 6 }} + app.kubernetes.io/component: primary +{{- end }} diff --git a/pkg/iac/scanners/helm/test/mysql/templates/primary/statefulset.yaml b/pkg/iac/scanners/helm/test/mysql/templates/primary/statefulset.yaml new file mode 100644 index 000000000000..6f9c99ea66d9 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/templates/primary/statefulset.yaml @@ -0,0 +1,368 @@ +apiVersion: {{ include "common.capabilities.statefulset.apiVersion" . }} +kind: StatefulSet +metadata: + name: {{ include "mysql.primary.fullname" . }} + namespace: {{ .Release.Namespace }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + app.kubernetes.io/component: primary + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 4 }} + {{- end }} + {{- if .Values.primary.podLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.primary.podLabels "context" $ ) | nindent 4 }} + {{- end }} + {{- if .Values.commonAnnotations }} + annotations: {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} +spec: + replicas: 1 + selector: + matchLabels: {{ include "common.labels.matchLabels" . | nindent 6 }} + app.kubernetes.io/component: primary + serviceName: {{ include "mysql.primary.fullname" . }} + updateStrategy: + type: {{ .Values.primary.updateStrategy }} + {{- if (eq "Recreate" .Values.primary.updateStrategy) }} + rollingUpdate: null + {{- else if .Values.primary.rollingUpdatePartition }} + rollingUpdate: + partition: {{ .Values.primary.rollingUpdatePartition }} + {{- end }} + template: + metadata: + annotations: + {{- if (include "mysql.primary.createConfigmap" .) }} + checksum/configuration: {{ include (print $.Template.BasePath "/primary/configmap.yaml") . | sha256sum }} + {{- end }} + {{- if .Values.primary.podAnnotations }} + {{- include "common.tplvalues.render" (dict "value" .Values.primary.podAnnotations "context" $) | nindent 8 }} + {{- end }} + labels: {{- include "common.labels.standard" . | nindent 8 }} + app.kubernetes.io/component: primary + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 8 }} + {{- end }} + {{- if .Values.primary.podLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.primary.podLabels "context" $ ) | nindent 8 }} + {{- end }} + spec: + {{- include "mysql.imagePullSecrets" . | nindent 6 }} + {{- if .Values.primary.hostAliases }} + hostAliases: {{- include "common.tplvalues.render" (dict "value" .Values.primary.hostAliases "context" $) | nindent 8 }} + {{- end }} + {{- if .Values.schedulerName }} + schedulerName: {{ .Values.schedulerName | quote }} + {{- end }} + serviceAccountName: {{ template "mysql.serviceAccountName" . }} + {{- if .Values.primary.affinity }} + affinity: {{- include "common.tplvalues.render" (dict "value" .Values.primary.affinity "context" $) | nindent 8 }} + {{- else }} + affinity: + podAffinity: {{- include "common.affinities.pods" (dict "type" .Values.primary.podAffinityPreset "component" "primary" "context" $) | nindent 10 }} + podAntiAffinity: {{- include "common.affinities.pods" (dict "type" .Values.primary.podAntiAffinityPreset "component" "primary" "context" $) | nindent 10 }} + nodeAffinity: {{- include "common.affinities.nodes" (dict "type" .Values.primary.nodeAffinityPreset.type "key" .Values.primary.nodeAffinityPreset.key "values" .Values.primary.nodeAffinityPreset.values) | nindent 10 }} + {{- end }} + {{- if .Values.primary.nodeSelector }} + nodeSelector: {{- include "common.tplvalues.render" (dict "value" .Values.primary.nodeSelector "context" $) | nindent 8 }} + {{- end }} + {{- if .Values.primary.tolerations }} + tolerations: {{- include "common.tplvalues.render" (dict "value" .Values.primary.tolerations "context" $) | nindent 8 }} + {{- end }} + {{- if .Values.priorityClassName }} + priorityClassName: {{ .Values.priorityClassName | quote }} + {{- end }} + {{- if .Values.primary.podSecurityContext.enabled }} + securityContext: {{- omit .Values.primary.podSecurityContext "enabled" | toYaml | nindent 8 }} + {{- end }} + {{- if or .Values.primary.initContainers (and .Values.primary.podSecurityContext.enabled .Values.volumePermissions.enabled .Values.primary.persistence.enabled) }} + initContainers: + {{- if .Values.primary.initContainers }} + {{- include "common.tplvalues.render" (dict "value" .Values.primary.initContainers "context" $) | nindent 8 }} + {{- end }} + {{- if and .Values.primary.podSecurityContext.enabled .Values.volumePermissions.enabled .Values.primary.persistence.enabled }} + - name: volume-permissions + image: {{ include "mysql.volumePermissions.image" . }} + imagePullPolicy: {{ .Values.volumePermissions.image.pullPolicy | quote }} + command: + - /bin/bash + - -ec + - | + chown -R {{ .Values.primary.containerSecurityContext.runAsUser }}:{{ .Values.primary.podSecurityContext.fsGroup }} /bitnami/mysql + securityContext: + runAsUser: 0 + {{- if .Values.volumePermissions.resources }} + resources: {{- toYaml .Values.volumePermissions.resources | nindent 12 }} + {{- end }} + volumeMounts: + - name: data + mountPath: /bitnami/mysql + {{- end }} + {{- end }} + containers: + - name: mysql + image: {{ include "mysql.image" . }} + imagePullPolicy: {{ .Values.image.pullPolicy | quote }} + {{- if .Values.primary.containerSecurityContext.enabled }} + securityContext: {{- omit .Values.primary.containerSecurityContext "enabled" | toYaml | nindent 12 }} + {{- end }} + {{- if .Values.diagnosticMode.enabled }} + command: {{- include "common.tplvalues.render" (dict "value" .Values.diagnosticMode.command "context" $) | nindent 12 }} + {{- else if .Values.primary.command }} + command: {{- include "common.tplvalues.render" (dict "value" .Values.primary.command "context" $) | nindent 12 }} + {{- end }} + {{- if .Values.diagnosticMode.enabled }} + args: {{- include "common.tplvalues.render" (dict "value" .Values.diagnosticMode.args "context" $) | nindent 12 }} + {{- else if .Values.primary.args }} + args: {{- include "common.tplvalues.render" (dict "value" .Values.primary.args "context" $) | nindent 12 }} + {{- end }} + env: + - name: BITNAMI_DEBUG + value: {{ ternary "true" "false" (or .Values.image.debug .Values.diagnosticMode.enabled) | quote }} + {{- if .Values.auth.usePasswordFiles }} + - name: MYSQL_ROOT_PASSWORD_FILE + value: {{ default "/opt/bitnami/mysql/secrets/mysql-root-password" .Values.auth.customPasswordFiles.root }} + {{- else }} + - name: MYSQL_ROOT_PASSWORD + valueFrom: + secretKeyRef: + name: {{ template "mysql.secretName" . }} + key: mysql-root-password + {{- end }} + {{- if not (empty .Values.auth.username) }} + - name: MYSQL_USER + value: {{ .Values.auth.username | quote }} + {{- if .Values.auth.usePasswordFiles }} + - name: MYSQL_PASSWORD_FILE + value: {{ default "/opt/bitnami/mysql/secrets/mysql-password" .Values.auth.customPasswordFiles.user }} + {{- else }} + - name: MYSQL_PASSWORD + valueFrom: + secretKeyRef: + name: {{ template "mysql.secretName" . }} + key: mysql-password + {{- end }} + {{- end }} + - name: MYSQL_DATABASE + value: {{ .Values.auth.database | quote }} + {{- if eq .Values.architecture "replication" }} + - name: MYSQL_REPLICATION_MODE + value: "master" + - name: MYSQL_REPLICATION_USER + value: {{ .Values.auth.replicationUser | quote }} + {{- if .Values.auth.usePasswordFiles }} + - name: MYSQL_REPLICATION_PASSWORD_FILE + value: {{ default "/opt/bitnami/mysql/secrets/mysql-replication-password" .Values.auth.customPasswordFiles.replicator }} + {{- else }} + - name: MYSQL_REPLICATION_PASSWORD + valueFrom: + secretKeyRef: + name: {{ template "mysql.secretName" . }} + key: mysql-replication-password + {{- end }} + {{- end }} + {{- if .Values.primary.extraFlags }} + - name: MYSQL_EXTRA_FLAGS + value: "{{ .Values.primary.extraFlags }}" + {{- end }} + {{- if .Values.primary.extraEnvVars }} + {{- include "common.tplvalues.render" (dict "value" .Values.primary.extraEnvVars "context" $) | nindent 12 }} + {{- end }} + {{- if or .Values.primary.extraEnvVarsCM .Values.primary.extraEnvVarsSecret }} + envFrom: + {{- if .Values.primary.extraEnvVarsCM }} + - configMapRef: + name: {{ .Values.primary.extraEnvVarsCM }} + {{- end }} + {{- if .Values.primary.extraEnvVarsSecret }} + - secretRef: + name: {{ .Values.primary.extraEnvVarsSecret }} + {{- end }} + {{- end }} + ports: + - name: mysql + containerPort: 3306 + {{- if not .Values.diagnosticMode.enabled }} + {{- if .Values.primary.livenessProbe.enabled }} + livenessProbe: {{- omit .Values.primary.livenessProbe "enabled" | toYaml | nindent 12 }} + exec: + command: + - /bin/bash + - -ec + - | + password_aux="${MYSQL_ROOT_PASSWORD:-}" + if [[ -f "${MYSQL_ROOT_PASSWORD_FILE:-}" ]]; then + password_aux=$(cat "$MYSQL_ROOT_PASSWORD_FILE") + fi + mysqladmin status -uroot -p"${password_aux}" + {{- else if .Values.primary.customLivenessProbe }} + livenessProbe: {{- include "common.tplvalues.render" (dict "value" .Values.primary.customLivenessProbe "context" $) | nindent 12 }} + {{- end }} + {{- if .Values.primary.readinessProbe.enabled }} + readinessProbe: {{- omit .Values.primary.readinessProbe "enabled" | toYaml | nindent 12 }} + exec: + command: + - /bin/bash + - -ec + - | + password_aux="${MYSQL_ROOT_PASSWORD:-}" + if [[ -f "${MYSQL_ROOT_PASSWORD_FILE:-}" ]]; then + password_aux=$(cat "$MYSQL_ROOT_PASSWORD_FILE") + fi + mysqladmin status -uroot -p"${password_aux}" + {{- else if .Values.primary.customReadinessProbe }} + readinessProbe: {{- include "common.tplvalues.render" (dict "value" .Values.primary.customReadinessProbe "context" $) | nindent 12 }} + {{- end }} + {{- if .Values.primary.startupProbe.enabled }} + startupProbe: {{- omit .Values.primary.startupProbe "enabled" | toYaml | nindent 12 }} + exec: + command: + - /bin/bash + - -ec + - | + password_aux="${MYSQL_ROOT_PASSWORD:-}" + if [[ -f "${MYSQL_ROOT_PASSWORD_FILE:-}" ]]; then + password_aux=$(cat "$MYSQL_ROOT_PASSWORD_FILE") + fi + mysqladmin status -uroot -p"${password_aux}" + {{- else if .Values.primary.customStartupProbe }} + startupProbe: {{- include "common.tplvalues.render" (dict "value" .Values.primary.customStartupProbe "context" $) | nindent 12 }} + {{- end }} + {{- end }} + {{- if .Values.primary.resources }} + resources: {{ toYaml .Values.primary.resources | nindent 12 }} + {{- end }} + volumeMounts: + - name: data + mountPath: /bitnami/mysql + {{- if or .Values.initdbScriptsConfigMap .Values.initdbScripts }} + - name: custom-init-scripts + mountPath: /docker-entrypoint-initdb.d + {{- end }} + {{- if or .Values.primary.configuration .Values.primary.existingConfigmap }} + - name: config + mountPath: /opt/bitnami/mysql/conf/my.cnf + subPath: my.cnf + {{- end }} + {{- if and .Values.auth.usePasswordFiles (not .Values.auth.customPasswordFiles) }} + - name: mysql-credentials + mountPath: /opt/bitnami/mysql/secrets/ + {{- end }} + {{- if .Values.primary.extraVolumeMounts }} + {{- include "common.tplvalues.render" (dict "value" .Values.primary.extraVolumeMounts "context" $) | nindent 12 }} + {{- end }} + {{- if .Values.metrics.enabled }} + - name: metrics + image: {{ include "mysql.metrics.image" . }} + imagePullPolicy: {{ .Values.metrics.image.pullPolicy | quote }} + env: + {{- if .Values.auth.usePasswordFiles }} + - name: MYSQL_ROOT_PASSWORD_FILE + value: {{ default "/opt/bitnami/mysqld-exporter/secrets/mysql-root-password" .Values.auth.customPasswordFiles.root }} + {{- else }} + - name: MYSQL_ROOT_PASSWORD + valueFrom: + secretKeyRef: + name: {{ include "mysql.secretName" . }} + key: mysql-root-password + {{- end }} + {{- if .Values.diagnosticMode.enabled }} + command: {{- include "common.tplvalues.render" (dict "value" .Values.diagnosticMode.command "context" $) | nindent 12 }} + args: {{- include "common.tplvalues.render" (dict "value" .Values.diagnosticMode.args "context" $) | nindent 12 }} + {{- else }} + command: + - /bin/bash + - -ec + - | + password_aux="${MYSQL_ROOT_PASSWORD:-}" + if [[ -f "${MYSQL_ROOT_PASSWORD_FILE:-}" ]]; then + password_aux=$(cat "$MYSQL_ROOT_PASSWORD_FILE") + fi + DATA_SOURCE_NAME="root:${password_aux}@(localhost:3306)/" /bin/mysqld_exporter {{- range .Values.metrics.extraArgs.primary }} {{ . }} {{- end }} + {{- end }} + ports: + - name: metrics + containerPort: 9104 + {{- if not .Values.diagnosticMode.enabled }} + {{- if .Values.metrics.livenessProbe.enabled }} + livenessProbe: {{- omit .Values.metrics.livenessProbe "enabled" | toYaml | nindent 12 }} + httpGet: + path: /metrics + port: metrics + {{- end }} + {{- if .Values.metrics.readinessProbe.enabled }} + readinessProbe: {{- omit .Values.metrics.readinessProbe "enabled" | toYaml | nindent 12 }} + httpGet: + path: /metrics + port: metrics + {{- end }} + {{- end }} + {{- if .Values.metrics.resources }} + resources: {{- toYaml .Values.metrics.resources | nindent 12 }} + {{- end }} + {{- if and .Values.auth.usePasswordFiles (not .Values.auth.customPasswordFiles) }} + volumeMounts: + - name: mysql-credentials + mountPath: /opt/bitnami/mysqld-exporter/secrets/ + {{- end }} + {{- end }} + {{- if .Values.primary.sidecars }} + {{- include "common.tplvalues.render" (dict "value" .Values.primary.sidecars "context" $) | nindent 8 }} + {{- end }} + volumes: + {{- if or .Values.primary.configuration .Values.primary.existingConfigmap }} + - name: config + configMap: + name: {{ include "mysql.primary.configmapName" . }} + {{- end }} + {{- if or .Values.initdbScriptsConfigMap .Values.initdbScripts }} + - name: custom-init-scripts + configMap: + name: {{ include "mysql.initdbScriptsCM" . }} + {{- end }} + {{- if and .Values.auth.usePasswordFiles (not .Values.auth.customPasswordFiles) }} + - name: mysql-credentials + secret: + secretName: {{ include "mysql.secretName" . }} + items: + - key: mysql-root-password + path: mysql-root-password + - key: mysql-password + path: mysql-password + {{- if eq .Values.architecture "replication" }} + - key: mysql-replication-password + path: mysql-replication-password + {{- end }} + {{- end }} + {{- if .Values.primary.extraVolumes }} + {{- include "common.tplvalues.render" (dict "value" .Values.primary.extraVolumes "context" $) | nindent 8 }} + {{- end }} + {{- if and .Values.primary.persistence.enabled .Values.primary.persistence.existingClaim }} + - name: data + persistentVolumeClaim: + claimName: {{ tpl .Values.primary.persistence.existingClaim . }} + {{- else if not .Values.primary.persistence.enabled }} + - name: data + emptyDir: {} + {{- else if and .Values.primary.persistence.enabled (not .Values.primary.persistence.existingClaim) }} + volumeClaimTemplates: + - metadata: + name: data + labels: {{ include "common.labels.matchLabels" . | nindent 10 }} + app.kubernetes.io/component: primary + {{- if .Values.primary.persistence.annotations }} + annotations: + {{- toYaml .Values.primary.persistence.annotations | nindent 10 }} + {{- end }} + spec: + accessModes: + {{- range .Values.primary.persistence.accessModes }} + - {{ . | quote }} + {{- end }} + resources: + requests: + storage: {{ .Values.primary.persistence.size | quote }} + {{ include "common.storage.class" (dict "persistence" .Values.primary.persistence "global" .Values.global) }} + {{- if .Values.primary.persistence.selector }} + selector: {{- include "common.tplvalues.render" (dict "value" .Values.primary.persistence.selector "context" $) | nindent 10 }} + {{- end -}} + {{- end }} diff --git a/pkg/iac/scanners/helm/test/mysql/templates/primary/svc-headless.yaml b/pkg/iac/scanners/helm/test/mysql/templates/primary/svc-headless.yaml new file mode 100644 index 000000000000..49e6e5798783 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/templates/primary/svc-headless.yaml @@ -0,0 +1,24 @@ +apiVersion: v1 +kind: Service +metadata: + name: {{ include "mysql.primary.fullname" . }}-headless + namespace: {{ .Release.Namespace }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + app.kubernetes.io/component: primary + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 4 }} + {{- end }} + annotations: + {{- if .Values.commonAnnotations }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} +spec: + type: ClusterIP + clusterIP: None + publishNotReadyAddresses: true + ports: + - name: mysql + port: {{ .Values.primary.service.port }} + targetPort: mysql + selector: {{ include "common.labels.matchLabels" . | nindent 4 }} + app.kubernetes.io/component: primary diff --git a/pkg/iac/scanners/helm/test/mysql/templates/primary/svc.yaml b/pkg/iac/scanners/helm/test/mysql/templates/primary/svc.yaml new file mode 100644 index 000000000000..b46e6faa8149 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/templates/primary/svc.yaml @@ -0,0 +1,41 @@ +apiVersion: v1 +kind: Service +metadata: + name: {{ include "mysql.primary.fullname" . }} + namespace: {{ .Release.Namespace }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + app.kubernetes.io/component: primary + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 4 }} + {{- end }} + annotations: + {{- if .Values.commonAnnotations }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} + {{- if .Values.primary.service.annotations }} + {{- include "common.tplvalues.render" ( dict "value" .Values.primary.service.annotations "context" $ ) | nindent 4 }} + {{- end }} +spec: + type: {{ .Values.primary.service.type }} + {{- if and (eq .Values.primary.service.type "ClusterIP") .Values.primary.service.clusterIP }} + clusterIP: {{ .Values.primary.service.clusterIP }} + {{- end }} + {{- if and .Values.primary.service.loadBalancerIP (eq .Values.primary.service.type "LoadBalancer") }} + loadBalancerIP: {{ .Values.primary.service.loadBalancerIP }} + externalTrafficPolicy: {{ .Values.primary.service.externalTrafficPolicy | quote }} + {{- end }} + {{- if and (eq .Values.primary.service.type "LoadBalancer") .Values.primary.service.loadBalancerSourceRanges }} + loadBalancerSourceRanges: {{- toYaml .Values.primary.service.loadBalancerSourceRanges | nindent 4 }} + {{- end }} + ports: + - name: mysql + port: {{ .Values.primary.service.port }} + protocol: TCP + targetPort: mysql + {{- if (and (or (eq .Values.primary.service.type "NodePort") (eq .Values.primary.service.type "LoadBalancer")) .Values.primary.service.nodePort) }} + nodePort: {{ .Values.primary.service.nodePort }} + {{- else if eq .Values.primary.service.type "ClusterIP" }} + nodePort: null + {{- end }} + selector: {{ include "common.labels.matchLabels" . | nindent 4 }} + app.kubernetes.io/component: primary diff --git a/pkg/iac/scanners/helm/test/mysql/templates/role.yaml b/pkg/iac/scanners/helm/test/mysql/templates/role.yaml new file mode 100644 index 000000000000..4cbdd5c9ff20 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/templates/role.yaml @@ -0,0 +1,21 @@ +{{- if and .Values.serviceAccount.create .Values.rbac.create }} +apiVersion: {{ include "common.capabilities.rbac.apiVersion" . }} +kind: Role +metadata: + name: {{ include "common.names.fullname" . }} + namespace: {{ .Release.Namespace }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 4 }} + {{- end }} + {{- if .Values.commonAnnotations }} + annotations: {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} +rules: + - apiGroups: + - "" + resources: + - endpoints + verbs: + - get +{{- end }} diff --git a/pkg/iac/scanners/helm/test/mysql/templates/rolebinding.yaml b/pkg/iac/scanners/helm/test/mysql/templates/rolebinding.yaml new file mode 100644 index 000000000000..90ede32f5fc7 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/templates/rolebinding.yaml @@ -0,0 +1,21 @@ +{{- if and .Values.serviceAccount.create .Values.rbac.create }} +kind: RoleBinding +apiVersion: {{ include "common.capabilities.rbac.apiVersion" . }} +metadata: + name: {{ include "common.names.fullname" . }} + namespace: {{ .Release.Namespace }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 4 }} + {{- end }} + {{- if .Values.commonAnnotations }} + annotations: {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} +subjects: + - kind: ServiceAccount + name: {{ include "mysql.serviceAccountName" . }} +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: {{ include "common.names.fullname" . -}} +{{- end }} diff --git a/pkg/iac/scanners/helm/test/mysql/templates/secondary/configmap.yaml b/pkg/iac/scanners/helm/test/mysql/templates/secondary/configmap.yaml new file mode 100644 index 000000000000..682e3e19ba96 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/templates/secondary/configmap.yaml @@ -0,0 +1,18 @@ +{{- if (include "mysql.secondary.createConfigmap" .) }} +apiVersion: v1 +kind: ConfigMap +metadata: + name: {{ include "mysql.secondary.fullname" . }} + namespace: {{ .Release.Namespace }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + app.kubernetes.io/component: secondary + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 4 }} + {{- end }} + {{- if .Values.commonAnnotations }} + annotations: {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} +data: + my.cnf: |- + {{ .Values.secondary.configuration | nindent 4 }} +{{- end -}} diff --git a/pkg/iac/scanners/helm/test/mysql/templates/secondary/pdb.yaml b/pkg/iac/scanners/helm/test/mysql/templates/secondary/pdb.yaml new file mode 100644 index 000000000000..49c7e167c0a2 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/templates/secondary/pdb.yaml @@ -0,0 +1,25 @@ +{{- if and (eq .Values.architecture "replication") .Values.secondary.pdb.enabled }} +apiVersion: {{ include "common.capabilities.policy.apiVersion" . }} +kind: PodDisruptionBudget +metadata: + name: {{ include "mysql.secondary.fullname" . }} + namespace: {{ .Release.Namespace }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + app.kubernetes.io/component: secondary + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 4 }} + {{- end }} + {{- if .Values.commonAnnotations }} + annotations: {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} +spec: + {{- if .Values.secondary.pdb.minAvailable }} + minAvailable: {{ .Values.secondary.pdb.minAvailable }} + {{- end }} + {{- if .Values.secondary.pdb.maxUnavailable }} + maxUnavailable: {{ .Values.secondary.pdb.maxUnavailable }} + {{- end }} + selector: + matchLabels: {{ include "common.labels.matchLabels" . | nindent 6 }} + app.kubernetes.io/component: secondary +{{- end }} diff --git a/pkg/iac/scanners/helm/test/mysql/templates/secondary/statefulset.yaml b/pkg/iac/scanners/helm/test/mysql/templates/secondary/statefulset.yaml new file mode 100644 index 000000000000..ef196ebf6df0 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/templates/secondary/statefulset.yaml @@ -0,0 +1,338 @@ +{{- if eq .Values.architecture "replication" }} +apiVersion: {{ include "common.capabilities.statefulset.apiVersion" . }} +kind: StatefulSet +metadata: + name: {{ include "mysql.secondary.fullname" . }} + namespace: {{ .Release.Namespace }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + app.kubernetes.io/component: secondary + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 4 }} + {{- end }} + {{- if .Values.secondary.podLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.secondary.podLabels "context" $ ) | nindent 4 }} + {{- end }} + {{- if .Values.commonAnnotations }} + annotations: {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} +spec: + replicas: {{ .Values.secondary.replicaCount }} + selector: + matchLabels: {{ include "common.labels.matchLabels" . | nindent 6 }} + app.kubernetes.io/component: secondary + serviceName: {{ include "mysql.secondary.fullname" . }} + updateStrategy: + type: {{ .Values.secondary.updateStrategy }} + {{- if (eq "Recreate" .Values.secondary.updateStrategy) }} + rollingUpdate: null + {{- else if .Values.secondary.rollingUpdatePartition }} + rollingUpdate: + partition: {{ .Values.secondary.rollingUpdatePartition }} + {{- end }} + template: + metadata: + annotations: + {{- if (include "mysql.secondary.createConfigmap" .) }} + checksum/configuration: {{ include (print $.Template.BasePath "/secondary/configmap.yaml") . | sha256sum }} + {{- end }} + {{- if .Values.secondary.podAnnotations }} + {{- include "common.tplvalues.render" (dict "value" .Values.secondary.podAnnotations "context" $) | nindent 8 }} + {{- end }} + labels: {{- include "common.labels.standard" . | nindent 8 }} + app.kubernetes.io/component: secondary + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 8 }} + {{- end }} + {{- if .Values.secondary.podLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.secondary.podLabels "context" $ ) | nindent 8 }} + {{- end }} + spec: + {{- include "mysql.imagePullSecrets" . | nindent 6 }} + {{- if .Values.secondary.hostAliases }} + hostAliases: {{- include "common.tplvalues.render" (dict "value" .Values.secondary.hostAliases "context" $) | nindent 8 }} + {{- end }} + {{- if .Values.schedulerName }} + schedulerName: {{ .Values.schedulerName | quote }} + {{- end }} + serviceAccountName: {{ include "mysql.serviceAccountName" . }} + {{- if .Values.secondary.affinity }} + affinity: {{- include "common.tplvalues.render" (dict "value" .Values.secondary.affinity "context" $) | nindent 8 }} + {{- else }} + affinity: + podAffinity: {{- include "common.affinities.pods" (dict "type" .Values.secondary.podAffinityPreset "component" "secondary" "context" $) | nindent 10 }} + podAntiAffinity: {{- include "common.affinities.pods" (dict "type" .Values.secondary.podAntiAffinityPreset "component" "secondary" "context" $) | nindent 10 }} + nodeAffinity: {{- include "common.affinities.nodes" (dict "type" .Values.secondary.nodeAffinityPreset.type "key" .Values.secondary.nodeAffinityPreset.key "values" .Values.secondary.nodeAffinityPreset.values) | nindent 10 }} + {{- end }} + {{- if .Values.secondary.nodeSelector }} + nodeSelector: {{- include "common.tplvalues.render" (dict "value" .Values.secondary.nodeSelector "context" $) | nindent 8 }} + {{- end }} + {{- if .Values.secondary.tolerations }} + tolerations: {{- include "common.tplvalues.render" (dict "value" .Values.secondary.tolerations "context" $) | nindent 8 }} + {{- end }} + {{- if .Values.priorityClassName }} + priorityClassName: {{ .Values.priorityClassName | quote }} + {{- end }} + {{- if .Values.secondary.podSecurityContext.enabled }} + securityContext: {{- omit .Values.secondary.podSecurityContext "enabled" | toYaml | nindent 8 }} + {{- end }} + {{- if or .Values.secondary.initContainers (and .Values.secondary.podSecurityContext.enabled .Values.volumePermissions.enabled .Values.secondary.persistence.enabled) }} + initContainers: + {{- if .Values.secondary.initContainers }} + {{- include "common.tplvalues.render" (dict "value" .Values.secondary.initContainers "context" $) | nindent 8 }} + {{- end }} + {{- if and .Values.secondary.podSecurityContext.enabled .Values.volumePermissions.enabled .Values.secondary.persistence.enabled }} + - name: volume-permissions + image: {{ include "mysql.volumePermissions.image" . }} + imagePullPolicy: {{ .Values.volumePermissions.image.pullPolicy | quote }} + command: + - /bin/bash + - -ec + - | + chown -R {{ .Values.secondary.containerSecurityContext.runAsUser }}:{{ .Values.secondary.podSecurityContext.fsGroup }} /bitnami/mysql + securityContext: + runAsUser: 0 + {{- if .Values.volumePermissions.resources }} + resources: {{- toYaml .Values.volumePermissions.resources | nindent 12 }} + {{- end }} + volumeMounts: + - name: data + mountPath: /bitnami/mysql + {{- end }} + {{- end }} + containers: + - name: mysql + image: {{ include "mysql.image" . }} + imagePullPolicy: {{ .Values.image.pullPolicy | quote }} + {{- if .Values.secondary.containerSecurityContext.enabled }} + securityContext: {{- omit .Values.secondary.containerSecurityContext "enabled" | toYaml | nindent 12 }} + {{- end }} + {{- if .Values.diagnosticMode.enabled }} + command: {{- include "common.tplvalues.render" (dict "value" .Values.diagnosticMode.command "context" $) | nindent 12 }} + {{- else if .Values.secondary.command }} + command: {{- include "common.tplvalues.render" (dict "value" .Values.secondary.command "context" $) | nindent 12 }} + {{- end }} + {{- if .Values.diagnosticMode.enabled }} + args: {{- include "common.tplvalues.render" (dict "value" .Values.diagnosticMode.args "context" $) | nindent 12 }} + {{- else if .Values.secondary.args }} + args: {{- include "common.tplvalues.render" (dict "value" .Values.secondary.args "context" $) | nindent 12 }} + {{- end }} + env: + - name: BITNAMI_DEBUG + value: {{ ternary "true" "false" (or .Values.image.debug .Values.diagnosticMode.enabled) | quote }} + - name: MYSQL_REPLICATION_MODE + value: "slave" + - name: MYSQL_MASTER_HOST + value: {{ include "mysql.primary.fullname" . }} + - name: MYSQL_MASTER_PORT_NUMBER + value: {{ .Values.primary.service.port | quote }} + - name: MYSQL_MASTER_ROOT_USER + value: "root" + - name: MYSQL_REPLICATION_USER + value: {{ .Values.auth.replicationUser | quote }} + {{- if .Values.auth.usePasswordFiles }} + - name: MYSQL_MASTER_ROOT_PASSWORD_FILE + value: {{ default "/opt/bitnami/mysql/secrets/mysql-root-password" .Values.auth.customPasswordFiles.root }} + - name: MYSQL_REPLICATION_PASSWORD_FILE + value: {{ default "/opt/bitnami/mysql/secrets/mysql-replication-password" .Values.auth.customPasswordFiles.replicator }} + {{- else }} + - name: MYSQL_MASTER_ROOT_PASSWORD + valueFrom: + secretKeyRef: + name: {{ template "mysql.secretName" . }} + key: mysql-root-password + - name: MYSQL_REPLICATION_PASSWORD + valueFrom: + secretKeyRef: + name: {{ template "mysql.secretName" . }} + key: mysql-replication-password + {{- end }} + {{- if .Values.secondary.extraFlags }} + - name: MYSQL_EXTRA_FLAGS + value: "{{ .Values.secondary.extraFlags }}" + {{- end }} + {{- if .Values.secondary.extraEnvVars }} + {{- include "common.tplvalues.render" (dict "value" .Values.secondary.extraEnvVars "context" $) | nindent 12 }} + {{- end }} + {{- if or .Values.secondary.extraEnvVarsCM .Values.secondary.extraEnvVarsSecret }} + envFrom: + {{- if .Values.secondary.extraEnvVarsCM }} + - configMapRef: + name: {{ .Values.secondary.extraEnvVarsCM }} + {{- end }} + {{- if .Values.secondary.extraEnvVarsSecret }} + - secretRef: + name: {{ .Values.secondary.extraEnvVarsSecret }} + {{- end }} + {{- end }} + ports: + - name: mysql + containerPort: 3306 + {{- if not .Values.diagnosticMode.enabled }} + {{- if .Values.secondary.livenessProbe.enabled }} + livenessProbe: {{- omit .Values.secondary.livenessProbe "enabled" | toYaml | nindent 12 }} + exec: + command: + - /bin/bash + - -ec + - | + password_aux="${MYSQL_MASTER_ROOT_PASSWORD:-}" + if [[ -f "${MYSQL_MASTER_ROOT_PASSWORD_FILE:-}" ]]; then + password_aux=$(cat "$MYSQL_MASTER_ROOT_PASSWORD_FILE") + fi + mysqladmin status -uroot -p"${password_aux}" + {{- else if .Values.secondary.customLivenessProbe }} + livenessProbe: {{- include "common.tplvalues.render" (dict "value" .Values.secondary.customLivenessProbe "context" $) | nindent 12 }} + {{- end }} + {{- if .Values.secondary.readinessProbe.enabled }} + readinessProbe: {{- omit .Values.secondary.readinessProbe "enabled" | toYaml | nindent 12 }} + exec: + command: + - /bin/bash + - -ec + - | + password_aux="${MYSQL_MASTER_ROOT_PASSWORD:-}" + if [[ -f "${MYSQL_MASTER_ROOT_PASSWORD_FILE:-}" ]]; then + password_aux=$(cat "$MYSQL_MASTER_ROOT_PASSWORD_FILE") + fi + mysqladmin status -uroot -p"${password_aux}" + {{- else if .Values.secondary.customReadinessProbe }} + readinessProbe: {{- include "common.tplvalues.render" (dict "value" .Values.secondary.customReadinessProbe "context" $) | nindent 12 }} + {{- end }} + {{- if .Values.secondary.startupProbe.enabled }} + startupProbe: {{- omit .Values.secondary.startupProbe "enabled" | toYaml | nindent 12 }} + exec: + command: + - /bin/bash + - -ec + - | + password_aux="${MYSQL_MASTER_ROOT_PASSWORD:-}" + if [[ -f "${MYSQL_MASTER_ROOT_PASSWORD_FILE:-}" ]]; then + password_aux=$(cat "$MYSQL_MASTER_ROOT_PASSWORD_FILE") + fi + mysqladmin status -uroot -p"${password_aux}" + {{- else if .Values.secondary.customStartupProbe }} + startupProbe: {{- include "common.tplvalues.render" (dict "value" .Values.secondary.customStartupProbe "context" $) | nindent 12 }} + {{- end }} + {{- end }} + {{- if .Values.secondary.resources }} + resources: {{ toYaml .Values.secondary.resources | nindent 12 }} + {{- end }} + volumeMounts: + - name: data + mountPath: /bitnami/mysql + {{- if or .Values.secondary.configuration .Values.secondary.existingConfigmap }} + - name: config + mountPath: /opt/bitnami/mysql/conf/my.cnf + subPath: my.cnf + {{- end }} + {{- if and .Values.auth.usePasswordFiles (not .Values.auth.customPasswordFiles) }} + - name: mysql-credentials + mountPath: /opt/bitnami/mysql/secrets/ + {{- end }} + {{- if .Values.secondary.extraVolumeMounts }} + {{- include "common.tplvalues.render" (dict "value" .Values.secondary.extraVolumeMounts "context" $) | nindent 12 }} + {{- end }} + {{- if .Values.metrics.enabled }} + - name: metrics + image: {{ include "mysql.metrics.image" . }} + imagePullPolicy: {{ .Values.metrics.image.pullPolicy | quote }} + env: + {{- if .Values.auth.usePasswordFiles }} + - name: MYSQL_ROOT_PASSWORD_FILE + value: {{ default "/opt/bitnami/mysqld-exporter/secrets/mysql-root-password" .Values.auth.customPasswordFiles.root }} + {{- else }} + - name: MYSQL_ROOT_PASSWORD + valueFrom: + secretKeyRef: + name: {{ template "mysql.secretName" . }} + key: mysql-root-password + {{- end }} + {{- if .Values.diagnosticMode.enabled }} + command: {{- include "common.tplvalues.render" (dict "value" .Values.diagnosticMode.command "context" $) | nindent 12 }} + args: {{- include "common.tplvalues.render" (dict "value" .Values.diagnosticMode.args "context" $) | nindent 12 }} + {{- else }} + command: + - /bin/bash + - -ec + - | + password_aux="${MYSQL_ROOT_PASSWORD:-}" + if [[ -f "${MYSQL_ROOT_PASSWORD_FILE:-}" ]]; then + password_aux=$(cat "$MYSQL_ROOT_PASSWORD_FILE") + fi + DATA_SOURCE_NAME="root:${password_aux}@(localhost:3306)/" /bin/mysqld_exporter {{- range .Values.metrics.extraArgs.secondary }} {{ . }} {{- end }} + {{- end }} + ports: + - name: metrics + containerPort: 9104 + {{- if not .Values.diagnosticMode.enabled }} + {{- if .Values.metrics.livenessProbe.enabled }} + livenessProbe: {{- omit .Values.metrics.livenessProbe "enabled" | toYaml | nindent 12 }} + httpGet: + path: /metrics + port: metrics + {{- end }} + {{- if .Values.metrics.readinessProbe.enabled }} + readinessProbe: {{- omit .Values.metrics.readinessProbe "enabled" | toYaml | nindent 12 }} + httpGet: + path: /metrics + port: metrics + {{- end }} + {{- end }} + {{- if .Values.metrics.resources }} + resources: {{- toYaml .Values.metrics.resources | nindent 12 }} + {{- end }} + {{- if and .Values.auth.usePasswordFiles (not .Values.auth.customPasswordFiles) }} + volumeMounts: + - name: mysql-credentials + mountPath: /opt/bitnami/mysqld-exporter/secrets/ + {{- end }} + {{- end }} + {{- if .Values.secondary.sidecars }} + {{- include "common.tplvalues.render" (dict "value" .Values.secondary.sidecars "context" $) | nindent 8 }} + {{- end }} + volumes: + {{- if or .Values.secondary.configuration .Values.secondary.existingConfigmap }} + - name: config + configMap: + name: {{ include "mysql.secondary.configmapName" . }} + {{- end }} + {{- if and .Values.auth.usePasswordFiles (not .Values.auth.customPasswordFiles) }} + - name: mysql-credentials + secret: + secretName: {{ template "mysql.secretName" . }} + items: + - key: mysql-root-password + path: mysql-root-password + - key: mysql-replication-password + path: mysql-replication-password + {{- end }} + {{- if .Values.secondary.extraVolumes }} + {{- include "common.tplvalues.render" (dict "value" .Values.secondary.extraVolumes "context" $) | nindent 8 }} + {{- end }} + {{- if not .Values.secondary.persistence.enabled }} + - name: data + emptyDir: {} + {{- else }} + volumeClaimTemplates: + - metadata: + name: data + labels: {{ include "common.labels.matchLabels" . | nindent 10 }} + app.kubernetes.io/component: secondary + {{- if .Values.secondary.persistence.annotations }} + annotations: + {{- toYaml .Values.secondary.persistence.annotations | nindent 10 }} + {{- end }} + spec: + accessModes: + {{- range .Values.secondary.persistence.accessModes }} + - {{ . | quote }} + {{- end }} + resources: + requests: + storage: {{ .Values.secondary.persistence.size | quote }} + {{ include "common.storage.class" (dict "persistence" .Values.secondary.persistence "global" .Values.global) }} + {{- if .Values.secondary.persistence.selector }} + selector: {{- include "common.tplvalues.render" (dict "value" .Values.secondary.persistence.selector "context" $) | nindent 10 }} + {{- end -}} + {{- end }} +{{- end }} diff --git a/pkg/iac/scanners/helm/test/mysql/templates/secondary/svc-headless.yaml b/pkg/iac/scanners/helm/test/mysql/templates/secondary/svc-headless.yaml new file mode 100644 index 000000000000..703d8e747b75 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/templates/secondary/svc-headless.yaml @@ -0,0 +1,26 @@ +{{- if eq .Values.architecture "replication" }} +apiVersion: v1 +kind: Service +metadata: + name: {{ include "mysql.secondary.fullname" . }}-headless + namespace: {{ .Release.Namespace }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + app.kubernetes.io/component: secondary + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 4 }} + {{- end }} + annotations: + {{- if .Values.commonAnnotations }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} +spec: + type: ClusterIP + clusterIP: None + publishNotReadyAddresses: true + ports: + - name: mysql + port: {{ .Values.secondary.service.port }} + targetPort: mysql + selector: {{ include "common.labels.matchLabels" . | nindent 4 }} + app.kubernetes.io/component: secondary +{{- end }} diff --git a/pkg/iac/scanners/helm/test/mysql/templates/secondary/svc.yaml b/pkg/iac/scanners/helm/test/mysql/templates/secondary/svc.yaml new file mode 100644 index 000000000000..74a4c6ef5fb8 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/templates/secondary/svc.yaml @@ -0,0 +1,43 @@ +{{- if eq .Values.architecture "replication" }} +apiVersion: v1 +kind: Service +metadata: + name: {{ include "mysql.secondary.fullname" . }} + namespace: {{ .Release.Namespace }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + app.kubernetes.io/component: secondary + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 4 }} + {{- end }} + annotations: + {{- if .Values.commonAnnotations }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} + {{- if .Values.secondary.service.annotations }} + {{- include "common.tplvalues.render" ( dict "value" .Values.secondary.service.annotations "context" $ ) | nindent 4 }} + {{- end }} +spec: + type: {{ .Values.secondary.service.type }} + {{- if and (eq .Values.secondary.service.type "ClusterIP") .Values.secondary.service.clusterIP }} + clusterIP: {{ .Values.secondary.service.clusterIP }} + {{- end }} + {{- if and .Values.secondary.service.loadBalancerIP (eq .Values.secondary.service.type "LoadBalancer") }} + loadBalancerIP: {{ .Values.secondary.service.loadBalancerIP }} + externalTrafficPolicy: {{ .Values.secondary.service.externalTrafficPolicy | quote }} + {{- end }} + {{- if and (eq .Values.secondary.service.type "LoadBalancer") .Values.secondary.service.loadBalancerSourceRanges }} + loadBalancerSourceRanges: {{- toYaml .Values.secondary.service.loadBalancerSourceRanges | nindent 4 }} + {{- end }} + ports: + - name: mysql + port: {{ .Values.secondary.service.port }} + protocol: TCP + targetPort: mysql + {{- if (and (or (eq .Values.secondary.service.type "NodePort") (eq .Values.secondary.service.type "LoadBalancer")) .Values.secondary.service.nodePort) }} + nodePort: {{ .Values.secondary.service.nodePort }} + {{- else if eq .Values.secondary.service.type "ClusterIP" }} + nodePort: null + {{- end }} + selector: {{ include "common.labels.matchLabels" . | nindent 4 }} + app.kubernetes.io/component: secondary +{{- end }} diff --git a/pkg/iac/scanners/helm/test/mysql/templates/secrets.yaml b/pkg/iac/scanners/helm/test/mysql/templates/secrets.yaml new file mode 100644 index 000000000000..9412fc35a5bc --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/templates/secrets.yaml @@ -0,0 +1,21 @@ +{{- if eq (include "mysql.createSecret" .) "true" }} +apiVersion: v1 +kind: Secret +metadata: + name: {{ include "common.names.fullname" . }} + namespace: {{ .Release.Namespace }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 4 }} + {{- end }} + {{- if .Values.commonAnnotations }} + annotations: {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} +type: Opaque +data: + mysql-root-password: {{ include "mysql.root.password" . | b64enc | quote }} + mysql-password: {{ include "mysql.password" . | b64enc | quote }} + {{- if eq .Values.architecture "replication" }} + mysql-replication-password: {{ include "mysql.replication.password" . | b64enc | quote }} + {{- end }} +{{- end }} diff --git a/pkg/iac/scanners/helm/test/mysql/templates/serviceaccount.yaml b/pkg/iac/scanners/helm/test/mysql/templates/serviceaccount.yaml new file mode 100644 index 000000000000..59eb10409d91 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/templates/serviceaccount.yaml @@ -0,0 +1,22 @@ +{{- if .Values.serviceAccount.create }} +apiVersion: v1 +kind: ServiceAccount +metadata: + name: {{ include "mysql.serviceAccountName" . }} + namespace: {{ .Release.Namespace }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 4 }} + {{- end }} + annotations: + {{- if .Values.serviceAccount.annotations }} + {{- include "common.tplvalues.render" ( dict "value" .Values.serviceAccount.annotations "context" $ ) | nindent 4 }} + {{- end }} + {{- if .Values.commonAnnotations }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} +{{- if (not .Values.auth.customPasswordFiles) }} +secrets: + - name: {{ template "mysql.secretName" . }} +{{- end }} +{{- end }} diff --git a/pkg/iac/scanners/helm/test/mysql/templates/servicemonitor.yaml b/pkg/iac/scanners/helm/test/mysql/templates/servicemonitor.yaml new file mode 100644 index 000000000000..f082dd5409d6 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/templates/servicemonitor.yaml @@ -0,0 +1,42 @@ +{{- if and .Values.metrics.enabled .Values.metrics.serviceMonitor.enabled }} +apiVersion: monitoring.coreos.com/v1 +kind: ServiceMonitor +metadata: + name: {{ include "common.names.fullname" . }} + {{- if .Values.metrics.serviceMonitor.namespace }} + namespace: {{ .Values.metrics.serviceMonitor.namespace }} + {{- else }} + namespace: {{ .Release.Namespace }} + {{- end }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 4 }} + {{- end }} + {{- if .Values.metrics.serviceMonitor.additionalLabels }} + {{- include "common.tplvalues.render" (dict "value" .Values.metrics.serviceMonitor.additionalLabels "context" $) | nindent 4 }} + {{- end }} + {{- if .Values.commonAnnotations }} + annotations: {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} +spec: + endpoints: + - port: metrics + {{- if .Values.metrics.serviceMonitor.interval }} + interval: {{ .Values.metrics.serviceMonitor.interval }} + {{- end }} + {{- if .Values.metrics.serviceMonitor.scrapeTimeout }} + scrapeTimeout: {{ .Values.metrics.serviceMonitor.scrapeTimeout }} + {{- end }} + {{- if .Values.metrics.serviceMonitor.honorLabels }} + honorLabels: {{ .Values.metrics.serviceMonitor.honorLabels }} + {{- end }} + {{- if .Values.metrics.serviceMonitor.relabellings }} + metricRelabelings: {{- toYaml .Values.metrics.serviceMonitor.relabellings | nindent 6 }} + {{- end }} + namespaceSelector: + matchNames: + - {{ .Release.Namespace }} + selector: + matchLabels: {{- include "common.labels.matchLabels" . | nindent 6 }} + app.kubernetes.io/component: metrics +{{- end }} diff --git a/pkg/iac/scanners/helm/test/mysql/values.schema.json b/pkg/iac/scanners/helm/test/mysql/values.schema.json new file mode 100644 index 000000000000..8021a4603600 --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/values.schema.json @@ -0,0 +1,178 @@ +{ + "$schema": "http://json-schema.org/schema#", + "type": "object", + "properties": { + "architecture": { + "type": "string", + "title": "MySQL architecture", + "form": true, + "description": "Allowed values: `standalone` or `replication`", + "enum": ["standalone", "replication"] + }, + "auth": { + "type": "object", + "title": "Authentication configuration", + "form": true, + "required": ["database", "username", "password"], + "properties": { + "rootPassword": { + "type": "string", + "title": "MySQL root password", + "description": "Defaults to a random 10-character alphanumeric string if not set" + }, + "database": { + "type": "string", + "title": "MySQL custom database name" + }, + "username": { + "type": "string", + "title": "MySQL custom username" + }, + "password": { + "type": "string", + "title": "MySQL custom password" + }, + "replicationUser": { + "type": "string", + "title": "MySQL replication username" + }, + "replicationPassword": { + "type": "string", + "title": "MySQL replication password" + } + } + }, + "primary": { + "type": "object", + "title": "Primary database configuration", + "form": true, + "properties": { + "podSecurityContext": { + "type": "object", + "title": "MySQL primary Pod security context", + "properties": { + "enabled": { + "type": "boolean", + "default": false + }, + "fsGroup": { + "type": "integer", + "default": 1001, + "hidden": { + "value": false, + "path": "primary/podSecurityContext/enabled" + } + } + } + }, + "containerSecurityContext": { + "type": "object", + "title": "MySQL primary container security context", + "properties": { + "enabled": { + "type": "boolean", + "default": false + }, + "runAsUser": { + "type": "integer", + "default": 1001, + "hidden": { + "value": false, + "path": "primary/containerSecurityContext/enabled" + } + } + } + }, + "persistence": { + "type": "object", + "title": "Enable persistence using Persistent Volume Claims", + "properties": { + "enabled": { + "type": "boolean", + "default": true, + "title": "If true, use a Persistent Volume Claim, If false, use emptyDir" + }, + "size": { + "type": "string", + "title": "Persistent Volume Size", + "form": true, + "render": "slider", + "sliderMin": 1, + "sliderUnit": "Gi", + "hidden": { + "value": false, + "path": "primary/persistence/enabled" + } + } + } + } + } + }, + "secondary": { + "type": "object", + "title": "Secondary database configuration", + "form": true, + "properties": { + "podSecurityContext": { + "type": "object", + "title": "MySQL secondary Pod security context", + "properties": { + "enabled": { + "type": "boolean", + "default": false + }, + "fsGroup": { + "type": "integer", + "default": 1001, + "hidden": { + "value": false, + "path": "secondary/podSecurityContext/enabled" + } + } + } + }, + "containerSecurityContext": { + "type": "object", + "title": "MySQL secondary container security context", + "properties": { + "enabled": { + "type": "boolean", + "default": false + }, + "runAsUser": { + "type": "integer", + "default": 1001, + "hidden": { + "value": false, + "path": "secondary/containerSecurityContext/enabled" + } + } + } + }, + "persistence": { + "type": "object", + "title": "Enable persistence using Persistent Volume Claims", + "properties": { + "enabled": { + "type": "boolean", + "default": true, + "title": "If true, use a Persistent Volume Claim, If false, use emptyDir" + }, + "size": { + "type": "string", + "title": "Persistent Volume Size", + "form": true, + "render": "slider", + "sliderMin": 1, + "sliderUnit": "Gi", + "hidden": { + "value": false, + "path": "secondary/persistence/enabled" + } + } + } + } + } + } + } +} \ No newline at end of file diff --git a/pkg/iac/scanners/helm/test/mysql/values.yaml b/pkg/iac/scanners/helm/test/mysql/values.yaml new file mode 100644 index 000000000000..3900e865955c --- /dev/null +++ b/pkg/iac/scanners/helm/test/mysql/values.yaml @@ -0,0 +1,1020 @@ +## @section Global parameters +## Global Docker image parameters +## Please, note that this will override the image parameters, including dependencies, configured to use the global value +## Current available global Docker image parameters: imageRegistry, imagePullSecrets and storageClass + +## @param global.imageRegistry Global Docker image registry +## @param global.imagePullSecrets [array] Global Docker registry secret names as an array +## @param global.storageClass Global StorageClass for Persistent Volume(s) +## +global: + imageRegistry: "" + ## E.g. + ## imagePullSecrets: + ## - myRegistryKeySecretName + ## + imagePullSecrets: [] + storageClass: "" + +## @section Common parameters + +## @param nameOverride String to partially override common.names.fullname template (will maintain the release name) +## +nameOverride: "" +## @param fullnameOverride String to fully override common.names.fullname template +## +fullnameOverride: "" +## @param clusterDomain Cluster domain +## +clusterDomain: cluster.local +## @param commonAnnotations [object] Common annotations to add to all MySQL resources (sub-charts are not considered). Evaluated as a template +## +commonAnnotations: {} +## @param commonLabels [object] Common labels to add to all MySQL resources (sub-charts are not considered). Evaluated as a template +## +commonLabels: {} +## @param extraDeploy [array] Array with extra yaml to deploy with the chart. Evaluated as a template +## +extraDeploy: [] +## @param schedulerName Use an alternate scheduler, e.g. "stork". +## ref: https://kubernetes.io/docs/tasks/administer-cluster/configure-multiple-schedulers/ +## +schedulerName: "" + +## Enable diagnostic mode in the deployment +## +diagnosticMode: + ## @param diagnosticMode.enabled Enable diagnostic mode (all probes will be disabled and the command will be overridden) + ## + enabled: false + ## @param diagnosticMode.command Command to override all containers in the deployment + ## + command: + - sleep + ## @param diagnosticMode.args Args to override all containers in the deployment + ## + args: + - infinity + +## @section MySQL common parameters + +## Bitnami MySQL image +## ref: https://hub.docker.com/r/bitnami/mysql/tags/ +## @param image.registry MySQL image registry +## @param image.repository MySQL image repository +## @param image.tag MySQL image tag (immutable tags are recommended) +## @param image.pullPolicy MySQL image pull policy +## @param image.pullSecrets [array] Specify docker-registry secret names as an array +## @param image.debug Specify if debug logs should be enabled +## +image: + registry: docker.io + repository: bitnami/mysql + tag: 8.0.28-debian-10-r23 + ## Specify a imagePullPolicy + ## Defaults to 'Always' if image tag is 'latest', else set to 'IfNotPresent' + ## ref: https://kubernetes.io/docs/user-guide/images/#pre-pulling-images + ## + pullPolicy: IfNotPresent + ## Optionally specify an array of imagePullSecrets (secrets must be manually created in the namespace) + ## ref: https://kubernetes.io/docs/tasks/configure-pod-container/pull-image-private-registry/ + ## Example: + ## pullSecrets: + ## - myRegistryKeySecretName + ## + pullSecrets: [] + ## Set to true if you would like to see extra information on logs + ## It turns BASH and/or NAMI debugging in the image + ## + debug: false +## @param architecture MySQL architecture (`standalone` or `replication`) +## +architecture: standalone +## MySQL Authentication parameters +## +auth: + ## @param auth.rootPassword Password for the `root` user. Ignored if existing secret is provided + ## ref: https://github.com/bitnami/bitnami-docker-mysql#setting-the-root-password-on-first-run + ## + rootPassword: "" + ## @param auth.database Name for a custom database to create + ## ref: https://github.com/bitnami/bitnami-docker-mysql/blob/master/README.md#creating-a-database-on-first-run + ## + database: my_database + ## @param auth.username Name for a custom user to create + ## ref: https://github.com/bitnami/bitnami-docker-mysql/blob/master/README.md#creating-a-database-user-on-first-run + ## + username: "" + ## @param auth.password Password for the new user. Ignored if existing secret is provided + ## + password: "" + ## @param auth.replicationUser MySQL replication user + ## ref: https://github.com/bitnami/bitnami-docker-mysql#setting-up-a-replication-cluster + ## + replicationUser: replicator + ## @param auth.replicationPassword MySQL replication user password. Ignored if existing secret is provided + ## + replicationPassword: "" + ## @param auth.existingSecret Use existing secret for password details. The secret has to contain the keys `mysql-root-password`, `mysql-replication-password` and `mysql-password` + ## NOTE: When it's set the auth.rootPassword, auth.password, auth.replicationPassword are ignored. + ## + existingSecret: "" + ## @param auth.forcePassword Force users to specify required passwords + ## + forcePassword: false + ## @param auth.usePasswordFiles Mount credentials as files instead of using an environment variable + ## + usePasswordFiles: false + ## @param auth.customPasswordFiles [object] Use custom password files when `auth.usePasswordFiles` is set to `true`. Define path for keys `root` and `user`, also define `replicator` if `architecture` is set to `replication` + ## Example: + ## customPasswordFiles: + ## root: /vault/secrets/mysql-root + ## user: /vault/secrets/mysql-user + ## replicator: /vault/secrets/mysql-replicator + ## + customPasswordFiles: {} +## @param initdbScripts [object] Dictionary of initdb scripts +## Specify dictionary of scripts to be run at first boot +## Example: +## initdbScripts: +## my_init_script.sh: | +## #!/bin/bash +## echo "Do something." +## +initdbScripts: {} +## @param initdbScriptsConfigMap ConfigMap with the initdb scripts (Note: Overrides `initdbScripts`) +## +initdbScriptsConfigMap: "" + +## @section MySQL Primary parameters + +primary: + ## @param primary.command [array] Override default container command on MySQL Primary container(s) (useful when using custom images) + ## + command: [] + ## @param primary.args [array] Override default container args on MySQL Primary container(s) (useful when using custom images) + ## + args: [] + ## @param primary.hostAliases [array] Deployment pod host aliases + ## https://kubernetes.io/docs/concepts/services-networking/add-entries-to-pod-etc-hosts-with-host-aliases/ + ## + hostAliases: [] + ## @param primary.configuration [string] Configure MySQL Primary with a custom my.cnf file + ## ref: https://mysql.com/kb/en/mysql/configuring-mysql-with-mycnf/#example-of-configuration-file + ## + configuration: |- + [mysqld] + default_authentication_plugin=mysql_native_password + skip-name-resolve + explicit_defaults_for_timestamp + basedir=/opt/bitnami/mysql + plugin_dir=/opt/bitnami/mysql/lib/plugin + port=3306 + socket=/opt/bitnami/mysql/tmp/mysql.sock + datadir=/bitnami/mysql/data + tmpdir=/opt/bitnami/mysql/tmp + max_allowed_packet=16M + bind-address=0.0.0.0 + pid-file=/opt/bitnami/mysql/tmp/mysqld.pid + log-error=/opt/bitnami/mysql/logs/mysqld.log + character-set-server=UTF8 + collation-server=utf8_general_ci + + [client] + port=3306 + socket=/opt/bitnami/mysql/tmp/mysql.sock + default-character-set=UTF8 + plugin_dir=/opt/bitnami/mysql/lib/plugin + + [manager] + port=3306 + socket=/opt/bitnami/mysql/tmp/mysql.sock + pid-file=/opt/bitnami/mysql/tmp/mysqld.pid + ## @param primary.existingConfigmap Name of existing ConfigMap with MySQL Primary configuration. + ## NOTE: When it's set the 'configuration' parameter is ignored + ## + existingConfigmap: "" + ## @param primary.updateStrategy Update strategy type for the MySQL primary statefulset + ## ref: https://kubernetes.io/docs/concepts/workloads/controllers/statefulset/#update-strategies + ## + updateStrategy: RollingUpdate + ## @param primary.rollingUpdatePartition Partition update strategy for MySQL Primary statefulset + ## https://kubernetes.io/docs/concepts/workloads/controllers/statefulset/#partitions + ## + rollingUpdatePartition: "" + ## @param primary.podAnnotations [object] Additional pod annotations for MySQL primary pods + ## ref: https://kubernetes.io/docs/concepts/overview/working-with-objects/annotations/ + ## + podAnnotations: {} + ## @param primary.podAffinityPreset MySQL primary pod affinity preset. Ignored if `primary.affinity` is set. Allowed values: `soft` or `hard` + ## ref: https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#inter-pod-affinity-and-anti-affinity + ## + podAffinityPreset: "" + ## @param primary.podAntiAffinityPreset MySQL primary pod anti-affinity preset. Ignored if `primary.affinity` is set. Allowed values: `soft` or `hard` + ## ref: https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#inter-pod-affinity-and-anti-affinity + ## + podAntiAffinityPreset: soft + ## MySQL Primary node affinity preset + ## ref: https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#node-affinity + ## + nodeAffinityPreset: + ## @param primary.nodeAffinityPreset.type MySQL primary node affinity preset type. Ignored if `primary.affinity` is set. Allowed values: `soft` or `hard` + ## + type: "" + ## @param primary.nodeAffinityPreset.key MySQL primary node label key to match Ignored if `primary.affinity` is set. + ## E.g. + ## key: "kubernetes.io/e2e-az-name" + ## + key: "" + ## @param primary.nodeAffinityPreset.values [array] MySQL primary node label values to match. Ignored if `primary.affinity` is set. + ## E.g. + ## values: + ## - e2e-az1 + ## - e2e-az2 + ## + values: [] + ## @param primary.affinity [object] Affinity for MySQL primary pods assignment + ## ref: https://kubernetes.io/docs/concepts/configuration/assign-pod-node/#affinity-and-anti-affinity + ## Note: podAffinityPreset, podAntiAffinityPreset, and nodeAffinityPreset will be ignored when it's set + ## + affinity: {} + ## @param primary.nodeSelector [object] Node labels for MySQL primary pods assignment + ## ref: https://kubernetes.io/docs/user-guide/node-selection/ + ## + nodeSelector: {} + ## @param primary.tolerations [array] Tolerations for MySQL primary pods assignment + ## ref: https://kubernetes.io/docs/concepts/configuration/taint-and-toleration/ + ## + tolerations: [] + ## MySQL primary Pod security context + ## ref: https://kubernetes.io/docs/tasks/configure-pod-container/security-context/#set-the-security-context-for-a-pod + ## @param primary.podSecurityContext.enabled Enable security context for MySQL primary pods + ## @param primary.podSecurityContext.fsGroup Group ID for the mounted volumes' filesystem + ## + podSecurityContext: + enabled: true + fsGroup: 1001 + ## MySQL primary container security context + ## ref: https://kubernetes.io/docs/tasks/configure-pod-container/security-context/#set-the-security-context-for-a-container + ## @param primary.containerSecurityContext.enabled MySQL primary container securityContext + ## @param primary.containerSecurityContext.runAsUser User ID for the MySQL primary container + ## + containerSecurityContext: + enabled: true + runAsUser: 1001 + ## MySQL primary container's resource requests and limits + ## ref: https://kubernetes.io/docs/user-guide/compute-resources/ + ## We usually recommend not to specify default resources and to leave this as a conscious + ## choice for the user. This also increases chances charts run on environments with little + ## resources, such as Minikube. If you do want to specify resources, uncomment the following + ## lines, adjust them as necessary, and remove the curly braces after 'resources:'. + ## @param primary.resources.limits [object] The resources limits for MySQL primary containers + ## @param primary.resources.requests [object] The requested resources for MySQL primary containers + ## + resources: + ## Example: + ## limits: + ## cpu: 250m + ## memory: 256Mi + limits: {} + ## Examples: + ## requests: + ## cpu: 250m + ## memory: 256Mi + requests: {} + ## Configure extra options for liveness probe + ## ref: https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-probes/#configure-probes + ## @param primary.livenessProbe.enabled Enable livenessProbe + ## @param primary.livenessProbe.initialDelaySeconds Initial delay seconds for livenessProbe + ## @param primary.livenessProbe.periodSeconds Period seconds for livenessProbe + ## @param primary.livenessProbe.timeoutSeconds Timeout seconds for livenessProbe + ## @param primary.livenessProbe.failureThreshold Failure threshold for livenessProbe + ## @param primary.livenessProbe.successThreshold Success threshold for livenessProbe + ## + livenessProbe: + enabled: true + initialDelaySeconds: 5 + periodSeconds: 10 + timeoutSeconds: 1 + failureThreshold: 3 + successThreshold: 1 + ## Configure extra options for readiness probe + ## ref: https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-probes/#configure-probes + ## @param primary.readinessProbe.enabled Enable readinessProbe + ## @param primary.readinessProbe.initialDelaySeconds Initial delay seconds for readinessProbe + ## @param primary.readinessProbe.periodSeconds Period seconds for readinessProbe + ## @param primary.readinessProbe.timeoutSeconds Timeout seconds for readinessProbe + ## @param primary.readinessProbe.failureThreshold Failure threshold for readinessProbe + ## @param primary.readinessProbe.successThreshold Success threshold for readinessProbe + ## + readinessProbe: + enabled: true + initialDelaySeconds: 5 + periodSeconds: 10 + timeoutSeconds: 1 + failureThreshold: 3 + successThreshold: 1 + ## Configure extra options for startupProbe probe + ## ref: https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-probes/#configure-probes + ## @param primary.startupProbe.enabled Enable startupProbe + ## @param primary.startupProbe.initialDelaySeconds Initial delay seconds for startupProbe + ## @param primary.startupProbe.periodSeconds Period seconds for startupProbe + ## @param primary.startupProbe.timeoutSeconds Timeout seconds for startupProbe + ## @param primary.startupProbe.failureThreshold Failure threshold for startupProbe + ## @param primary.startupProbe.successThreshold Success threshold for startupProbe + ## + startupProbe: + enabled: true + initialDelaySeconds: 15 + periodSeconds: 10 + timeoutSeconds: 1 + failureThreshold: 10 + successThreshold: 1 + ## @param primary.customLivenessProbe [object] Override default liveness probe for MySQL primary containers + ## + customLivenessProbe: {} + ## @param primary.customReadinessProbe [object] Override default readiness probe for MySQL primary containers + ## + customReadinessProbe: {} + ## @param primary.customStartupProbe [object] Override default startup probe for MySQL primary containers + ## + customStartupProbe: {} + ## @param primary.extraFlags MySQL primary additional command line flags + ## Can be used to specify command line flags, for example: + ## E.g. + ## extraFlags: "--max-connect-errors=1000 --max_connections=155" + ## + extraFlags: "" + ## @param primary.extraEnvVars [array] Extra environment variables to be set on MySQL primary containers + ## E.g. + ## extraEnvVars: + ## - name: TZ + ## value: "Europe/Paris" + ## + extraEnvVars: [] + ## @param primary.extraEnvVarsCM Name of existing ConfigMap containing extra env vars for MySQL primary containers + ## + extraEnvVarsCM: "" + ## @param primary.extraEnvVarsSecret Name of existing Secret containing extra env vars for MySQL primary containers + ## + extraEnvVarsSecret: "" + ## Enable persistence using Persistent Volume Claims + ## ref: https://kubernetes.io/docs/user-guide/persistent-volumes/ + ## + persistence: + ## @param primary.persistence.enabled Enable persistence on MySQL primary replicas using a `PersistentVolumeClaim`. If false, use emptyDir + ## + enabled: true + ## @param primary.persistence.existingClaim Name of an existing `PersistentVolumeClaim` for MySQL primary replicas + ## NOTE: When it's set the rest of persistence parameters are ignored + ## + existingClaim: "" + ## @param primary.persistence.storageClass MySQL primary persistent volume storage Class + ## If defined, storageClassName: + ## If set to "-", storageClassName: "", which disables dynamic provisioning + ## If undefined (the default) or set to null, no storageClassName spec is + ## set, choosing the default provisioner. (gp2 on AWS, standard on + ## GKE, AWS & OpenStack) + ## + storageClass: "" + ## @param primary.persistence.annotations [object] MySQL primary persistent volume claim annotations + ## + annotations: {} + ## @param primary.persistence.accessModes MySQL primary persistent volume access Modes + ## + accessModes: + - ReadWriteOnce + ## @param primary.persistence.size MySQL primary persistent volume size + ## + size: 8Gi + ## @param primary.persistence.selector [object] Selector to match an existing Persistent Volume + ## selector: + ## matchLabels: + ## app: my-app + ## + selector: {} + ## @param primary.extraVolumes [array] Optionally specify extra list of additional volumes to the MySQL Primary pod(s) + ## + extraVolumes: [] + ## @param primary.extraVolumeMounts [array] Optionally specify extra list of additional volumeMounts for the MySQL Primary container(s) + ## + extraVolumeMounts: [] + ## @param primary.initContainers [array] Add additional init containers for the MySQL Primary pod(s) + ## + initContainers: [] + ## @param primary.sidecars [array] Add additional sidecar containers for the MySQL Primary pod(s) + ## + sidecars: [] + ## MySQL Primary Service parameters + ## + service: + ## @param primary.service.type MySQL Primary K8s service type + ## + type: ClusterIP + ## @param primary.service.port MySQL Primary K8s service port + ## + port: 3306 + ## @param primary.service.nodePort MySQL Primary K8s service node port + ## ref: https://kubernetes.io/docs/concepts/services-networking/service/#type-nodeport + ## + nodePort: "" + ## @param primary.service.clusterIP MySQL Primary K8s service clusterIP IP + ## e.g: + ## clusterIP: None + ## + clusterIP: "" + ## @param primary.service.loadBalancerIP MySQL Primary loadBalancerIP if service type is `LoadBalancer` + ## Set the LoadBalancer service type to internal only + ## ref: https://kubernetes.io/docs/concepts/services-networking/service/#internal-load-balancer + ## + loadBalancerIP: "" + ## @param primary.service.externalTrafficPolicy Enable client source IP preservation + ## ref https://kubernetes.io/docs/tasks/access-application-cluster/create-external-load-balancer/#preserving-the-client-source-ip + ## + externalTrafficPolicy: Cluster + ## @param primary.service.loadBalancerSourceRanges [array] Addresses that are allowed when MySQL Primary service is LoadBalancer + ## https://kubernetes.io/docs/tasks/access-application-cluster/configure-cloud-provider-firewall/#restrict-access-for-loadbalancer-service + ## E.g. + ## loadBalancerSourceRanges: + ## - 10.10.10.0/24 + ## + loadBalancerSourceRanges: [] + ## @param primary.service.annotations [object] Provide any additional annotations which may be required + ## + annotations: {} + ## MySQL primary Pod Disruption Budget configuration + ## ref: https://kubernetes.io/docs/tasks/run-application/configure-pdb/ + ## + pdb: + ## @param primary.pdb.enabled Enable/disable a Pod Disruption Budget creation for MySQL primary pods + ## + enabled: false + ## @param primary.pdb.minAvailable Minimum number/percentage of MySQL primary pods that should remain scheduled + ## + minAvailable: 1 + ## @param primary.pdb.maxUnavailable Maximum number/percentage of MySQL primary pods that may be made unavailable + ## + maxUnavailable: "" + ## @param primary.podLabels [object] MySQL Primary pod label. If labels are same as commonLabels , this will take precedence + ## + podLabels: {} + +## @section MySQL Secondary parameters + +secondary: + ## @param secondary.replicaCount Number of MySQL secondary replicas + ## + replicaCount: 1 + ## @param secondary.hostAliases [array] Deployment pod host aliases + ## https://kubernetes.io/docs/concepts/services-networking/add-entries-to-pod-etc-hosts-with-host-aliases/ + ## + hostAliases: [] + ## @param secondary.command [array] Override default container command on MySQL Secondary container(s) (useful when using custom images) + ## + command: [] + ## @param secondary.args [array] Override default container args on MySQL Secondary container(s) (useful when using custom images) + ## + args: [] + ## @param secondary.configuration [string] Configure MySQL Secondary with a custom my.cnf file + ## ref: https://mysql.com/kb/en/mysql/configuring-mysql-with-mycnf/#example-of-configuration-file + ## + configuration: |- + [mysqld] + default_authentication_plugin=mysql_native_password + skip-name-resolve + explicit_defaults_for_timestamp + basedir=/opt/bitnami/mysql + port=3306 + socket=/opt/bitnami/mysql/tmp/mysql.sock + datadir=/bitnami/mysql/data + tmpdir=/opt/bitnami/mysql/tmp + max_allowed_packet=16M + bind-address=0.0.0.0 + pid-file=/opt/bitnami/mysql/tmp/mysqld.pid + log-error=/opt/bitnami/mysql/logs/mysqld.log + character-set-server=UTF8 + collation-server=utf8_general_ci + + [client] + port=3306 + socket=/opt/bitnami/mysql/tmp/mysql.sock + default-character-set=UTF8 + + [manager] + port=3306 + socket=/opt/bitnami/mysql/tmp/mysql.sock + pid-file=/opt/bitnami/mysql/tmp/mysqld.pid + ## @param secondary.existingConfigmap Name of existing ConfigMap with MySQL Secondary configuration. + ## NOTE: When it's set the 'configuration' parameter is ignored + ## + existingConfigmap: "" + ## @param secondary.updateStrategy Update strategy type for the MySQL secondary statefulset + ## ref: https://kubernetes.io/docs/concepts/workloads/controllers/statefulset/#update-strategies + ## + updateStrategy: RollingUpdate + ## @param secondary.rollingUpdatePartition Partition update strategy for MySQL Secondary statefulset + ## https://kubernetes.io/docs/concepts/workloads/controllers/statefulset/#partitions + ## + rollingUpdatePartition: "" + ## @param secondary.podAnnotations [object] Additional pod annotations for MySQL secondary pods + ## ref: https://kubernetes.io/docs/concepts/overview/working-with-objects/annotations/ + ## + podAnnotations: {} + ## @param secondary.podAffinityPreset MySQL secondary pod affinity preset. Ignored if `secondary.affinity` is set. Allowed values: `soft` or `hard` + ## ref: https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#inter-pod-affinity-and-anti-affinity + ## + podAffinityPreset: "" + ## @param secondary.podAntiAffinityPreset MySQL secondary pod anti-affinity preset. Ignored if `secondary.affinity` is set. Allowed values: `soft` or `hard` + ## ref: https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#inter-pod-affinity-and-anti-affinity + ## Allowed values: soft, hard + ## + podAntiAffinityPreset: soft + ## MySQL Secondary node affinity preset + ## ref: https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#node-affinity + ## + nodeAffinityPreset: + ## @param secondary.nodeAffinityPreset.type MySQL secondary node affinity preset type. Ignored if `secondary.affinity` is set. Allowed values: `soft` or `hard` + ## + type: "" + ## @param secondary.nodeAffinityPreset.key MySQL secondary node label key to match Ignored if `secondary.affinity` is set. + ## E.g. + ## key: "kubernetes.io/e2e-az-name" + ## + key: "" + ## @param secondary.nodeAffinityPreset.values [array] MySQL secondary node label values to match. Ignored if `secondary.affinity` is set. + ## E.g. + ## values: + ## - e2e-az1 + ## - e2e-az2 + ## + values: [] + ## @param secondary.affinity [object] Affinity for MySQL secondary pods assignment + ## ref: https://kubernetes.io/docs/concepts/configuration/assign-pod-node/#affinity-and-anti-affinity + ## Note: podAffinityPreset, podAntiAffinityPreset, and nodeAffinityPreset will be ignored when it's set + ## + affinity: {} + ## @param secondary.nodeSelector [object] Node labels for MySQL secondary pods assignment + ## ref: https://kubernetes.io/docs/user-guide/node-selection/ + ## + nodeSelector: {} + ## @param secondary.tolerations [array] Tolerations for MySQL secondary pods assignment + ## ref: https://kubernetes.io/docs/concepts/configuration/taint-and-toleration/ + ## + tolerations: [] + ## MySQL secondary Pod security context + ## ref: https://kubernetes.io/docs/tasks/configure-pod-container/security-context/#set-the-security-context-for-a-pod + ## @param secondary.podSecurityContext.enabled Enable security context for MySQL secondary pods + ## @param secondary.podSecurityContext.fsGroup Group ID for the mounted volumes' filesystem + ## + podSecurityContext: + enabled: true + fsGroup: 1001 + ## MySQL secondary container security context + ## ref: https://kubernetes.io/docs/tasks/configure-pod-container/security-context/#set-the-security-context-for-a-container + ## @param secondary.containerSecurityContext.enabled MySQL secondary container securityContext + ## @param secondary.containerSecurityContext.runAsUser User ID for the MySQL secondary container + ## + containerSecurityContext: + enabled: true + runAsUser: 1001 + ## MySQL secondary container's resource requests and limits + ## ref: https://kubernetes.io/docs/user-guide/compute-resources/ + ## We usually recommend not to specify default resources and to leave this as a conscious + ## choice for the user. This also increases chances charts run on environments with little + ## resources, such as Minikube. If you do want to specify resources, uncomment the following + ## lines, adjust them as necessary, and remove the curly braces after 'resources:'. + ## @param secondary.resources.limits [object] The resources limits for MySQL secondary containers + ## @param secondary.resources.requests [object] The requested resources for MySQL secondary containers + ## + resources: + ## Example: + ## limits: + ## cpu: 250m + ## memory: 256Mi + limits: {} + ## Examples: + ## requests: + ## cpu: 250m + ## memory: 256Mi + requests: {} + ## Configure extra options for liveness probe + ## ref: https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-probes/#configure-probes + ## @param secondary.livenessProbe.enabled Enable livenessProbe + ## @param secondary.livenessProbe.initialDelaySeconds Initial delay seconds for livenessProbe + ## @param secondary.livenessProbe.periodSeconds Period seconds for livenessProbe + ## @param secondary.livenessProbe.timeoutSeconds Timeout seconds for livenessProbe + ## @param secondary.livenessProbe.failureThreshold Failure threshold for livenessProbe + ## @param secondary.livenessProbe.successThreshold Success threshold for livenessProbe + ## + livenessProbe: + enabled: true + initialDelaySeconds: 5 + periodSeconds: 10 + timeoutSeconds: 1 + failureThreshold: 3 + successThreshold: 1 + ## Configure extra options for readiness probe + ## ref: https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-probes/#configure-probes + ## @param secondary.readinessProbe.enabled Enable readinessProbe + ## @param secondary.readinessProbe.initialDelaySeconds Initial delay seconds for readinessProbe + ## @param secondary.readinessProbe.periodSeconds Period seconds for readinessProbe + ## @param secondary.readinessProbe.timeoutSeconds Timeout seconds for readinessProbe + ## @param secondary.readinessProbe.failureThreshold Failure threshold for readinessProbe + ## @param secondary.readinessProbe.successThreshold Success threshold for readinessProbe + ## + readinessProbe: + enabled: true + initialDelaySeconds: 5 + periodSeconds: 10 + timeoutSeconds: 1 + failureThreshold: 3 + successThreshold: 1 + ## Configure extra options for startupProbe probe + ## ref: https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-probes/#configure-probes + ## @param secondary.startupProbe.enabled Enable startupProbe + ## @param secondary.startupProbe.initialDelaySeconds Initial delay seconds for startupProbe + ## @param secondary.startupProbe.periodSeconds Period seconds for startupProbe + ## @param secondary.startupProbe.timeoutSeconds Timeout seconds for startupProbe + ## @param secondary.startupProbe.failureThreshold Failure threshold for startupProbe + ## @param secondary.startupProbe.successThreshold Success threshold for startupProbe + ## + startupProbe: + enabled: true + initialDelaySeconds: 15 + periodSeconds: 10 + timeoutSeconds: 1 + failureThreshold: 15 + successThreshold: 1 + ## @param secondary.customLivenessProbe [object] Override default liveness probe for MySQL secondary containers + ## + customLivenessProbe: {} + ## @param secondary.customReadinessProbe [object] Override default readiness probe for MySQL secondary containers + ## + customReadinessProbe: {} + ## @param secondary.customStartupProbe [object] Override default startup probe for MySQL secondary containers + ## + customStartupProbe: {} + ## @param secondary.extraFlags MySQL secondary additional command line flags + ## Can be used to specify command line flags, for example: + ## E.g. + ## extraFlags: "--max-connect-errors=1000 --max_connections=155" + ## + extraFlags: "" + ## @param secondary.extraEnvVars [array] An array to add extra environment variables on MySQL secondary containers + ## E.g. + ## extraEnvVars: + ## - name: TZ + ## value: "Europe/Paris" + ## + extraEnvVars: [] + ## @param secondary.extraEnvVarsCM Name of existing ConfigMap containing extra env vars for MySQL secondary containers + ## + extraEnvVarsCM: "" + ## @param secondary.extraEnvVarsSecret Name of existing Secret containing extra env vars for MySQL secondary containers + ## + extraEnvVarsSecret: "" + ## Enable persistence using Persistent Volume Claims + ## ref: https://kubernetes.io/docs/user-guide/persistent-volumes/ + ## + persistence: + ## @param secondary.persistence.enabled Enable persistence on MySQL secondary replicas using a `PersistentVolumeClaim` + ## + enabled: true + ## @param secondary.persistence.storageClass MySQL secondary persistent volume storage Class + ## If defined, storageClassName: + ## If set to "-", storageClassName: "", which disables dynamic provisioning + ## If undefined (the default) or set to null, no storageClassName spec is + ## set, choosing the default provisioner. (gp2 on AWS, standard on + ## GKE, AWS & OpenStack) + ## + storageClass: "" + ## @param secondary.persistence.annotations [object] MySQL secondary persistent volume claim annotations + ## + annotations: {} + ## @param secondary.persistence.accessModes MySQL secondary persistent volume access Modes + ## + accessModes: + - ReadWriteOnce + ## @param secondary.persistence.size MySQL secondary persistent volume size + ## + size: 8Gi + ## @param secondary.persistence.selector [object] Selector to match an existing Persistent Volume + ## selector: + ## matchLabels: + ## app: my-app + ## + selector: {} + ## @param secondary.extraVolumes [array] Optionally specify extra list of additional volumes to the MySQL secondary pod(s) + ## + extraVolumes: [] + ## @param secondary.extraVolumeMounts [array] Optionally specify extra list of additional volumeMounts for the MySQL secondary container(s) + ## + extraVolumeMounts: [] + ## @param secondary.initContainers [array] Add additional init containers for the MySQL secondary pod(s) + ## + initContainers: [] + ## @param secondary.sidecars [array] Add additional sidecar containers for the MySQL secondary pod(s) + ## + sidecars: [] + ## MySQL Secondary Service parameters + ## + service: + ## @param secondary.service.type MySQL secondary Kubernetes service type + ## + type: ClusterIP + ## @param secondary.service.port MySQL secondary Kubernetes service port + ## + port: 3306 + ## @param secondary.service.nodePort MySQL secondary Kubernetes service node port + ## ref: https://kubernetes.io/docs/concepts/services-networking/service/#type-nodeport + ## + nodePort: "" + ## @param secondary.service.clusterIP MySQL secondary Kubernetes service clusterIP IP + ## e.g: + ## clusterIP: None + ## + clusterIP: "" + ## @param secondary.service.loadBalancerIP MySQL secondary loadBalancerIP if service type is `LoadBalancer` + ## Set the LoadBalancer service type to internal only + ## ref: https://kubernetes.io/docs/concepts/services-networking/service/#internal-load-balancer + ## + loadBalancerIP: "" + ## @param secondary.service.externalTrafficPolicy Enable client source IP preservation + ## ref https://kubernetes.io/docs/tasks/access-application-cluster/create-external-load-balancer/#preserving-the-client-source-ip + ## + externalTrafficPolicy: Cluster + ## @param secondary.service.loadBalancerSourceRanges [array] Addresses that are allowed when MySQL secondary service is LoadBalancer + ## https://kubernetes.io/docs/tasks/access-application-cluster/configure-cloud-provider-firewall/#restrict-access-for-loadbalancer-service + ## E.g. + ## loadBalancerSourceRanges: + ## - 10.10.10.0/24 + ## + loadBalancerSourceRanges: [] + ## @param secondary.service.annotations [object] Provide any additional annotations which may be required + ## + annotations: {} + ## MySQL secondary Pod Disruption Budget configuration + ## ref: https://kubernetes.io/docs/tasks/run-application/configure-pdb/ + ## + pdb: + ## @param secondary.pdb.enabled Enable/disable a Pod Disruption Budget creation for MySQL secondary pods + ## + enabled: false + ## @param secondary.pdb.minAvailable Minimum number/percentage of MySQL secondary pods that should remain scheduled + ## + minAvailable: 1 + ## @param secondary.pdb.maxUnavailable Maximum number/percentage of MySQL secondary pods that may be made unavailable + ## + maxUnavailable: "" + ## @param secondary.podLabels [object] Additional pod labels for MySQL secondary pods + ## + podLabels: {} + +## @section RBAC parameters + +## MySQL pods ServiceAccount +## ref: https://kubernetes.io/docs/tasks/configure-pod-container/configure-service-account/ +## +serviceAccount: + ## @param serviceAccount.create Enable the creation of a ServiceAccount for MySQL pods + ## + create: true + ## @param serviceAccount.name Name of the created ServiceAccount + ## If not set and create is true, a name is generated using the mysql.fullname template + ## + name: "" + ## @param serviceAccount.annotations [object] Annotations for MySQL Service Account + ## + annotations: {} +## Role Based Access +## ref: https://kubernetes.io/docs/admin/authorization/rbac/ +## +rbac: + ## @param rbac.create Whether to create & use RBAC resources or not + ## + create: false + +## @section Network Policy + +## MySQL Nework Policy configuration +## +networkPolicy: + ## @param networkPolicy.enabled Enable creation of NetworkPolicy resources + ## + enabled: false + ## @param networkPolicy.allowExternal The Policy model to apply. + ## When set to false, only pods with the correct + ## client label will have network access to the port MySQL is listening + ## on. When true, MySQL will accept connections from any source + ## (with the correct destination port). + ## + allowExternal: true + ## @param networkPolicy.explicitNamespacesSelector [object] A Kubernetes LabelSelector to explicitly select namespaces from which ingress traffic could be allowed to MySQL + ## If explicitNamespacesSelector is missing or set to {}, only client Pods that are in the networkPolicy's namespace + ## and that match other criteria, the ones that have the good label, can reach the DB. + ## But sometimes, we want the DB to be accessible to clients from other namespaces, in this case, we can use this + ## LabelSelector to select these namespaces, note that the networkPolicy's namespace should also be explicitly added. + ## + ## Example: + ## explicitNamespacesSelector: + ## matchLabels: + ## role: frontend + ## matchExpressions: + ## - {key: role, operator: In, values: [frontend]} + ## + explicitNamespacesSelector: {} + +## @section Volume Permissions parameters + +## Init containers parameters: +## volumePermissions: Change the owner and group of the persistent volume mountpoint to runAsUser:fsGroup values from the securityContext section. +## +volumePermissions: + ## @param volumePermissions.enabled Enable init container that changes the owner and group of the persistent volume(s) mountpoint to `runAsUser:fsGroup` + ## + enabled: false + ## @param volumePermissions.image.registry Init container volume-permissions image registry + ## @param volumePermissions.image.repository Init container volume-permissions image repository + ## @param volumePermissions.image.tag Init container volume-permissions image tag (immutable tags are recommended) + ## @param volumePermissions.image.pullPolicy Init container volume-permissions image pull policy + ## @param volumePermissions.image.pullSecrets [array] Specify docker-registry secret names as an array + ## + image: + registry: docker.io + repository: bitnami/bitnami-shell + tag: 10-debian-10-r349 + pullPolicy: IfNotPresent + ## Optionally specify an array of imagePullSecrets. + ## Secrets must be manually created in the namespace. + ## ref: https://kubernetes.io/docs/tasks/configure-pod-container/pull-image-private-registry/ + ## e.g: + ## pullSecrets: + ## - myRegistryKeySecretName + ## + pullSecrets: [] + ## @param volumePermissions.resources [object] Init container volume-permissions resources + ## + resources: {} + +## @section Metrics parameters + +## Mysqld Prometheus exporter parameters +## +metrics: + ## @param metrics.enabled Start a side-car prometheus exporter + ## + enabled: false + ## @param metrics.image.registry Exporter image registry + ## @param metrics.image.repository Exporter image repository + ## @param metrics.image.tag Exporter image tag (immutable tags are recommended) + ## @param metrics.image.pullPolicy Exporter image pull policy + ## @param metrics.image.pullSecrets [array] Specify docker-registry secret names as an array + ## + image: + registry: docker.io + repository: bitnami/mysqld-exporter + tag: 0.13.0-debian-10-r256 + pullPolicy: IfNotPresent + ## Optionally specify an array of imagePullSecrets. + ## Secrets must be manually created in the namespace. + ## ref: https://kubernetes.io/docs/tasks/configure-pod-container/pull-image-private-registry/ + ## e.g: + ## pullSecrets: + ## - myRegistryKeySecretName + ## + pullSecrets: [] + ## MySQL Prometheus exporter service parameters + ## Mysqld Prometheus exporter liveness and readiness probes + ## ref: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#container-probes + ## @param metrics.service.type Kubernetes service type for MySQL Prometheus Exporter + ## @param metrics.service.port MySQL Prometheus Exporter service port + ## @param metrics.service.annotations [object] Prometheus exporter service annotations + ## + service: + type: ClusterIP + port: 9104 + annotations: + prometheus.io/scrape: "true" + prometheus.io/port: "{{ .Values.metrics.service.port }}" + ## @param metrics.extraArgs.primary [array] Extra args to be passed to mysqld_exporter on Primary pods + ## @param metrics.extraArgs.secondary [array] Extra args to be passed to mysqld_exporter on Secondary pods + ## ref: https://github.com/prometheus/mysqld_exporter/ + ## E.g. + ## - --collect.auto_increment.columns + ## - --collect.binlog_size + ## - --collect.engine_innodb_status + ## - --collect.engine_tokudb_status + ## - --collect.global_status + ## - --collect.global_variables + ## - --collect.info_schema.clientstats + ## - --collect.info_schema.innodb_metrics + ## - --collect.info_schema.innodb_tablespaces + ## - --collect.info_schema.innodb_cmp + ## - --collect.info_schema.innodb_cmpmem + ## - --collect.info_schema.processlist + ## - --collect.info_schema.processlist.min_time + ## - --collect.info_schema.query_response_time + ## - --collect.info_schema.tables + ## - --collect.info_schema.tables.databases + ## - --collect.info_schema.tablestats + ## - --collect.info_schema.userstats + ## - --collect.perf_schema.eventsstatements + ## - --collect.perf_schema.eventsstatements.digest_text_limit + ## - --collect.perf_schema.eventsstatements.limit + ## - --collect.perf_schema.eventsstatements.timelimit + ## - --collect.perf_schema.eventswaits + ## - --collect.perf_schema.file_events + ## - --collect.perf_schema.file_instances + ## - --collect.perf_schema.indexiowaits + ## - --collect.perf_schema.tableiowaits + ## - --collect.perf_schema.tablelocks + ## - --collect.perf_schema.replication_group_member_stats + ## - --collect.slave_status + ## - --collect.slave_hosts + ## - --collect.heartbeat + ## - --collect.heartbeat.database + ## - --collect.heartbeat.table + ## + extraArgs: + primary: [] + secondary: [] + ## Mysqld Prometheus exporter resource requests and limits + ## ref: https://kubernetes.io/docs/user-guide/compute-resources/ + ## We usually recommend not to specify default resources and to leave this as a conscious + ## choice for the user. This also increases chances charts run on environments with little + ## resources, such as Minikube. If you do want to specify resources, uncomment the following + ## lines, adjust them as necessary, and remove the curly braces after 'resources:'. + ## @param metrics.resources.limits [object] The resources limits for MySQL prometheus exporter containers + ## @param metrics.resources.requests [object] The requested resources for MySQL prometheus exporter containers + ## + resources: + ## Example: + ## limits: + ## cpu: 100m + ## memory: 256Mi + limits: {} + ## Examples: + ## requests: + ## cpu: 100m + ## memory: 256Mi + requests: {} + ## Mysqld Prometheus exporter liveness probe + ## ref: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#container-probes + ## @param metrics.livenessProbe.enabled Enable livenessProbe + ## @param metrics.livenessProbe.initialDelaySeconds Initial delay seconds for livenessProbe + ## @param metrics.livenessProbe.periodSeconds Period seconds for livenessProbe + ## @param metrics.livenessProbe.timeoutSeconds Timeout seconds for livenessProbe + ## @param metrics.livenessProbe.failureThreshold Failure threshold for livenessProbe + ## @param metrics.livenessProbe.successThreshold Success threshold for livenessProbe + ## + livenessProbe: + enabled: true + initialDelaySeconds: 120 + periodSeconds: 10 + timeoutSeconds: 1 + successThreshold: 1 + failureThreshold: 3 + ## Mysqld Prometheus exporter readiness probe + ## ref: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#container-probes + ## @param metrics.readinessProbe.enabled Enable readinessProbe + ## @param metrics.readinessProbe.initialDelaySeconds Initial delay seconds for readinessProbe + ## @param metrics.readinessProbe.periodSeconds Period seconds for readinessProbe + ## @param metrics.readinessProbe.timeoutSeconds Timeout seconds for readinessProbe + ## @param metrics.readinessProbe.failureThreshold Failure threshold for readinessProbe + ## @param metrics.readinessProbe.successThreshold Success threshold for readinessProbe + ## + readinessProbe: + enabled: true + initialDelaySeconds: 30 + periodSeconds: 10 + timeoutSeconds: 1 + successThreshold: 1 + failureThreshold: 3 + ## Prometheus Service Monitor + ## ref: https://github.com/coreos/prometheus-operator + ## + serviceMonitor: + ## @param metrics.serviceMonitor.enabled Create ServiceMonitor Resource for scraping metrics using PrometheusOperator + ## + enabled: false + ## @param metrics.serviceMonitor.namespace Specify the namespace in which the serviceMonitor resource will be created + ## + namespace: "" + ## @param metrics.serviceMonitor.interval Specify the interval at which metrics should be scraped + ## + interval: 30s + ## @param metrics.serviceMonitor.scrapeTimeout Specify the timeout after which the scrape is ended + ## e.g: + ## scrapeTimeout: 30s + ## + scrapeTimeout: "" + ## @param metrics.serviceMonitor.relabellings [array] Specify Metric Relabellings to add to the scrape endpoint + ## + relabellings: [] + ## @param metrics.serviceMonitor.honorLabels Specify honorLabels parameter to add the scrape endpoint + ## + honorLabels: false + ## @param metrics.serviceMonitor.additionalLabels [object] Used to pass Labels that are used by the Prometheus installed in your cluster to select Service Monitors to work with + ## ref: https://github.com/coreos/prometheus-operator/blob/master/Documentation/api.md#prometheusspec + ## + additionalLabels: {} diff --git a/pkg/iac/scanners/helm/test/option_test.go b/pkg/iac/scanners/helm/test/option_test.go new file mode 100644 index 000000000000..200411e3606f --- /dev/null +++ b/pkg/iac/scanners/helm/test/option_test.go @@ -0,0 +1,167 @@ +package test + +import ( + "context" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/pkg/iac/scanners/helm/parser" +) + +func Test_helm_parser_with_options_with_values_file(t *testing.T) { + + tests := []struct { + testName string + chartName string + valuesFile string + }{ + { + testName: "Parsing directory 'testchart'", + chartName: "testchart", + valuesFile: "values/values.yaml", + }, + } + + for _, test := range tests { + t.Run(test.testName, func(t *testing.T) { + chartName := test.chartName + + t.Logf("Running test: %s", test.testName) + + var opts []options.ParserOption + + if test.valuesFile != "" { + opts = append(opts, parser.OptionWithValuesFile(test.valuesFile)) + } + + helmParser := parser.New(chartName, opts...) + err := helmParser.ParseFS(context.TODO(), os.DirFS(filepath.Join("testdata", chartName)), ".") + require.NoError(t, err) + manifests, err := helmParser.RenderedChartFiles() + require.NoError(t, err) + + assert.Len(t, manifests, 3) + + for _, manifest := range manifests { + expectedPath := filepath.Join("testdata", "expected", "options", chartName, manifest.TemplateFilePath) + + expectedContent, err := os.ReadFile(expectedPath) + require.NoError(t, err) + + cleanExpected := strings.ReplaceAll(string(expectedContent), "\r\n", "\n") + cleanActual := strings.ReplaceAll(manifest.ManifestContent, "\r\n", "\n") + + assert.Equal(t, cleanExpected, cleanActual) + } + }) + } +} + +func Test_helm_parser_with_options_with_set_value(t *testing.T) { + + tests := []struct { + testName string + chartName string + valuesFile string + values string + }{ + { + testName: "Parsing directory 'testchart'", + chartName: "testchart", + values: "securityContext.runAsUser=0", + }, + } + + for _, test := range tests { + t.Run(test.testName, func(t *testing.T) { + chartName := test.chartName + + t.Logf("Running test: %s", test.testName) + + var opts []options.ParserOption + + if test.valuesFile != "" { + opts = append(opts, parser.OptionWithValuesFile(test.valuesFile)) + } + + if test.values != "" { + opts = append(opts, parser.OptionWithValues(test.values)) + } + + helmParser := parser.New(chartName, opts...) + err := helmParser.ParseFS(context.TODO(), os.DirFS(filepath.Join("testdata", chartName)), ".") + require.NoError(t, err) + manifests, err := helmParser.RenderedChartFiles() + require.NoError(t, err) + + assert.Len(t, manifests, 3) + + for _, manifest := range manifests { + expectedPath := filepath.Join("testdata", "expected", "options", chartName, manifest.TemplateFilePath) + + expectedContent, err := os.ReadFile(expectedPath) + require.NoError(t, err) + + cleanExpected := strings.ReplaceAll(string(expectedContent), "\r\n", "\n") + cleanActual := strings.ReplaceAll(manifest.ManifestContent, "\r\n", "\n") + + assert.Equal(t, cleanExpected, cleanActual) + } + }) + } +} + +func Test_helm_parser_with_options_with_api_versions(t *testing.T) { + + tests := []struct { + testName string + chartName string + apiVersions []string + }{ + { + testName: "Parsing directory 'with-api-version'", + chartName: "with-api-version", + apiVersions: []string{"policy/v1/PodDisruptionBudget"}, + }, + } + + for _, test := range tests { + t.Run(test.testName, func(t *testing.T) { + chartName := test.chartName + + t.Logf("Running test: %s", test.testName) + + var opts []options.ParserOption + + if len(test.apiVersions) > 0 { + opts = append(opts, parser.OptionWithAPIVersions(test.apiVersions...)) + } + + helmParser := parser.New(chartName, opts...) + err := helmParser.ParseFS(context.TODO(), os.DirFS(filepath.Join("testdata", chartName)), ".") + require.NoError(t, err) + manifests, err := helmParser.RenderedChartFiles() + require.NoError(t, err) + + assert.Len(t, manifests, 1) + + for _, manifest := range manifests { + expectedPath := filepath.Join("testdata", "expected", "options", chartName, manifest.TemplateFilePath) + + expectedContent, err := os.ReadFile(expectedPath) + require.NoError(t, err) + + cleanExpected := strings.TrimSpace(strings.ReplaceAll(string(expectedContent), "\r\n", "\n")) + cleanActual := strings.TrimSpace(strings.ReplaceAll(manifest.ManifestContent, "\r\n", "\n")) + + assert.Equal(t, cleanExpected, cleanActual) + } + }) + } +} diff --git a/pkg/iac/scanners/helm/test/parser_test.go b/pkg/iac/scanners/helm/test/parser_test.go new file mode 100644 index 000000000000..6d9f5ad0cff3 --- /dev/null +++ b/pkg/iac/scanners/helm/test/parser_test.go @@ -0,0 +1,199 @@ +package test + +import ( + "context" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/detection" + "github.com/aquasecurity/trivy/pkg/iac/scanners/helm/parser" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_helm_parser(t *testing.T) { + + tests := []struct { + testName string + chartName string + }{ + { + testName: "Parsing directory 'testchart'", + chartName: "testchart", + }, + { + testName: "Parsing directory with tarred dependency", + chartName: "with-tarred-dep", + }, + } + + for _, test := range tests { + t.Run(test.testName, func(t *testing.T) { + chartName := test.chartName + + t.Logf("Running test: %s", test.testName) + + helmParser := parser.New(chartName) + err := helmParser.ParseFS(context.TODO(), os.DirFS(filepath.Join("testdata", chartName)), ".") + require.NoError(t, err) + manifests, err := helmParser.RenderedChartFiles() + require.NoError(t, err) + + assert.Len(t, manifests, 3) + + for _, manifest := range manifests { + expectedPath := filepath.Join("testdata", "expected", chartName, manifest.TemplateFilePath) + + expectedContent, err := os.ReadFile(expectedPath) + require.NoError(t, err) + + got := strings.ReplaceAll(manifest.ManifestContent, "\r\n", "\n") + assert.Equal(t, strings.ReplaceAll(string(expectedContent), "\r\n", "\n"), got) + } + }) + } +} + +func Test_helm_parser_where_name_non_string(t *testing.T) { + + tests := []struct { + testName string + chartName string + }{ + { + testName: "Scanning chart with integer for name", + chartName: "numberName", + }, + } + + for _, test := range tests { + chartName := test.chartName + + t.Logf("Running test: %s", test.testName) + + helmParser := parser.New(chartName) + err := helmParser.ParseFS(context.TODO(), os.DirFS(filepath.Join("testdata", chartName)), ".") + require.NoError(t, err) + } +} + +func Test_tar_is_chart(t *testing.T) { + + tests := []struct { + testName string + archiveFile string + isHelmChart bool + }{ + { + testName: "standard tarball", + archiveFile: "mysql-8.8.26.tar", + isHelmChart: true, + }, + { + testName: "gzip tarball with tar.gz extension", + archiveFile: "mysql-8.8.26.tar.gz", + isHelmChart: true, + }, + { + testName: "broken gzip tarball with tar.gz extension", + archiveFile: "aws-cluster-autoscaler-bad.tar.gz", + isHelmChart: true, + }, + { + testName: "gzip tarball with tgz extension", + archiveFile: "mysql-8.8.26.tgz", + isHelmChart: true, + }, + { + testName: "gzip tarball that has nothing of interest in it", + archiveFile: "nope.tgz", + isHelmChart: false, + }, + } + + for _, test := range tests { + + t.Logf("Running test: %s", test.testName) + testPath := filepath.Join("testdata", test.archiveFile) + file, err := os.Open(testPath) + defer func() { _ = file.Close() }() + require.NoError(t, err) + + assert.Equal(t, test.isHelmChart, detection.IsHelmChartArchive(test.archiveFile, file)) + + _ = file.Close() + } +} + +func Test_helm_tarball_parser(t *testing.T) { + + tests := []struct { + testName string + chartName string + archiveFile string + }{ + { + testName: "standard tarball", + chartName: "mysql", + archiveFile: "mysql-8.8.26.tar", + }, + { + testName: "gzip tarball with tar.gz extension", + chartName: "mysql", + archiveFile: "mysql-8.8.26.tar.gz", + }, + { + testName: "gzip tarball with tgz extension", + chartName: "mysql", + archiveFile: "mysql-8.8.26.tgz", + }, + } + + for _, test := range tests { + + t.Logf("Running test: %s", test.testName) + + testPath := filepath.Join("testdata", test.archiveFile) + + testTemp := t.TempDir() + testFileName := filepath.Join(testTemp, test.archiveFile) + require.NoError(t, copyArchive(testPath, testFileName)) + + testFs := os.DirFS(testTemp) + + helmParser := parser.New(test.archiveFile) + err := helmParser.ParseFS(context.TODO(), testFs, ".") + require.NoError(t, err) + + manifests, err := helmParser.RenderedChartFiles() + require.NoError(t, err) + + assert.Len(t, manifests, 6) + + oneOf := []string{ + "configmap.yaml", + "statefulset.yaml", + "svc-headless.yaml", + "svc.yaml", + "secrets.yaml", + "serviceaccount.yaml", + } + + for _, manifest := range manifests { + filename := filepath.Base(manifest.TemplateFilePath) + assert.Contains(t, oneOf, filename) + + if strings.HasSuffix(manifest.TemplateFilePath, "secrets.yaml") { + continue + } + expectedPath := filepath.Join("testdata", "expected", test.chartName, manifest.TemplateFilePath) + + expectedContent, err := os.ReadFile(expectedPath) + require.NoError(t, err) + + assert.Equal(t, strings.ReplaceAll(string(expectedContent), "\r\n", "\n"), strings.ReplaceAll(manifest.ManifestContent, "\r\n", "\n")) + } + } +} diff --git a/pkg/iac/scanners/helm/test/scanner_test.go b/pkg/iac/scanners/helm/test/scanner_test.go new file mode 100644 index 000000000000..5e4178c7120d --- /dev/null +++ b/pkg/iac/scanners/helm/test/scanner_test.go @@ -0,0 +1,265 @@ +package test + +import ( + "context" + "io" + "os" + "path/filepath" + "sort" + "testing" + + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/pkg/iac/scanners/helm" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_helm_scanner_with_archive(t *testing.T) { + + tests := []struct { + testName string + chartName string + path string + archiveName string + }{ + { + testName: "Parsing tarball 'mysql-8.8.26.tar'", + chartName: "mysql", + path: filepath.Join("testdata", "mysql-8.8.26.tar"), + archiveName: "mysql-8.8.26.tar", + }, + } + + for _, test := range tests { + t.Logf("Running test: %s", test.testName) + + helmScanner := helm.New(options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true)) + + testTemp := t.TempDir() + testFileName := filepath.Join(testTemp, test.archiveName) + require.NoError(t, copyArchive(test.path, testFileName)) + + testFs := os.DirFS(testTemp) + results, err := helmScanner.ScanFS(context.TODO(), testFs, ".") + require.NoError(t, err) + require.NotNil(t, results) + + failed := results.GetFailed() + assert.Equal(t, 19, len(failed)) + + visited := make(map[string]bool) + var errorCodes []string + for _, result := range failed { + id := result.Flatten().RuleID + if _, exists := visited[id]; !exists { + visited[id] = true + errorCodes = append(errorCodes, id) + } + } + assert.Len(t, errorCodes, 14) + + sort.Strings(errorCodes) + + assert.Equal(t, []string{ + "AVD-KSV-0001", "AVD-KSV-0003", + "AVD-KSV-0011", "AVD-KSV-0012", "AVD-KSV-0014", + "AVD-KSV-0015", "AVD-KSV-0016", "AVD-KSV-0018", + "AVD-KSV-0020", "AVD-KSV-0021", "AVD-KSV-0030", + "AVD-KSV-0104", "AVD-KSV-0106", "AVD-KSV-0116", + }, errorCodes) + } +} + +func Test_helm_scanner_with_missing_name_can_recover(t *testing.T) { + + tests := []struct { + testName string + chartName string + path string + archiveName string + }{ + { + testName: "Parsing tarball 'aws-cluster-autoscaler-bad.tar.gz'", + chartName: "aws-cluster-autoscaler", + path: filepath.Join("testdata", "aws-cluster-autoscaler-bad.tar.gz"), + archiveName: "aws-cluster-autoscaler-bad.tar.gz", + }, + } + + for _, test := range tests { + t.Logf("Running test: %s", test.testName) + + helmScanner := helm.New(options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true)) + + testTemp := t.TempDir() + testFileName := filepath.Join(testTemp, test.archiveName) + require.NoError(t, copyArchive(test.path, testFileName)) + + testFs := os.DirFS(testTemp) + _, err := helmScanner.ScanFS(context.TODO(), testFs, ".") + require.NoError(t, err) + } +} + +func Test_helm_scanner_with_dir(t *testing.T) { + + tests := []struct { + testName string + chartName string + }{ + { + testName: "Parsing directory testchart'", + chartName: "testchart", + }, + } + + for _, test := range tests { + + t.Logf("Running test: %s", test.testName) + + helmScanner := helm.New(options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true)) + + testFs := os.DirFS(filepath.Join("testdata", test.chartName)) + results, err := helmScanner.ScanFS(context.TODO(), testFs, ".") + require.NoError(t, err) + require.NotNil(t, results) + + failed := results.GetFailed() + assert.Equal(t, 17, len(failed)) + + visited := make(map[string]bool) + var errorCodes []string + for _, result := range failed { + id := result.Flatten().RuleID + if _, exists := visited[id]; !exists { + visited[id] = true + errorCodes = append(errorCodes, id) + } + } + + sort.Strings(errorCodes) + + assert.Equal(t, []string{ + "AVD-KSV-0001", "AVD-KSV-0003", + "AVD-KSV-0011", "AVD-KSV-0012", "AVD-KSV-0014", + "AVD-KSV-0015", "AVD-KSV-0016", "AVD-KSV-0018", + "AVD-KSV-0020", "AVD-KSV-0021", "AVD-KSV-0030", + "AVD-KSV-0104", "AVD-KSV-0106", "AVD-KSV-0116", + "AVD-KSV-0117", + }, errorCodes) + } +} + +func Test_helm_scanner_with_custom_policies(t *testing.T) { + regoRule := ` +package user.kubernetes.ID001 + + +__rego_metadata__ := { + "id": "ID001", + "avd_id": "AVD-USR-ID001", + "title": "Services not allowed", + "severity": "LOW", + "description": "Services are not allowed because of some reasons.", +} + +__rego_input__ := { + "selector": [ + {"type": "kubernetes"}, + ], +} + +deny[res] { + input.kind == "Service" + msg := sprintf("Found service '%s' but services are not allowed", [input.metadata.name]) + res := result.new(msg, input) +} +` + tests := []struct { + testName string + chartName string + path string + archiveName string + }{ + { + testName: "Parsing tarball 'mysql-8.8.26.tar'", + chartName: "mysql", + path: filepath.Join("testdata", "mysql-8.8.26.tar"), + archiveName: "mysql-8.8.26.tar", + }, + } + + for _, test := range tests { + t.Run(test.testName, func(t *testing.T) { + t.Logf("Running test: %s", test.testName) + + helmScanner := helm.New(options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true), + options.ScannerWithPolicyDirs("rules"), + options.ScannerWithPolicyNamespaces("user")) + + testTemp := t.TempDir() + testFileName := filepath.Join(testTemp, test.archiveName) + require.NoError(t, copyArchive(test.path, testFileName)) + + policyDirName := filepath.Join(testTemp, "rules") + require.NoError(t, os.Mkdir(policyDirName, 0o700)) + require.NoError(t, os.WriteFile(filepath.Join(policyDirName, "rule.rego"), []byte(regoRule), 0o600)) + + testFs := os.DirFS(testTemp) + + results, err := helmScanner.ScanFS(context.TODO(), testFs, ".") + require.NoError(t, err) + require.NotNil(t, results) + + failed := results.GetFailed() + assert.Equal(t, 21, len(failed)) + + visited := make(map[string]bool) + var errorCodes []string + for _, result := range failed { + id := result.Flatten().RuleID + if _, exists := visited[id]; !exists { + visited[id] = true + errorCodes = append(errorCodes, id) + } + } + assert.Len(t, errorCodes, 15) + + sort.Strings(errorCodes) + + assert.Equal(t, []string{ + "AVD-KSV-0001", "AVD-KSV-0003", + "AVD-KSV-0011", "AVD-KSV-0012", "AVD-KSV-0014", + "AVD-KSV-0015", "AVD-KSV-0016", "AVD-KSV-0018", + "AVD-KSV-0020", "AVD-KSV-0021", "AVD-KSV-0030", + "AVD-KSV-0104", "AVD-KSV-0106", "AVD-KSV-0116", "AVD-USR-ID001", + }, errorCodes) + }) + } +} + +func copyArchive(src, dst string) error { + in, err := os.Open(src) + if err != nil { + return err + } + defer func() { _ = in.Close() }() + + out, err := os.Create(dst) + if err != nil { + return err + } + defer func() { _ = out.Close() }() + + if _, err := io.Copy(out, in); err != nil { + return err + } + return nil +} + +func Test_helm_chart_with_templated_name(t *testing.T) { + helmScanner := helm.New(options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true)) + testFs := os.DirFS(filepath.Join("testdata", "templated-name")) + _, err := helmScanner.ScanFS(context.TODO(), testFs, ".") + require.NoError(t, err) +} diff --git a/pkg/iac/scanners/helm/test/testdata/aws-cluster-autoscaler-bad.tar.gz b/pkg/iac/scanners/helm/test/testdata/aws-cluster-autoscaler-bad.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..a66f228c9851f1f7d92cc62cedc935c99558808e GIT binary patch literal 4054 zcmV;{4=L~;iwFP!000001MM4YbKAHvpZP0LIoFF_lag%9&Zy(g_-yy;X&xFom&}cu zi69D+P?H3Y0OjbC^tX2x07*~}%enZHxmNffi6C|XEcT5Ry_J3# zE}6EyAxkqBQpJW{qOzPphYT~GDm~!rymxtidh+W0j9ej~NOCK(8RtCW@5^R@C2(^ zBp6G^Jb@*Sg-RpuTOaS*wnF}=G|HKLAO)~S{@a~)yO96ZVdn_tfB*1kBmZBcIl}s* zmenq~ehUJ#1tMc*O^(~TgOHfMzv90_U)jNvqQe7Dx@2Q5e!27+$dC`cJf>rY5}2`6 zNUlUS>yn$NG8l(hz{L_FgRzQ^K^wJ#!vH5r^C;?xh=%}qF?u0XFJqD=%JbNJmC=)I z42%Bo$@8|h$O}+JB%%WrNiq@{NyQL0Ao)0n5dubBvQedoDh+^Z8H!Lga|x>%=OExs z)M~zD5erfMH38bgt}ERmGBYX6aFQfKQ5<6y0$YXE(o>NqN|OfGYgG}xM$$z*=1OZO zBx9K7Fs{{#3$n6U(`moedd_uU#(n{Lyf1{ODS0MtlQS048IFvhlVwvL>cgOOD+Yw- z94>lY(r9>yb&EXXQV}sC7d=9UL)e4_B~KN(o$zo%)Pxa>l5pssQVxWVn1THaGFv3F zxdrD5<7njbRBANvQS@E(3~NGHBuCqs zY?cD+o)$X?3f=G&IY^Tkt=TgHguH^YMkXdB8cAl~T+IL|?untadjDGs@^5S_^grw^ zjVM~i2bkkC{QpA#@3js#{{Po#YtP>b&X>3X>+C=NuAToLgarE=`~NlCod18r6L4g& z%$a*JQ*=lb?Rp;d1CB}fA5+gZXp=QjcOy9~ zY8H>#wMz{`=5$&;GOSd@8XGwWR;sC|1%{fc!sedCz0TqugaZ|238N2BB#QC`&ZnBi zUq;*|6A@x^05$!jyMihw007;E*Kqh+cgyxCV6(C2DrRM87TD^h8Lr-`Zx<`%>Ur8-1s(_5a>YSd@a|4^;Yl#s96= z!9x7MxAFhJO8fAkxkIKr20mwsjCjN_WH^qIEa3#$f2XOVqvnox{yqi!sq?$ ztsT3-v*Ua+WFt^fg)87!y)=(h&G{=u6&_wrS(bru1yKb(kc8xLU;pIsRX!T=_oU%h z$WUq`7273Ru=Z z%WSpTC*X*{3ek+2v^`){0=CX2c`}%38l1hj!m)VciEh*O5^_{-XR(XN8ao)Nd()*_y__UH5eM~9pG zkFV3V9=LhR4Llt4r z2fA6NG&IqatG9J8xT9nYb{In=;zy(brt5e`fW2!Y_ytUN=KJQ`3i-c0KRJ7T9>l|k z;IX^qzrELK9nHyq=iq2>BmZBcZLO-M@^*F}P>MfWfOmF0@A_3Wk~kw_HVCobKDoQ3 z%e7{oS=F^oAe!maVx)c%f9uo+mV!bm8l!4Dd#F{Jg zhd&}t?G>W&-b@&{o_(jgUt|KLnaNwM;_<2gb_RjJPvuxEACtB$>v&q`xhms9Y`Bk$ zL}g-_>&h3; z)kjlItleO&9`)!ebhhlNlgWdlS;+dxa_78PeCr4>B-zmnX2Ap()itslzj11Pp{e z58z;2Nq>Q5`Viq{bf!g^D+!}OYAiE$NgtV~zf1a-SNeKf-*QqPr)D7ADa~LUP(tDw ztrAG7lqP7>f>51s-}fQv>{n2@WF1&39=12F+=znPJreM*yNn;px*E$53PHHIRy{fLtge2NOEQ zTu0wtAfVKaa}8Ran$AuD2QItX=<94qo}gaqpBk0{>HRyXIQE``m&wQzB(wkwnPoIh z0TFYB3qxf%$k>#t*|rt3)7tIOa&nkV>1q8A=8n6UJMLVHtCM!O-RdsNFj;mD($qQ#CQESG|p1RU*<{gPB61RG)(rMdV>&Z_?>=qEkY^L-zl%1>|6+FifZskJ@~ zSn%YC&7i#iQU&B^us}N~dOL*;a_wrc9Cu+C0So-(aUL(3M&d2smrYX`s|pYdw2QVmNgfZ-LXS-R55wDfw7t2*p@^pFD2u|$pKU#PIYa%b z7Xi_s!o}QDjy{>X#bB=B6Esa_xj`RvWuH80w~(cBPy*Y4Z?SG~^?r+@a3o~HD&Vrh zO`m01Q?4O3b?dow{CeErq|u9u?SH9d|b13Q#F0p9?nL|{5+ zdtpthO3QG9sH&Ymy*Rtn+F0D8VGlz)>$ zv@T3hUG(2`f+B*!0MZ)dJ}DDxI1yTAFp5_x3(-k40$3Uia2dWI!U5fADv088rHCnh zK6(Dkgc4MiE*PBK07e#bU%Rd<_`u5o$kG)Ci{+u(MZXs)AoLXxu8Sr!6cOLVUDX2T z10-_8X2;t4UGJ7Z-#VTjY@_eS6m^Gg_aUQ?e|gq1VmbwCOvN!+hFhlp@KfdwPM(t< zo}$zRW!h1&*_7fiojj16-7)DSRudJxK?m(!l{jekfQGh+O3(U1udHE<$QoVJ*lV@- z{C3N4A2oK(%PY`UIt+%_*X7;)huSTE<9sxNgF^%-D6IEi42{j(7 zSwcXbiwuPn(8MNRa*fHeGUtSUJ;IWkfX4J!kW5S&I@2}vm6y_gPBQ;Z0yYjNDvq{( zLYaI6>dGO`JHrZcJNV;Xp{~gP2dE^#CIUIXe2%sM|892{{{MNfssH#YZHx4%LLW-& zS##p#b^^}fAm`Cg8(oucg~-cZ>{*G0AHXFM8AoCOQH}~HFnSlmMG(Cy)8$Z3KWJ<~ zkg%~4%qMBaM*Mvde*S4YAXo;bbCs!C5&^dq>@d&HMGH&8W@hGSGZ_WbI0QLuA5}JJJ?F8~Rh0g)Kfln2~@BhHi z*EHiIm*nE?941UN@oz8@UH~eBHqAKb`r~SWP203h+q6yFv`yQzP203h`+sQv1;JB} I$^cLR0CfPoKmY&$ literal 0 HcmV?d00001 diff --git a/pkg/iac/scanners/helm/test/testdata/expected/mysql/templates/primary/configmap.yaml b/pkg/iac/scanners/helm/test/testdata/expected/mysql/templates/primary/configmap.yaml new file mode 100644 index 000000000000..9ee00d2c2c0c --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/expected/mysql/templates/primary/configmap.yaml @@ -0,0 +1,42 @@ +# Source: mysql/templates/primary/configmap.yaml +apiVersion: v1 +kind: ConfigMap +metadata: + name: mysql + namespace: + labels: + app.kubernetes.io/name: mysql + helm.sh/chart: mysql-8.8.26 + app.kubernetes.io/instance: mysql + app.kubernetes.io/managed-by: Helm + app.kubernetes.io/component: primary +data: + my.cnf: |- + + [mysqld] + default_authentication_plugin=mysql_native_password + skip-name-resolve + explicit_defaults_for_timestamp + basedir=/opt/bitnami/mysql + plugin_dir=/opt/bitnami/mysql/lib/plugin + port=3306 + socket=/opt/bitnami/mysql/tmp/mysql.sock + datadir=/bitnami/mysql/data + tmpdir=/opt/bitnami/mysql/tmp + max_allowed_packet=16M + bind-address=0.0.0.0 + pid-file=/opt/bitnami/mysql/tmp/mysqld.pid + log-error=/opt/bitnami/mysql/logs/mysqld.log + character-set-server=UTF8 + collation-server=utf8_general_ci + + [client] + port=3306 + socket=/opt/bitnami/mysql/tmp/mysql.sock + default-character-set=UTF8 + plugin_dir=/opt/bitnami/mysql/lib/plugin + + [manager] + port=3306 + socket=/opt/bitnami/mysql/tmp/mysql.sock + pid-file=/opt/bitnami/mysql/tmp/mysqld.pid \ No newline at end of file diff --git a/pkg/iac/scanners/helm/test/testdata/expected/mysql/templates/primary/statefulset.yaml b/pkg/iac/scanners/helm/test/testdata/expected/mysql/templates/primary/statefulset.yaml new file mode 100644 index 000000000000..a7f5f59d831b --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/expected/mysql/templates/primary/statefulset.yaml @@ -0,0 +1,147 @@ +# Source: mysql/templates/primary/statefulset.yaml +apiVersion: apps/v1 +kind: StatefulSet +metadata: + name: mysql + namespace: + labels: + app.kubernetes.io/name: mysql + helm.sh/chart: mysql-8.8.26 + app.kubernetes.io/instance: mysql + app.kubernetes.io/managed-by: Helm + app.kubernetes.io/component: primary +spec: + replicas: 1 + selector: + matchLabels: + app.kubernetes.io/name: mysql + app.kubernetes.io/instance: mysql + app.kubernetes.io/component: primary + serviceName: mysql + updateStrategy: + type: RollingUpdate + template: + metadata: + annotations: + checksum/configuration: 6adfba795651cd736dfa943a87e0853ce417b9fb842b57535e3b1b4e762a33fd + labels: + app.kubernetes.io/name: mysql + helm.sh/chart: mysql-8.8.26 + app.kubernetes.io/instance: mysql + app.kubernetes.io/managed-by: Helm + app.kubernetes.io/component: primary + spec: + + serviceAccountName: mysql + affinity: + podAffinity: + + podAntiAffinity: + preferredDuringSchedulingIgnoredDuringExecution: + - podAffinityTerm: + labelSelector: + matchLabels: + app.kubernetes.io/name: mysql + app.kubernetes.io/instance: mysql + app.kubernetes.io/component: primary + namespaces: + - "" + topologyKey: kubernetes.io/hostname + weight: 1 + nodeAffinity: + + securityContext: + fsGroup: 1001 + containers: + - name: mysql + image: docker.io/bitnami/mysql:8.0.28-debian-10-r23 + imagePullPolicy: "IfNotPresent" + securityContext: + runAsUser: 1001 + env: + - name: BITNAMI_DEBUG + value: "false" + - name: MYSQL_ROOT_PASSWORD + valueFrom: + secretKeyRef: + name: mysql + key: mysql-root-password + - name: MYSQL_DATABASE + value: "my_database" + ports: + - name: mysql + containerPort: 3306 + livenessProbe: + failureThreshold: 3 + initialDelaySeconds: 5 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + exec: + command: + - /bin/bash + - -ec + - | + password_aux="${MYSQL_ROOT_PASSWORD:-}" + if [[ -f "${MYSQL_ROOT_PASSWORD_FILE:-}" ]]; then + password_aux=$(cat "$MYSQL_ROOT_PASSWORD_FILE") + fi + mysqladmin status -uroot -p"${password_aux}" + readinessProbe: + failureThreshold: 3 + initialDelaySeconds: 5 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + exec: + command: + - /bin/bash + - -ec + - | + password_aux="${MYSQL_ROOT_PASSWORD:-}" + if [[ -f "${MYSQL_ROOT_PASSWORD_FILE:-}" ]]; then + password_aux=$(cat "$MYSQL_ROOT_PASSWORD_FILE") + fi + mysqladmin status -uroot -p"${password_aux}" + startupProbe: + failureThreshold: 10 + initialDelaySeconds: 15 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + exec: + command: + - /bin/bash + - -ec + - | + password_aux="${MYSQL_ROOT_PASSWORD:-}" + if [[ -f "${MYSQL_ROOT_PASSWORD_FILE:-}" ]]; then + password_aux=$(cat "$MYSQL_ROOT_PASSWORD_FILE") + fi + mysqladmin status -uroot -p"${password_aux}" + resources: + limits: {} + requests: {} + volumeMounts: + - name: data + mountPath: /bitnami/mysql + - name: config + mountPath: /opt/bitnami/mysql/conf/my.cnf + subPath: my.cnf + volumes: + - name: config + configMap: + name: mysql + volumeClaimTemplates: + - metadata: + name: data + labels: + app.kubernetes.io/name: mysql + app.kubernetes.io/instance: mysql + app.kubernetes.io/component: primary + spec: + accessModes: + - "ReadWriteOnce" + resources: + requests: + storage: "8Gi" \ No newline at end of file diff --git a/pkg/iac/scanners/helm/test/testdata/expected/mysql/templates/primary/svc-headless.yaml b/pkg/iac/scanners/helm/test/testdata/expected/mysql/templates/primary/svc-headless.yaml new file mode 100644 index 000000000000..9fe0f11c87ae --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/expected/mysql/templates/primary/svc-headless.yaml @@ -0,0 +1,25 @@ +# Source: mysql/templates/primary/svc-headless.yaml +apiVersion: v1 +kind: Service +metadata: + name: mysql-headless + namespace: + labels: + app.kubernetes.io/name: mysql + helm.sh/chart: mysql-8.8.26 + app.kubernetes.io/instance: mysql + app.kubernetes.io/managed-by: Helm + app.kubernetes.io/component: primary + annotations: +spec: + type: ClusterIP + clusterIP: None + publishNotReadyAddresses: true + ports: + - name: mysql + port: 3306 + targetPort: mysql + selector: + app.kubernetes.io/name: mysql + app.kubernetes.io/instance: mysql + app.kubernetes.io/component: primary \ No newline at end of file diff --git a/pkg/iac/scanners/helm/test/testdata/expected/mysql/templates/primary/svc.yaml b/pkg/iac/scanners/helm/test/testdata/expected/mysql/templates/primary/svc.yaml new file mode 100644 index 000000000000..2bbdab8fe468 --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/expected/mysql/templates/primary/svc.yaml @@ -0,0 +1,25 @@ +# Source: mysql/templates/primary/svc.yaml +apiVersion: v1 +kind: Service +metadata: + name: mysql + namespace: + labels: + app.kubernetes.io/name: mysql + helm.sh/chart: mysql-8.8.26 + app.kubernetes.io/instance: mysql + app.kubernetes.io/managed-by: Helm + app.kubernetes.io/component: primary + annotations: +spec: + type: ClusterIP + ports: + - name: mysql + port: 3306 + protocol: TCP + targetPort: mysql + nodePort: null + selector: + app.kubernetes.io/name: mysql + app.kubernetes.io/instance: mysql + app.kubernetes.io/component: primary \ No newline at end of file diff --git a/pkg/iac/scanners/helm/test/testdata/expected/mysql/templates/secrets.yaml b/pkg/iac/scanners/helm/test/testdata/expected/mysql/templates/secrets.yaml new file mode 100644 index 000000000000..ffa6909e2f04 --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/expected/mysql/templates/secrets.yaml @@ -0,0 +1,15 @@ +# Source: mysql/templates/secrets.yaml +apiVersion: v1 +kind: Secret +metadata: + name: mysql + namespace: + labels: + app.kubernetes.io/name: mysql + helm.sh/chart: mysql-8.8.26 + app.kubernetes.io/instance: mysql + app.kubernetes.io/managed-by: Helm +type: Opaque +data: + mysql-root-password: "aGZYYW1vN3V5NA==" + mysql-password: "eHR6YU9MR1VhbA==" \ No newline at end of file diff --git a/pkg/iac/scanners/helm/test/testdata/expected/mysql/templates/serviceaccount.yaml b/pkg/iac/scanners/helm/test/testdata/expected/mysql/templates/serviceaccount.yaml new file mode 100644 index 000000000000..760b8bf731a5 --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/expected/mysql/templates/serviceaccount.yaml @@ -0,0 +1,14 @@ +# Source: mysql/templates/serviceaccount.yaml +apiVersion: v1 +kind: ServiceAccount +metadata: + name: mysql + namespace: + labels: + app.kubernetes.io/name: mysql + helm.sh/chart: mysql-8.8.26 + app.kubernetes.io/instance: mysql + app.kubernetes.io/managed-by: Helm + annotations: +secrets: + - name: mysql \ No newline at end of file diff --git a/pkg/iac/scanners/helm/test/testdata/expected/options/testchart/templates/deployment.yaml b/pkg/iac/scanners/helm/test/testdata/expected/options/testchart/templates/deployment.yaml new file mode 100644 index 000000000000..c41133c72716 --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/expected/options/testchart/templates/deployment.yaml @@ -0,0 +1,46 @@ +# Source: testchart/templates/deployment.yaml +apiVersion: apps/v1 +kind: Deployment +metadata: + name: testchart + labels: + helm.sh/chart: testchart-0.1.0 + app.kubernetes.io/name: testchart + app.kubernetes.io/instance: testchart + app.kubernetes.io/version: "1.16.0" + app.kubernetes.io/managed-by: Helm +spec: + replicas: 1 + selector: + matchLabels: + app.kubernetes.io/name: testchart + app.kubernetes.io/instance: testchart + template: + metadata: + labels: + app.kubernetes.io/name: testchart + app.kubernetes.io/instance: testchart + spec: + serviceAccountName: testchart + securityContext: + {} + containers: + - name: testchart + securityContext: + runAsUser: 0 + image: "nginx:1.16.0" + imagePullPolicy: IfNotPresent + ports: + - name: http + containerPort: 80 + protocol: TCP + livenessProbe: + httpGet: + path: / + port: http + readinessProbe: + httpGet: + path: / + port: http + resources: + {} \ No newline at end of file diff --git a/pkg/iac/scanners/helm/test/testdata/expected/options/testchart/templates/service.yaml b/pkg/iac/scanners/helm/test/testdata/expected/options/testchart/templates/service.yaml new file mode 100644 index 000000000000..6c6699f3d5dd --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/expected/options/testchart/templates/service.yaml @@ -0,0 +1,21 @@ +# Source: testchart/templates/service.yaml +apiVersion: v1 +kind: Service +metadata: + name: testchart + labels: + helm.sh/chart: testchart-0.1.0 + app.kubernetes.io/name: testchart + app.kubernetes.io/instance: testchart + app.kubernetes.io/version: "1.16.0" + app.kubernetes.io/managed-by: Helm +spec: + type: ClusterIP + ports: + - port: 80 + targetPort: http + protocol: TCP + name: http + selector: + app.kubernetes.io/name: testchart + app.kubernetes.io/instance: testchart \ No newline at end of file diff --git a/pkg/iac/scanners/helm/test/testdata/expected/options/testchart/templates/serviceaccount.yaml b/pkg/iac/scanners/helm/test/testdata/expected/options/testchart/templates/serviceaccount.yaml new file mode 100644 index 000000000000..6fe44a89bb3b --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/expected/options/testchart/templates/serviceaccount.yaml @@ -0,0 +1,11 @@ +# Source: testchart/templates/serviceaccount.yaml +apiVersion: v1 +kind: ServiceAccount +metadata: + name: testchart + labels: + helm.sh/chart: testchart-0.1.0 + app.kubernetes.io/name: testchart + app.kubernetes.io/instance: testchart + app.kubernetes.io/version: "1.16.0" + app.kubernetes.io/managed-by: Helm \ No newline at end of file diff --git a/pkg/iac/scanners/helm/test/testdata/expected/options/with-api-version/templates/pdb.yaml b/pkg/iac/scanners/helm/test/testdata/expected/options/with-api-version/templates/pdb.yaml new file mode 100644 index 000000000000..7c7ef5fd74d7 --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/expected/options/with-api-version/templates/pdb.yaml @@ -0,0 +1,17 @@ +# Source: with-api-version/templates/pdb.yaml +apiVersion: policy/v1 +kind: PodDisruptionBudget +metadata: + name: with-api-version + labels: + helm.sh/chart: with-api-version-0.1.0 + app.kubernetes.io/name: with-api-version + app.kubernetes.io/instance: with-api-version + app.kubernetes.io/version: "1.16.0" + app.kubernetes.io/managed-by: Helm +spec: + selector: + matchLabels: + app.kubernetes.io/name: with-api-version + app.kubernetes.io/instance: with-api-version + maxUnavailable: 0 diff --git a/pkg/iac/scanners/helm/test/testdata/expected/testchart/templates/deployment.yaml b/pkg/iac/scanners/helm/test/testdata/expected/testchart/templates/deployment.yaml new file mode 100644 index 000000000000..8ace433f0c03 --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/expected/testchart/templates/deployment.yaml @@ -0,0 +1,46 @@ +# Source: testchart/templates/deployment.yaml +apiVersion: apps/v1 +kind: Deployment +metadata: + name: testchart + labels: + helm.sh/chart: testchart-0.1.0 + app.kubernetes.io/name: testchart + app.kubernetes.io/instance: testchart + app.kubernetes.io/version: "1.16.0" + app.kubernetes.io/managed-by: Helm +spec: + replicas: 1 + selector: + matchLabels: + app.kubernetes.io/name: testchart + app.kubernetes.io/instance: testchart + template: + metadata: + labels: + app.kubernetes.io/name: testchart + app.kubernetes.io/instance: testchart + spec: + serviceAccountName: testchart + securityContext: + {} + containers: + - name: testchart + securityContext: + {} + image: "nginx:1.16.0" + imagePullPolicy: IfNotPresent + ports: + - name: http + containerPort: 80 + protocol: TCP + livenessProbe: + httpGet: + path: / + port: http + readinessProbe: + httpGet: + path: / + port: http + resources: + {} \ No newline at end of file diff --git a/pkg/iac/scanners/helm/test/testdata/expected/testchart/templates/service.yaml b/pkg/iac/scanners/helm/test/testdata/expected/testchart/templates/service.yaml new file mode 100644 index 000000000000..6c6699f3d5dd --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/expected/testchart/templates/service.yaml @@ -0,0 +1,21 @@ +# Source: testchart/templates/service.yaml +apiVersion: v1 +kind: Service +metadata: + name: testchart + labels: + helm.sh/chart: testchart-0.1.0 + app.kubernetes.io/name: testchart + app.kubernetes.io/instance: testchart + app.kubernetes.io/version: "1.16.0" + app.kubernetes.io/managed-by: Helm +spec: + type: ClusterIP + ports: + - port: 80 + targetPort: http + protocol: TCP + name: http + selector: + app.kubernetes.io/name: testchart + app.kubernetes.io/instance: testchart \ No newline at end of file diff --git a/pkg/iac/scanners/helm/test/testdata/expected/testchart/templates/serviceaccount.yaml b/pkg/iac/scanners/helm/test/testdata/expected/testchart/templates/serviceaccount.yaml new file mode 100644 index 000000000000..6fe44a89bb3b --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/expected/testchart/templates/serviceaccount.yaml @@ -0,0 +1,11 @@ +# Source: testchart/templates/serviceaccount.yaml +apiVersion: v1 +kind: ServiceAccount +metadata: + name: testchart + labels: + helm.sh/chart: testchart-0.1.0 + app.kubernetes.io/name: testchart + app.kubernetes.io/instance: testchart + app.kubernetes.io/version: "1.16.0" + app.kubernetes.io/managed-by: Helm \ No newline at end of file diff --git a/pkg/iac/scanners/helm/test/testdata/expected/with-tarred-dep/templates/deployment.yaml b/pkg/iac/scanners/helm/test/testdata/expected/with-tarred-dep/templates/deployment.yaml new file mode 100644 index 000000000000..ed57d12a6e2b --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/expected/with-tarred-dep/templates/deployment.yaml @@ -0,0 +1,78 @@ +# Source: with-tarred-dep/templates/deployment.yaml +apiVersion: apps/v1 +kind: Deployment +metadata: + name: with-tarred-dep + labels: + app.kubernetes.io/name: with-tarred-dep + helm.sh/chart: with-tarred-dep-0.1.1 + app.kubernetes.io/instance: with-tarred-dep + app.kubernetes.io/managed-by: Helm +spec: + replicas: 1 + selector: + matchLabels: + app.kubernetes.io/name: with-tarred-dep + app.kubernetes.io/instance: with-tarred-dep + template: + metadata: + labels: + app.kubernetes.io/name: with-tarred-dep + helm.sh/chart: with-tarred-dep-0.1.1 + app.kubernetes.io/instance: with-tarred-dep + app.kubernetes.io/managed-by: Helm + spec: + containers: + - name: metadata-service + env: + - name: METADATASERVICE_UPSTREAM_API_URL + value: '' + - name: METADATASERVICE_OIDC_AUDIENCE + value: "" + - name: METADATASERVICE_OIDC_ISSUER + value: "" + - name: METADATASERVICE_OIDC_JWKSURI + value: "" + - name: METADATASERVICE_OIDC_CLAIMS_ROLES + value: "" + - name: METADATASERVICE_OIDC_CLAIMS_USERNAME + value: "" + - name: METADATASERVICE_DB_URI + valueFrom: + secretKeyRef: + name: with-tarred-dep-dbconn + key: uri + image: "ghcr.io/metal-toolbox/hollow-metadataservice:v0.0.1" + imagePullPolicy: Always + volumeMounts: + - name: dbcerts + mountPath: "/dbcerts" + readOnly: true + ports: + - name: http + containerPort: 8000 + protocol: TCP + livenessProbe: + httpGet: + path: /healthz/liveness + port: http + initialDelaySeconds: 5 + timeoutSeconds: 2 + readinessProbe: + httpGet: + path: /healthz/readiness + port: http + initialDelaySeconds: 5 + timeoutSeconds: 2 + resources: + limits: + cpu: 4 + memory: 4Gi + requests: + cpu: 4 + memory: 4Gi + volumes: + - name: dbcerts + secret: + secretName: with-tarred-dep-crdb-ca + defaultMode: 0400 \ No newline at end of file diff --git a/pkg/iac/scanners/helm/test/testdata/expected/with-tarred-dep/templates/ingress.yaml b/pkg/iac/scanners/helm/test/testdata/expected/with-tarred-dep/templates/ingress.yaml new file mode 100644 index 000000000000..b48564477997 --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/expected/with-tarred-dep/templates/ingress.yaml @@ -0,0 +1,26 @@ +# Source: with-tarred-dep/templates/ingress.yaml +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + name: with-tarred-dep + labels: + app.kubernetes.io/name: with-tarred-dep + helm.sh/chart: with-tarred-dep-0.1.1 + app.kubernetes.io/instance: with-tarred-dep + app.kubernetes.io/managed-by: Helm +spec: + rules: + - host: metadata-service.mydomain + http: + paths: + - path: /($|metadata|userdata|2009-04-04) + pathType: Prefix + backend: + service: + name: with-tarred-dep + port: + name: http +# tls: [] +# hosts: +# - hollow-metadataservice.mydomain +# secretName: hollow-metadataservice-example-tls \ No newline at end of file diff --git a/pkg/iac/scanners/helm/test/testdata/expected/with-tarred-dep/templates/service.yaml b/pkg/iac/scanners/helm/test/testdata/expected/with-tarred-dep/templates/service.yaml new file mode 100644 index 000000000000..7d86aeb5b02b --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/expected/with-tarred-dep/templates/service.yaml @@ -0,0 +1,24 @@ +# Source: with-tarred-dep/templates/service.yaml +apiVersion: v1 +kind: Service +metadata: + name: with-tarred-dep + labels: + app.kubernetes.io/name: with-tarred-dep + helm.sh/chart: with-tarred-dep-0.1.1 + app.kubernetes.io/instance: with-tarred-dep + app.kubernetes.io/managed-by: Helm +spec: + ports: + - name: http + port: 80 + protocol: TCP + targetPort: 8000 + - name: https + port: 443 + protocol: TCP + targetPort: 8000 + selector: + app.kubernetes.io/name: with-tarred-dep + app.kubernetes.io/instance: with-tarred-dep + type: ClusterIP \ No newline at end of file diff --git a/pkg/iac/scanners/helm/test/testdata/mysql-8.8.26.tar b/pkg/iac/scanners/helm/test/testdata/mysql-8.8.26.tar new file mode 100644 index 0000000000000000000000000000000000000000..53cb6802de42a09509727e267c7a68df02ba3bb5 GIT binary patch literal 284672 zcmeFadv_bhu`imx`6*`8KHHM706s*?4$t;pp(WYTiY0N2vUAVz&Ju>ekempwU;s&s z?fcp9ud2HGG1G$?5FkZo6NxEcrXSVS)z#JY>d|HKkKxXfi*z>cUZ$hrAFkoEi_hbO zgFn!}_>}+Md-!Pg4|@muyN^+5e}C@}yLon)Mjvcsf58I2}mv^mSBlOmsUs~7Y6v^d<^=@Y>f-P3%I z!g&{Ecg%NGn5X9qOeZnVI<~F%3dvsgWA`I^F8Z^4O5h(RFD_61>xU#Sk~BF>i}?d| zJj~P6VfG*?`sq-Q8#&- z75Vu%8&J&)G&9VSQC<*UI)GuGql*X0MV7w5>-ZPfFTNe{T zk7D9HpI*16GAJzLja;l3&uV%mFu|1B;+W31TqmzsqJF z1zBtGZ^Gr4C6uqbUw8K(Z+)uub=&)8j2Zqv1X8a9USf?)`2X(1hX)n@zyIit|6j*k zl-t}i@3!l1ue*o;Zw>PEte77r#YMXR=<#6+QaJ4o4h9GP!Qkt?hyBN24IV#yc>3tk z{=?IQ6i*Mn`ugnjkB`pw`f2v~@uR^XPj^q#NBx6Grw6;){?>U0R)bAyaF}fG@9yt+ zcK17bkKgR?AMX9}@X=S@KYq2h`{?U~gGc{&`)Z_UG}yx}JKC_T?g zC60#>s32rPkj#?#MLNeHNMRrH;V_wiAI$OrJ*adO-5SS$@8l ziL^y>8r44+_?TD;v?q(%EE~^NAb~(;O4I;*C=k5P&huhEyF~Xin&ZWAc#`#Jked>c z4hu-Cc>H7tS!#>A`4c0=wB6-8+C&sPGt+)eUBADd)7dP&{9CCRZ6LwK04x@o02_gq zaD{5iKG53Qi8qksY%)uZNqB}N2bBFZ87@ZIz2ZKa+7hgh_#X@o9CjPefPrV-^R9d= zV~i@HK$3Kl(WPSLFWIFa^d;zs3*b+sX7cmj@Yn~OJLIt`MLyv(gimkY&`ElHrSx?& zpAi{BHpgtu^K>}8bi^mehb~npx@VYg`j$YdpF-fzl6y?5D#;YEIDQlJ)c|EyTEKWOeoC9$JW0N^8-vWys%lCdac}DA% zc5J3#i**K&lgmWc@DVL# zaJ~6O!mkrjrx18xZB&lm5F2A;Eyau1{2?t9H*-b*A{#7*+3Y1z@QoHNX zef=QGux68OT3+wAyVSxgJ9B5ocZ*Y00h2Gf`D6#DsbXiI7C17dgHb*vCNb+6>e;c( zxHDP|=lOJ)b!2zrp9Kmi4Uzw;D;@aQ`)d=xO-6m%H(m0y`| z4xvjJLJPudE)Ec_CTfh94tFro_AomD#vO(uW2bh}xM_n1PQ0`jX4!OEpXuzpNRH?a z2+VG>ZF{5I+EtF7c1$OErZQ!`qDvvDpuX?~0xD!q@2YR1J(L~?^hGKSIis^?qolw9 zt(`OF=-$ekL)Y$mMw7#`hCXEQWGB|rs#fwL0T}wYQUkpPMd|mwd^B1>(*;5$pqPct zG8#CkvBkeOb1s9n=y5XT=g}&%A%csU9sgvS_4Bh!@NQOQ=qTH4`7-=L zcDguEw3+-23yK~LCm2L=F6ya&X@JO9)rX} zOYzd+)od8_q&u5{l5wUWeQRRBkX%kXQjcEuNF_b%CeN{9(%A!R5uMN~rH!)>tCJb9q%Fq#^A^#Mgd|xf-c#=L z>Qiq?b}_{?d+n;2%1z^s_lf{Hv=1gT>D`}dudrQ_amelPS6ZDRJlaJZgSyg%GqY^3 zk96_V!qRB~|AXCSSag#&U>}A%TtG=c>rA!=W-PGq6iJWOeBn~>J+O}*1GJ2w$Fj1i zr+xMWgD+pbd3Kom6&x_h=U+hFpj`k!RM^}Dd8F0N&lu9_5OfXj7j_{w_OW_u+tC@duj)z zE5Fq-CIZg~nx!7kOu4HbZ6-$0>d_Y7BO@nH1Ek=?DVC^Bs7LlDI@|Qn$PAQ&G1I}8 z3)>BJGDDMR=*`=LgHF7eH>4B_CXq2(0fk^VOm^OrD7ho#ZVa4E){IY_od~{~V)ROj zDI}A+!mfbMR1COKT|wul!RZNGq&vtC&B-ZHL9&QN9=->CAJW@|b~$?dR+{P-7l+BO@&+$I`!^Vs$2&0bU)X0^ z|6-DCKZV!^6E&2$p2!SOqVcN$tNQgkQCy#AGBBp1dKxQ7&*_4m$dMZ<3h_s2&l#^mZ z)1r`F!_Ew|&1TT1Cig&O(BUP#?1TXrIY`bOR=r@FWA%>Da1=5=;I{ya%WViIRts3! z{!en)R)8*`!a0IJMFIY&aES(t`5I0n2^9fn;4-q%)Gi-6(g1D!3@ZR@dGFFG!}=_>@xLQVRm%c?T^oxwaNek>DziA?7TbO$;QV0LOVn^;;$eB7+s>JozF5k znRO;-bf9yPK?)yQ-Fff^i@1Z%ljLXaZ$MgfIt?+wz9mlYP9|@s!vzjd-*CydP!Z)|Evoo2_ zOa3X;g!KG2USemMpYBKrZfiQ3&A)m0aQCsmL^_dqq~V?UXewCk7AV7oNsi}EjBAL& zIz8t1s8Rz1-f)Rg`tdCe>5~uH0J!0<_a49CSKyF?4u}!BDZbf-w-f%yFQ)l`XJ{#q zgD#43ah!HLFoaEF*xI3^us}V4rv98Okd8R*lNJ^mH2#~x&hgEUZ@&9l-cE+%sibcf z^RuttifMfKwx4ftsh|799Hb!hu?~iXv{=b+VRFE{EQ&!v?Ky%3D~4>$}BjjAM0j@fX4R{laZeB!C&I7s%nWw)Sq}s>r>{kQY#Xo+xk&9Hlv;_HBt{GVAeP@4iWsZp0twXzwB^$*q`XA-oVY6vMC!eRmo6+07C0_$t%C8Fcem;z8?L8Chlj?z7) z>(ms|lS4N-66TDv6M3l|CcR>E23;8@y9YKoFh_~jC@|J4+K)3x9z;_+P=8?|vY;`B z0cz(ncr}9g0_cvy)4}-%|IKszL@B#6^yRZY&zlkDck?fZEetQj0o1IRQ(208&;Tm3Qz6#)U!H z<4FYfG89uzN7EAdvEdk&q_4=1BBR->KHB%kro4=uV5;JYC3vgiQ8MH`JscVM4{4x? zgV9+G-JrqNt0NFjs*F$}sbPPKoZ>4i@BwXKN0oIB%@MRna1tr6D{G23@{pXMRlUeV z!XE{ub{C?CPvE=Lhu?$SRA1t0V*EwoyXea3BW04YGE$!q#Hmx-N)%Qflv;S_6G%{e z6m_TSH{P=iL2CuLaI(p9f6f!@;IQz!N7!(caXJb|QO$P{qJpi;O{gxz1JU=n;J%PZArpoRk+Mt48v%K22epY-xn=Gb@ z{C)n^O5h_>8AApAo*nOC0B7|qhn6V8RDmU>6%OO#@N$@tj+UR8UJjGJ-CbfdSnlD( zV45W;<91As1xH{lO_F_EH&snzGgG@-P-bmhGs; zbitTHY8VrZ^bbh8#wm4}k8)^>TXA-eH9^ZbU9dZVao3a8o&1%oB;uK1on6>-4_l&I zO;iP{0FnDCdjY~T@;y1^;_ym7sf3WCpHCJBJm`?2&a;G1(2$LaN|Vt5F2C@FAT1(v z95`0;zla566*m-_p0R`+xL}7soYCCq!Yp$^@?i>6qwk!1=B-lNI?VaWpQfU z{+dLo<(nB0$(o|9qisXg3Y8`G0)HKHRNVQ@6_O`Hj-`p!`o~nMypaVjTT~~@3QU(z z5%Wmi+LJM4$mjA5%49sGt%IKuZLHe*G@IqT_i_txJjbuK8YJEGWHDDr-^kCknmmK~ z98N-SEc zwD7_r-N*ziSvq0mHth$qNKqT*#j|>oAqgb0<+FODWeTcUK&v-gCcz}Wgdz7_*|qOH)h&bhQ)8b77`W^z8g|NMR=NqEnUr9>QF;xuH=l$%G!Pc zk2!ce;oQilO+CU@i$CBAH4YR+xpX4wYNVnA0s7`n2S$FBS5cs%G-rI85IBUnz~F6>F6--e-#$5}WM6QkoaxV6+XZ(OWW{Coh&6#SO4H zI~#cr!=+)iII${b4KPn$7&8SXa0xh&{V5u@Vy8FHHwTY2=wLFDX4qIHe3pGoY1|Oa z!hl+YWTyGZ5OUM8(bLn|rso}_Ahc>&`>N038%*UTBzZ;3+p_nnN^Zu-vA|lR(WFW5 z*bYbngn2k{3x#Hd!{DGkfu||rX6Khrb8@C6aQL69lCnCP9xz?&=RgRvb4t##8t_m9 zOoWT;I1D_=%NqY=oa(UX&Jquf1crPpA}DU4zi~du!N?3ij0-~)O|#%fHJ8@7?Hs_JBT0)91fiKz3M!#wMxqPj0Rn~vg5Vg6HEJtdqA&~e@|W^ zffM$m{yTE5wB+dHr{$b^z7I|#7xL%o-1V7!y{bMC-ea>}9@@ z&YJDiEX%I*$_#nHoLgLr;y zUX?{DZJazoOC;zwPOtsO={Hn^>YH0#)!HuEyN{Q*O-0}rU^{@WX1RtC^jGgBZaUui zjK<%gw!Nl#2ny1Y9YCQs$qt?$FF}nf78gjV9fu~tV0SU*(Vdp%jCeT(u>WC$_lLvc{HW7^FP&2$Jyfy5?$J_}U zk6))_1it$m7qTzeTsb`x3A)DNUEFL!_GRdhXvwD*1!9Q@ttj+GFo`2fO$b2YWHBJ~ z7lksRk|?yd+m-+C?(83! zR`{RV+HzK2wq!j?&&iI^2hrdl#$5XQonKaj@ezz|6ys&W6n&Vy!cII1h0kHLe$PHe&KPFPPdFm?B~P!LQ(FwQ z&&7z87#zP^S=Vf9b-j2p%S(SxF>h`psy)@ny^5;bO8PvrPV4sB)$&jub+UKULGTbFc6HIOE5XLJ9 z_qp*w+&OZwbWX~G0!sB>Bw~5uy(jzHW+;Y2M3zS3-HvbL0e%*mE)6Oc;%7@p$l|gf zKU=h`p&b88VSa9OJpbh&H*?akr377#p?)LW03T5M(%zz>xfwscch`=0sYJ&n@ z^AWgRSfDK?VDq`KJ}}UhuEK22tm?19hxqkF16_xaQJA1VJpsvi*Vga{WJ;TL(^V9LJ9Uxfk-rHQ1^O7;AJU-e=aB%;h93<#)N91Acjn+r%C7xEp1Z^$e zjaNpY=jB1Yt{Nz4d+&Pjcvcw^4;8d^t_HDXl=$Lwh{)M{wO~O9&9TG)+`e!@3nGa) zCsz&_w9PQyd5cpcoP8DR;S&c9TG*U^2bQ5*5Y-9#6L+{8RND6^HB(mRfwqDj-xG8J z+smpQ*qFYW;K2w=xeNTUFqD86b-Gn=+gMNA71WfaXwOJ5cL9X|!U2SKzM=It@tmuM z5ZWdj9hw8CvEtYQULP7k=Om3c9H_V~jL^1I?g&f3LT^n-cY%bS!5JLF1Po#~^3r^} z=jzwKY9OJV6p)RCIkL}s$y^skq1CX)umSSN*+ej59e^;{T{WDruE`d09l;n|;h|-0 z$LbB2<0EP!qN)u8?BHSDHWQh_yP!f7o?A2ZHl8E8h{Ct+nutuo)dCA0(T8Ak8y8Yo zz8%;tmeN(4ZMm4P(saw^6loe537`I+78Dfbw=%%ck*g13d0X<|)exp%WK%(gHNZ8l zBp;SG;I>#rt2Wtk1+Cg_%hgk97Fh6~t+aAB6Kp6Ei)Hxh9Y7+{fKCXm_)zFZ3oR~U z;G+f|d-}7UD3LClr&mQNm(s7-4>`0{9g)vtoixI9;`0`(Qmf#|5Wp2sjxv+h8tj#uADEQ@o>flbi2Y4 z?RZGaRM5(sxc4eXdg`aw6q1N(vnH*o&dfPH#$ocK!>Wc71^zBT&j$PvyO!|48w${i z&ztWqVYisFlGjq0oxe4L^lIR6A$BduXJdhSabUbqJzKzqxLPE%4c4=51glu1?40yt z&1Y&R{p8_#4szz`8xPpC?UQ-O3r6#Mb0J|B(akG%Jwba8J}YI$p{xnpv-SDp^hAv> zK^Tc2$Uxc(Z|h=t*9Gp`)=Jo-TCVLS_|hmlC)I>;gN?y^br5L7u5V!>C)Ua2WYqvZ z+f`SCxF{yA!W;f#Z`}<-ul&tyc`RSG9}0PE(@!}xj($hO`l?-!+4{I;H&hlAej6hE zssph>cx@2o3U4&nuL_kPc4Wi-Y%yGv8AJpAPYU?6NU*C4JoJBN$X^u|K3%L4^k*Bk z9O*9XPogKy_P~eS%4V)?J}QyI9&nw3e{Raiq2@M({#l5+n+g82?WrS8^{f{Q|FhK? z3_munO#q;+&7E#21kg4rAO+ERZGr%8eGjPh!vJloHM<0h{mB&p`aU=+VCSAtRAc}ej^4a+Tcxc+I#ea?~1 ziSAxkfa#&rHN`O#<$6b6yn}NUHkHff-^iv2R}c%$3q#=+sgWt4 zOexzRmMgf|3^$Qe9KN_sFL7EN_;}8jHpWJ2=TC?QK8VCb7+C3+ zFAliC{k3v=(1h0=F2IE_pQjW&jyQazLzg|534$J-Pc#S}S;LSS4t95BE~7V3zcm#2 z?E+ko?y9AmDUsVOGnxOn43QCGf@vy01$V&%8HJ$;0mK4}77*yqq3nI|A#RU4CayqF zWiXYwG7|*AP|j}*e1j@t+yrz$B0i21lW2giM*zS&LzsIQZ@de!LG_-a9DZrZ>RhUE zrko24mFR=;J1}{y3V6Sc&DAL;Z`QMqlqrQW!Ey3^snAJ&;phyi2XcUlHAPOkLpY~A zu(z`Ptj+!HH%1ixEP=0*1whnwlv0stkXCI;W_VuGH@Vk*-?1dw^6Ddf3T~J{jt9d2 zyS2u1ZH;EwrHa&*j-t^@Xw{ox;SA&rBmYC+s5gV-5ESc&D4Qz zDZ483FG}zMzL(BsDW!p80iWlB*a|s7Nu9$4UaM%Nn@`Mh#rk2QxAB^PJHm5aCJROD#o`gAaPth&Qf;-Bj!?5`Y#HhT1RWQxm zw*!&=wW;=(j4tcIL=nq3O<2@v)pBm%DtTB5(b;pFY0y?ehfEUo0gLc~F#IB;Bu*6; z{^GLu2jmkdNU&#LWQ&5f2nbqW&5oNB=frEOABw}~q2-e75rWeWDfBzY^G8<7a=Qxa z60WArDz`BU0vn+~#?LL$bA!6J%T<;Bu}_yO>zgIjiL;dIt;}pffZTd2H%xD0oTOS` z*N(MW2(oxR>;_x}_X+W9H|jes!U$QDi^$O`wq}ho#b3M3 z)0QlA>mx^^nDwL;cQ7MhH}PDYpv$QoNaM1DKe%G)EbCwPhq%EP#Uy$x=EbdaZbl%X z+1`OnN5*D6r_Jz?per@HptV^dT62PpRE4a_Iv^}WHke_ajAU2_&r_E;j_tLm%fq(l zRbT+|$8;teOU%qjP#h+I+}k~1s_~kTI*G?$vE}a+{aMO4%+d*5ZfPwSktVl)`6cQ8 z#FsaB%Ohh9{q|d!TVD&Y$^2+`UUbdn(AE|V9R+S6;=ECE%9|pzSuP9^j`7x_1nN3X zqG}a$Wjwo%3Tp&bhPPT*V3)Y^iqn{>77%3aWjQ^^m3E8ME*ubdrgpp9DFH2c_B_YC z4lXL_BWDT@7hPz(CU5x~J-WsYEe#i=u~|EQxzl_+oSeTkjvpW{|7A8lM{;pA1B>nH zTX<#8_5Qv{srlsHVo9m<;RM+kYa21IaWz0rB69@Ne0(-}OKBNKY1bUk0Z>Ch1G=FV zS%yCBexs%=i|nFAX;r>Iq9mOq%thKoIb3SDyeKA{bX%mwYF&_$7``Rl#_}47!<${c zg)}zBsX1#=LGZA=Q7O|Mr1%15NW)}E>E=nJXm}#FNOL$Po+%Mzh)$aqR4^kNU$Jy| zkmH0re@o}_w|uu?b*RgFlc35K>(Df{?(0KpZ}ap?)(492hkYw}SPlsn%*Q|$w#5ca z zb2&v$(JJQbD@)nS$}kqV0RGrbLbgM*C0R4E-8p=}qT1t*o}I8c$i=L*%VbP!RnpYKGEHJCaXgjgMosO+AYpA^>C%V2YIax!)8z?Ew zQwT`5q;IdZa!755AW2(O$RE3+r;O%Z(p=+;viuW&`TAF+Ex5kxUmF`>(BAHm4KJ zLEBp_H+dY<)6wy2H@qszh1E%WyosRfc`P{PToXrK@MxQfwN2ut6$?<18tf5 zYSu{vVt^e!l|9EQacy>-2#*6T-@rlv4Pw-+!agxtoENw}3sx#Yd9X0NGk&u6s_+4#+@8P^~;Al#_M z=3T%-`#Aq&F|S5J-r<+SWKH3T>Ws-|>7K&YE=~;eI2?}^HYBG!Og6?+N2iElIt8ufu;m`ZOem(SE7YK9x&PhqhGom5SSFBmRmA z&kHrCHNb(UP5xd3j1{wgH0noEL?-5?y=?_&HF)J*GH%>LgV_fd`kcB?S1vt z!5{Vx_IDqn(Ek43A9nW+b{{?ZL$bR`AnMw}dP`>jsqTH-=Q1`oxVH5^ak*cxu(v;# ziQmRXN-5M2cXo&bI))r3vvb&bNIC!6_5&(T#v>{_IsH5A5%LV&0%F5F&kDSuMo})E z^)KM#*PkzDR9>EX_35Ig@j2J$^7DK?`Y`E{Zgh|i;bYh%&&HmoitM@WGu#e@>OXJ0;CiS zt;n=6NV5r?YqYqV8Ufc#Z)tkMp~w_bGl}h1d*za)IeD6$@$DeIKPNNH2Rw4&`vZ5j zS=y&9FC9+d+J=1yjvvSqL!^-)+RZq+t9>y^4ECiHW1aA3Vbvi>vXew{%JONvGmGl0 zEake}S@#8-z!#%drScT|hv zZKJmLMjPX2LPg4t)P8J6}cy4aZrc37YQ4y>w5PU^;qbZ=zwo-h!7x60<16%L}Us~gK zscB2Wh)UrC9B6?%BI7_mfJ?A27g+ZYX@R)4`Q_7m<~D0;*;{7xbDO+hN;u``3VcM! zk@uGOSD-neCd>R|J0i7FL-c_N3FQrKu-owepl_Ah^87_ktgFh^=hgZf z<$F5fezq2YN4H;aCGn5)CZ&$Z9fe@s8Lz5ay9L{U-EG2~AV!PM-`6&5!FGvp?l$7} zMKX(4x{f$1I<3X296R2=+lueDV%nlV?N;pQ-J{2~NIcw(@$0v(-$?Z+`d>`~Q0%;X z_2$`0cm8p{LA@lYMwjV-A3c0n(f>kSe5e22;2hL7_2kb-uiqpuk6t{3gbz1;gv`$7 z-IvfPCcpi*C2xOv_WI=ctCxO(pGZc8XFbXs9Umu=5=YaiS7__YFOy?v4j{L~nu?Ql z4m~K@IVma|raDps&dJPqnvvdb0L9Ma5-Qv;zuemT<(CdA0yTnQkf*qg4T1Xo7f4EA zE#H9Tjcw=x0gR_vHg*lc*z77I7#20@dO{O;f;$P3g+OZL1$n8{-JCFIQj56hoW9#O z>4wb@2wq3%G|bcUi^DA}CsT@W1Zc;w;~^0m4*GOUdUrk@8Vid#2_*Lhd4H}diMpX_ z&#W(i_vv=>`TgYA1UV3(hVCe?uhd zS0fZ`NxBe*2>m25q?$AU6U_hV;yeJ2CQBhBcw0g>&y)Y4Rg%2=E_w6kXUXxar~iqn z34G%(wzi%lCCm)F25Gld8EObysp&4EYxhZbLM zCO@=7+t!w`UL%kb1Izjs6OF}TW@6U~??M{n)79KG+esHtcgu6Gq|z10Ni|jnRIe|fj2cJ%>U+#mx%G0eSs$uW=!pt@xpHuC9QZUV#Q?BvK%T& z_Pa@l5mgx*LQ3wD%iRv`Y}tJ~WJY(=xd_I4Ez33OEZCgbnQng`A+>EQ2((_TSgjAs zgSP8OJy@ylf|4;1~4O3hby-m`yA6OBjJi} z{S(XWK;ioVH(6ulwvcMDRqbHzu;j+`vt;{!6&*8(Z6p&ZNsARWLQ_O|*4fnYmv#Vu zzpE*d{5#7RFboY7G9N}^bTi`UK%#dcax71hEmiNjU2qOYTob+Hv2!&1b3&Lpej1w$al z0G-gY*=#c71yp8{hIyb&(Z%;gR&+;;;XI!Xp~fL=E;d71NlMRHEc#VDDoCrssn%E> z;)7W3FJnl$hI!h3q9}Fdz!K2W?((5k08qjLukgOssoZYWa~|oAo;Zq~x7fW&=S(`Q zO##}V|2=rTU(x@5b+CJ<|J@Y+m$mcDFFRi*@AJ_i3ssa$fIK6vr>vo+Nae7vv|^|vw^MmfDE?oN6xztc>Z3&me;g>r{p9gO{+f?Y z7D$BpG1=}2YvlDv>MsjjleBG1>Q=!`=(a&WLPTIU7vBW$K&YKvWoWj&mVfQ zI)k&dk+j*X#~g)3xj-k2EuPToFpQExAa|mZInnXkqH78nF3rD4zJNRh{!jdzA0^w8 z^2I%t-+a_8VKWM|Ic7*gKlaG2gIdoy2Cy}u4_aG$6oL!Fd2$bjMLywBydr6(#D-m) zAoG6Rc&n-WHZ$^?cGt!MB8!k9GI>5qk%v{27$Q88^@J~hnxbx`Jg#Gx5K)(N9cfb5 zH))r6h=>u~2K{7FKBQ6o70$Unt-;6rRS;6u<6-g#rzfKSfAYc}im(R0H+oqU&uUVc zTU)^sjud#2PI)nOYjIZYK>3hoKBFES5Vl<^sUoD~T5bg|sm3c>S!}h94pl`g30pmU z_RgY5OzAI@w$CX1@7ObdFc1@QI?0184vbc#2SpGi@nu|rSq65E|>y4f}(btKl*nAzy6wBT)eV66zL8DXK=D)q0!rV9Rox(C79ZkP4dVd%`!R z1|s1Hnxx!?VtWZ>z;{IiYD6f9r&}Z`Pq$hjr7OjYOn{lKP^GO!Dwt;}L)j27s|q50 z67s(bYf)TatxNywl9QF_kgLy-XDvawvXz0gBdKK#T$QL?o65|vYCQsNT$$HIRt_%6 z6VOW56INRlj}oBQGEZ5Q1YM!ZTy2sF(aj9|3^O!gX85i4+PW1dl^mJrg79{Qh}PAT zm=^arm*Qqwy$ZjIHPmK@@!4n1LCiZkSo$)!YUWvWcoRZ8VU$;n)qfKhQ91_Y%y&U7emx=_iJgTUhhxCR7u zBl1ayhkrz|s4o|zgh#WC7SSzl(ip9$93iWU)i+kevO29h)lxO5Wgt4BVDSWPY$N=*(Tf^2; zn~LEQu`=57eLHxa*xj1Pz}wIIQxgQi zLp^q+(JA(N(+UB|bSC)yE_8$${oU!!b=TM6BBR=_x~7IeqIxD#bkE7c4&U53rA z)IyC`OrcRZO^#7dkd&6(yWwpFcU&0}ZU1o8!Hpf`bG*WaQa~|?8w-K?J{6=~bP<_D zG%1is0_Ow8-{+>C&}vUJ5WFD)8m>Ry!)+(wX-Ser{*K$9lI{NPDsH{~rHz^uv$OP7?g{{OQpfa+?3|F&Q;%m0x*?wx*(tX&y+RZkCyAHvWz= z>)HP}$uX``DCXVEbhH^0;BxyP!uTIQD#d?1did4;o&9g)lO1W02oC~3wDT%qFaibjY`UQw*SC{qG|ISb69ngr!=Kw91uAk zLbt=!yD3VCc}qHcDdokQ_t4h{dnLeY#%s{myss^rUmuGelr1Ki zfHX5s`85gUluM{W@9OUE3`% zP~QJdG>Kd?x*-XmY5zZXRL=kTcz^G1|8FsS>lZ2U%4jeVu^id-gUoyZEL7hl# z*mFOfrlh1o7$^4ysmHPrL6L({Mza-t zMj`c+4>&BI!oLM79SOfAhi~p$wklJYwxY=1rFMOl1H!$_%#$BHQX(t@Kx5kkMCyp* zzI3$GZ3!z>h&~bQ$P29W!EWugZ%-#6rCy}d_-1ZT(qRvt zN0Bff%ipe90kHIpZI%J=S|y$%vcc`p6huMu&!l`Xza3(Ws)m4vi$zaTLKomrbD{oVcjkpJ&JdbEER|GVl`Ua5SEVW|j4 z)9)RseR&^C*^2Ta3wHxWpHUsVDn`R68pPwt;AvjW7JOOHw~GNR5qHOhnjDeKREOsL zvAWHvV)MME#|712E+J6l3Ocf`@GK4ZPXddWPuG4SIdW1T*7~}v!}Q~iW9NM@T?WtU zI}M6Oy0vz-ijtNTkL4BDQaek^uvX_>4+B?XBX^a4rx>W)|6wheXVA?c!gXyqVEO)k z@bDp=eZu?yzIyoZZvVd#igMdDv#v6q33hg$CS1K>dp|K`b8@$>*QS2n__l5gc+QK< z?sk*G2KAKHR{!;-4-+u}PqRJj%b#|SwpW9bg)m&L4J!4;6w24^gtGl+=a*s!tvDO$ z^oE5+Qn3_9akSg7DWDKdVdWb~rFdN%(P82keq=BXlQB|tSk1bItd*Mz>Kr3gF5kPX z-~?~gu#5DMQYtEi1yw2ydIAtUq>znlhyhB&zH0&tRL*lwdv^G-3^bgQ8aoO#M<0$2 zDHbDRb~CE)V2_H?r{`fhxGMQ&_x&~Y{8ec1dkDv)vKd@!Yi$utag%&*Ct$R)W8=h*48(ExOcnBILs^$YwH2(0>{yuT@!%-(npV zt-xy({5fvjv-36Dp;j}11UWs9auLyph7{a$LcxKJCV~HYl zg@38DuSFOBhjbi%0YT+tgd`kg>a4cr?qdav#^ujh+FDGs7b>E2P3#$_T**jSB=>w% zu$HMpV#lRGS7U?KexN06WA1)YZN;7x5(scUIF2*dU_31DmYUg+nq{x0J9(+~8aD0W zUAncpWSMaT`lmpwkC2Xu(2|+zAj|J4FX$F8@ro2!S;FGwVI^r1y{hxeMFSatny@08 zTp$sww3;@$l)KT~H&GE+s7(^DfjvhI^stlQilp%lU%*7a_0vf@S)Y%W?#ZvgZ#r+$ z-2k1}B{o1A;wVR#tcCJQMxNQDf^NkDQvJ&kEL~R@zq#og6$se*I34A3kCO*anICws zlEn6TnCyBlN{gY2R|p%E1v0*P?3=J*9}CwhISnss#&MYzj>3saZ2E$-9D45F1cFi_ zxr2ElqE{m-PAA&KI&9IngT+;OU}w+kR=Z?E_WG=)_0=hxYc$evY1))oz&jq_t%P)=5(0v2K5(@f&WHCr`Ab-aTu`XQ z7@to%jFb-XLJ_4^$f>Pxb%!Pl;u}(Y34m%TGmKoZ_4d2xKRl})lIC;U4sIu8p1K2` zgcOB^>}{nyURJDS@FI7(@EtT|FqyFjhjkh967XY?QRl_gj;H=i(- z(v(*CiofflFTE&mQ>3NoyWCN}mZ}I_SEDKgI)=ngN>Dob0I(&r6o^W(^@AKmN8cWu z)GzvgTu5Z2YZe>GrS;_xx@73z(zQfpUE`cdB(A8m#w;P2s`jFzfy)(MM2b9n0^v<4T zC2tA*!ybEU#Y~kS79JO+!)Z6wbdytR8RqY^F@oB0f9Pp&!8`#XpP+~+Q=%%|pxkQb zZd^5uTb#?{sh&u^!3#un>M(j+Rf);dvRaIus*LeAU3~mz`}1ES=Y+$~Zz02>deHox zB|rb1ba1_Cv=;AcR5khA-~Kmc(i~UDAngPYe15NwiRebNY z-gNF=IQ)y>X)rn_-kKAv9ATwp z<0d7NIm|GaXC<-3H+T*Bro0X^JkBnsNI*U3iU`9S z1eKzDcx(h#UI^~5(E;dNy(Hv`#`o6_h6@Csga*6GbRL>G*%=iYVaoPUs#h#d#mpCZ zyL^DY4P8~3$0nh^sJp9IUqn>Sp1*;}oO&P^#Vrnq_I@3CuYdg+tBzRCRK-p(+~otR zP6)iTfAUDZJRwZ0n>dmOowVc}(Q`RN?bOx=A*u*=U-D z1|U3Yl|~gOw#wmOj`xu>)0MS)gANjd;jEm^{$;H!_DIC557|zy(a2Jo);3xKJ#v3S z!}a=x(b7OywvSd!af%gh5z_*{x=)UO0&lN)*o4v{=y_#@Lq*v@MwPcGuYP>}h5)lKF^HwWgeE+pQ5# zmpk4HK0O3VDV#0F|Vk5ENWF z8}Tx!pAm0N!P!(m1`yUH7QFTusU66om`_G^$JP}`74-H*uRNEHV%cHhI#!qtaM>yn z*)yCmCyx?qbrFUwq}3b_L>>XkxI689gs8A=F9I+L5!R6J(HWHsnpAP7R3>b#rjS`8 zS90MffzfDVuM9A9D_m&|H4C7$X_!G&m$5xs2ZnAgz9y$)QA|$MMV&WlJ-0Fd`$Vto z0ABh9xKB3jJ4M!|S+q!ds9?>&-$z!fhkBA#F`NLjic}Wz= z005Jqo#)LM(R{%$+Q68uw&i=Gh(hR713Lnt=a)}&x{87>Z1Fp3i^@P-6otkgj!_%Q z#=@A=U~IFu(L- zW1lrrgQXCH7z(}GzRuSh2+y4F4(W#hV~l+SG8E)l9`I7C6&{SDy}Qc(kcg=Gs1)_> zssQANPIx?t?w8GqvL6HX&l#>Y_Z~eg=YRj|;laUO{@~^=Bb4!@V-O>)$B5i<8x0TpCT%ugWY>Kd!WwQkmuQRvAK=f`aJobC_9 zm7((eFnNi~>+oc{IK|zv7cVDsa@@E)8Vq0$MmCPagzieA0?sW^r9Ng+ctsyksnNz= z$~Fi>evJuF-lFG3 zF%;0;jO3S4-5f(O0|_-34rio(0a5BBYSdF==%b}XsfC0bbwHYia)q_+8qg-a)Rut? z;uZ^?W%& z)3mP-u_5>ZWJGZpB#`E+u7Zi7my01@gYX(|f-t&J7A&imz}}R+X)vZc#ck3aL_zb9 z^%WgMRNWF$+AdRc_#HGzVcrflLrP0l=XY>JLH_-0vY1-!eka+M05j_>u4V^zoMHE9 zNm}(jo1J>q=r$3r4@>2DU+ORr{QuG3SEc;_j~;z>r~j>+vW-4B zEWWj*ti+1jr4e4V-RD2k zIWix1-+f(l^9lNJbZk^tEs2iIjUM(lx;Ag3u`>UMv(to(ESIHR^6S9T#Qz`eR_uTK z`;YGIf9v49ZC%6v%lAK9GBA?9_z}V@5$lT&+)j%3?1t9cutAX5#cN3I)5-EntvD73 z%z_M}bQ`=A`~4lIz1UBFP$bOP-vVvXPMKJdNkzSKOX@IXx_9?Lw_WPda`pUwb2orC z@c;e2y~ic_|MBkK{a?}P+W14m*2;E`bk@Is^KyT_m}SYf=gR9@G~7bHb+?5&&_cHq zBins)ZxjpP-PS_aEjYi#>S3k*)}nqWrJdU1x=UnQz5LUE5*QAeD(Viz$EoF=MlSyx z1<#gs9269zL(sK#XklXVJcpx4m?j6~$Ih(LQabX0**JT;Yw(&eP{04f+ipWQfG*$v z5BB!H+AHn6=V2P8@%XFOLMHp-Ggx}qj@n8jr;8+3+^wYwUqrLDCpBHSi@=2TKP8i^Txt)6$0 zLxp+$gf6_0{q|FD;^d;hy405yg?pp6CcH6JOm*T6936aET*VDD%j6%jUAR4K4&TrUGfB39<`QU-FRwjd=2zwZtC3l&7%(8wuhFu4ss}IW!+aOwq+xCk2?6g24gpS+JZNK z|ETR+7{0sIbQ5rrH|9~>OI)e23#cWj#VMm4lzx--w~#I^AH9ckX^r)6A`P29sYLG5 zko4)M_>bRai!2C@3`lL!UH_&~%Rs${n;L-EHRlOo(vFuH^gHk5cDCBg!20L6&tSEJ4+N_f+qC=@wTxnz8jzW1kHvqNc z6WT}3t`Y@!1Gv8(xo*8oI1VG6H8(){`oNZ2UkBNh9M-y<-ljWC6(*A-+q*4p$why~ zFnynecMgT^yf`{}^X&E8KfgM8Oe=;YA;5zRPN)7k4+uim_^zqpVy3opaVYY)Hm&3H!-&U5bh`&{q+hX$oQ z%8lQD=;upxhx*$4LMgwZFNzWwghb{ebK}rPk{`AKdd=G!V-pZtK-Jsv2tD9kxJvI6 z-lN+*Keul%k%03jX)*?E;4C)+l>&h*9xO`xK0aEx}h zPR8Mkw;Y8v`q0apAPdWJdA2tMYq65+{#x#xbOQ}i*{cF7obaHC98iLrCu^^u*}jW5*EE%BX8aYa_ySeRNn42^7@AEt1PdQQoq--OyGaA-kp^WXuqku1@b>> zfxtJ{5>Y6F_%yMej?~eaY+ON~>n@Fwnaj%bstI(LN4p3{<^sZs#I#IXR^ipeBCH*sQga?AcXvbXm~Agu4VuWl`mPOY(v39KUR+Tb5^rqP zlirP7k6EEkLYAi{azoMU9Rtl*Ak9~9E~-{uQJ6Qt_+E7JOQVB3r|Qg+p4TBQtLBr! zT0&|>mhk8`LX$O*yveYPr)zI<^k##RwYIs(wp_Dcd9J4s0ab8nvq#$FVbtWa)|pAg zTWQ3@tUPFaN2J54R9y9z$m^g@9osVc1KhCsxJU=0XnAI%4K)@n+2)3pUjqI~u2uA$ zBI>ST1Y76(fQ=l!%+kBc0`d-j8@ssBky6n5$_t4X@>$|v>gSsFZ}W->&`LVN-FPrP z)m-bx>_Zmq@8Iq|iv_dX4Wg>r79UAo%%`u3@*ew(h?*=Vm$$uk>zeg;wQIP6ucrUo zaR*-`!)7sp=SNsu5^bsqVp9-++J9UFinFx&ejvxe2^sGYvq8g{1)~ ztr~6-tb!A*6J}PZLr9y6D~+=q4ucE-m|XnP`ftV0s#e{jGhA04;~e9HmJIpkj64>M z)wA=dR*oeDqr9Tm4YNc=bh$zgx}p07tFy=pYx||S{I7JQU|sQWZrq)M)$)|WgiM#7 z{Y4)&AV7;bBrt|e2(P!nRy&n3xs~sl&_=zCb|p)OO&mzp-LfaQNk``81cpCIl@zd< zOsCy9Sy)K$L}ZZ*%8WmfJ8+ra$?u>+Am&X?(maUr=G8R) z$0FNu7rPYy7Rei{is%Hjqu}^zlJ9?uP;y7q;fsSVPTiI1<+h zU!LNcE~nu!MC`XQ2t31ZixGMF^S(AM(`$QQ<^P;*G41y$H}Ph+fhPX{sH*>YxOb=j zzqUE|8#aqWSB5!8lptv&+&^gXGgG@M^-pQYw~71vd_87%2(j1o zitCYq2X|E?#FFM4c7@WRq@v{!B>US8Pq`^~Rd}ndt^%f^gZA!tI0unzOfDFDNT#qL z_^S-*hU_}eCF;j$S-h|QBK(BkYxqCHfb;oeb|d`%;lV-q{*T8G4<6s?|8IgU%p&kT z1Jx5&J+^dj(C3Cryf7r9#Y&rmp&+_Px<8p^6TYotr))AdjWlc>vd-&H4Q2=pVA|a! zy4n0h*H9JRnjx0z@+!8$G4>_PhTYf5w3`-L_oW&$q(5)vuaQAMFr|9Vwzp{ytML9B zq^5&GPIvI8!z-E5qnT0LcI9moesc5xy=BF?(pPoU;jPTui-{1bR|DM0kxcFC0Dd~j zZBok)l8ciD*Wkx!b=xf*w1S&K(xFNilm=Ix2*EF3I$GA%4pu_MAXGDlMSqq~vp4xD zn=CvdYbVoq0o{s zp$73mlg^6Z-4hqgsAY#Xxj0iBtDu=Dst0R_!;xy(rPXS?%7SHq+$s~y&cFi$`%dTm z3n>@sxjkT)YPxlOcq6)rP52Y`A@X1MA{&nKbI@{j4X`wv|92nZu)lY(zx(*{!NLCi z-XC`N_8;B(|6RjGM4J38IZo$fOD*6^o(KsfA1<`eoDV0b2`@dAevlBqnCI^^j)m}^L&QV_GGM1S-v0vDH1# z=R5pYz~Abg{&R*0_^S^t&hh)=Tz?kt$2+c}Q;cshP0sRRR&0IQEj~=~_cVQnzw;5k z|9>d@Q;J;viz0dc^cgyYb@T7I8+WUl53+Pe%Hh#g1i!cak*xpM&yJqHc-9>at_9qs z>woXj<40c|l-56PgnW2+{=1gRC^h;Y|JLbr$o$)(jr%Zpae4AzKO~*be{O;5<%e1S zgXHuw`8J=UT)wp>ufPn_ zY=Xr9giHsgI5Q>1WdUtUH+h;BSgK$RsCR)zhFLPoivnx|SvYWLKnD+!i!6PA*&%-i z@-`SO`pi}^k|EyHcQCB2pI^PFYwfZR$>dCi^S65xt9rPz^WnpXuIMBoMBLxndNWH0 z*(ja8!=-(~<|b$J4=L`y9_GcI7$HU*!9^FPGYp4BZ{~Y*+YsdKF+qZnnnXX`U?^B?~A)7M*DyizgT|< z*F)d~ao^Tm6f|cvO+Wt`D01ZqVcp{5z7huZyD|348A#786&V+XmtsrJXc?FtkNXF? z7Opkwou2Rr?>m;YZ!rW81#ZmmuA^N7I}ft z&thw<6* z27=4(4RtaZ>zp%&L8Xw741E(N2xeJ$k@YL|PHZ&kxdqTWy!i$8<|4z&!}_F43<^U- zhMUvW19}gChHMlygY1k`&IA$9!sLF&6`2?wWHS)$XSfN|{J2jpLd<@NRYH0a+DNfsPI+qLWx}Vw zLePaGH2D?nLN17ZV4;t{sC}w6xZNI|7t&#uGdsP`&N0ce%O1h;s}ArfGxLN?0K-xw zgj9b0n)J4}d#DL`gLb{^8he#?ZHr>i5x;ZH5w=t8%d_+nHF`h)t<-LT;|EwfLXV3A zukf+((#b?$F&8`5Yoz9hB<;Fa+?N^(uT1C@Im)!0(ADRY!MxHRr36=b8BG8cwUfW; z7uCLG`ib$9v^T?{tmnW+@sl|-8f;8J^gPE>#WB;ASgwP%-)cZJ%>OC&>U`XDjs2P!cT|X+ z^vb>2{5ODRz~~MqknOQz32U@}O~#(h*nkJnWZ8Hscal0tWTk&FUvfIJ315S+Wk~gI*z)nQ@Ly*-E0?acgxEIWyi4C~v2N=KnX8M(Zd!UJ?(SK_}6NTh;LB){N zfX3+@L>}`Y!!kHx*2V%07fLSE5jL%f$_G}IVMNvAMDVceFF<4V>B=zH3jab`54c|w z+%m$Tz~^4x;za_p1q{=H7k-IvFf;(| zEQMLIXJLzhW~4d-jg|q)512yB&Il|}w%(d(SeAE+u#KtEj5H+4`8O|sC~RS~b}P0X)mTWA(5y-J__E9hHv8#r+XqoFNQ2RohMU_3EmG=p3o ztd{uX<5>y-hGaBKqA$_`Gq;h!&eL;!YMTlNixO~VAB`5!DuQ$4r)>H&T!}k^>STZ& z-&XyEUohCOySv@}uRDY6G*8E!z1_}i7mVG_>=Z)b@dUaMIUq5x;b5r*jQdpN6Aq=h zYYg`J*~`hCZ_ggjds?G3SP3F9=PQ(fgbcN6es&2llofosuucB3$k|O>{)GQxuqF9N zfbj;4DPNQc*$o!1JzRh^Wn^ZXEQV0WW>D^mfbj{(R_PjpEr~7OV0};fUQkuZ*e{XX z>p_n*9;8F)?0N~*fjv)?+q(s;7K8O6v?Jn~n6|H^bRY*S)$3;mB7j9KzdAYE!*1-eZgQ$va)pS2CJ4EJ5VZMR8pkPO%qA$ z?@T<92_>*dnz}uIG1$@N+vu8%-{2Yjt!OylJI*w2TIU4&#Z>klm=TN<1tKY zx0!}p4c22aKVt4=%*vp=mpoFmc#A)nSp|4v2Ex!fGS=6yi|A}Z=CCRgYk85uDjnQP z9@R|7%1uSC=*h&1zCm9pqa}W&qs8uv6ZDWRX;4P6-#;wTe55s}YgPrm%PtGz8;GC7}3 z#%xf14<8>sG2HZ^n+GeqXlcAc#%6<6Dl%hb53XJ`|8$T17z$Wuu|xGJq_;=*Tj)=W z#}=&vp=*&)NW;`477v<+dWzJATsYt}5L|k`Bo}nzaU9|_WsRz!n;8@-aR#R+Y_oEQ z0GZjRIhz>M8MNeQm`o}OJt3L5ZWHAO#M6GTPxKDY9vUFkp1IOfyyHkg zQIi7LP>8&n7nU*D8gbFr#~sgbj>jCfw0Q;SG(T=$MHg4E5y`HiCBwX~QhWnt<2{Oo zLGLEF3gsB8Eyk73X!WamfGC;Sf<{aNo&}uYDy;pi1>l%aTr_Mh4(r8xRNe(&ni4s%eaW5(N0;6H_>8x!ctfq9#`#V_ zDk|#M za6Td0$7$aXsLi5585F@fBmv;c9{~P(GNecn=~=KDV0`AkKPIy~IiW~F?U%G7=0zd{ zbweQNLws)n@)j7!TC19z6=L%*>{~W?4a!$C_`#wqPN%Hk32}}R>q;m9ykv(aPy&n- z;BW;cGZ-sbosQK(ug5&428P{Kf|?0lWAosk*R#CV6jHgan;Z>?lMgsOkl{Ro%WSWJ zyR7K!;E*`znNHhce4Z}_J_E0zJL zI_$FR0nO`mV|Z_Y@d)6M_yf4HkA+0cfU{czoet>mz#$*L>T(ISFfJPYTn%ucKM(dL zfJ<2mds~z@z};rcS2FeBth5TQqQvaDpSMzB#exs~xg1t2wXgC37;oY#H0>J6R|hxP|Kz06f?9nuc^lNI)}#;+C_w?LEupDo5m zg{ZPwOEmKl9`beD#u}i8Ra;=(dggV9`6!=R5C)~b_6#dWLa?F zVS=n@aCYy7gU2rgoZo}6ZJld_9T;!o<3mJbjrH#s!CLt_vDBnHa7g*8(g7UV(cV;Ab%4Knd~Y z0_*Q$GBhWWUz6|TInKuB#Wj)%feyeQt^|0o;Onv~fS<_on*_WGyosxX9mm;9l&gdlHwCe2hZZM1*Giv&9ftjPqiiM4 zRl=I3KGyt6fcGkGECW2sR_eIQ^??q+o488Yc{j>d;Bpq_Dq-CUg4TG@0^`v1&E|{g zGEoIw)i~z+h1US9xheo};wnBYah|eVR`KgM1Jt$zVZ6##0vh)_j`I`(JR+<372Cqo zx*P%Cz*YSI<2;1`kI5>2&DIdLEVwF-M|lbX9+y?F5A;;yC} z*2W+Fb5Ny5#9XM-Fci!!TudHVQ;7v+TAMhtHdQm(rHP9O1C_MnT zMZ<$_YPOcbXhn?UjEwxiRp29lOHqx~+bq;TUJJmXkSGv(kBs|u>L6;y2w7g?DqR@& zF3rx6COrrW()L`U-}$<3xS2hNy`Lf+ACNbqz31DXYyjRQWO=Z3)lx4VxP~0Y$;a;_ z#>sgj;&4THx*d{hvf#h$uz9|Kp(^Bz&45cyc$T8->R?uD0Pvr;X*>Tb zY&_X3EIXM*#fm@AEP&^?)PeN*~l(sgS5a z(OP0b7H*jyTkc_!XIvRzt=NK8c+(JW71D>cfqx;Za4HqVSVPBb+^A)@p}aTbmg-3X z#-2rEbZ$7u>iswwn3N!1`_->RDkk9Cdl_Kd1Dt)IW6)m(aA}NrV-mI0 ze_|umd*1lmS9Y}VkxG?x*94i=fCo!t9bm5Q__r2-Q`U(75&?@Et^jT-T_5ai0H;)V zV1*SY6%Q54Rg_Tw@-;b&b3Jh@^U7}KD*!jCMxGz{Y?A?v8vyv4W>>_P<3nE(6@wOK?l* z#y%?m?%#u1hQ17VSeAk?1ByquM9;rKvbXwpRsbCQoWcx;Z)W5()DNP+tvucz=9I_K zq|yY6*>?|F+FNJ(YB>e~ZWgCM>L`u-SoAM0rHafBVQz!RGd4qHgQFxo*hbldkYiOu zmQk*w_y_8J%(n!ZiUIJZol|MNPNbD)poOO3TSzEKwduS*y-ILt&-)i}hr6T@Z5vj# z0scH}P>y z;SZGbKPDAw%Vl$<@H2(eNlG1QH+ca3W0`UeqjYF;BwWh`v?3lb&XX`w zr>a9yr3v(ZwP97Iq14RdDvtqvoz2^JM%z{y{96!QMZ*O6bpcime0!uQ55i~TY) z+_AOXLBvi7Bvy)uqkcE^X@&h-@LL30#fGyX*wIWY>|>+FwJNZGuM=(+8OIHYY!3YO zg040{0${(H@2dylGJj^vd4j^5Ba?Vbt!O`hsK%>#S*S!%KMuzgp91` z^$q~wKU@*~xVLkiv-7os-;90h0=$IvSk4m`-T;9tYXaDBmU%*m;;d&G=LzdC183ED z0Kb{@gx$wkPn7e76<39=MR{7fHGvh5^Mp{ugDc{kC#=~5o)wD__`OP<65|Yk=tfyj z9p?$_R(e|V0l;tOJRu0TC3uTQ-U|DEq(}9fB?z>f$1l7lZkwwE{ASMMLloybgopZ| zbH9EwNUM|}>{nS&iT3^O<9vs}ho2=v(&=brgXR5-t3cJdEP>z1d3-41e22h~$vl3| z)-bJ5aCO>`@*M*IrbD}El&cd@T;xGRHyz-m;P>cD@F~&$ro+7KhUl~rS3LFt`^|^N zjR$*e4E`0sKiO2s*T#Tn+Hb}_J5#p|{Bp2)ZehkO&$rfZF!YPTZ#K;__>JLTM|#U8 zJktTXicP< z<$y2;p7rf9iPq~v!U9HK`vc%Ni)=plP0>{jP+Q9d&1^Wn>I&HRd`o*iQmTg0D)^g@ zvTDgVEg&r-t(IYb)3H_zeqFSc1%zZ|8w}YLZ)IyGww5EF4M$ut_;oQ?7LXA9TN8Cf zq>H87a@W27*ei=KGD5bC4+8itw=Dtx`XjL1AnZO|Ka^JBUqc+01F|)yeLkn(TY-N) z(O8=pK|AnQh{p4!|5mrH2>90#xkCkZL2-@acsFo5nHtQ+|u<4qNFHM;_>l$)wnG;JEm2-W9W*JTMT~luB+hJ3Cn_q zn~vTx*l(73C=|-`eW7}lk1EKo62s+Y1Y7VbCv_s8x;QS|Xm?|gT((K^3|lvri;1V% zIK<#@BAzP-{u&WoEjbUSRHCnX6`HXRDlXVfK83dCI$uCMQQDWtuGillJz3@Arz;0< zx%ew$ddn_S0=X}HUH{HiRG$Y9l0WMADIluLVU-%-s0*1BLD+yxN*94t7=ATB2$PmA z&}lRuhI#viyTN6oF69lSYmn{-<>q5ki$2U=7waQnub?UC`R~DM29cP-S9@_}!F|k5 z(|(8{SLN5_uNN7H;NPJ7@0>4)smJ<)(e1#vbj7czeCyXOA-9*={KI7S4uP+9J@ytT z;Uz^dw)@?eVjxH(Sqlt+nQYo{wgr@B-Ooi=@A^Q z{s^RJwlgNZ5L6C9e`e&jUf4J+j95qps1fA*A*FIjr_?4Is z=5r$9Vw%F4)A^EPUHXR--W&8S0j+f@Z4Il3d~|n=!2Ss-W{#frvalN4lk%Tv#M-7|u9r@}zYDk|jj0 z2F4=Q%6(nwH$Wdu`tNXgdp_v}Eijv@2Yosza+nwh_*FpHYC%_5`vK5T^Erfve8>FT zDK4_%uvbSgf^}qI0L0&q(MvlVmL8odsmXPqEPVC25>Pka|+sI;ChpKDBfrXhk=<>ZNl4xdD*<#SxD#bTec3HndUXUPY zf@0fNW#m{Oe(jTm&hQrxGQuG-!KT>+(wthKZb*j&N|p9)9mh`Y#4iv*AU_ ziJ-C@U8S{8wDW;G)op#HL8v;q63y-n&jOxpgN{a8p0F|4C0S_y9ZIQqJs!DSK5mC7tYTPd}RljDtK?DJHHR2%1Y0T%$pm2b+Yx#jeL|hc5tAINNaMgsC$XN;39NyS< zh6YfauJ^0a8T@j|AfRcv6;~U&LDY;P6ad4SxJ{9@*b6WdJsD5s?~<8_2br-Oxg7?i9imXHUzxQ&_(N zsuf=oIkj5YArSHSw`lZRSaC)0+7+jTeU(8(*;70Q?)pbTgXqt~B!* zzoHjXVo4Fei)1;&jAnzFtTyoxp=2*m(kW0x?P(r8UL4Pom_gHDc2pJ;%Z;Y3_0*3S0NWg{Xp0@>q8c&~?NU;yVRf|XgSGZ$^S5Ig4fF@7g^ z;7Q()RO!VcX1Hu>wOv51wLdtc^TpF0a2LR-V};#A*QHgO!=O9FrGPS$%n@`ux2r-jSahMhxA zp(18pcBo9_5Ey=ph%HLZBE6$ot9dZ8)57%!Vf7{vIPPAf!J7n8BJ;6Sj(m4Ne8jCr z@LNfDgw!4a&?mlIxG9cH>wIdRU^Vw^wY|N){Z@WlyPc)Y;NwC7RG0f>Ekpc@gSEz^ zW0crMpcQC>ZEQc**eJ1vU++tUb_?NX!09padEWb%jJ;%8VP80Qy2*aP7en#pfsW~TaaNwfdN568h|X~prexXo~l1QK$1pOJ`N1Z}f06AG?&wwKOs4-o&4?B%Q7XB!zW z4a4Z*nn=z-ybB-ND5pL+@3il;M|@{)93n1x7kI~MXcmV|nK?||1KGu|FsfXP#z~-P z{|H`({C)-{=a^w+P!;YXftY2sokI}MRp|c}d)CZ$)4qXuN$+W=#`$%c#G?E3ZXi2I zZq}GcAjz{kuiqkxB&BOzE0a%#9V7v8y;$sh&JE2fmbO3c_C^g*eyfdZCzvyWMT1gD zgF!Fn$$@a417N$^gJkMUgWmalY{uU z{0v+pZyHSrSBs9%mE*z3#%QoIMi$5rjJn@$SaIwI^F$g+OU!Ls@EV&q=pTV_k^~as zH&32;u6 zp5!_VMg-B3mL$h!gL#vVyN6)EKyu}rz^*5naic1xbFEg^?|d{8mL7;6T`-ooaKTb( zcE&TRaN}FbmE3a@+JR)dNBu4?3~FIeoi*qb*m;0U208ESY}`ZcR%^f8!w*=V6oJRg zxR2kHZj)JSEXqa%`$IfofHkRjO$pp51?Y=Vi#!Q4vRsw4V{dPZP`RSqL?^kQTVPBEo(J8y#!wuju|Z zWVD8Xbmgw?<<=O--%ofDSEE#wToo)#Tn$|Q;9c&MB0Ak01n0);)zaUjEk{( zvQIgziE(hD7iN#V)V{9xzR7CQezXfVpd7G4l*!qmz>@X=YXVr*gdAYLAd}&4wSefn z6Y_IOUg7MZ0a*iGNU1JxiygbnQV?bv*h@@@gomhrjK_t|py=$l1*M7H z1nZ~96AiRfQh_1>OJRhNyxd2TkO((Zl@v~MU%I1))Q0&r$O7HOQ{31_aJtBCo7qkm zW&m-Mrjr?QK1b2Ghk~(2kjdd(bfqg1QH0%rE>FFw=A&)UY7i&UhV*M3l^U;D0>WU^ zNAcI!A*FO9+w6U8o#*n5n3%I${_(E)W9SpVhk3omgNQi|1JOlwzJNGnV5 z9UBp)D#SVx)d^0ww^jBMoDl7%v4p)-o@)GGQr&65y$g#N+2zGixJJ4~&bW{@#ri=9 zGIGT#*6^}ByE(m~DdhReUBCHTrUy3I8>sC@#@O+P_}#^ts#EQApj`LBpNly{VL?_0 zKZzLwFh(nHUVc01!8Vs`)fEkUpjKEl?!QpG2ZNrDhtR}2fQIo3T>QoZaBx0Aqz!OI zuUzj)e+owyksB9jvxA@r{tEhN-a*G7(_eB7tD3JJjVT;C=;XT*z_U2zVe99WQm^Ij zJTl-27@?1q0XOZ4QaDPsi$gVhk+4T>CmCCZ zwy?Sq64qto+36$dLUEZDcr{U}70FZC7$Yio-Yt65-KVlX3z=KJc5CpF%V@+I&G3so9IDv3-(O`!TFSOC+ z2x7d4=J6gs;MAl^pU~BYaDLEQKInDeL?QqWMm--P4HFnyWOYEHxersfS%h{Gj*tKG zV;y@6?vnpl-{2RFAXCQIAM44weysocYh9WA4>S~BrYGhMmEgMZgIBUhkXsJ|fr=G? z{}W+KNs_Uwg>8aZE4*;AO9MqtD!}fV+(l8;pP54ZSe>$_SIJPKg-IiYppYrI=BPuP zkyEfMXT|%?`2d`KK#?B>-KuE2+KkU;p)A$z?Rbg2U+~_+Gle_z80l zu3XG7oq|vOU4roiyKyFE1`dx8QP!-$hJd{ea%?xKjjr7C**8Fk_&>R-iFVy4ZB9^B z@K8ksfeJBqN-w42w!~rdesl~H;CY|>i@uWnmGng?_DMv)n9IcAL8kw6(2q;5uoz{t za&XrUtXEjFxWm<5s`|Ds7G8+Ey0yT?fvSXxzFg2AHxIMjG*r;*qG;jx1FT}=Avlvf z=Xy#HqfSXp9ZY?MhVZa5TZOFi$mNJaB74nZ4G~ z0S5){eN>aho!p=ixS3(jaH8(?2WvC0kHE!^5i%#0F=>=SVb@8c6ix=>)0rBtc=zVj ztKIjnH+Ob^c=hJ#-&JbxVkpW$+67{VaU;k3`KUXB=`FJar#>blVfPcd5qjR!G55HNJsFZA zDk9}aGS{@F4P;Gy9Iz&60#MZ$V*m}n3T!Z?!ow=*(Cm2F84QnFeK^J7%febL00c!qm=F}80QkMu@ZcCqgm@E^elZv{PzHa*NB0rNxI`8< zuy}gnTRm)-`H_;r;Y-4zL>!B#Hwew&R3eN4Au^q-3ta`3pxQq!q6jx;S=@8Bf8yr zwXU70KZKFg&fXC(;}k9=OpU1^SVGC%uqmrlfSRLR?mp-ZcXKn^p&L&=B5oazTyVdX zJ*yKCD`S*EEd}$?(SB(`NjKI84zUHLy7Tjxw|&Fyf$ezGI{5IhHG~$9`M_}QO1uFv z6OE@FjX{2}4m)iT`OYK3%=U!KV29jnQt!2gO3eIL177B-ddo={lFk^Q*!ALj7rhWf zd)6J`+@JN)l!%xtdOg*pM0|-qO4$%dmOZvIR4Mr|K6T6AW3xIgD4lKv23yrtDjd#d zYY`0zGkPrbogT>;pPa@GZ+Fg`^0NfSfI%x@G$*b83V^ZF>8k^xjaQn-qmv%+oh;=I z&N3>;fBkj~+Ai1!;77(UM5k;_Ldl%k6i|Sz%aR$()z0>es}-t=hMR(Wt?o&}J%pt~ zDIlp6E6iyL(#YXApgC*K6o&`}1d(7*mb*78E`8foHeN_lnnazuj>qqw{nz%+?)J;S zyaxm?DqstJB=S{OYUmm^Xxc_7GR=_I%qxK4(A)*xY;gDLP1RT)cJ><;*zaD!@J49i z1jQn=wFmdJ_0{#&`>K_mt-DW~s@(}pE4bl#7(XIs!i~uH79-_nT}5w8FTv!GtGJx>OI)pVXC*c%mXPTZ_!t5(xveh7qZC0~ka) z7{J*S$vp&F4?(aJ$Az~P9RWbn$=L(ZAG8-3M&?e;L3;CAnxL$T?r(U>rJU5ILoW(GK8lTjhsMX$;uY$vq*=w_%60J?8Sds?yn#Q=B^ zocY*#92%vrwlEup<^eyO8r4aR*JQge7vpcT{jeosm>g!*_r^S^{kLFFL8eV8$-L;E-J|1C_J;#}{4=R;Ju{=QRP@!w>gChz-R)N|@$K7e^X1>N z@3vn)y`Od9jH9di6VMV=fC(B^XTEj-|=7k zDgV3r&4X`#xAt&-^${Aaudn@Xb?uw=hikvfRxdH;!mq-u+8Sa?3%@X>p{^}1Zc4M? z+RrfXp~;%cSfi0kZ5smSwJA5>BDo-klYck{6Ez)vY}utI{2t?~yU%uZ<%dk?zme4+ z)bRtHhEKcsaC}N@`N_C_)PZ}3FTxK5ao}UX5u`_n`Oqe}#n{zGwvJzx8h3rEanmY} z9K0~E%mUIXt}MU1VVR{h(&NM{H9L&K2j(2GMb1SxqkmED5RRk?xgyG!WEjqUeW3;< zjT!X6w)$wjME_r}t*zhC|3bjMWTO~mle7i;kB(5CYy+3urD98w~n&8U|pQRjuRuxuRpgyI^4*+&RcBhvPfjy*H`%x6w>HwpiM zvry?#{ZotrXc(%sH*3f6A8%wca_B{UofVa+b-@l|7d90$%s=*7CL0c+!OxLfs%clp)Fmty)iQO>Pq#VgF`gpYk8AB z)U!M7E;5Mf(4Oj9{o7Zs@VCkyJnF#{l+~YXzQK=AHvf11?%k|`naFhh@sEa%ICliT z!r(1(5j*XtV_K*?SXO(?W4_x*I4tWwKa(VODmqZI8WQp437&%vzw-~y{~R2n)EON` zXcn@3*%~X@$gu7vK4chD-{9ZOhv0E|D%8o(x=im{n{!nNj=`bS)W^l1I1Y!vvD%-< z%vmUmli?YZN3dr5`D2GZNN8=?4}Cyblr|D(Uwhg{F0_yA;PdXIj=;IQJX(AkiECa5 zeV2&^3h>51(_YTly(tv9$i{^|BejFVlpa3|CDw&*6+dxuuJd!&l!F*HrlBjBxgghQ zCLMO*+2;|t*sJ2pGQH{>g*dFl8}~g#5wzc7fbvQO!E}U7kzL@ zvF4Z)NTPNoDj_SUcK^}XwnB|8x`ATkCE=v8uBPz!0A7RQ zU2t1?_L;(yt*tt5jriqV9>nDGq^A%e@^s}wCx1>t=<9rOx{2xgT{2DRoJ zJ(k~m=`AOP7BXxG@TQ!9N6{IhpFA3IJjmmDnZ@PU)lp+Xs)g;jR%(o5kKj*mA|b7- zrpDz3Y(tZ|%2bz-(j{r`Pbm!$5(^O=)TB$2+XT+5{JH?CVJ?diT4~5hB3qKg79lVn zle`#WD%J6?jGC@T)u0Hw5=|qKg$0%1Qi_Ijx|p7E9Y$X=#vJ?q08xLh7;s?^z#RMk zn{U=j{=fBA1b}Yr|74p!+s6yo|IA21%p6hG!XYpX6HGPG3z$TCZ}B4~T4U|5)gJW* z`^Ymljid6RC{fl&+OXI~p%u9twIAVnr03+jYq-s=6ytg1$Hr*-SUy0q1~W*a4epw; zm?s#8BVK+4nfE-?gu4SB69+MdUBsO5*#mcSh69`kDKr)v-IiI>RmI{30v1{_kte^|i(-cz;>jo{k4iwO6=Gk=|Kdhg>;Z<4L@=IG%vjVO(yT zG`l3M?699Ld@(55JFn#|-r~Ue=F+f&x^LAW7K4{f6mGEQhd^48ahTl=)87Xy_yy*kW_Ig|@RD0yLFAgw*TcBBI1$*&ZTlKjmDA(g&youIO% zFTkm;97v#HI;9r`khh)!92alaEcn4-c$FR{+%2?PpozQeCMCvoAS{+qG4Q`#no_FR z=>oV=^;(h5nD%f3wL~wN4>eZ~7!cv-f^j=XMJnDiydYIcp6nEoszA8Q%X=LVX_SLu z_rE2@ILh$_5O3D&MFhMID7W4`U6d^y4BH7qiBtGo1dF|jD$`P+oAPFyvLjupucruz zQ!z+#^)iBT#zN1lSl&Mp-5xX_{IOZv%urZ}hr2xA*6Q3AT7N`wCHkPlu}2;)z;Q!A zTg$$1kU$Os6s;R}V^)onzCz7UOFPqY1EEZ5R{saeS7ceJN@v9$bXu5hGB#$@;vt&O zCy7en5h?(*a5ngm(Mw8^f`ygL0p=JI0ky=J^b8RsHVf&MzUs6sP7nM$7KV{{wn&cG zw=xvtz)|2sRu3UWZQz->Mn z?BGNMAYmZ|B9)*|nJ!aoTijhR$N!@>d@k(y(Rs}A|2$YjvVS4}W9^#~dvvVyRIU%9CtfuS=EoItKUN^8L8VLs$r3au?Qo#(dse*L5cdcp zP+gp{MQX(}7LHn7WfZIIJ&28ROOj~v{()YnDP0C%vxLDAT0+c*)w`ObLDOfo3a><6 zYchuqj8oyuVYkymIs;zO^Og;?jZTS)8LoY<8Nv`>c!q0Xh7-Ut;`P(ozA6Nb8LdoV zl>#1abWOgQSnsNY7t^+@Yj=wQXR2Cn{D7wqsD%$C`JzFTNPEbU)nW{eC!6(XXcO7v z$o_%Ih^;Anu+wr_TevuLk#Ji4kVmk=qZRe=`QxDf6;Iv|QG`Gspr@uSG>PH1G(e6| zalF^?TCs4hKANuc%>yZ!vbq7w(94M|KjAj?jA6UDEvvG27+cCu%0-n@FFrJ;o$;U_#}X*D@ed@x3q6rkYyhT0C4 zk%~VQL6nw$m|mrtyHjtcgFZI@UExPeLnF`0-!GZQB+LPYMR1r;?uHn4(pn^_VT^O_ zI3N?MXjlQCxvMM9;rn`4Eku+g@*-S{?+pF^xYjO{H32B33>`v|pyj{B=eB=|))}bjoR(l8fT@3p=b@;i}IlIF3)rh`^vpyimSD3!vxkM_9X51b4~=1xkFpj6E~A_anJw|lqtTBsH2&Vgt_H_C2uI{dbMwG!ln~5+Y=z@ zFv7zq&;bQpjCofHb6o)cJ2Z1)GD9`zCr|{{VOT;uZJN(O@(Iega#S~4WkC*JF`xNJ zt`J3Pq*z75ll^mn1iVF&>e4N;Ep_|qt-!Lta}!JmXlvuAh)_)+6FCq#N~QZ{EDF$9 z;V8dCFs4Se&C1TdxWVPv6VZwQyB0FL_T~JBi1j*DvTGekl3{Cdxw?vDlLYn^03OO%U9C zz3m|>!~~=3chYV!oMZV#j1riml|rHF2mh!&wQu%}w@Sok_RScmi=rBQeJwo-)<7-bYlI@&bt7($@}78{XzP!Hv1H-5;UqCV%R)n>(nc(;u@0-(4@G zV}mw%&f1F|;}lOH1;(s~YP>&8O71|-1t2Kq!3x#gi9?tg7Gh1UK46Ht9i3s>)%lbr zSL)~7WxUG-rWdqwg&}iwf|ol^)kn*lL$M>OXbgT+6i*0=Lf#*iA{i_39XVJef;>9h zu~@9`4$gMnjt#SV4BLkEt-F)0s&WK0F0KP4JFiDKlOL!X*jH|3;koRrtvT31)|g5n zH@)zJAQN6-T_ViWq4$Ri0KQe78W1_{z4SWo*`iMv#9$p>g1?PBpL zIE9D{nMb%_X#vLIG1k}C3zou}%|qGA*L>2L*`10cjXy1&6%=vTvQp)$g%|)12t%-0 z69tY>#o?F2Rbi>_mtyXK(Gl)x$#da0mxAZEQf26ENw`kF?u!C^uLQio0r9z@8xk`P zI>Qml`}E=88Y1Lj6-$$1Ns^eb`+P_4UF@`Ld_k^XWCeSsK(pc`f<44|0}S{yOiqb# z=Da8@)*4Dj3wHy_ZD1${Dx4Cvul1U<4sVBu-Gl(@R4K~7U+6*@?n1G*XzL|CVXw{X zDR?fNPsbrO4`CjyRgJ3HKsG>Db==AkpUs-d9ANb%U?(8#8XLXbV2DWPVz0?v2yAXl zjl^_?ITP2Sm|fE#bM${Z8+$4Bf1duo#$s8W!G`{!qPn6{V)+vKe6<-t3-^x2?FSA8w$dNarsLtqa7R|32o`=7`EAAJ32 zBL2IElAt&Bf3JA2tGZi)|D*O16O*XX5-r@DA*^<~!$2{Je@8PY9)-l2=#f;32m7SXpj3EejX#24i+D>piWO^vCY~KNT?#nL-N|y#RwBt|C&g-1vn9E4 zP%f{zW1xD9%IRneb3`uF#Z@HROlGz#w1^ILE1{Gr{D6do!k+1PUAO^}nI&+Sj zW;@M{;H>f{8}d`!YuFBMruf5vE50>T8!UuOJQnOd-4@`$ndM+FY_pk5Y%?(j%rk&P zdnsxeK0@)=r6*X%Lf1=PazAw3h57`A_>7#IQpsMRtm#OoUP|7G^iPS4(@kk^ao&u} zOAyoIc7pPRIw(B0v%EaMj^Cw?B26^5%Z<~(-^{##8FTWjB(WrehFMQw>+DtwVbE-{ z3E(MCh-;XbOa2&JT{$lnUpHWygHJq62)KSw>kQs=E+~C_$O91 zJN~z}{_w$rvi*M(|GOq^SVI2`;Xg$RWAZcn^^QEs0F708KuuG!A}37Uj>R@)sZxH9 zl&uON*y+i6_-M3R8Q?2*)ySTG!qEb#F*^Q=<;gd)S8`iKtEblyaW*vFsU>2v(eE6uJ^pv)hxVCOArdRH$LCVh!F zZZJS%^SdM0KXX)_gaH4pIH4H`t+;WIp};KrBXiWFqdB?KD^z`^wl=FS{w-g>j*MqTHRp*q~r(*OrJMEeQR&;=P5$uPJacydL!QP216YNnaS| zCTVXT3=EOS7`f8-xz33>Yo=eS8p`KXj{5SZG)*TOMJ_0}FrF)+s(6<94vvD(%pdSA zMZ1tYPz1+fTu0t#NmQb_1``Uo1I0dp_|I-w3#)_r^%!~dH^dozikOwfSK%2JImI zhFvgjc(djBdaK7P!EvjtI9+gO+5*ae_m{)<&G2dd)O%WZxK3{4#c>M1-;;?DyjB*Vq?namaupz^@sxUIs}eEr^8{;G1LCl`^#HQtxzWkqMpaZ3OTgP#JHOV|u(jJ8z=kAfA6a z9Gno71K$Ap(*s*V_k?JpiZ*Rn;UzN%mKGcuGe}1UEA@fXHOw4cx92IUPDr9$Z!q93 z({+|#t1FX0uRiI5(?g0bGfRwi$la#gcWrBeq`94Rj9-J;vP_Hgae7YyMLjE{2_I6pXCYm81Z>zN84t(m`@EGb+`u4!4FrXX z+>wgH)Le91r#N+`sYZ@W#E|4@fMDOm67Z`_%?Lg(W*}g!`!s-A{xOwnz7kVOWaj0m zJY!XcDk3>EX@NU2s%tV#u1Yl@8OHeqU|6gRfR)Q}rir(8BC(4yx6nh<&E?A;1U#9T zOC>!EGYwSMH0hI220;yBL72%(?!e540J=lrwb?s8Ze?pY)gfPB;@0#bF2X}Sg*!;} znr!up)eb=XC>_C}2gOl(%@4ZWNX^EBr@g=;J#REP1sBBgE^6nX=xY7PAAcZQbDgCA zpA1=HDOh0kb$l?pbLR@b&*f~PK_d2AAh`S(VLhacSCO76wIuav>u_)ah!fMB(IsqA z^eCjA1VG}%Qy_4!)0V0$drBt6yr>o}C|#Yi?qHnz1*+f%#?D+<7IPq9L{rw5LE30P zkP-U9_2Vz5oZ7(9gRVqb@zpw{O#od^MEV9t>8MD=t-MJHq>Fi;{~y)}jI_%Y0GRFn zUtL>!^l;Muf0O@lEfIhv{Qt`BGEv!}aZyO^D0*a)G7g?Np}EtVCH|oLO3w4srp$!$ zEC(?0Jz($VI#_8lX%LT1<3by-;b}PorJ+PRi-utjWR7GfY3GpIGC_UlyoYWZJsur= zH0&wr&Ocru2WFt^ht(E(Ld&(k(E*fyYY z{|FOK=OH5}GIrj(D%8ucGg+NT)1cp`OS=}tz8Pd82+ZOC@uc!4ascP>|J6sUrTo8j z+!y}nCjW1t4ZLKdrT9Nw(?}(2sp}4M0Ovl8a81m3JEcqtO?%pqYGO z%D}Gip}5_!&j2Iz!?Hg~UzPE82haLvZ(BofDp8y|pLnL1r$|BvzyA98_8s00b1>?` z3ovrfG{j(64=zug*t{$JyV36`$5RZnD3071WIyGDKK10s^?!)_x5=(#YT#ff(F}J- zEAKpdm^B*h&H=7JWvQd`^{-FHT^zdY^v}S^aX6jJ*8G*0p2wZR!7E{*4ocsUlkK0rO<%9DO`MgC(+jFF`;XqdspDX>lUw%}<{!BLK14ji zMW*iH+?@kLzNI{^3}u9XDID^XVb9t7IMFc|&iUV#U89S-xPz#~83NhTPRN>#%qWo` zrI69+)&5U>5Q8GJ1}&P(X11GQqzH5fd`25?$9~d1c3T3>0#~v=%r~Ugg zItSy`ton-J9B<(+lf8Q@54%HV^rH zrA_gqu*pEuYUwdS9wzZH3(As2yLG{k)_Vuk6LfH|fh-8#1=U^C{VsnF2t%C3kmO25 z3|UlNxM}w<27x*HKN3rE{qfP|62Rx!|7&Zj6aL>v->lx~|JQ+POX&YUC@y5J{lR#I z{1&FL4F|m*|3!7sS!#bkBWu6gLe^E4u`gv8S*evwZJNeW*&ErsuxW}Y7}$3xG~_&5 z-BX=0WUOk6K&xcJHcQo8d0r23(QhHEp=J(${IUJV+rQ+;t@W=TZPc|k>8#HuwvN`SsVk_cK#AFu2t|hjJ@7L>SbRG?*59x_ zYGB5BBWW2_3Wv1oB-eqbH)DJr2+ZdH&_OLJBVT!cfDjJwW7t|Y8aRjlf4#a^zW;aq z!TL@7@AGVUsn;d=|64_dP6iWO5dpl)DkHhYuZM`qSmi#BnHj+ea(UiRM6uA}wEPE} z_w)%_0%c6%Rd22PgNnuem^YVZaC#1GEcr)E$2=~YpBon|2AM99!L&}>b6~@)gX1G_ zm@5nB0m=m09Px27GZh)LmOTycv!`nBZD`W+R2jJ_a49y;gefMf z)8H=KGGU>RI^04XY1>i{?fvg@l~;-vZa~W1mV}r4upmS!^hW5bhIgVxo}57Qg(akjW^1TOPQ6x7h4G( zhQ#0?$`DP;NC^-zZXP=r%$fj`f~*P&Degv zP!;1^*0*5!3H1cU`9fBokMVhIz6y{=Ia0P(DhxG$-xQe^?i(+uZzV&^6^i+NSM8j4x$|3cF)RWjPit%=H;F+*XgEXsUHejF0RyvEc?68IX%*DZP4HfIuS0JN*=*B zVJ9|f0%-4t=B(sf6#HPo5dJ*({V8*Bf|>h1Py2LH8xD5Pl&;{oF1MKM)y%5jnn_&V zrKx6dq2YAX*l+|6@xdq%`PEp*g&7c0f`zPs-V{;FUKmZBnHf>mPJpvdgceXa zVCsyf5Y=?-n733z3`oEN59U4b&N$h>ujxxbYrB_BAqaA%HpTLFiQIC2*3rovRch&#cR z6JNwEhk~~;%_nFp)q6cHR6VVi1t1@Ej0y*ick2s^-4*P8ZeZHYb-Qm;Wo`*rr>_N{ znY$Vh^Wn+O$^66%IB#BjK5|P0iE9iOqqC%=B)BrM&1E`GVVJ&mMU>=@InwVEI~&#P z&*+l-e7hQ_9|^9Kb2@zrcq$xm>F*UgaxIp>4zi5@2l*+3tM~sP`)hq7{*Ob!H~C-J zfd&`y{}2(LKKTO@rT%>NP9VcFHwAcVSXF5Ysk^RxT?qg$($%UqP~{2IX$RH(V!ohs zs4LI`tCaFd@J3#?h(pNUH?^oje=E6ATt%sJ2o?JZDpnzjfFsOs;}pAy47%7yDn#c` zB$b_|)yT48R~PK}qBDoy0(Y)Vx=zf1_I`qWUhHFGhY+hs@x)KbDaFcA{CV#9KGD+q zPJDCUH@k61a_&o=n8m8k=JvFSeSQ`*MuOU!L{H1kHFHDB&&jK0({vesNji%vmkDX* z^LfA22ay2cN-{;txH{c2kzlTK*(#5O(%kbKSnTg*`My zC`Aq0t*+0IG*vLEuE5SI(OX=@oUQivNKmHL6&!2n?-g#Ei@2au+hPe4NMDNv@#ol@ zD|Cq#F{`+QIXgqXSaxZK-Ab*w*iT{Ss@_j0kWy{M&R&P-<+m}$68^u_K|bPT)Iax@ zOaYsH{s-m%9+dOH*1x{V|GFOhxQPGnwIagK|9XDq-oFIL-0t||4!}4kJql2D1Sqz- zoCh%Wa}|1Mh5s)O5DvWnz>$kC-~vp#FfU;7Ttvxb=6X_v->ssj%5y;OEqfg}Q0liT zo8*(dQX%kPbAPoo&B-y6oQN6TCGQtp(V)T6%=eWhS{27L8~@IiIZotR95G#_60%dR z=8BD&ZaPN@1=?_oo03knpvhb@3nFsJ>bY&Gmuw`RBJ=UZ-b;5e?q2ZGq-VO)A<9m> z8Ng0nbF`f8wh7ymyy?if&<+e^@=?WZOitC#%PqYl$^62~l*=gmzO+-LIE#XtWyZlU z)YR(Y^qSZE?vUbn)2;k@iD>fSxdvFlsH{kEa;IN^6&(3#1)9Pcb zQVEMk+Hen@&$f*_d2zRI-)8NnSW8TN>dDgCD~OVWfpI(NHb*c7a5CxU9JA#czw^z!e-0oCIMB_#X0H@SxcI{^GXW)=hXTRW%plhEox= zJn1U8wlQP^w=z?L54?oco${V0>ne`r?ykF8VL5S?6UN}wF_!JP!dkqDAHYORmMn2O zwqYFvo^f$hag{4VZ|Y8^4y{?6WpTXN(cq-!@y9(#HftSouFHAz`W|E=m zNOG3aSRB-~lAtXXP&9pMvZe6=lOL(qSD9;oA#kN~#8lR#pD)2lQe|~XJg!uCKco9M zg5>jHapig<4qukLB`S2d@+tOSKqKyL%IO@dT_Kv&)fLGa4+W&7otk35B&v`>B;7b^ z{L?eN$~DDJtwfoK#wkdGTcs+XkPI`?u41#V!3DU4{}&Z+x)&-0INSfb`sl&xdinhC z!<+hl*Rc5*LL^Rh);-xk*`pCEzVIhe$&%ymrK7%y@%*fN+J67cq)7Q+M z5iE^DPNh8j4i%NAR%a@70>Oa`Nu_OcrBOWM@){&NA?opF2Sw>&H#T1M+Ut1Z_0AiUBU*%cY zD|TC&uSTFa(;Cngf+0y#_HsE{)qO;vRH}Wtmo+(h7`WYvpc3UWdHFvTYGqS;hhNmblOqvxCH)J_D3MOgiabb zQ!i>Scgn11I`Er!Gtq6FKN6^^g2DmbQu&a=ljVLFksamPR*8+cI8geO;Zt54E@_?4 z^sdKF`d1xg3Hv`4YfIe-Hpl*d2nRsP{$KxQ_2&HNwV>QZ^1oDoWpojUKc2DWlhe_8 zsy%?Y7`}9bt85MWhpVep+>K#MC$Jyk^!JNjm~ zt8)Q%AqwR}BcubE9wKTgu5Zw;kP=|OLkcv{aetxb1_+l39?QB6W`$}RxC9s%!yRGF zSEtN(YV~GR)2<#jxh{RBSXx^K4!V8Oy&?#aF131mZDwn;NyCfPDy%IGQJBORy*U0OB+~ z3ujWpdh5PEziezKwa=%BQZ?temb%wUaJrt$016k(B?aKqkELk%4bkAdoKWp&BAQZ} z`w5l+GUa*w%>eaL(7ycOMq6rAZV1r1rE-8%1-gJ3D4j9?w)AT3<6H@64U zl(xo0+_S3}{&C&mwr_1>E-U98h5HoUwGl>|Oq*1w2|kst#pK&lrV(o=X1lS_`hm_= z<&?9Qx_I*u%^R2pF-jIlyKFyR1UyJgGL8|os=5eQY z(mm?qj*Baq#vJ{B_3Lu|m$k1StUtJ^|8gb3pVP^$>~(8Ihre9rtD8>0N&?7+kj5Ju_W1{tkLU z2mQ{GcwxvAd94Q+zT7_fKkhcOSAAT6HR#KmoKnV1RB^pnr&()0-Fd&m(!1zlORkQ{ z-fr#S;)`KkYaVq+EAqEazt-IU?;(D`-~Pk#5xyTE`PclczY;sz$NI*n68g+*_nP^~ zQ~bT(`hdSjC;0q-(G++7b_e4;+kW~C1LD@LpHQN<)DDoHuUc(0d-qpJgRJoL-|rlcMyL74$_gb#Ge2Hw4-WE`(crX;e|p-(d-28Q zE4SF@Zbt9-(Qq(6B}>uUU$WldsC$6a^wGx-8IDav--9#Z7WmEcKR)u!8K;jFWgGY$cRMo&BD9iuwNASh;N8g1)@mPcdEf>B+CS_by=a}*PCBC& zO-@?rI$_)hST+OsAa5Rydp&-#c(%WFaxOaFMpnme^aj}vgJG{-*GT|O+x=0v2(VTY zCM^p@^4Z`FOp13#8_Y-0IC&c~*9HDcCwlkbI6G)z13I{8P;VV<<38Np;1uloW2T_8 zD+mP}CiefX3kk@9K4irZyXSaiucreD$A9-7E=k8lLV4_s*QWCq*$zb+0KPXk=cR@c zU)ApAD@+N%;zSWuu0w>&KZo4Nn<}Q{se<7c#T@pDW1|jQv>{}m$(_;~dFRvQY=(VY z;+SWz11z&QV_XD%(kV`nmo7HDgB7UnPriHo29oQk%rfIQzdxuo9t|M;Fxc+%o$R19 z9CZ&_db^X~C*B{C3>Y!Ssb-=kzmi1G2jd~HSMnmgk2f8X$30Xq~mXJ$YvC5+h~9 zkrqm_L%^M4O?ck!9v)&+*chncbo}U4Z)KZ@xEQ)S0*TgYzi?I*<3K_{64t`dHGBE@ z7l1B57TRCqG;%P-lpPp5j_=63%Sw~@~K^GofQ#_T`yo=DMrmY)@&i*$R+q z_Mta4^TFY04`&93(6J4BYbXDTo400Ps_2 zmi-ZyuaF65{O!F2kW*m7nJ7ronFvfi-%LRFP>&D&od_D6VpWZ%fTH3i0w(7oBI)!vd2_nk+R>M6gF4Xkgfi(nJ@XXWBzD zi}@dynkQs9>Tu+xIdYIi=jlDnpIE8IQu7?bw62)s67ziXWOEB1g&bPZ0kq@NWM+I_ zYMuwfcJ(wDhUeCsr_0PPPJF3(o(`ZE;-Z>hZR~kRi|#w56|}YSu!|&b?aX_a^Y2pg zgc|*EF#Pa(a=MGo(|TBLj$Fc0^K?fRHRmF~_jy7;MzMJwT_wX8>qyjWXY}CR51J4D zI0twYkE`b?BLCTNIH1lxPj`;Jes(Dzhq~OSDsAC%J@V~k^o=S#V2Y?W@oDBNPlY)Gldh z!tDSp8P`^CA7*W+`<_RQ7o@Q7nD^%lQhmIw<93i00&!-c{^>p-?4qwNC z;JSeetku3tQJE|~I0>e%e(nBr$gCq#$zkIi=ywtlMlz3wEqr2D7hk?tHOV!243-3$ zZ9dNY>KB>14V*)$Ha?F<6fy6D45Qe|lHab6+blV;>CNY{ts*12K5i7p{Ssrw(J%C= zK#*oiG6)J*u{XIDzhr;u^gBaw9ynjx*0(Zy3km}H5;zgMpxCE>9b9bl^|r@jCi|WB zOJGg781t-q(0M5lk^13aAlYH^+iQfN@om2~tQ#}tknenI&KBt@g?j~^6rK3$>!%xk zAMW^-_0_e1TUq^PW$jNZFs~ZP+j*ngZ>%-e9>L%5M>z8H{;2gSxWHkW7P*`O)$I#} z*pGK{sy0IR7kKo$eT2~Mu#_1XlsIgNV05PkFMEi!!{vVIKLF~4Yw6ORe%=nMFx7!- z8@<&-B?CsxC_*ZM3WTJ$Bs9EI;QosMmGP+;8oGOfoK-~0_H1V>fEw%ozSXdZACxVS znVqm9oiOZ`BjW<7n?}F%?i{#HEyU#Xn!X;$KlyQrT4t1%H1vsoUd$}_Zo0tp);><1 z%~J&~DEzDB_dEf4LGe`eT1t$F5@MZAJMyDqBPg(mL);r=`a&6M{9Z*t6+=lPk%ITa z%3}3UV&+l8%-pvN?wPK*V&`UUTov>emh@FGr|=}N%$v$F75<2&(o&-MkO|RSA!Z@Zov(^BB$nNWC_L-!hF!XYXQz z$+Y)$Wm|MU=%P-M9I%WsQaza9ikWP)sr*v-LaJCY!mWt*PxSXJ0%$ajB^1TFpK zJbX+oXmO!D`vkWJlr$wP_))w=ooS+VM`ubp*$aKH0v{45_O!Fa=Fv%Hcs3hk=`1b< zK26`i-vnTNPu)yk1V-tSN}p5k1#fUuRY7w3fPDr&Z<9fs09A)w555&4g!6-%cPu~ZU2JD*xzLWq<+xk!P)s$@} zgv0wfTMlzci^}3JBXofbbA-se=l=Sl?Y#Zf{98a~-IC@3t#UE~#pEpY%G*rIXL29$JZf3lD3ud!!T&~w5J2j#-T;=2aEa5YHhQzY+=k< zxZ%~{)-t`Mc!>H0XPRlHp$0`ohC-MMEsubbID1HD*g{Ao#0(-rN~>yA6r?GH3-4Q} zaM@Ae(h>6G!5D5lodQjpIL!T!-E~NJ3LK~~g6w4x9^v^O5)>gdX=fSMen0BY_c1KN z_AbEnuME*=qor*BXGFOx>pI1~ii44RvKvo$q6)t9vo3TD$zjLgz^>$5Q?to5kls?2 zVR3Df^3BwBQ8q-~jtAN~_M(3#m!UdDIATdU5+|#K5gFTL;&;e-cb!Dgf0-4?1j90> zC@#J_eXOTm8~$Ue)l4n}d747A-wp>S-%-t{iVF9;^MXVC4ib|#kXwU&p6{{^>7pG} zP8@MS<+%m^gk`9n>MEyxOQ$)nugYa$_FTksh{`?gysL_bI0`pPmcSXyIg6GiM1%b( zQvxs1wssabq?RP3#vV}&`~DTn()bP*_RpK>M&=c@oS6KF#gBP1 z90!r$NQX?&j66p7TF9yjita+G&td;OeCUoEPsQuVYPYmdRK<_QVn}<7foeWpeSsG9 z_K#X&dp~Am$`<~jry5gqnwFO-c?vJs%hM-29m3vSkgJeST zn{gp6sL7rhIlL37D61>xb4q|y^3Q#K1tG?ku+ zmkOrO#E<6eUve>k`x(U-BIj}!-oMv+*BfuyB(u5n7EbP8oaYzb!Z?xZ*~K~gr$scA ztrHfpEd9Mni{xKw2Pbz_Nz&0B%?gNA80 zsG9Mpxdw07ez%4E-qptN(SurzCo=Td;=E1;*oUz86bw9JVdouD(&Df+?!jva`|hjF z-pAHCD#yXY7Un^ieZ?Xy`RFT@A!VeQW#-s?`|#ypglZo-A_8COe0DUuBTlF_9JbD} z7SsaK%CECG(95DCz>pI|IO#t4B06EJm&__XAaci#R1^Xh`CU{Z0kp;mxc3KxUZ>T^ zR0@F(0K1XP7FiPS!3Y2!a+V&~~5i3uRZOg8&s zCbtkB%s!f>uz|u{j+QCL@mHvnR`I8ZFSkf-8(HP<-^!Bh$BDJL6BtOBu11O1-9cTJ z74FO3`JdPCYeolv+c02#Bin_!hWb)u)zi?~QAdnmU|qT$*b2b)7LGjz zz-C1Npgkdwane9$&N>UjN?9w)f?_UFL|Zc2d8;H!cz&oPoM7wCO;%Ve^1Xy>)esm! z1uPc*V%G&WVZVp^Ulb`FFYAN%G;gyiGnEf5eY|24D$F#5Gn047g0nEyTa5CA$=-tM z;%evy8oh>5fJ5!=?>gs!{0U2ucLWK15w#jf8cqtH`$Mfl0HzNv#9D|#i{du}3_?XK ztO2eyb28=wfR@FkkoE%}*FxbCpc5U5{1?3=bWIR}v*leW2_x)VRS7&m{0HEJNRL~> zicDTQ4UErE#jWU&QsKJ>t)>X{~ z23p(=bQ2RqbX%Ze?LT^m+0Y!Y4h2jN(qAip#O=Qgs}n>#x{ zyn6H0+VBzltm6(NZ?QFhR!{;d-gFL&PiB;Q)0Z?~6JF5Ssg}N{x*uuMf!yZBNyI&bcpnHv=TKE-4Chqj~K6w^v1yHvWR@Um_ zbP&uBbm7}(#S|IukoW=gUvO4UchO4A9xNBcT3vlyuir2Jw1%IAD!(fJzWUW<45O-) zbLp+ex1H4Auc8YuTC!L|9C8PB1gDj#<`q5q2NDYh5d+6SH;-@MVF9Uw5lZ7i6V|=Y z8j`)5qnIwFff@+*h}e)d2H8*fpwH<&uK%M+o)>&rz{6kbWN-*EJ$i_q+8vge#l=OO zLJ1IS7Xrju3J7#@6@WmWR}P4^YCxE7lYK9hAs_&SdvN>Knyby#TJ7Dde)e*3);U3u z$87Dv{cL@8ef7SUOgvlnipG$-Ir01i*m;gHMZb#*$L)A-6orP5$NN&95^nsH6~Wy6 z7K=WO^Tyz?0pw{wD`xV@T}J<4=bf~ELM$Vx7efSteFp>46)C4z`f&OvHlg(&5pJ_x z_-iP0X%r{s0v>`z42D2I9;jiru_WUmQmIk016{S-=m?62baM7U10t3PH^~pZv)d#Rid*`UihN`E-i^?7pjoCoPlGoKTYWDe}#*Z$;VrL=4|FTE(@3~IHc<9 z+UvUUi2ML{9}0}qFT^;XZ4@C$oj;jyd(g=_ytOc@Cfc79tKu}`D!5nS^nmlBWNAt> zv^T_IH4RkB8vq7;e9U)133u>QV^i-2IP4=1NLd{`fN(b;yOz5adFXKk?VPQ@KYQ>V zR}SbFuiOeB8(;tpL@7Eb<)Kt!kHC#&^9+!j!x{W{_wIpKaH5c9<3Hqz3tp=l6Uv+B z4H6zcLWy3)$b0z;FniED>ohpIb@}^2gF(qgoQUr83LxzLEwYk|=2C7z`|UKi1{tq$ zw%!;G?BjzK%CGD>iWc-~Rc;MV&r!?%c$EDC<#*5jOzE}mg}ZAtYuC=kOmG(B+=QNi zeYYp~v$yn9!NG>&z}X#QMBN_M@BX<)%sv4r67|S%1>=OGJD(0Zr*c%A1yp}*8XN44EC8z4V#v;bm?ZTvKCZ$>MH~vDu;jk_z{KdILqc>c!Z+iveta%`S#Yc zmpjiIm?gww1>gu{oZj#@E?$OwdQ|HE=1b09shj#ym>jei8@j1?8O1jT?1h`U12^$n z?$rO$ixVhuz^-=S=T!at#@TjuzG;oTsTYpCaOiu`NQZ^)#5-WaXflaHXfyVvQR1i6 z>NriO$w9X}M1o74fuBNQ-+{7S@i$aR-Ae+d!^FFeYU@gN-E%%VAED=D82i(dUcI_G z`#(Q0KVdGraFcWkIu3ky5O`ZG8M*=g%MC}&bCVmEMr@XNECSG363ybFhvf+qZU8-v z#Q0fqlce6nVK2PIzYM(*)*M4KfyX9(g3xpThSY~Hw%FKnuWmX)ru_si#i8mT~yB#2Qo{NTkM z39_s~zp1OMj7Gn4cJ^CedY#?XXQJ`{0Fb(lK5F>?_U`Upf&cG3x#$14@vNG6YYe&v z|7(r>OD{iq3#Z~MtNdwWm*Z~K1DH+cB^zjA|_m#j+wtX%)S-R)klmtX%+p6=YQ|J!&TJYfHm zcw9{RE{Fzhz@}~tOaM0gqy=Ba%NMlaGJoP;dbtuO0SHteT7e+xv2^UF@E?%EuKgfj z5%7c9AK`niPzd7|T{UnWVw)oGJSrD9g!x-cyk`5E#cJTTr09mSq-TV)=Ali0}Cal!z(AP`n(&2yQM{vMjqpUu;krlu9ZusZd05}zpP3tw;yIpceR;`fd>1WW>j}Gl%f#TmTIC7Xzt&3Ie~!Hi%TE zBuk_QM*$>i>Vt5kcSdk#aL5KNu;8c0a9}t2wsKtwN&H2Gz_KI$i;d_fRyV#M$l@s2 z!I?&DhTu5Nj(+T2H24*8{)@!8b}YnkxFcGKfK_qIiajB(vyEgn=x~t$h|@zZfXg=l z;9>c0I_xE`SKP6Qf{oD@;5cBve=flJgFEnoB8UNhy>94u?>WG{k7?|_@TNg@qt@^t z{sUYu9rMo&cTTv<@#K;3UWQQu2V^#hU=fHF#<4O1W`r$dgmnMPdO{@BJ@qY@?mmAJjol3m#?FtmHmp- zyVSQJUoX%Xp4<5uoz4e21x6P&JY9{fsS!u*ds@r3-BgyAN0VH6`MOX8dNqpT?+t%4 znWaQY2oy1i*u#ONI5OHSG%^JaeG&!!(Cijpvnjo|R>|-9XH#$JU)(UE$e3r#=Z%iM z!R%5s<6kiTB8XrR$vBz?BP47=#kj<%+5w&Ep(ZY8+Llz zop@)Llc)s3%{FhPLay;zboy;FDSZ8jSd=gk=-o<^gNTIgLI z1Rqyt8v9q!k)o%r5a2$#u)9dUI$cD5n-)YjoOwP?X*~0AMsK1SyC%{L{0|;}Njy*N zhQMc{ctYDe!`&O`l(%sVP0V7DGoBxw{S!HK7ev+9hp&z~MPH&QCD6usjR1(gO7@(s z4mTc-{S??3;7P1dQn2K8F zd8p;WM{@_mH%=S@b%B(N`c|g?K)D=;*2m9%JnF;_I|f#dXCFYR_YtN9Q6LxtEEU+? zu7J8n7Sd^`F?|5b3mfe~&mHWzGZpZ3qOfz}$4T0WXQ9Nj4taJc(8s2fW9-5tP5MXJP4Umm`A_0pM)9#FG59JeETvyMZ4M3wVPuh^x2M=L$%6HU;cx?Mg9K z5vvKvfqD_el~5}@*&$cMpBHVGF6;=l4IxQtxl6*BUm}SCW-Q0riqY{rb)M%LDcka6sm#j}pFFLstbhw`J1e zRyop9?D9EY9VLMq2(cp#Ff#O1E8pTTnl3_v+_Pivx0#PKCIL~~zfs*L>PvtS)No^GyZI=SU zIgkm5IuawX2o|O2gTr%;v(&c^qmSe&1g$=9;eEX1gwp`(%;2nw)W)fY?@dGyh+Lo? zfDsS2RJfn5us$w|zP^7No#;EQO)#;HQ3)sr!vX7Fp(xoEa#lAZYgueYYu;KcV?az} z3T17YTmz427T`>+0G^{pgEP93)+XnLPiHDO=0YPCV*K8k^rz}5V3ZRD5N{wz0^esY zFaX>H*Z{|?BM%jGdf)Vh;Vy@--}9!NWPHFr@h|W^_%GnIVHft8v3lnX#}RA4fRz)0 zI5P(Fb=q`vFdDBI^btv@uiPoi{!)4mbL4CTq;1%|q^1kV%xH>>V)oXHibBq`PhyY| z0e$*`b;{qF0-1QJj_&eQrc`(%erCx|(@JGf=u8#aNH*C95ZT2n;LDCP0I@FlxkITJ zrcTU~_?o^$l@b_IF_4M- z@jV>Uqib&jxS_6l`>*&I{czL)Vg%eI&$b=BX(YTFXr7@5pUIA$BbsuoBIsy(qQ?;4!9j-1L#0g&O5zo&4 zn&rlf!!@U=Fw_-|aw=g^!?OchI(x9CS?}KtMpt*9WOVpj!qahaFMzF8 zz0omb483~RQ@{ytO(BIGsz+}#rw*#2} zH%XyweE_`d9Po3jsN+fLP((R7nuf@?4M)s_{E7D{d^Ek{s~vr8astbaf09PBjX}P# zZv%`Iw7TVOzB&xyq_{3>7Bu6h_yP;uvafS)fNxn>>MCByL;SQV#VB<`6?IB?A#C`} z3jkw)C55}!YU;~&(Yev5-7*sANT^f9pw2raD5dbGQQ*Z=1`uR>uDx6DWTTECrHRep zQ1EM@Wn^l?aspdb;tkB)fbww&5mfZi`sv>tjd4s0D?L$RxjIc&qR$Q zULkKjk_W$1%BpYuBKad!hd;CI4;1g6CBjYB+yW|m9&ys72wlU_K<9tO~qM(kJ z-iTffSZ{l~XPcNzKXfOiM^!D;$ndXaQ&l&%BYkL18;@tvqT>PS^wdgtsKzGC>);kZqiH zYa(@xnK7L(ZVEj2%HuY^WVwd+bVEOy3GjyF$RB1$Pkz;SPNnhEzz^}36SauLu^SHg zzi6I^D(=X*EJ^smJMhyq)3*s=wpcO)IRpTB1v>&_0d{e2fiQ~LHEdMo4Unof3;B5Y z(flGpfeIu;-aN>NSjHXwiUu4|V1hk_9@I=+v=~Md#NH&j;ugt30D!_l>=Jq|@JRni z^>y&bu^($?0f*0Owojdt<*7us_*!I0nTc0T`-+5eJ$^0o!@9J<;Ax0Dov@wDV;DmCw~$^#lmxC!AZxMlWUwhhbC9r^SZH&pCC&??g#>%_|XCs-uEkr^KwR7W|=cY_yR z;G(rp6oOcB%-_M%hnL7+>8*~IrmauC*r)A`S~#I!OErkP=V+G77SH+5rJB5O{Qxk2 zJ_f=bN5P1F%U{6CmTwkm7Wfjfi>%o*{-U|O&XYQ>CE4L?c$lg$p*`3{ze-vp!pka zxP&YX(>A5Q{hAgLdZMMXDZzag*O3li;T)xXtFJuY3YxF&+SM~(a}8F`d<`~SHS;yv zaK#iFZ878BT&tzGisd4Tm#&xmagOfcf6%wdOt>wWNtnW1BO%w)L8;lUQ4(W!d2ZVFu zNI$|Ai*FIso?$_jc`9Zg9I$q$GjTs6PlTgf%5UP6XK)(dX7uH~_=24HS?|e{wj8q2 zi2X{3io6W3estqZh4+$pyUp%T8g!w`QB-^ZFq<7F&3+nTR~W_?Koaj)2T$JMRBUlm z={`#~rr1xxp_Uy5u0NSyPotVnJ3>L2UB70%mfbyRMe^IS4s(IrgpFe&Emc`R8xS84 z4X00<6*5cRB42E8GEHw@_;E|S&nWI0HE>Eh2kb%3DH_YlX)OW9GMjQ7vfDB{4A@&z z>_taPW-6%LAR8moA+5NN(nn{wW9PKV8O`vimM+kYdz_2Lk1vFI?P$yJk>(zj|E<6J zhgL^e+O|7wTeUX4y&n6+vGj;c*yslBW`^8s9V2&eU6^cd778fX26rW-&0`ZU97R{* zEC|r|ztA0VZsw~IF-G@4!*LW5xoPcXy`!#>ZCp-wkXjFaI@=QNH!&JsW-Wd9{mU)< z%>KgOOug_7;~H)1lU8smub(s5W{y!ecS9myt*Q=$TQHlbg@L3X^zf`F8(x-CVWZcq z4bCudr$7nfr`S)uH_%NJ0`~u%SA<YZ3jhu;V6oA3N49TYC<)U}Ai;Oc!OFxgsnBm1w}c{IJ;(TOj0hOTolb!%^mrtQ_S>BOMGZrF$*gAu+LJ zwp>?@~*}Ad3%@5=C4I%=`(K8tec_Bn*2-IMlL?xUl>$@rYij( zv?9_VhP=%j9MXl*M2kwl_A2g&+2VE{D0fsoxL&w zG-`7;Uo`_(+3|!A4#XHk@1d9F!uZ6!As;W{rdTaPKCl_#yzrBFMq597J{w)~({N_! zGmjcMo`w3<>1yTCAUlUfj=KY9b5-pS zm{bnP;u5$d5#)5m1>IUD+?iWo4(uAozzUdc+MDHF11m>ibq=%+sM0-f&Oxo(L9p0; zrHi1smHdZ037X{5)J#Y-u+oRj!A2m@Mvjn(p@ALGQ~u=-KD3!bPuY{+4jk zD}kfnrdQSd${h7dh&glBt0dxv&Uz)CdV=H6{z(%?OFDdx!m?L zP*>r&SB9+`*FEjXv8?l683bkSdzq2R+zxyi)I}G*436*BuFH~zeEEv~Xx@C4{WNy# zD|V4{?5phN4te+$2a@;ks~pLioc)SWS>63I(EfuR{))Kzk9GO0CYvRk{xU+lcl)b# z`zy}EmvHz^hwuEhDTU`SIu>)G7@Ofcqk05m&YkqclBq`92{Z9pyW2549# zSYM|$pdb1@47DwTSZX&@an;6K)s!Z~2AT<_58%OFK*9sX94 zy+K!}5n%Z_dWvCT*aMo7f;)os*F{gZDE9x&*;YJohcwsthdIOeM$YR`V-L`wrE_L~ zq3Bjbu9-;-j3kWG{B&~mW03i4E(9^Yk!w``+SA_WxHB{=geS588{itB@|rZA7+>oJ zGvL+PFV8j0x*!4Jr4UTW%11Og0XaJ$bu#P^H$odsPEkV3B*KHC9LblpO8YsC;uuiW z5>hkCXxz@4ta-<13nrkDsuFSR*3LG5LWf~5RF+mz=x_iy>Z^A~rImZBVM!*+m>p zXglVJ;Vo?BL1ZHjwHG3%J9NIOauFMdv8?9YObMSzT+hiAZM%xb#)tZ$gIK4oQIRd6 zN`PL|y5w!~(QpX@&=C{HxDW{=!ToBGiF(i$p++%|3fKV26F(tFk*N=V|14%zOx=mv zhbUfJC~}Hx!+(@y!18X%?vHO>s3?kP-3?L875nZMwE#yb(d^h2wsDt{+uW*)2XEz{Ai*YKHc( zW8@0>miJkhD=|UP7L?N)1HR$$F;(J7N*QF`IPmnj2kB8ui2Wd+FS&d!Rodj7a!sh# z;s~VzzyTc6!jVz-zW+Mfe=`z85Fw3o8#&i z2G(z|Srh>#mwG4mE}=h=2+b)H;>H0A25c78=a9i@ZGQjf5p3d zf8$C?g%V&b7q6p~+G?sfIpRtD4uT7s(?NZYQ@=NhjaY?cWfzu8u z^gDPJFfF#*fUC)7bG+=Ogsg&P<*O=2Rj6#`M`m^8EV+7fGaIu-jm6{|F5@J{`d^rT z@Td-xnj=@ECp6uUqku`7;85yd$X4U1Ac~4fg%$1ECcZ4y_)8-9zkcD!< z6DR(s6ZjY2@MaizWO2$7XtOw#bZ%VNm0ZsmX{FVOUg`{{tXU)0W}#~Sa<06cGBtDQ zjF`p_#j8u(H6ycFCC>Ps43%5#D6`_urZGBgxH<6+@pt zx2?mcJL|*vGI3-uG&8_H#e?FfJd9MtkGq^>yjLi}up(ud9E%-g+ReqfIxA{NO@hAk z)A4LTp`S8bb#pBVFT2d}uERPVjQax<@3^xxdQV<(XvzmI!A>ov*(f*g!yvkRFAP6& zAHDDr)&?{aMx()d41Y4SG$Bf*(T7<@smmZ5xIx`#WpY?7;fEK|d(^H^Tt^(xp{<&N z0(3Ejjmpw4$`7KA$`2_5U9m!ToCUtD3n%pjo)ToeDfYQxhbqw zuR(>h!94IX4o1=%WWnvKOV>{t8X1-p15xur7W1x<1}G>G`Ekxy+8Oz9LQdb~dHg-O z1kPE1kvfR=#`UgUziv?I>iwRVZcxw<(G1T@*LqnWB*u?^)S!Es(B>rr5aepg3DZuB zHAm_SSh{%c^}d82lEA(4N|_4(O2H9}AIBaX3buII}}@z zx+YdThb}pl%0c&D+J96pZ7EYMk><5XE_svu(!Lf;`C8jyYYDII-u4>3wrd+NdTlp3 zoy_4Jx60$lf^OON>Z@|fwzs~7OSZkir5v*DO)l+@ZEyC=IAiyAMDWo$T;S}UZe?Dj zbvxDq-q@>L91Htfmo%_4Z|jowm+`eOX>vJF>yk#7_p>f(_)B?N@7}^f(N>5wad{Pm zsJyH`^ADrgixTbV-;uYeIX#VNEpKX<)1Qb^YB3yfOO`X^D?63KCb{LJ+6M3VMN`sw zOhNP}14tURFbTOPY1hTal|)he1%K5H2;ePz|)qAZ5Qz-D@7TOl(Ww8Ao9h3rj@_ zhW;*#zAOQ$hC-`57fom8ym_<9w?Zd~+Ce5hDl-}KCm!&r);I^9+Jk^!9l)6;maYSu zSxJt&5T?mi3J*=0cswt+=?;Qu(4DwspZy{l&hV@xqB~A~XEJ(_Rl?(yj=Ve)NVNXb z{qr?GJiaBPxt=R+uPaRheA6$y+3XD_O_oq z`4ih-B@kuL3?mW%D9g5!AKX{+{0=L+{g6+58}E=^v-Qm%x|biA}WKcsTY`C;}3cdaKpvDK)4*fA6OOec);dqq-^tBFv=C zB|!CVSq=~@@77vmw5i*gW!9*>HwsUrJX%U(Fcmfy9r;_WTg)(W@zZuNOfjBl2X zZR67LVvg(3$hcve0#}bvx`+^bQgeyWq27(6Bg5Y^TSxmBRnIalyuBz&Ph@fZOZoKJ zrU;v1s#~LEZcbi^>>#v1vltK@P0$KRrIlcAFde%v1s=*4yl?b&6Nc_>)%NGCn+y|+ z4?(1Bku9+-)O5Zxh3cg{Q!QEhnJzysq94_gwI7YJ(GYMMGRRu08Jkve738P&YZ+@x z|5q+rp|+oE-OBr8MI>{q+_Du|4BT6Wh#XipMCiHTl3eIsvtkK2^i3S{3Li9B22m7v zZfLYg1;jN%TbHIS1i;b6El#WY!Nb)h&}2nF7!9}ZdF+oyUTA3fDL17(AJ*D0ZFzE(vKTBMDINMP=WK(|c` zI!7#me|#aFc+k2CU8Y)&alZs1q<(2 z-(pnW#ME7Ye$U0){QWB*3ARYf{OjA$M_jhQ^NaOY%J(udT8hA}+poGMpKAlotg7zp z7HoQD-+ktL$2Vbx&aZ15w#4B13TWZZZ^Rij+;7GATk#)%E7rK*S~lYr{{Lsx|B3vX zA_zdz#dMcvPWmxjp^mXm|NCTjx1j%p;`jRBTX>HCd3buxULU@C2?YB)bB@qT&!IpR zpFg+w*B@V=o*ln=trz$a$ubJD%;Cw2t;FGUsugNIe#}k+GzjDt^eOq0UypqmSp)0f zHRr%XW;voH+un#he%xyP{(A=%fzpFuJdda$~_z!XPp3JOgN@3ku-09 zmaj(O97?Mak0REfaj3Km9eIPMJ8m-5Pw70*-kR9DhM*2 zgP~W zmm3Y@EVK144kL@Mw1$C{M1T}SLHlk*`%6Vzn(Z?Z=cQP=6gS7G?8*^_;_z^F`);63 z)LWs*KcHJ4c>C&KaKd?i`sU5~`;)`7v!C9azIgUX|anO8{d0gbZf zu+v!xQZ6zLGSxuNAp?GJn|E?X600)f8)(C>&%5BaH0s=>ix7YSS3H6Lq;L%E`1s2s ze(>zkYY(=QNAQL_=GyQ=UnEKzajTBpIuwUuY{y|HMx@GE`V4NM$z2zBHg8i6nIK@D z@%oo5>&)mnu`_Kyw2)f6HTPnLA?)S{U5s}~!|q^+n?|Ev`RcA6x4y6uo(mF9He`F& zAW@ra120bPWsG1(o7gVT&wA$Lan+0^MV4pOWFfc{58`CdYyCoaM$V$jtk0EUd9oQI zMT(OfwIKoK;{edk7OCJ-Dgdx%X(cWCc-Ws=iXj8|9w!X0JS)c(%P0|qA=f{$T#Qh8 zjbYCltw^AOE_{R~6Q&oe{pX}3AOe}JmW^x*ep%x~dr)75{e{MX{wz#bdh~XlI@|Dn zy$030(%I6O9YFnTdKtSTAUU%83Ez{Ak-^zqSQ*|zjZ z#?>L(PsA?v2p5hcn|!2M0lH+od5y_DU@FzRUg74|g!wPGfpL%tn-afGfy>LtW;g|C zhC&C<(y^l(f;@X@;D>V?ll#HR!G}?JoKE;CEyadoZ}{OfA}h~x_cANkQS3`+PXvjK z9na$rY+6cis|E?}x0nlelXX6God&l3Fq0tI!7~fz*2D{=o;0tKZ6{crPB$ICkL1do zYFNXf+Og_;@21U7gAClZNvQ+tK9_Cna2kmTz`YjnN3>nhnz4 znV6PF92t5`!6+a8bgrQnN4@OGjZw?Suu>ii8;vB23S68Pty(Im!eRo`j5xB2YrvLm z43&s`NKnbLKXS$ZwO>+#BV+vu7I~%0B#ux_OlCpqPXo^(B2)?rW$tDJ*-%aBAm;ny zsg@kUJQY1Axz4U~pttLWRscZT4YfbNv}&)>lh^;ghy4b1&ZsqAaTr3$(8RpR65C7r82T-3$xwbW*e`606n8{A zuwpqGfghS8ZI%qAg3-Hu3M`$Aw4bH!F*6LbWaNJIO_8loXzFV}DW7b!t=DSb(GE7* zrGYK6fq8OJJvZ5;N*WHujhs3&=v0yGMwz5EZmg6RG`7R70w+M3D!9AU1uFY!E&P57 zY9&po)?ViYocz6#R9+mZ{>v8B1_;&-s_Q3<^0l?H?c6B&SS5tS2ZvU*JL{Rn@?vL}k76 zxC3ORA-0CFnyPq;Sgaty3RJI>2!&#rC{Va}Ws30%)F@?V$*y40x)!uaHc_CeUJ2JP zBd_uu#-@5|Kwkn^^~6<%H(u~eXyfu!v?`@_DRI<{l#4rA1}TzHXi!5hraP7o-VF*b zdM;LhlMNUI$SU+Y+1zR=2?O8CL_taBoJnn4q>oE4rSW|mM-z!BT~8f&Au9higC(TCZTwXb?@viGNUwTlhE z_kfd6Y_PvKLUW%wO2y$HqA%*}*@Ta#iWcoU0pnh(umH=n?)rVfW`01WB?|%-{pQs}L)ozGCQ;A!m@hP2H#Um>3R5r`Y$d)&&r+@l~oBCyD{e^T1YBKXy)HRxQ zO;A{1X%}x-YF4=AI_3{E+t!<`Mb^9K|J`WW*C5qO2i?c)2(Pmd-MD3XJOcy2+~z46 zgtmaIn^>7-QMeN?Q%PUDG3Gf-oLH64YgiYlX==I_X`xI@Xq8xwqRAA^yWJpQvQ^yv zBg)tjUc#pd##_Dgc)Zmi7Q}dKr5caC@qxtOL(z_TYhU<+0yM_h%3ijMZD7G)BbdDVR%@ttv{onR>Z~sZ|{_n}|)1CYKzuS0(#H*>ki2`1bSvW52PN!qjy4U&^ zn78Dn#QdcPegCK@zy8H0(LtltRWX2?^}o}5`ZT}(pX}YQ|66%-Iw6Z`tvmIFBi8}D z>a{-j=n*3PIxtU{i$`*dM#-H8ma=kf8Bsk;-Ab6qN^N(0;@=z|#S3M0r|2|QP=@ik zB5tEzq@M3Ft5cQHpvDWC#f8fdKk;YL0}4a@MU64Y1o>c4`Eh(RW)Dj09U}Cb!rRaUCI0&+M#SP$=VN$uZbwpYv3zDiTtYEByL?g$dOd4p}S$Y$BpxKt8Z;`jWdl-916B$WvDil5^OA21d z2wt`k+Bmt6xP12XVF`qL2ZyKU^mf8;L_ELjGFGpx1pQs(s))B4_v8 z`fb_^mF-#4XoV0KTm9opAHrimd$dXo{(9cUG@GK?{27K-^e%5W^aW!hS=%g} zqcHTNZJ&DlTA)Z8W?kjcA$$qm($@nP0(=EemV8r2of9`sDdn)eMQOeyOIgQN-Jn6K zayu0Btno3Lth}L|j-UDNQDklM4M}E`uA%xi=6HjYW$dAIE`W2M14BG<(=lt8+}fc( zGtgTyc6XlaL+g2mn%wnnITNwi&dtG1>X=E%3e^|rH7bYzSJEmS9%>pl$ z`3`s~m`T zqmkDbKrCDbq23?|k6S^vKcGKpgVGofffG;Cs5~!PHp05asyAFT-_qZ%hWw3wmghCH z*)$5gIKSa*FINM+-pfj0k0>Er;3kR5D+{0U0%ERWJpH!>p+AR_`K><~KKz@|=8Gl`oJX&pMP6eUofz^)TmqmcM;#le%zt6phO^w0sbt z8t`q9e{1;t$cKaby?1l!T@=4s9nkC<>MV8PyeW1X6A#7FqKh*Xx9r4lJ`&t9IE z&Z4!!_0iX=Nox+OnsG^O0jj#Q1T`-$SzenXeQ|hx`26s!d}a(R%j}6Y6CUl$snB=4_}{c7N?oC{mi1=TZSQ7!Lw*q9|- zhIHo^k_|UYEUFT@o+2rI8@PEfCy%n1|9gIV`2O1;4!=7qA#}@W(`dZCER-+9s~_FC z0Ou-1F{Ay4B3ZTcYf)n9ogBTY{463C=cTbXAzyq4asMl5l9zJ~n7}@hP

ul_p1{67&WBl^1$Ra)POB)3U9&P}eX4cHO)L?tl(#nNzc}Y}7d9P)r zb1m`i?|P@7^FI5fIs@82e=z2_j!c%;B3Sxdp+-Sj%Xsd{uUQ0UnTl0l7Q9~uf5>HU zYEE)3rL1Nl=>ovjMMV`C2_d{SMu(Z<#LxOMaMKJ)q~;nPXeJKXi!bB+U&MD^7yKMFIPRG-;B_&I4rX9Lzqd)*wM}iIUxUc z6`HsLqsAwvi&IhavObpA#mO(E0<;QqrkKkfm z2v}_TE6Ye;6}bMyM*TS@nW~#|=&7Dbu|s^7xdmF%mzM+JWCN6%Q(F@6hMo&K*vUlh zg^#XO`A(Xm(wb7dbU1>YTxFP&&e$p!N8VYS*+35#4;o;)+s);*T;{W+jno#;YX!XL;W06$s|id-24st&5LWW z4aHKORRvn&|J!OyXCVvGq%%YxHN}jy#w@;_*+WQF+eVtUYO@<=QJ>8ta> z%pDHE)>|XbQ#oawjN#nJv#Nhe<3C*uJ7YL5f)JG~8vv><{_m6BeEj#PyHNZ-{?jcy z^JjN!6m&+>PU}9D(>yVf>XNYC#aw1)`m&>7hDO=Pm_KYN|2|-^VIJVkbT$b5Wc)fx z(QxDDa5Tc~F{rLVN10YGv02d^aL#lwMyfN>jJ{#bEq={MY+rhDH+U-e|82$p-F<3_ z|NG=V{`;*wYvTX+Z2u3>_AAZ|%clsfjxkk}BXrIkt713F5^icLnhi}mX06axdmu(& zD=De=M&7=*RaU^0tJGA9B&!FJJ9_R0E}X8gjrxK7pjBv#)1ui(CAhi%sJbU|vz^Bn z3210B+{m|Ltzn*<%Mda52%C)LjPwAfsCh_=DAB)KFQFPYU7Z7PwStl@^Q%pG(iS}h zib03wdL+M&Mjo~U2%dLF%BLh0jcQ5^`Kof@vQSR_Bov2ptZiGrDzs&8w(hLZnV0|K zDDYM$_=%_P{J&c`|L^VW-sgY2mB+Gcn1TL8=u)OW#{+jLUw)o1H3U^Hi5z_jgjXYQ zRwseIPaNM+YP=f}mg8B#b?F^oYd(cZQfl-LYjZoVg3_I;lX*11wY~DhZ15`x z#$qEdv`>QmXnf_S;w3SOk&OwnbGf@5Fivf=bR5MZuchnsFnY>3-^CJJ5TQU|7|?-FkiH4<7BYL4A5C0cT;z);Jat|t7G`} zS)eu2yUzo?_NScxuPy^<4gcTi_4af8e}DTv|JUt2CCc)0tKUt_LcMd@LhrYh|G=%K zBx9&argP*Bt*cmts9fFo@`lz|w%;w#^xalgvW$ek=Ht(Q{8PUFuPy^<_5Q!t>pkt| z&wu;7JNNtlZ9MZj{Cwejq4yj6x^jkUo6btJhT7Ww| zD^^~k^rBh)MG}mb)m|vYs5TF)N;2BCNrSLgmu9q9T_@`=n`pF5J-l41QG-XUA=zkY z`*$_nsJV?a+)8d|JDD@#XvMa2t6R$#NIIHl0{d&8;FgY3UeNF3zqkKk02E zSO?`kM`_leB}b`oH$GpM(#jdV&r^Dzr?h<98c^MWnM&(hzRy)!G7;;_R$A=iKRsWm zcHmn>#?oBtfB2lG3nltX0{6HB}aIXf>MUsK2Rnm$h6oSwgb{r1)K z2H6CyPM3`kBRZq_!{vz!DYEcaoc~ZCdyNaxn^R!$#g8Hnj z6>n$vxgqa!L*D0xyw44JOSvI64Z@1ELt0v1SANJ9otajZBQg)pyjddea8|{+M7=MP zR&llY4Ch%u>quN!()yyQ3w5%Y+t{~ZT0g4!)#TpGyYQ9U=dUvVUVXFc&A?aE|J}&J zX9sRQS@^7dEs}??1n9M8;;ZRzxm?Zp4w+b+ca z+TY&4kNqwImT z)ohv^yBBza41#3?nlp!BZJ*RYeHOuDJ1fc~*l?W9#SUTm8O8K)&Feg^MCC< zeR41V-^!E8|M{4bJdz}Lx1unT!Y{o15P&7`lXopA)i)F3??$j&E(X7uy1&glEwM}C zR@HV4!~%|HqzD}i_V>JSXbNP!wW@~d+nD!=%PQ_~twYJmHH794-G&tTKZzu+NdC8y zZJ>_-KPl>ec6;~w|66$qr*T{E#~Qtl%+%LR{Zksv^PcU46`vwBTQY4b~JdG4Yz zN}f9#n>-2GnCGf&Zlr$4IKKIm@P9z74~NQI;s3jPdqw`gw|~$7Z{x9O{qlY~`S*fE zUI`K@N&F=!2=hn}qu7gRwRZDchiGJ}Q$vXra1PAS+(vSn&0KX&i?n9jmcbtX(0f?Z z32{dwN{!$K3&Cv#yJ=S?R!m?2kxbrDz|U#qhnY^-W^CVVo}d+Y;RcyfQod=jbc(eL zc74sjNQSXH_0Ii?7tJ&stMR%CKqc)g2-P?Wqd1?|KzrE)B5NB9!V`PMKQIG{))+5* z%D>^KN)L#tCQI`kC|DIcJS%3jA)YAhG&@!7vN9b`Dbrj!AC6ofcTY}R*TdyMXY2(N z{}SlXTSFf;^56E8UOxZ#&XarlzuS2puoE}Md(i~g0pldVuE$=;1~Wex!J=odDnGcF zuvS_R*!kE`STdW^eIZMLYyyTh%zTML=`Du*BK5C4@`cdegFFKbgx)20q1l+m-i7}W z1jlP1%Kg`-!`_6!4TF`4H4!8>^4|4k?G_x}e) ze{^F%nkDS`#Y^aL8b`kZduloU$aA~A9K8A(c?Dnpr!Nm*yn5+OMr-M#a{c$7>_2(B zms|h6r@h_#^?w`BfB#pf)4@%#BOLp0&i>C2tkd~N3#gv|5bJ-$1~=?E_Zn=q_$Ocn zF1v7(l$O1Z2f-~6$=rbdg{Ml`-EWY!%;3BfO@Xg)7Y@NDP$nn;oB;>{EV)TiZ{n~Q zUII%MdW3poXe98M2=(3>#<0wxgDp1p+^d@o+B=}N!Du!lwgMxO%6MQ{t#@y(P@3_s zS#-gN^UDVMsvdN^*VosMxao=}o2}M)?2f#N8-D;9gR{BlBE5Ej9R}RB4@Yakg#)E= zI4F7(-%C|PAWwt{36%Fk^f3o0E<8LFGi*1y4tYzd*#HV8Zs>an#~@+u0t-#pDDo0= zw(=)aV3uxpLy3d{^)&h@MzJM|6AycK43C)H(|Q2h{D;54IBm81{XY5SKV&EdVD7?* z!I6mx7g2KVO#oxVu?t+^5wGbyFaDNQ{Sa0YYzdQ_PRs>6W`?MNu3FghahR$(1JonB zsdYApqBH>vPLZTB^|HydD;a4U#YXThxF?45*P7c=LP^7Yt!xN4$MsjWlsM{nA&C)v=pE=dk!-U?~6q2uPzT zHt|k^nPG#SWXtJv1_V6uOb~Ioq2Rp9HlBZfa=M8N7NKPL3)k$W2hh&a2sVG{?EHr_ zHuU1uzrZO$st%&dOIidtBBdvy@!kM;$S-1`my_5-KQBM=p@CMbql4RXdVl*XyrS{$ zIy=rb{yO;)*}|2NTmxoM$SiUl&hP{t+|a5HFJjUKzVR)` zEKMa}zntEYjk12W0Qv{FzrcVYC7z7Kkq8IvM zq_JN%9!%AbO*9cAdO=p1P&3(A`01vS{$;B`7o%Cs@lQkI-LzW&V2>Z4`_spd57-aT z7{k$up(bJ-_6Zf(6=x=IL%(doX7W>a1Z|`qeb#43TVTbB5_PiT3cV7!0NNzF0>~s) z=y;XqO8A;2(a@*Wpa_Q354OR(Npvz2RIhM|KOeB~$XifDZL9T(QGO`v^9eUn*f6Iw zHBFvRjC29)6SUhYdsxM{>KfE;pZh%W@JpxOB}`*{)91a50lpw+&Vx7sMtBiBy#00? z>K5A7(lz!A?Wz{}RSj2Jz#PG3z`h*2XcXFi_e-u_;rvNCx%$k2_e}dlTG2rDBDo!8`Cv^V#T>O!gpuMS}iTO!l@u%1qpC+6|l=fmcl=Y2~oTbEQU}K`> zy|7f_m?=s!>)k?jskd?MBfE8d3Tso~^qYCC@%c5e)|buZ!P7y*GR?tSWJ0Z=3FiMI z!Xftc;zv?$qE8F-I1gw742L2g^#y2oJ$PS%#+QwLKb`U&75{ISfjtTpZXGY_PnHz`i50VC%K?50{GN-orrp-53zTAd$ZLznz$*%4xZr9>{l^}Pk+xF_7GN?IC@~ED%E>|iX z38hu7Sb3KoTxKM;!il@Dzt;yljS@UWYs1YCR+hMJ@aKDANXButFW+Z_P2J0Wv6?*ec6!^Lc)PC;HVaPM z*Q)7*#S(1FUD@vP14BB#cpasb+d549>b*Hwb!Jo5&ZgebzqnyS!B>BFy8};A{t@r9 z!BP&re!iLAt4P?cae_i&CrxVfLN|OS$tRAaY?k+Rv)VSEL>wB@`Cyqdqux*fCv9j>t_x z1QL`4j+YLo#~j`Mv=?P%@m>v9-rV%fY9wCaKwZXdX|pRHkO_rR`5k*cp<3{LaX{H% zajHV|N(U?H;7-nO{me>8xm?kg)llD%9|Tgg94+j=a8eBte(fO&;3XQa>L^nM ze(-J*uH_nGMq(isSOsDIHSQCsb37mDyn6Cx?;po_ArCIB;%TzyF|Ih(|oW; zG7@y3aW%5|nQ3KnBi?|+6nftZ1zVCw-t7y^vQiC7tQ!0bH4@wA?Tk<`kB2lEaYMqaL&Z}uV7ttAWn)m zX_h0j0k~2(FA6xNI$0^y%>c*D9*2RC77eBFe-bt|v@=bk5yMKrf2mf@pci`@0B)Fs zlz=XI7ar$L#1bb{ha{fmi?*xf0k`lS1Mot0ehct|2}PQCo8!U-I??T8)&ux#I>I0v zDfHr9-W1A;X5LaMmWW~$pG=+5gb}BJ8j}PfMdRMQ@B;vzMr3Zod+rwjkM-{-cs#zWv3=U;|PU66v|06dKXFRo(Z)XaHS>b3x{BNgcM%p1;Ppo1gs_M4fbWX%Y(lth$d z!b5mHyAKDPbOrMPevy0^M>9>@`o!o3JAR?$6G}{l42`FtdBimsifHwHz;LA#lyCXlcOfxJm}NOgmEqT>?(uy^%JAb-~^MaBHr_e!dyORm^ONYkV5e zxCuCJkG>nc@B;VdjKY%Q1bkx0{5_h|yEh_WDaZGE#sUC7=@;LY13vX)pZKEySvGS* z?{5L(4DjA|0rCpKQywWH2RYEVh@fy=kR#v)xT^raaQ$Ewd*@?Vf8!_+#QuqW%ijYl z6d%?MIsmvk7vRZ^(q$C@KjZIj6Yx5W+cS{%bIk~@lFKljQo>c% z3HiJO&m0RA$(`q7O`L5H1_y#H*bPox_kcx1VwF2B-^Yu$EG~%!zTs$gmo4UUWTBHv`;~o4Z&+3R!v%;{sg>^DY^k!Dao-{r$%*U3cGM z#}|wY-L@#Ejm-0%1H4wvtOG9Z^{E3J#?i)4BE1AI3$E3W8b5PlyckSCHj`7{XMqH` zeq1KDW<1!y^3R-ryCrwy^?(l%*~U%z(ty`qiQ99K&j~oM$$0wm6+Fle+qj+o6#J?7 z2GfQEcPu9r|KFBP6!ZBvIF3S2b8f{pN$1H$1+BrGkWczG`ei6WV zW4TKstBu@(faCf2NbPBMoRguA4jWd|cS1XO4Y9i*;257_SRr6p0T-3l2YVC1wRCf3 zz<CXjuE^T0h}U#tpo7g-R=DxKvuwI8b2HGRe+Q2 z%F3Z{u;mvLhFT!60z6AJ3jg~OfSXA}`?+RJz;g*k{R@-g{e<=P1dSauSP*b6_2XWVb%`D}!78p%l&e=ln}^vUYQQ~-S9hlf|LPZ45m(8J<{O@1<) zuy8gRcySj*${`Rto_S;pR}(@aDB&X?dlalnh7W2q3k`IK1i1V0ZRnPR4uHEK=LcMn z&BPsHR7%x;L%snAl#OZ4(!6J4;O5LXKFWLMskoX7=|xmAf@gH4_VO= zz3+zQV<@9>-B0LDCY@czQwY2I65)JS<1h530`%B>6*0V2@?xPQIl^;7zZ4IzX)R<6 zK-FJB$I=}8r3*R(|86?r2=L3ct_=Ludu?S4ZaELAM&Pg5cWYI!H^=@Wj!+qt>vDyv z+!HeJSLqIApnge*sD|LL%qc2kMk{fPBJj%`qcq03GS{fH6~o^M`zv&i%J9DuBlQZ= z*#y6-o0I`xwWE~A+ScGImBW5RXQ>?cweC{Ojp0+d!&KJrJT6li_zj(=mV&<#_UCb& z%7I_wI#mUGlV?%zOWmie;8!|OEi;5>Lc^j?R5?DgYB#EeoM#zVs`ZYb5%|kERIMHS zdg57?Th)s9ELM5vT?zgjaIO;Im$~~G0&`;*t3;P5*X(4~5c|tGTG@uPQt<1szY=#V z%Q$XHWLuTf)#^vk2>iS0dL_WGr~SL>fF-~$bC)Os|E{=U3GmB|ZOg#FE6!LF`~vjV z;NKOOECGI*14SA5cf~PFfM1XOyXl@Kz^~^#chgBrfM3sf?xw4j0KcB|+)alq34Q_k z8u0Ik+m-;op7Y#I=Pdz#J@)UW3zq=Dp7Y#IM=k+=nKM@j_;<#gOM+j^dG4lDmju56 zeI@vJ#+}Ueo@XrDM%3Quycw%n^{ydIf z8Eor!{c3!#+r<6rP-aRI+3CCrcQAFszT`|aQqkUu6WBrnnH&4-aRpm+5}MsCF6|Iz zvF)jG32Op;Qw8}P-c_0of9?KN3t+#_$EphaReM=Ay;YyX)2a&mRr^|T&Qs!TRYE*9 z{#KR5vto}c0>8}Xss#Ky<8>v#Z@BBO-}lP6xohTQ(|p^yEe|Zq7EnHvM&Pf(4{HHT zFAV;Ays=g>f@a{);ggjCrDd64n{(Eua=)x1_)R>sD#2felT(>@RuTLr{#ljauiR0q zkqJCSNA??zhTXQnbyXCzl zz;7Thui1xdaWkb)<$hcy_$%?`%E7s;FIPh);mZ8EY~U~9(be>@*vP9ZXM$a4Q@E0+ zjP|)_*Xi@aqj}u97Ixms{IBYKuGpd;R#`!)r_5ROA?}i=tE8gwx^y{%codtn9mNy3-tAQLw8^9{CethvdO3Cla z@R+X!o!=0W{BpgIMXdRyl~04X<<%YNN9> z_CCbH2GK0eViC}F##;n1|JK{yvw~$EMD4{u zVOx;kuhM#^Fo+UyBsT~$4LFQ16GueyvQ(0R+$j_ekJ~ ztb=H_CbB^&Rdb=*yn{GfkxcYH49>m@Cqt#V+a}DAy zdDHUK^-j?{qizs)$){GR2<&618pK`ltS!65tR2K%@~@>qSp3{-2XU9YZ0DGXyX$K^ z2Y0%=9=DvF8ZGR*>vuZ`ce=aYw~McdJLH2q2Y0%=p15<&ggDx~ib7PulyRpce%*d% z6lSMF5ab?y!`Pjoy|`#ucz9&TsW(%M{}jz}YHh4M%MC)^VfKy2q@n=trBo5-hvJSC zUz-gnNtXvP9%R-Nb3Asto%%ueVTx9O=H;_F2w8-=sWes-xEF>}n6L|{WjV7oCRl8l z33$aIc<+K4^;Ki~AOr(r?ggG1w5gz~Xf8jg#2#5lVQ5ZWij{-l_XMyQ zvQpB4PIwuJp};u!rRS1^m`m>@2QiMqD9+o4WrNT@XBQr77%Lvb!V5by)YL{o;k5|Y zs2^Zr5XvZZh93u<)->IYA10|Apz$CufE!rl($pCn61EW1dP#Io(@J(j6y>)1c-m^= zq1hcoSDto*#3t@kQiX8i6aJJV?*jG%g65r}r~YMQoTk&{pxeEKUS|Ug@Z8N`(+b@u zRoHacx4^q5QH&e{So4J2(@X=;O;EzT;DbaMmhDO?j^W)5{X=BZROrUQ$XhWm4iG2WRwOUF(eMd=BU2~6YY8P^a`#m_A&Fmh8Dn)IHz)9HKpgIZzC zKo<6`PqK9Ig{~6KU!;#c_R)Qf4B%vEG&Hk+7+hwU8Qd^mnD3(Ynkh(d;~KHP4qYEG z{tx#0&H2j%hHh*;?*4F$_gy|x7Dg)fMqAh-{6^cjX_O>BT~tKEii1E#8zz59v-l{H zE_VRL2qpvM`eciNkTYiFzKNDm6K-uMN04lZDgGx+hQrSFwiBauy-~sWM=GHD7M>q~ zV8}MRU_g?vjgudfO=Dg2u_E?kufrRH!EPaJXdhS{+F<4fX$RJtJDR{*h9W4#c@fYN z0YLr*Q^vV~+rxm&z_37Yvjvm!uftbA@Sxn>Dw(L=7ltq*5~}nX($@aa4M~g1Ezi+R z-Y3HvqFK2zXejfOdFMiO?A*AMz(%S6V2>Z4`_spd57@t=837}4Bc1KvX-)jHq5asz z8*vr2oX;R)TL8jejJy8iPwOY?P z0=S4Nr_*JjcP+@8T0m5y=2+9IB9&6HD_kPnV3HzxqHVk?2TRt!41qjh!j;7qt&Cyp z6Wr`-7C=FF;0N%+OStE!!{_lW0h2+^nVRSk*k6POkW!S>7q|q9mMH*~@uV%~=BPKE z8wB51v<*YMmCZI3po=byL~lpCO)k{dFQ!!^4oGhLj>wU2>}a%@3;Ssl3cjW-aa zly*18Tpm#f4s=B3bi?P8)-dcEd@p+^$&o%TED(@cD6Jle3NB*tv3?@x_XHYsOv%BA<2% z4Nf(F&Kk+Cr?Nfi%6BaQsyYUwucK7#C|Tm1NCrfy9;FaOgfT7-Fyxj~C;|7KWZd7v ze}040B*IO9I+y?M^NNX=jSAH*0t;$U0cQgi2n-d`&B@-6vtAtb6YXTwkI|C%(HqXN zDcmmvI?*9Au|s3jjl$r@Xn=N8aZ^c+2v!aM8+bz(iOhr`nN8`w88u+i-zCK>E+RY< zKjaw{uiAn0kh3PdxZ$=NvoPc%-V%E?Nk6;^EP(rf=!7#lF(`~3FNZ{7l>yBQG8x2= z3xtk~O2`9zQLcbjNOsTxX3&wVPH~Gf;XX|ukcnRA3l3ZE1ugEed&Rkl$JJU!z#Fww z!KeY3w5t3t+aW3UK*BlA7Bz=L$q0^LQMPU}FK!OKzF^LLz{ia^ItdPw<$(c-J0^-1 zNY_j(vQo%Q91^fl&S}Zs4)D}Kvi1SBD>y2?ppyXo)IVi!Pn6Bg0q~w{_a>olNF>xO zKC)?isoI1`!G$iz2duZVyT^q$7%7OOF)}cTrz}8E-sL{W7IhbXony!`n`OSx$jgTg zqZ_utZ}IhJz!#85p|}Uor5~!9Ms)Zu~Ed15ky0gVg~^bAN@paYn(7fH#mf$ z{7%OaWaId4a3c=`4n~8r!~oHP^l&l^60*&Z`U`&UfljBh7^E$fa0Rb`>7$tQuye#Q zYE#Hf81*UG18up*!`@b=ufXN~4rKfp>PyCOs>W)UGu#}p(_-CbQq_g~TQ}Eg{&zATZt`oetYN2LO7IZFuNr7=;Gl37(H$cNuG6?E>Xz7 zD(NV7lMhK(o^oLU2rX9^_#7}4b-~pUo2j;6Y0O2&;++sqhQR>Wvnhd%` z1cczRRRi{_=R9G5!GHES>;tMal(KPm%6&9D7 z4m5em!@ZLX%@oq#1FGM1qad;SsHP5x3&L7UqQHaGH6n1BDlc&Tnn0eB+b}X`07qTN z#I}jYXTSg47Cuuyw-4|K^gtlP&!5}cxPESb{@muA?k8LVCO!m`x0K_Ryoew}A}(zH ziZ~@f2g6#3NgzQ26E6CRM8=kF77ahQXIv>OI20WC9whCQs^K^(BEkkZ-{iwIJ_y9 z@eC+HMxzc&qumFxw+<>nj8sj?feyJ{Yj(!T1O%zw5m}LPTN<%UZ7^HKR8V~Bn<Dh)Ev{aLve(_{fpian4>U1CFKZ8=yUGw=yb7%)^42|4+y$tz~-+R8o@^I(z2 zgT5D29clBQa6&sb`9CbF3X%}#{gLMee0lJ($XyO^kHkt9Qp=-+6r_BB#FX9<1GDaJ z6`L4ef`QbP>D4=^C+tuM9Y0BCfKNWMx6{iQZ=NqvYX*S8N`;k%M=G&!2*Cwaxvq5K zITK|&=52P@jFy_+n7~iiMA1R^C#;S%DtAFmTP>BQ7k&&p4Q~cv)}slXw7kT^Jh(1Z z=}tAJvf6EM8IT&0iaa{Z6uzVbQ#Br;tpwjxPPh^dI|bUi7DrF1xhFOAEEGZ?SNa2M z6Ivg_i#&|rpbiS^TUka^-x_jd>3zk!(>HI<-=7?wo&EIY^u;fH*Z3s%KpZfg!*C*J z1CZO&St>X{06Q3(9sm;F=NfsRv2;@SvS>hJtYXZDj+f>^91vk)e&cLA+pSiIoq1PC zin=Q7+9US|eqRBo#!w~GsprPL=?nKNqU^7ziHbrVQItqw;{FO>gz}0XM^K!FMkfh- z%=yM+nuW(n0O!xg{1iIyfUYpSB#c#E42ViXZ6Iq&OX0^PD90r7Z0WEA%e?SEN<~ng z_bJZx*kxZF|F82Ul}$DkRkqD2#iFRjl~4|i=}041U4uwii{eW+groULBVAgR#t{yi z>5GV(M7c^P%T_DD00Ke4O>>CvyYX-gLIi0D^eX)7bV$1s17Sf+jfa|eH-PC9nj5Wi z5dUCMw2VbS5uyNh`RlD?nDE}#+^ zSC`l5K9Egx_=`=1h2#UNRwkL#4^z_l1F`kZji-;-I`0C15K{m>?5YHeG@AND9eN|$@+FVN!v%4fes*l)+H1)=#qpaT z;Kc<4(&og!MER6oaN)V0bnnEMVL4p+(JYZ|ICLXMgWl0)DcZhB_&AVD7F%@L0LGH} zxXXLkEpY=$Y6ZM(6-rATS|NhcHKV*seaF2SW|OJ5*?U(Gy{0(_3hDx*fvu(s$mn`u zS5#JW~sts1R#1t_0mDW;ZGX9^YZOKaeXd82oS;#Q?Ed?&>!10v0UYQ6h{iVR7#8h?_Is%0x#>_Sjn0aS1;ed0k`$;2M}yRenQX~yu2hPw!n`8 zlCe>*BbgC%i3>p8f&tEtqJ265>?&nGO#LpO`o?S+B#!_Ec@m_vJ6^H3OWIYN!=psI zCI=DmtzaG~{kt*kX$2vfIItCV)egeQO z^Z=z?oT3yQ`&*>57k5zC*Vm3qP&y#9f>=bQlI{=3M=xKWz3l8D&AsK6q}DeG=fYHg z9J1VN6qnu0STI+#$VJ&)V$y z;o0%oR_mwZ^MAg1d(M73JUu;peSZA%jJ-K!M{i!gI6gmq^BR7A%MM@vi+z9m`o$LW zd?|S=knAlE~bYtq7c>*Ec{Enz9q%F9?b&SL5uWvLKRaO zj`OEG@Q5ozzssfnPG&CCLDK0^s5ZPnTB46_c=q@9=wEp9|876sefp=~-p=+u6x!M8 z{b{@RbZ4*kC$_z^QC2)N%sm32EZa_ga9_y-n+8a-%A5`P2ok!``;x$q`eDu;$monN z&c@#G199fo1Hqe-y2*SAx11PGqQF7)qvrwBPLVB?*}1q`B=`?ld$--ftH@CnBbA3pdOP>)|27^i)&Y?f5Y=S_OM)F6QfSMa`a&!@0C^aGPSwwz(}VxD zMp+D`19k|T9M`gF;yf`HjQoatLa5J!=n}TlftOyRVoylKLS=-<-E+gp8HFA$f09fw zxDisj(;7#(W|mQ;wEQ7LE}PJWbJPxFnCzqr;6{nEHUJj&!I_4atq{#Jh)R#I(XaCe@h<4)zbGo z{^)L6Ur29U*sdv)pqTHZn{qOI?nm5+}4xRw-*Z*xizyIES%&z>&0htBh`44yh zXLzTM>YfAkxcm8YtA)i{ryiMexumXPx`hR|3nZIh24zuI8_Y%?YjcWnG&15g-Pl5# zZH)Y3%Gw}3wORYyH*es-eD8rzloqS~{P47m|NX!1%}v$;NC=$YfA2_h;F=z=%9Lyi zfswtK;R*GO=aYcHjxWPVy?)7Wxa3_luY`98(x#PCroH5fA}KCQYgZV-0fTWn@CSn~ zGZ)cFxWl7Bt-Tg^t=b#La8xmAk{xjzH(J7eunYEy{We3Jh1^%H4ySPW`r1PsBuH&7 z`Jq#s$T8l>mP}-$dBg_RJCD2vtRnZL=F^BR$T;ZdXd@uc@n6wgR?OZK6Q|}3S3ArM zUoC5yK|XnWO#TJ)eq$6QQT8cDPG6gG4M%Ix3s5brx60{do>qAcqg8?L3fvP5)U4CG zIfJz3^MMAJCO<^=eH40Os^)@pQLm6_x!*g0`y&_9^Ix!bn8MpOd)RoMG!W#lA1WZ& z!L!UP7CO#8sImnhv??^FO;!YSan_t+9T&A+c!8evAX|--tjhzM+>QKP@VhQ&VxU)G zaT8W(=%Y16%6i-8G8F`kI*ARCm|)H`q;2Ml;;nR;Ua+=1o#t$cRiy*=XXskPe4#sy zzvubPLs+|8>>;=A0{j;zB@fX<&ftjIiF#IcROmQM9-;G1>-gEvL~m z3Zl!K?~&b_2pLC7<~7oMA6wmOGQ3LG4Z7C1SWz-l&w{Rl8_yV`=e}=gViPX z(aRtjxPh^_@*nj!^nw+oI$AMW&)RRfK{`h|oZn})9odTPptNOuQHx$y(?g@9iHOh< zN3a8u8LPUulg>aRbw@p*J8GxdoK1x*nQG6Nxx||Q1stKVSqu&QchA}Bb++5Zoxhw8 zoK4jr?cS`-yX;=|&=arMHqlyEw!Jc32_HH!@Mc)b`Q0@|*3+eNl|_SShOf*e)%oxZ zNd>D!-7t>AU!y@oq{-5^Nm~OH*P5Y97F`{BG8@W<7}6>u^rY%^sTLGrQSekPgQ%&xQhSEqHzuH73Ryp!A8+Dc!$+%8VJJ)-4&>IbR- zwB>AbM*#Eefz1dEZHbV~!t9KP8_1$OwSWKoxy_g|2Xtgu1_TR(b7g&#b7bsYdLKz4 zMt)tMJ3Nn!vL&Kh4`YDrQbP@8&zm}SwCd|4FeOtMIzQ?n?hw@pfU_GLOzyl z?x7D<`1qV0fnskGT~S<4PdK)?sT)L>j}nG1Hj`Osn~leh8;T#?sVdQ6HE3DGO#AnZ zt9RR-zx}fL_}%~e`IpC^KR*PTHqgU9!R()&?`+TKPbG{t0Af1nZ_eq8uoFrq5FXa~w>8%F`c-nt;{%>iLgL0AAqEMDoYt)`_2 zi9Rf)hquf~5e1v(6&LPwr_ydvxvi4hu!uRA?+soF28W)_yr#o`Kmei(tu$^vGUz^W zaokC_kfri?*PQg=@9QXhgbssobS+sw_A8)7|8DhH*58Rsj->K(rIgp#p)}L&=TSx1 zaKA>dPvNy)5exdw1zK1Z>ldzmj+3{-#ZuQlJ|q|gPNC2C&!KcIK4ULWPv4yK(9pCV zMU-k8NGT>PWa~G#$Tcrar(NFZGa)G%F98gef3i@h-W7DjBZSAE9*bm)psas{&(MO+=sWgKY2}PJSH< zQ`%AUBFc-INfsF5pL3PO?Ws|$NRXcxX7ssR_gVMPd{2%1#{*igSN_}Eds6WK+}+u| zm;Y|#nM3~5j{H2%mx!+_EC~wYhoBw$2V@Gyr49Od3al=(nvwDkw9HGhHov`-B1PG5 zZ1$k!G#PIlWhUGK9o47Gysj6P(c-PeRhW5I0VDpKrSOe*P}4Q0_KQq$H_2O&5Q#FZ zID}^#XL9SXJ@*L4B!MK!edt2q-b~ZDr zHt(pLkhlSrCS+>wkthi~#A%+^bP5!9$vR2tIFeLl>ERjb`f%Qiw~mz%&0j5}Ymp|Q zQv_;yOMn1oHgr2Ot(i@lk33oqa*!GwQ!mmxob0-Da2Q~$j{ZG7L~<{EaneR{Snl2x zL|#}KD<}2bp|FO$E%4M6x%es%cF2?E-^N7C66MAvVoI&$R83-Pp9IGf`y6U&MM+uq}y(n%XLZJvvsQ4;=iEo%y zBKszO<@J*%nl@5vQz;@H+FdC|xt1G2l$2xFYhypEcJ~;wi_t3zRJMVJWHSuGw<*Ku zh-wb{S<7I@DR8Ppg^^xce+9!N-?!U$`O0%6;z!s5OKPm^IW8`z|Zn( zO)hsOPGEA!(hWO5IqF=bUH!sVTzxldvq8zs|FyFVfVkoPW;AZIR@f|-DmN|Q9cVMp zw*j3LvhO@kiT0glHh&Hh-wsrK4oibPMM}0c;q@;pwc`Y+m!`5^=tKGeSOG9BSp}+O zgFbqz=oL1zZx=!2ru%#O@3K20)nbRAki;xIW%a4kJOi-2XUKuD{O zXyYWG)&^dEo~!6p-coW_HFlHly&GmU-OeLIq_N>wAu+P~M%7etjbE+MX5x?Z)X4wB zPj9XI|6Z>s|L<l zjq)n=UDPS|C$k`OF-P~Lw3p|sWz-qmm^EbPc-RoZ_qHyV^cQ(v7r<%Jv(^mX$*w_Efo=;WVRyK{%f1H-?tYZ zl&cVTF~fWfzs=l$r^p7rO@nn>KY6@$DxYE>2ziz}neuFDJ9&0$#J z@%jwQk|qqcr%ea*eHsz@mFc*Kdp}cf5#Gr-9Dx_mR^uEC77mh17Ltzonm|itF6*_n;=GLiTHQvq*V4ez%)_A}35L@*Lxt0jCj7VQ4EtVgpIW=kt zpEAOfF=DVAp^DDR3A;RAl9%~WM7ogFlP}!(; z_4vQ3$qK!8axG z7_3rWsGSM_2AkT^EQ2hS7zwUHzSe-9PHxnnRTq4@raJ7U@uI7I58h0QKGUSp!WLGzp2$n4=B=gLcP$7qntqnwaKu(utT*Rrp(7kU z^K_r>u4GA5xOvR(R(SnT-(w}rj(txnZvLf3bwD!=$l==s&phAx;N3K0I}~AT39iE< zl)-;Vu)Ak1x5HY{(bwt{CXF(rH?ZUwRJDGA(`+g##Rkw|kb2V>u4Ubrfd>mK$E;zR zwG~^$9$&$6T^fpOmUjpX`V)@4k5#oEDxk}a=W3UmY@&X(DJMQk01kr&#*MT^erf@x z1iulCFL)o+VRqntdZUxV&PAnWFPUFe54{vpmSa9nMud-;vUn=j-0BskZakp z7QbeEnjMi89Y=ZhaRGB$uJ&Dcyh0^;pfB8st>ZLY*@^kRvuQIBW79^7BnhzzIrPTl1~SISh!@dj?|Ff_kQ}qQY<_q|(&rL|dNucmZ~bliR06 zp9~d1XlzR`+8RU_BGtVv@}Q8@??Mp|ilgcj=jKO?yF3p=U)FJM{UN+y8?))OSn89A9q_(gwr;6> zGW7id4=$^Kp=eI|zJD|t z$r#SGfoENMcBWGpI3fy=SOBl0NxK27Atq0Cf-N`6ycVK`n`O7o%rT$mZM1%LM%lu|Q^ctirPlJ-aSWE6_9;K(4Fos3SfQjHL9@8h4D3`pv2E z63uIKgi~mtnv4xo?D1K3rAiek@vUMl>9gSdMrvTA$YxHhB=9jnvkl^e@De>=?7-+A;hy!z3N zWf~{%Bl_ftyk5M4&!3+?+&~|SVHyx}`7QgS<+W*hKD98Khw$!D;3<|?l;)P*VkHP^~2 zG@R*ia5Um#N1H*Xo!!2R6(}xY+FE7WhSE2KUn$RCPT{VKiP=;=6>jadcJw)x=bI@v z9Y)=qcg+nm!qkt&rD$})9{o9aBs6z;7b^4BJ*>W?$O6I6&?IIs5>v?;X_u z89C$(Mx5epQl(B?EIM{K4%0&)v(4@UIwDwTjPMThn!>=<>IrG!t!UbO>E$ z!AUnoOu2mY)=&Ii^jp}gLClPQ{(1D*ho6$MyYpoKpv}Y0{d9Qx`uO#C2kbN(J;0cP z$Pc^c28!fb;MmWf5AY3^`TTiviwR*PRI$%_Kqr2E9Na+nI80Ptkmr+o75SqgwnfI1 zDWzSZ2;?Bo`_Ke8npM9DTid>KV{e?jd;zoLNjv+)4E)g<0cT4- z`;`_~Hp^5;8K!1ENv9b%$?9Sl!6uJALL%-z{gEtOe0z!L7n* zPng3sYKWK3wJ;jIO}zLd_R!&_Ni*mg!Yk!klGby&U0rr6%IueOeVQg-BvxSTDpD!r z?fxkOVcv&??pQbFM}ZI?l+5%LC;|K&1LIh^C3&^gJQjk5;g*^QT8Ot@2Uw;7D}J-v z`ehd|^*mq#)a_3T`I2P27IQ+~KF?~NSMJn)IQ!@az6)a7#Vq77h``Ac*cg_-_s>#L;t!m=M z%FTu{-`q>?W}o!tlOfw_FblepuzW`3z$$QWXd999m6$!-bluD1U0R3p`JM~s`c@z% z2qi0su#k6t_34ACne+TAvrVV@7pV189#&H3-?{?3!!`LFl% z>HYcdR-Q%9f3J`bUOZp<=}!{aojLo-ftH^9X!R_2>XUt}=gen8>0Bp1Szk1V3BwmY z^yS*JANkyPIQCJKI*YB>X?cLO;`x>U*7Boct{vQHu|mnQ5jD~!N5@$E)HZsxw1eBP zG&9lKn2Fc5_3MwX)%DTa2V+zNiIfeeSfbeK9X%4~XQ}j9Y!oa%8Veff;1sX;bZkIi zj)s8L31tzd)B?fTPROQVq@H2NfI+t?Uy~9-V!DXrm8v1~_V)Vlc>)QY`?R&S(5^s2 z$)YN!K1=xqM3~iD1Db3wRb%WwMUm5F2V9vhXNAH*6Sx;p=E!C(?Jc6nY1qt))i`nx zbIkjKDx9)bmQ>)#X79NAMrWtr2j!ec=7x% zv3L3R)hmGnmE9dBkUrXSO%MlrNnMcM)tVGSMfA?65EAuQq7X9MDp3ekt8vUGO4K;D zN~PSO%9ToYO(BHwnb6}WSAQg4-4ce0`bGiq?*zixM>yy?$9v zX7lJvbSP@hqHEB8aUFyX!ktu$30C(Xpcdm5^hLHzud1DTWgJpgZpb-U)~}Px?PzyO zCkMhx1C?CHmgUXZRcwe#fO{_A$0Mf86s zoDk0b&krlt{%JIJXB2=LOy%motjuy6K&`L!=>O~po=XL&HL;`{kfkN74p5z1ReAl8 zTi+@Q1oCZ?>E>$s&}x$AvF+o@tPD`v~p2gTy$M`z30SdoiS5eidh zBrv2+wF`VDgTPaBDoKh%5Qf(~}oJm(`n6v)H^jh^h=zN-v`u4Pqjw6b& z){GmL5@^1Y`U?~LK~ILT+Q`qT28&%!wP_Bg;xJg+$Gi=%SRJMfaZZX^Oet2;(2{yF zeX_m|)mT9!oO23F#L6{lXI6VWXgs2|aF49$TXoxA&p}95E-uXqx@Gzrn-)aIY*z@3 zTeYf^VJT@Bb|HI@Yv3fhu(}v4F~!}ln2N3ASsi!N5j`iT%ePw7)Qb6?-rIC>mS;WN ze#PWcI;I+uDItkzZkg-!P*v{BsG;Z3|H6U7Unme@jsNfd?)FaJ|99^`{@?99twj{T zr$`BZNin_J>J30SncW%fFM(68_@&Rw>3n$)>rwX>kl9Qz%d0ih$(Gt?8jWBeU1v&V za}80@K{xjQ?TvoK#5{H-oza+9#kt~j+B^lexgW6Z-tD+$_V;GNy_Kw##*>YU~KV>{>{ErqWKcbIVa%g=wB%vRu1uvf$E= z=eEbwd;j`X*L8|0&S0zdYWt=zpZdw(#*$|<-sqiFI`JKk=Mj)cF+Tdnm?oW!*f`gC z%37HeQoik%PcyHh&sjC$_&FyPr4w-9{?}hCW*$X@1t6f+Pc?QtV+^) zZ>zPQXO8o~zOBur1*kdy@9pny=k33qZr}TV-paGc@n0N9Vl)=l!C&-9?i-XcSt9KaJtZ zqLctd8=s~|#t}~G{BhyTvnb8Alh;@-nxZRPab>=nT%eW%(==4=wyt%;ymzwHrVO~I zy(!*yUls1T^=y^69K=>;6oKI!&EnV#GnHc7EFx4^l5-gO6bn7gl@?R0p9dp5!Lak} zusq~NIB6gJ9R3_iYs5b%KZllcPDwgX$5c#Z7xm`(pEplQ{R(VNCFpTSBlf6+j-Ct; zThAV1alr);3c#I%XRwSkvf0<(Ndy)!aW_siHu>Kfd%?uN41uStq>mc;fBQ*3{!8!4 z?#}ML{C^wI19sx3$f+oqFQ0VWc0xA5EV0PNr|$3r?DL7!dH~xYrmC7vG3_n<0fZ8e z%?q2j>2M6Cw^;1K+0nle+h+D2)(bQcdY9ZvdV?z_gf%MruT6)&34Y%*B(29zay^Cr4%`p$-*f_h|9?;vn42HX5_bIJC3HBAqhGyY+H(An=XQBH zc-3;Q63n^V{fcxFw)KDd^6$cHZ>bLxv*&;q~Bn@c{VE4}X7g+G_RteUz+QBMo`@B%39nU1J8ez8)J5qTLFNEkj!cvu+_{(q0k?&u`)pZja>GR< z)`r;#BKA`h!&!&}0cbi7QMJ<=rr8ROMaM+dj( z^#1l&ctzvgb#|QXzw$bG$n_8*ND_G42b6Qo^G6N>p3s?JkVsN+uUtQ%Z{p-9ma?D` zvVIvxQ;$xDBmd&UBdHz4@C?)k?C=7PXs{V0pq~&b+{S?jL&O05sXw0pb;&UH>6pkG zKcCpkk5is*^EjN&()tRmPo3(g?)$2$0VJ$1mb|Ml<%A$NI7xJo_F-^5Q#Y(xe8KRO z!08P?|N1s;L9@Df^|x={w1u7}%d`Fb@U)Hp{lD$aKC6NRR?WzO1Yg`XBvV*s93=&u zz$Xbv7yky@vH_+bfySpGS%#^CC({u1O*$!qgrlT@ll4JDi|NRnx&uFGxST8tWuf)e z{IRXX`o*^VGFn9k`eY9^JUhtb#eUw;4_C}Saz_vxk<)9eSY9k|@Xc}<_{bln?-Aa5 z3qu2~2h?++0wVEJ*hv5)r_c8^CNsg8!a0ni@YiVIWa3BP1}AJMaWsv6Qs=6owB5)S z7MS-r;LJAhJg80K)LK|0oNyFKJ&?d)Dsq4}g;Q@~eQ*M8p@Ys6^DbMM51I4A8N<=q z0^}TUo<2W30{J}wsc7iU1MZybXk-VoF`P%IFBU-Nyl_q<5DRZ?pf-gQb|{e6W-;te zqOoIZ4#5L}j??{xkpw(Q&EHY`km4YSm!PmH`<5wS-w8V-*v-Yl6eQ0?Ef5fJVe(57-1Z~Bj1Nzam83l1 zw98eSc=44#^rQt?6t&g6lZY(625#II%ow|TCqa{J5&H%i11gwGe6F!UHU1!oF1tJ1 zy}xz0pLToSbm1t{(P}3hKkW26z5TAn)^Ebp{isyna5QB$q{{Qf32>70o2lBXaDXRh zi;_qO4spPkih|PqO5L@?l3n_-9Xam!Q-xG9(MJIYqqwv8%aH2l3a%8ne-NbN{nRUd zb%(Z(UV=(@Ub=`>bq>H<9dY9a!WJwu;bDX81h2gk|1uQi3il zHliysB;x2@SXiu3x=?|td0R?gv&>x8#4ROL)J+|?GvFLk!GC(JXQ4N z>cbLmU{j9)L;c#xGmRRs{88+2Q9|B|;Kv*G+YHX*fbJ1$1qqCz)~{J-1hybZUX}(m z4Q9za!bkZj8S(BHm)W{ubkPN$beV9?HW8mwF@RYGb25no*Kov&$#x;Eo4x?}{vt zmD7(ztwbNM@330*+3iifrbq6>X%qv!tmCq?`#Q68=2dx*@?^~3DWz_?AEm5?I$;4*gs<(8@3L0K}6W&wuQ!H#)U z71_;}I#gVsIt54$4nd;w&^UDTflUJe<7q@t?OSwyYjEAL#yf_U5qbj!{~AedWlig} zXK{?|6Uv^WM-}tJD<9;93HNu5eX_SE0i9o@yIf?L)98hhtj$HbKi_QRhLLlzR7zI5 zMvS?Ye4=dLrA)$IzUQF>V;CiZiyWPJOPzG&OY>qbKa8AnFBehMj2B$Md!^%m^)^8N zE-@iKl%sIr=+8FKFg_UZCit&;gfHtb0D2t64}PNCMJGTsF$lfc5D;SWvv7sa^hgX9 zcnL2n&VwW@lNcVbFbX@ev_6cYPz^78=HN(_whd2ktsgU2?F9^(zr2yt9p2k-`&q!i z`^!rXU%LN0^yrMGCW1mgn{>H?t73wzljsb&tWhi7Ch z=Z`~%IjNDK-efj$TbgqfH(|TXZqmomwcIwft<+&>c$oQ7j|2qA85tde#<|IA=FbdE z(^vS@=1&$WkL85Mf3etGU|)wp1LNZsd6dVL4E0pplX>QEHNlYj%}{kGpSt9tk;?yKBvN)R7#E)v`&z57cKll7;SvFTPg~j>P(!zb& zDbzAN+{!H0%wH~OdSBvizWFnPBIoN~~fF*W~*_(If zj$|t<;+@6$Q*eIaIaF}m);!kjd?Lm7k~^QCUVe?+9|!hJZ+{$$+ui=?K>w2)hj;^7 zdzx=>a$o9jU3(mDaP(WwyF?r=n z(4XJ800Tap0S0{_C!a8w49DKYO*AhSY$}Uc5(^V43@?H)Uk|WB00(}KvgtaIc2%;t z7L$Z0oa^|(6w=BZ6bi+$O{Mc#% z^6Hx9?La}jnV!?zcY2+XH}KuC)7$RE`@5|cdSr07#kxib*%tsPZ~jD8RDse-E&Av% zxOQ)nN2p;_a3Ibek%L0|Xp4tDM~g@-ethvdN>4x*f`#;m!z0X%GJxDTc5mR9pL#rq zWi_NS24ir;5(#w38$lhQ0$)0aY-EXI4POeU)dFnIG;|MmJoy4@_*kUpB(W>vZh`;7 z!!JnPz!qWK@k5~I3AfVb0f7>Il6a(fr0v6yj>#@w{#^@JdC+>G$Wo`c2S&snlU^Pm z{!3>60n?_R=`#VJ_|OGJBJR!urrkPbD@$Y%sKt{Ez0b=t?atECk$`w?wZzRTU7FU> z=67Qds3~4&24Kp5jDp$3Gp}E_&@oTYUc1xYVtO~PDO6FrEyBNmxQB2iY;=PN9rEBp zXioC4}j!r#y^ivG$;0;Yc;{Ut0vok0& zM-Sh8=ff&=({Ww9Cn8AfAe+dM(UOWURYlH{boit{Fs6g!^#Rimn?nGUM+^|q59q!^ z>sHaMX-kxmEk&lP8KoY?Rhl44i!yz%lN-_c2k+*UJH?=12l=+ddlmnSw<)9myuP4I zF`H#7e)PjKAUOQ|3a1j;JQSA#^#Rj?eE?ilN|jhh=4JWbyHTh_T+)CMuo-fypM)1I z1>^;oLf}Fe3z60$Ntk*>D?e0MX0Y-i7$63lO0a=zH>5?0ibVVuT95hdFmSfSCl~e` z-Ytm`W6y{0Ic#7=CONwgGEUwVxT5zUzSzT;39Kw0yNn5=Z@xnTUYl3tQ}|};{syZ+ z_MXiKG?U%R3&H;r8|?1^?^GGTF_5l4=+QLDM(j9_h*Il?R|hRANy>O~Cx>TeKfO79 z(aMC(D|vl*puJ=Q(Uh&|!u(Q!F5O;J#ThQM6)a8a^-u&J)*qnY${s&xfsLDXD6cV9 zRxIch@dwz|qFxhPb1TYR{B^5|8LC{dz+S&Oe|hGlA32|bu_v&VE|Xtid;4}X3~cun z$T83`$v|yZsim_G(AZfQMU$syW`&b$g{{xA7ndXWp3cbXe6&qn+T>z z%otDFApZg_h@jyVX*oAl&)V&+95&$<(fp%a(T1U%^Bp{tOZ`1Ikqhr!9-_LX)*b9v z5rD5X^X%aUIv5VqfGW$7?_|Z0OkF^tbbZLEfpsGGYZ8SB-m~`a4)Q!lqr=Z_WN7P+m`kzZ z=(vF4n!{lf1K`rkKMTot;kl?8NN~n^xJEwIT`o^iE4V6!08EA=sw*)9G<@*_boaY- z?!AHL3=D5scIm2?X*;_G|lRid+Ck8@j7~H_|3ZSP-J2bpqiF>UfoLL^Zm4haYVARvt ztB!grqunA2K;vRi+&pQM&cGM}6=(`WeiD|llP^~aA$I$Euq18=2B>BdELv8tm}gBqnacKILa&DS1C8#M0jXn1W|#QC4B8a(l;M zItCFi^@p^eNZcJqlL)WDaptex8$<0XW3g&+U6I6&kWfc>aZY4Hk!G?GGqgy2MMj*s zj)mGmNYOH4Os*_9`_hc-$s>Uif(%nD*3A~R#|=8eM}QM#Uv7vtn+e(vH|~tYoN zI!%Uf?u-3Mg#uK5$*6$QYmXu?!R{r)lB*FQ!FkNU$VR~}ug$fl4~?%G1UspM+eC)g zPh#%^)9-OM^)9-&@JRtl#efQDc*>z!t6n=-JMXV{-%tEYKE>S!>dy`^fDW2gB%5ND zh))s;-0(n$Qd+oj#?2OsDHPnAc}^oHk?{L2vj z{5$SmJ94$OTB6*D&Ct%6M_!m>_Ac4N7L&2ncARatfd$&)Q+xC8Eu@zTEJY-}8CkLT z3189P$Duc+6^~&{1D_wExvYTr0CZemUpp>A z=|u5mH{hj`?hnUDFJGU%?CjvS{Wc6xGo&_1V9X*mG6gVpukoV9y^KB1)uHd}*iR|l z0EUUZc40}3P(SDoW~qT72^EY~FM!NAWbMNsxmC`d{q(r4cx zkew?{KB;B>A&e-zoS|~=5@Z#AVKDXL3D6#KK1`ty_!D3H$rpy>d_|cD?w|YT{`u0+ N{|~VGlH>qb1OQG3a54Y@ literal 0 HcmV?d00001 diff --git a/pkg/iac/scanners/helm/test/testdata/mysql-8.8.26.tgz b/pkg/iac/scanners/helm/test/testdata/mysql-8.8.26.tgz new file mode 100644 index 0000000000000000000000000000000000000000..ff8bd1ab402e355babfe8592158b633c4761061d GIT binary patch literal 40449 zcmV)EK)}BriwFn`iz{LP18sS8acnI(E;ueSHZF8waschUYkS*9k}x`-`73&9&sg$^ zpeV_5Mzfx?vg~B^B)0WRagww1>gu{oZj#@E?$OwdQ|HE=1b09shj#ym>jei8@j1?8O1jT?1h`U12^$n z?$rO$ixVhuz^-=S=T!at#@TjuzG;oTsTYpCaOiu`NQZ^)#5-WaXflaHXfyVvQR1i6 z>NriO$w9X}M1o74fuBNQ-+{7S@i$aR-Ae+d!^FFeYU@gN-E%%VAED=D82i(dUcI_G z`#(Q0KVdGraFcWkIu3ky5O`ZG8M*=g%MC}&bCVmEMr@XNECSG363ybFhvf+qZU8-v z#Q0fqlce6nVK2PIzYM(*)*M4KfyX9(g3xpThSY~Hw%FKnuWmX)ru_si#i8mT~yB#2Qo{NTkM z39_s~zp1OMj7Gn4cJ^CedY#?XXQJ`{0Fb(lK5F>?_U`Upf&cG3x#$14@vNG6YYe&v z|7(r>OD{iq3#Z~MtNdwWm*Z~K1DH+cB^zjA|_m#j+wtX%)S-R)klmtX%+p6=YQ|J!&TJYfHm zcw9{RE{Fzhz@}~tOaM0gqy=Ba%NMlaGJoP;dbtuO0SHteT7e+xv2^UF@E?%EuKgfj z5%7c9AK`niPzd7|T{UnWVw)oGJSrD9g!x-cyk`5E#cJTTr09mSq-TV)=Ali0}Cal!z(AP`n(&2yQM{vMjqpUu;krlu9ZusZd05}zpP3tw;yIpceR;`fd>1WW>j}Gl%f#TmTIC7Xzt&3Ie~!Hi%TE zBuk_QM*$>i>Vt5kcSdk#aL5KNu;8c0a9}t2wsKtwN&H2Gz_KI$i;d_fRyV#M$l@s2 z!I?&DhTu5Nj(+T2H24*8{)@!8b}YnkxFcGKfK_qIiajB(vyEgn=x~t$h|@zZfXg=l z;9>c0I_xE`SKP6Qf{oD@;5cBve=flJgFEnoB8UNhy>94u?>WG{k7?|_@TNg@qt@^t z{sUYu9rMo&cTTv<@#K;3UWQQu2V^#hU=fHF#<4O1W`r$dgmnMPdO{@BJ@qY@?mmAJjol3m#?FtmHmp- zyVSQJUoX%Xp4<5uoz4e21x6P&JY9{fsS!u*ds@r3-BgyAN0VH6`MOX8dNqpT?+t%4 znWaQY2oy1i*u#ONI5OHSG%^JaeG&!!(Cijpvnjo|R>|-9XH#$JU)(UE$e3r#=Z%iM z!R%5s<6kiTB8XrR$vBz?BP47=#kj<%+5w&Ep(ZY8+Llz zop@)Llc)s3%{FhPLay;zboy;FDSZ8jSd=gk=-o<^gNTIgLI z1Rqyt8v9q!k)o%r5a2$#u)9dUI$cD5n-)YjoOwP?X*~0AMsK1SyC%{L{0|;}Njy*N zhQMc{ctYDe!`&O`l(%sVP0V7DGoBxw{S!HK7ev+9hp&z~MPH&QCD6usjR1(gO7@(s z4mTc-{S??3;7P1dQn2K8F zd8p;WM{@_mH%=S@b%B(N`c|g?K)D=;*2m9%JnF;_I|f#dXCFYR_YtN9Q6LxtEEU+? zu7J8n7Sd^`F?|5b3mfe~&mHWzGZpZ3qOfz}$4T0WXQ9Nj4taJc(8s2fW9-5tP5MXJP4Umm`A_0pM)9#FG59JeETvyMZ4M3wVPuh^x2M=L$%6HU;cx?Mg9K z5vvKvfqD_el~5}@*&$cMpBHVGF6;=l4IxQtxl6*BUm}SCW-Q0riqY{rb)M%LDcka6sm#j}pFFLstbhw`J1e zRyop9?D9EY9VLMq2(cp#Ff#O1E8pTTnl3_v+_Pivx0#PKCIL~~zfs*L>PvtS)No^GyZI=SU zIgkm5IuawX2o|O2gTr%;v(&c^qmSe&1g$=9;eEX1gwp`(%;2nw)W)fY?@dGyh+Lo? zfDsS2RJfn5us$w|zP^7No#;EQO)#;HQ3)sr!vX7Fp(xoEa#lAZYgueYYu;KcV?az} z3T17YTmz427T`>+0G^{pgEP93)+XnLPiHDO=0YPCV*K8k^rz}5V3ZRD5N{wz0^esY zFaX>H*Z{|?BM%jGdf)Vh;Vy@--}9!NWPHFr@h|W^_%GnIVHft8v3lnX#}RA4fRz)0 zI5P(Fb=q`vFdDBI^btv@uiPoi{!)4mbL4CTq;1%|q^1kV%xH>>V)oXHibBq`PhyY| z0e$*`b;{qF0-1QJj_&eQrc`(%erCx|(@JGf=u8#aNH*C95ZT2n;LDCP0I@FlxkITJ zrcTU~_?o^$l@b_IF_4M- z@jV>Uqib&jxS_6l`>*&I{czL)Vg%eI&$b=BX(YTFXr7@5pUIA$BbsuoBIsy(qQ?;4!9j-1L#0g&O5zo&4 zn&rlf!!@U=Fw_-|aw=g^!?OchI(x9CS?}KtMpt*9WOVpj!qahaFMzF8 zz0omb483~RQ@{ytO(BIGsz+}#rw*#2} zH%XyweE_`d9Po3jsN+fLP((R7nuf@?4M)s_{E7D{d^Ek{s~vr8astbaf09PBjX}P# zZv%`Iw7TVOzB&xyq_{3>7Bu6h_yP;uvafS)fNxn>>MCByL;SQV#VB<`6?IB?A#C`} z3jkw)C55}!YU;~&(Yev5-7*sANT^f9pw2raD5dbGQQ*Z=1`uR>uDx6DWTTECrHRep zQ1EM@Wn^l?aspdb;tkB)fbww&5mfZi`sv>tjd4s0D?L$RxjIc&qR$Q zULkKjk_W$1%BpYuBKad!hd;CI4;1g6CBjYB+yW|m9&ys72wlU_K<9tO~qM(kJ z-iTffSZ{l~XPcNzKXfOiM^!D;$ndXaQ&l&%BYkL18;@tvqT>PS^wdgtsKzGC>);kZqiH zYa(@xnK7L(ZVEj2%HuY^WVwd+bVEOy3GjyF$RB1$Pkz;SPNnhEzz^}36SauLu^SHg zzi6I^D(=X*EJ^smJMhyq)3*s=wpcO)IRpTB1v>&_0d{e2fiQ~LHEdMo4Unof3;B5Y z(flGpfeIu;-aN>NSjHXwiUu4|V1hk_9@I=+v=~Md#NH&j;ugt30D!_l>=Jq|@JRni z^>y&bu^($?0f*0Owojdt<*7us_*!I0nTc0T`-+5eJ$^0o!@9J<;Ax0Dov@wDV;DmCw~$^#lmxC!AZxMlWUwhhbC9r^SZH&pCC&??g#>%_|XCs-uEkr^KwR7W|=cY_yR z;G(rp6oOcB%-_M%hnL7+>8*~IrmauC*r)A`S~#I!OErkP=V+G77SH+5rJB5O{Qxk2 zJ_f=bN5P1F%U{6CmTwkm7Wfjfi>%o*{-U|O&XYQ>CE4L?c$lg$p*`3{ze-vp!pka zxP&YX(>A5Q{hAgLdZMMXDZzag*O3li;T)xXtFJuY3YxF&+SM~(a}8F`d<`~SHS;yv zaK#iFZ878BT&tzGisd4Tm#&xmagOfcf6%wdOt>wWNtnW1BO%w)L8;lUQ4(W!d2ZVFu zNI$|Ai*FIso?$_jc`9Zg9I$q$GjTs6PlTgf%5UP6XK)(dX7uH~_=24HS?|e{wj8q2 zi2X{3io6W3estqZh4+$pyUp%T8g!w`QB-^ZFq<7F&3+nTR~W_?Koaj)2T$JMRBUlm z={`#~rr1xxp_Uy5u0NSyPotVnJ3>L2UB70%mfbyRMe^IS4s(IrgpFe&Emc`R8xS84 z4X00<6*5cRB42E8GEHw@_;E|S&nWI0HE>Eh2kb%3DH_YlX)OW9GMjQ7vfDB{4A@&z z>_taPW-6%LAR8moA+5NN(nn{wW9PKV8O`vimM+kYdz_2Lk1vFI?P$yJk>(zj|E<6J zhgL^e+O|7wTeUX4y&n6+vGj;c*yslBW`^8s9V2&eU6^cd778fX26rW-&0`ZU97R{* zEC|r|ztA0VZsw~IF-G@4!*LW5xoPcXy`!#>ZCp-wkXjFaI@=QNH!&JsW-Wd9{mU)< z%>KgOOug_7;~H)1lU8smub(s5W{y!ecS9myt*Q=$TQHlbg@L3X^zf`F8(x-CVWZcq z4bCudr$7nfr`S)uH_%NJ0`~u%SA<YZ3jhu;V6oA3N49TYC<)U}Ai;Oc!OFxgsnBm1w}c{IJ;(TOj0hOTolb!%^mrtQ_S>BOMGZrF$*gAu+LJ zwp>?@~*}Ad3%@5=C4I%=`(K8tec_Bn*2-IMlL?xUl>$@rYij( zv?9_VhP=%j9MXl*M2kwl_A2g&+2VE{D0fsoxL&w zG-`7;Uo`_(+3|!A4#XHk@1d9F!uZ6!As;W{rdTaPKCl_#yzrBFMq597J{w)~({N_! zGmjcMo`w3<>1yTCAUlUfj=KY9b5-pS zm{bnP;u5$d5#)5m1>IUD+?iWo4(uAozzUdc+MDHF11m>ibq=%+sM0-f&Oxo(L9p0; zrHi1smHdZ037X{5)J#Y-u+oRj!A2m@Mvjn(p@ALGQ~u=-KD3!bPuY{+4jk zD}kfnrdQSd${h7dh&glBt0dxv&Uz)CdV=H6{z(%?OFDdx!m?L zP*>r&SB9+`*FEjXv8?l683bkSdzq2R+zxyi)I}G*436*BuFH~zeEEv~Xx@C4{WNy# zD|V4{?5phN4te+$2a@;ks~pLioc)SWS>63I(EfuR{))Kzk9GO0CYvRk{xU+lcl)b# z`zy}EmvHz^hwuEhDTU`SIu>)G7@Ofcqk05m&YkqclBq`92{Z9pyW2549# zSYM|$pdb1@47DwTSZX&@an;6K)s!Z~2AT<_58%OFK*9sX94 zy+K!}5n%Z_dWvCT*aMo7f;)os*F{gZDE9x&*;YJohcwsthdIOeM$YR`V-L`wrE_L~ zq3Bjbu9-;-j3kWG{B&~mW03i4E(9^Yk!w``+SA_WxHB{=geS588{itB@|rZA7+>oJ zGvL+PFV8j0x*!4Jr4UTW%11Og0XaJ$bu#P^H$odsPEkV3B*KHC9LblpO8YsC;uuiW z5>hkCXxz@4ta-<13nrkDsuFSR*3LG5LWf~5RF+mz=x_iy>Z^A~rImZBVM!*+m>p zXglVJ;Vo?BL1ZHjwHG3%J9NIOauFMdv8?9YObMSzT+hiAZM%xb#)tZ$gIK4oQIRd6 zN`PL|y5w!~(QpX@&=C{HxDW{=!ToBGiF(i$p++%|3fKV26F(tFk*N=V|14%zOx=mv zhbUfJC~}Hx!+(@y!18X%?vHO>s3?kP-3?L875nZMwE#yb(d^h2wsDt{+uW*)2XEz{Ai*YKHc( zW8@0>miJkhD=|UP7L?N)1HR$$F;(J7N*QF`IPmnj2kB8ui2Wd+FS&d!Rodj7a!sh# z;s~VzzyTc6!jVz-zW+Mfe=`z85Fw3o8#&i z2G(z|Srh>#mwG4mE}=h=2+b)H;>H0A25c78=a9i@ZGQjf5p3d zf8$C?g%V&b7q6p~+G?sfIpRtD4uT7s(?NZYQ@=NhjaY?cWfzu8u z^gDPJFfF#*fUC)7bG+=Ogsg&P<*O=2Rj6#`M`m^8EV+7fGaIu-jm6{|F5@J{`d^rT z@Td-xnj=@ECp6uUqku`7;85yd$X4U1Ac~4fg%$1ECcZ4y_)8-9zkcD!< z6DR(s6ZjY2@MaizWO2$7XtOw#bZ%VNm0ZsmX{FVOUg`{{tXU)0W}#~Sa<06cGBtDQ zjF`p_#j8u(H6ycFCC>Ps43%5#D6`_urZGBgxH<6+@pt zx2?mcJL|*vGI3-uG&8_H#e?FfJd9MtkGq^>yjLi}up(ud9E%-g+ReqfIxA{NO@hAk z)A4LTp`S8bb#pBVFT2d}uERPVjQax<@3^xxdQV<(XvzmI!A>ov*(f*g!yvkRFAP6& zAHDDr)&?{aMx()d41Y4SG$Bf*(T7<@smmZ5xIx`#WpY?7;fEK|d(^H^Tt^(xp{<&N z0(3Ejjmpw4$`7KA$`2_5U9m!ToCUtD3n%pjo)ToeDfYQxhbqw zuR(>h!94IX4o1=%WWnvKOV>{t8X1-p15xur7W1x<1}G>G`Ekxy+8Oz9LQdb~dHg-O z1kPE1kvfR=#`UgUziv?I>iwRVZcxw<(G1T@*LqnWB*u?^)S!Es(B>rr5aepg3DZuB zHAm_SSh{%c^}d82lEA(4N|_4(O2H9}AIBaX3buII}}@z zx+YdThb}pl%0c&D+J96pZ7EYMk><5XE_svu(!Lf;`C8jyYYDII-u4>3wrd+NdTlp3 zoy_4Jx60$lf^OON>Z@|fwzs~7OSZkir5v*DO)l+@ZEyC=IAiyAMDWo$T;S}UZe?Dj zbvxDq-q@>L91Htfmo%_4Z|jowm+`eOX>vJF>yk#7_p>f(_)B?N@7}^f(N>5wad{Pm zsJyH`^ADrgixTbV-;uYeIX#VNEpKX<)1Qb^YB3yfOO`X^D?63KCb{LJ+6M3VMN`sw zOhNP}14tURFbTOPY1hTal|)he1%K5H2;ePz|)qAZ5Qz-D@7TOl(Ww8Ao9h3rj@_ zhW;*#zAOQ$hC-`57fom8ym_<9w?Zd~+Ce5hDl-}KCm!&r);I^9+Jk^!9l)6;maYSu zSxJt&5T?mi3J*=0cswt+=?;Qu(4DwspZy{l&hV@xqB~A~XEJ(_Rl?(yj=Ve)NVNXb z{qr?GJiaBPxt=R+uPaRheA6$y+3XD_O_oq z`4ih-B@kuL3?mW%D9g5!AKX{+{0=L+{g6+58}E=^v-Qm%x|biA}WKcsTY`C;}3cdaKpvDK)4*fA6OOec);dqq-^tBFv=C zB|!CVSq=~@@77vmw5i*gW!9*>HwsUrJX%U(Fcmfy9r;_WTg)(W@zZuNOfjBl2X zZR67LVvg(3$hcve0#}bvx`+^bQgeyWq27(6Bg5Y^TSxmBRnIalyuBz&Ph@fZOZoKJ zrU;v1s#~LEZcbi^>>#v1vltK@P0$KRrIlcAFde%v1s=*4yl?b&6Nc_>)%NGCn+y|+ z4?(1Bku9+-)O5Zxh3cg{Q!QEhnJzysq94_gwI7YJ(GYMMGRRu08Jkve738P&YZ+@x z|5q+rp|+oE-OBr8MI>{q+_Du|4BT6Wh#XipMCiHTl3eIsvtkK2^i3S{3Li9B22m7v zZfLYg1;jN%TbHIS1i;b6El#WY!Nb)h&}2nF7!9}ZdF+oyUTA3fDL17(AJ*D0ZFzE(vKTBMDINMP=WK(|c` zI!7#me|#aFc+k2CU8Y)&alZs1q<(2 z-(pnW#ME7Ye$U0){QWB*3ARYf{OjA$M_jhQ^NaOY%J(udT8hA}+poGMpKAlotg7zp z7HoQD-+ktL$2Vbx&aZ15w#4B13TWZZZ^Rij+;7GATk#)%E7rK*S~lYr{{Lsx|B3vX zA_zdz#dMcvPWmxjp^mXm|NCTjx1j%p;`jRBTX>HCd3buxULU@C2?YB)bB@qT&!IpR zpFg+w*B@V=o*ln=trz$a$ubJD%;Cw2t;FGUsugNIe#}k+GzjDt^eOq0UypqmSp)0f zHRr%XW;voH+un#he%xyP{(A=%fzpFuJdda$~_z!XPp3JOgN@3ku-09 zmaj(O97?Mak0REfaj3Km9eIPMJ8m-5Pw70*-kR9DhM*2 zgP~W zmm3Y@EVK144kL@Mw1$C{M1T}SLHlk*`%6Vzn(Z?Z=cQP=6gS7G?8*^_;_z^F`);63 z)LWs*KcHJ4c>C&KaKd?i`sU5~`;)`7v!C9azIgUX|anO8{d0gbZf zu+v!xQZ6zLGSxuNAp?GJn|E?X600)f8)(C>&%5BaH0s=>ix7YSS3H6Lq;L%E`1s2s ze(>zkYY(=QNAQL_=GyQ=UnEKzajTBpIuwUuY{y|HMx@GE`V4NM$z2zBHg8i6nIK@D z@%oo5>&)mnu`_Kyw2)f6HTPnLA?)S{U5s}~!|q^+n?|Ev`RcA6x4y6uo(mF9He`F& zAW@ra120bPWsG1(o7gVT&wA$Lan+0^MV4pOWFfc{58`CdYyCoaM$V$jtk0EUd9oQI zMT(OfwIKoK;{edk7OCJ-Dgdx%X(cWCc-Ws=iXj8|9w!X0JS)c(%P0|qA=f{$T#Qh8 zjbYCltw^AOE_{R~6Q&oe{pX}3AOe}JmW^x*ep%x~dr)75{e{MX{wz#bdh~XlI@|Dn zy$030(%I6O9YFnTdKtSTAUU%83Ez{Ak-^zqSQ*|zjZ z#?>L(PsA?v2p5hcn|!2M0lH+od5y_DU@FzRUg74|g!wPGfpL%tn-afGfy>LtW;g|C zhC&C<(y^l(f;@X@;D>V?ll#HR!G}?JoKE;CEyadoZ}{OfA}h~x_cANkQS3`+PXvjK z9na$rY+6cis|E?}x0nlelXX6God&l3Fq0tI!7~fz*2D{=o;0tKZ6{crPB$ICkL1do zYFNXf+Og_;@21U7gAClZNvQ+tK9_Cna2kmTz`YjnN3>nhnz4 znV6PF92t5`!6+a8bgrQnN4@OGjZw?Suu>ii8;vB23S68Pty(Im!eRo`j5xB2YrvLm z43&s`NKnbLKXS$ZwO>+#BV+vu7I~%0B#ux_OlCpqPXo^(B2)?rW$tDJ*-%aBAm;ny zsg@kUJQY1Axz4U~pttLWRscZT4YfbNv}&)>lh^;ghy4b1&ZsqAaTr3$(8RpR65C7r82T-3$xwbW*e`606n8{A zuwpqGfghS8ZI%qAg3-Hu3M`$Aw4bH!F*6LbWaNJIO_8loXzFV}DW7b!t=DSb(GE7* zrGYK6fq8OJJvZ5;N*WHujhs3&=v0yGMwz5EZmg6RG`7R70w+M3D!9AU1uFY!E&P57 zY9&po)?ViYocz6#R9+mZ{>v8B1_;&-s_Q3<^0l?H?c6B&SS5tS2ZvU*JL{Rn@?vL}k76 zxC3ORA-0CFnyPq;Sgaty3RJI>2!&#rC{Va}Ws30%)F@?V$*y40x)!uaHc_CeUJ2JP zBd_uu#-@5|Kwkn^^~6<%H(u~eXyfu!v?`@_DRI<{l#4rA1}TzHXi!5hraP7o-VF*b zdM;LhlMNUI$SU+Y+1zR=2?O8CL_taBoJnn4q>oE4rSW|mM-z!BT~8f&Au9higC(TCZTwXb?@viGNUwTlhE z_kfd6Y_PvKLUW%wO2y$HqA%*}*@Ta#iWcoU0pnh(umH=n?)rVfW`01WB?|%-{pQs}L)ozGCQ;A!m@hP2H#Um>3R5r`Y$d)&&r+@l~oBCyD{e^T1YBKXy)HRxQ zO;A{1X%}x-YF4=AI_3{E+t!<`Mb^9K|J`WW*C5qO2i?c)2(Pmd-MD3XJOcy2+~z46 zgtmaIn^>7-QMeN?Q%PUDG3Gf-oLH64YgiYlX==I_X`xI@Xq8xwqRAA^yWJpQvQ^yv zBg)tjUc#pd##_Dgc)Zmi7Q}dKr5caC@qxtOL(z_TYhU<+0yM_h%3ijMZD7G)BbdDVR%@ttv{onR>Z~sZ|{_n}|)1CYKzuS0(#H*>ki2`1bSvW52PN!qjy4U&^ zn78Dn#QdcPegCK@zy8H0(LtltRWX2?^}o}5`ZT}(pX}YQ|66%-Iw6Z`tvmIFBi8}D z>a{-j=n*3PIxtU{i$`*dM#-H8ma=kf8Bsk;-Ab6qN^N(0;@=z|#S3M0r|2|QP=@ik zB5tEzq@M3Ft5cQHpvDWC#f8fdKk;YL0}4a@MU64Y1o>c4`Eh(RW)Dj09U}Cb!rRaUCI0&+M#SP$=VN$uZbwpYv3zDiTtYEByL?g$dOd4p}S$Y$BpxKt8Z;`jWdl-916B$WvDil5^OA21d z2wt`k+Bmt6xP12XVF`qL2ZyKU^mf8;L_ELjGFGpx1pQs(s))B4_v8 z`fb_^mF-#4XoV0KTm9opAHrimd$dXo{(9cUG@GK?{27K-^e%5W^aW!hS=%g} zqcHTNZJ&DlTA)Z8W?kjcA$$qm($@nP0(=EemV8r2of9`sDdn)eMQOeyOIgQN-Jn6K zayu0Btno3Lth}L|j-UDNQDklM4M}E`uA%xi=6HjYW$dAIE`W2M14BG<(=lt8+}fc( zGtgTyc6XlaL+g2mn%wnnITNwi&dtG1>X=E%3e^|rH7bYzSJEmS9%>pl$ z`3`s~m`T zqmkDbKrCDbq23?|k6S^vKcGKpgVGofffG;Cs5~!PHp05asyAFT-_qZ%hWw3wmghCH z*)$5gIKSa*FINM+-pfj0k0>Er;3kR5D+{0U0%ERWJpH!>p+AR_`K><~KKz@|=8Gl`oJX&pMP6eUofz^)TmqmcM;#le%zt6phO^w0sbt z8t`q9e{1;t$cKaby?1l!T@=4s9nkC<>MV8PyeW1X6A#7FqKh*Xx9r4lJ`&t9IE z&Z4!!_0iX=Nox+OnsG^O0jj#Q1T`-$SzenXeQ|hx`26s!d}a(R%j}6Y6CUl$snB=4_}{c7N?oC{mi1=TZSQ7!Lw*q9|- zhIHo^k_|UYEUFT@o+2rI8@PEfCy%n1|9gIV`2O1;4!=7qA#}@W(`dZCER-+9s~_FC z0Ou-1F{Ay4B3ZTcYf)n9ogBTY{463C=cTbXAzyq4asMl5l9zJ~n7}@hP

ul_p1{67&WBl^1$Ra)POB)3U9&P}eX4cHO)L?tl(#nNzc}Y}7d9P)r zb1m`i?|P@7^FI5fIs@82e=z2_j!c%;B3Sxdp+-Sj%Xsd{uUQ0UnTl0l7Q9~uf5>HU zYEE)3rL1Nl=>ovjMMV`C2_d{SMu(Z<#LxOMaMKJ)q~;nPXeJKXi!bB+U&MD^7yKMFIPRG-;B_&I4rX9Lzqd)*wM}iIUxUc z6`HsLqsAwvi&IhavObpA#mO(E0<;QqrkKkfm z2v}_TE6Ye;6}bMyM*TS@nW~#|=&7Dbu|s^7xdmF%mzM+JWCN6%Q(F@6hMo&K*vUlh zg^#XO`A(Xm(wb7dbU1>YTxFP&&e$p!N8VYS*+35#4;o;)+s);*T;{W+jno#;YX!XL;W06$s|id-24st&5LWW z4aHKORRvn&|J!OyXCVvGq%%YxHN}jy#w@;_*+WQF+eVtUYO@<=QJ>8ta> z%pDHE)>|XbQ#oawjN#nJv#Nhe<3C*uJ7YL5f)JG~8vv><{_m6BeEj#PyHNZ-{?jcy z^JjN!6m&+>PU}9D(>yVf>XNYC#aw1)`m&>7hDO=Pm_KYN|2|-^VIJVkbT$b5Wc)fx z(QxDDa5Tc~F{rLVN10YGv02d^aL#lwMyfN>jJ{#bEq={MY+rhDH+U-e|82$p-F<3_ z|NG=V{`;*wYvTX+Z2u3>_AAZ|%clsfjxkk}BXrIkt713F5^icLnhi}mX06axdmu(& zD=De=M&7=*RaU^0tJGA9B&!FJJ9_R0E}X8gjrxK7pjBv#)1ui(CAhi%sJbU|vz^Bn z3210B+{m|Ltzn*<%Mda52%C)LjPwAfsCh_=DAB)KFQFPYU7Z7PwStl@^Q%pG(iS}h zib03wdL+M&Mjo~U2%dLF%BLh0jcQ5^`Kof@vQSR_Bov2ptZiGrDzs&8w(hLZnV0|K zDDYM$_=%_P{J&c`|L^VW-sgY2mB+Gcn1TL8=u)OW#{+jLUw)o1H3U^Hi5z_jgjXYQ zRwseIPaNM+YP=f}mg8B#b?F^oYd(cZQfl-LYjZoVg3_I;lX*11wY~DhZ15`x z#$qEdv`>QmXnf_S;w3SOk&OwnbGf@5Fivf=bR5MZuchnsFnY>3-^CJJ5TQU|7|?-FkiH4<7BYL4A5C0cT;z);Jat|t7G`} zS)eu2yUzo?_NScxuPy^<4gcTi_4af8e}DTv|JUt2CCc)0tKUt_LcMd@LhrYh|G=%K zBx9&argP*Bt*cmts9fFo@`lz|w%;w#^xalgvW$ek=Ht(Q{8PUFuPy^<_5Q!t>pkt| z&wu;7JNNtlZ9MZj{Cwejq4yj6x^jkUo6btJhT7Ww| zD^^~k^rBh)MG}mb)m|vYs5TF)N;2BCNrSLgmu9q9T_@`=n`pF5J-l41QG-XUA=zkY z`*$_nsJV?a+)8d|JDD@#XvMa2t6R$#NIIHl0{d&8;FgY3UeNF3zqkKk02E zSO?`kM`_leB}b`oH$GpM(#jdV&r^Dzr?h<98c^MWnM&(hzRy)!G7;;_R$A=iKRsWm zcHmn>#?oBtfB2lG3nltX0{6HB}aIXf>MUsK2Rnm$h6oSwgb{r1)K z2H6CyPM3`kBRZq_!{vz!DYEcaoc~ZCdyNaxn^R!$#g8Hnj z6>n$vxgqa!L*D0xyw44JOSvI64Z@1ELt0v1SANJ9otajZBQg)pyjddea8|{+M7=MP zR&llY4Ch%u>quN!()yyQ3w5%Y+t{~ZT0g4!)#TpGyYQ9U=dUvVUVXFc&A?aE|J}&J zX9sRQS@^7dEs}??1n9M8;;ZRzxm?Zp4w+b+ca z+TY&4kNqwImT z)ohv^yBBza41#3?nlp!BZJ*RYeHOuDJ1fc~*l?W9#SUTm8O8K)&Feg^MCC< zeR41V-^!E8|M{4bJdz}Lx1unT!Y{o15P&7`lXopA)i)F3??$j&E(X7uy1&glEwM}C zR@HV4!~%|HqzD}i_V>JSXbNP!wW@~d+nD!=%PQ_~twYJmHH794-G&tTKZzu+NdC8y zZJ>_-KPl>ec6;~w|66$qr*T{E#~Qtl%+%LR{Zksv^PcU46`vwBTQY4b~JdG4Yz zN}f9#n>-2GnCGf&Zlr$4IKKIm@P9z74~NQI;s3jPdqw`gw|~$7Z{x9O{qlY~`S*fE zUI`K@N&F=!2=hn}qu7gRwRZDchiGJ}Q$vXra1PAS+(vSn&0KX&i?n9jmcbtX(0f?Z z32{dwN{!$K3&Cv#yJ=S?R!m?2kxbrDz|U#qhnY^-W^CVVo}d+Y;RcyfQod=jbc(eL zc74sjNQSXH_0Ii?7tJ&stMR%CKqc)g2-P?Wqd1?|KzrE)B5NB9!V`PMKQIG{))+5* z%D>^KN)L#tCQI`kC|DIcJS%3jA)YAhG&@!7vN9b`Dbrj!AC6ofcTY}R*TdyMXY2(N z{}SlXTSFf;^56E8UOxZ#&XarlzuS2puoE}Md(i~g0pldVuE$=;1~Wex!J=odDnGcF zuvS_R*!kE`STdW^eIZMLYyyTh%zTML=`Du*BK5C4@`cdegFFKbgx)20q1l+m-i7}W z1jlP1%Kg`-!`_6!4TF`4H4!8>^4|4k?G_x}e) ze{^F%nkDS`#Y^aL8b`kZduloU$aA~A9K8A(c?Dnpr!Nm*yn5+OMr-M#a{c$7>_2(B zms|h6r@h_#^?w`BfB#pf)4@%#BOLp0&i>C2tkd~N3#gv|5bJ-$1~=?E_Zn=q_$Ocn zF1v7(l$O1Z2f-~6$=rbdg{Ml`-EWY!%;3BfO@Xg)7Y@NDP$nn;oB;>{EV)TiZ{n~Q zUII%MdW3poXe98M2=(3>#<0wxgDp1p+^d@o+B=}N!Du!lwgMxO%6MQ{t#@y(P@3_s zS#-gN^UDVMsvdN^*VosMxao=}o2}M)?2f#N8-D;9gR{BlBE5Ej9R}RB4@Yakg#)E= zI4F7(-%C|PAWwt{36%Fk^f3o0E<8LFGi*1y4tYzd*#HV8Zs>an#~@+u0t-#pDDo0= zw(=)aV3uxpLy3d{^)&h@MzJM|6AycK43C)H(|Q2h{D;54IBm81{XY5SKV&EdVD7?* z!I6mx7g2KVO#oxVu?t+^5wGbyFaDNQ{Sa0YYzdQ_PRs>6W`?MNu3FghahR$(1JonB zsdYApqBH>vPLZTB^|HydD;a4U#YXThxF?45*P7c=LP^7Yt!xN4$MsjWlsM{nA&C)v=pE=dk!-U?~6q2uPzT zHt|k^nPG#SWXtJv1_V6uOb~Ioq2Rp9HlBZfa=M8N7NKPL3)k$W2hh&a2sVG{?EHr_ zHuU1uzrZO$st%&dOIidtBBdvy@!kM;$S-1`my_5-KQBM=p@CMbql4RXdVl*XyrS{$ zIy=rb{yO;)*}|2NTmxoM$SiUl&hP{t+|a5HFJjUKzVR)` zEKMa}zntEYjk12W0Qv{FzrcVYC7z7Kkq8IvM zq_JN%9!%AbO*9cAdO=p1P&3(A`01vS{$;B`7o%Cs@lQkI-LzW&V2>Z4`_spd57-aT z7{k$up(bJ-_6Zf(6=x=IL%(doX7W>a1Z|`qeb#43TVTbB5_PiT3cV7!0NNzF0>~s) z=y;XqO8A;2(a@*Wpa_Q354OR(Npvz2RIhM|KOeB~$XifDZL9T(QGO`v^9eUn*f6Iw zHBFvRjC29)6SUhYdsxM{>KfE;pZh%W@JpxOB}`*{)91a50lpw+&Vx7sMtBiBy#00? z>K5A7(lz!A?Wz{}RSj2Jz#PG3z`h*2XcXFi_e-u_;rvNCx%$k2_e}dlTG2rDBDo!8`Cv^V#T>O!gpuMS}iTO!l@u%1qpC+6|l=fmcl=Y2~oTbEQU}K`> zy|7f_m?=s!>)k?jskd?MBfE8d3Tso~^qYCC@%c5e)|buZ!P7y*GR?tSWJ0Z=3FiMI z!Xftc;zv?$qE8F-I1gw742L2g^#y2oJ$PS%#+QwLKb`U&75{ISfjtTpZXGY_PnHz`i50VC%K?50{GN-orrp-53zTAd$ZLznz$*%4xZr9>{l^}Pk+xF_7GN?IC@~ED%E>|iX z38hu7Sb3KoTxKM;!il@Dzt;yljS@UWYs1YCR+hMJ@aKDANXButFW+Z_P2J0Wv6?*ec6!^Lc)PC;HVaPM z*Q)7*#S(1FUD@vP14BB#cpasb+d549>b*Hwb!Jo5&ZgebzqnyS!B>BFy8};A{t@r9 z!BP&re!iLAt4P?cae_i&CrxVfLN|OS$tRAaY?k+Rv)VSEL>wB@`Cyqdqux*fCv9j>t_x z1QL`4j+YLo#~j`Mv=?P%@m>v9-rV%fY9wCaKwZXdX|pRHkO_rR`5k*cp<3{LaX{H% zajHV|N(U?H;7-nO{me>8xm?kg)llD%9|Tgg94+j=a8eBte(fO&;3XQa>L^nM ze(-J*uH_nGMq(isSOsDIHSQCsb37mDyn6Cx?;po_ArCIB;%TzyF|Ih(|oW; zG7@y3aW%5|nQ3KnBi?|+6nftZ1zVCw-t7y^vQiC7tQ!0bH4@wA?Tk<`kB2lEaYMqaL&Z}uV7ttAWn)m zX_h0j0k~2(FA6xNI$0^y%>c*D9*2RC77eBFe-bt|v@=bk5yMKrf2mf@pci`@0B)Fs zlz=XI7ar$L#1bb{ha{fmi?*xf0k`lS1Mot0ehct|2}PQCo8!U-I??T8)&ux#I>I0v zDfHr9-W1A;X5LaMmWW~$pG=+5gb}BJ8j}PfMdRMQ@B;vzMr3Zod+rwjkM-{-cs#zWv3=U;|PU66v|06dKXFRo(Z)XaHS>b3x{BNgcM%p1;Ppo1gs_M4fbWX%Y(lth$d z!b5mHyAKDPbOrMPevy0^M>9>@`o!o3JAR?$6G}{l42`FtdBimsifHwHz;LA#lyCXlcOfxJm}NOgmEqT>?(uy^%JAb-~^MaBHr_e!dyORm^ONYkV5e zxCuCJkG>nc@B;VdjKY%Q1bkx0{5_h|yEh_WDaZGE#sUC7=@;LY13vX)pZKEySvGS* z?{5L(4DjA|0rCpKQywWH2RYEVh@fy=kR#v)xT^raaQ$Ewd*@?Vf8!_+#QuqW%ijYl z6d%?MIsmvk7vRZ^(q$C@KjZIj6Yx5W+cS{%bIk~@lFKljQo>c% z3HiJO&m0RA$(`q7O`L5H1_y#H*bPox_kcx1VwF2B-^Yu$EG~%!zTs$gmo4UUWTBHv`;~o4Z&+3R!v%;{sg>^DY^k!Dao-{r$%*U3cGM z#}|wY-L@#Ejm-0%1H4wvtOG9Z^{E3J#?i)4BE1AI3$E3W8b5PlyckSCHj`7{XMqH` zeq1KDW<1!y^3R-ryCrwy^?(l%*~U%z(ty`qiQ99K&j~oM$$0wm6+Fle+qj+o6#J?7 z2GfQEcPu9r|KFBP6!ZBvIF3S2b8f{pN$1H$1+BrGkWczG`ei6WV zW4TKstBu@(faCf2NbPBMoRguA4jWd|cS1XO4Y9i*;257_SRr6p0T-3l2YVC1wRCf3 zz<CXjuE^T0h}U#tpo7g-R=DxKvuwI8b2HGRe+Q2 z%F3Z{u;mvLhFT!60z6AJ3jg~OfSXA}`?+RJz;g*k{R@-g{e<=P1dSauSP*b6_2XWVb%`D}!78p%l&e=ln}^vUYQQ~-S9hlf|LPZ45m(8J<{O@1<) zuy8gRcySj*${`Rto_S;pR}(@aDB&X?dlalnh7W2q3k`IK1i1V0ZRnPR4uHEK=LcMn z&BPsHR7%x;L%snAl#OZ4(!6J4;O5LXKFWLMskoX7=|xmAf@gH4_VO= zz3+zQV<@9>-B0LDCY@czQwY2I65)JS<1h530`%B>6*0V2@?xPQIl^;7zZ4IzX)R<6 zK-FJB$I=}8r3*R(|86?r2=L3ct_=Ludu?S4ZaELAM&Pg5cWYI!H^=@Wj!+qt>vDyv z+!HeJSLqIApnge*sD|LL%qc2kMk{fPBJj%`qcq03GS{fH6~o^M`zv&i%J9DuBlQZ= z*#y6-o0I`xwWE~A+ScGImBW5RXQ>?cweC{Ojp0+d!&KJrJT6li_zj(=mV&<#_UCb& z%7I_wI#mUGlV?%zOWmie;8!|OEi;5>Lc^j?R5?DgYB#EeoM#zVs`ZYb5%|kERIMHS zdg57?Th)s9ELM5vT?zgjaIO;Im$~~G0&`;*t3;P5*X(4~5c|tGTG@uPQt<1szY=#V z%Q$XHWLuTf)#^vk2>iS0dL_WGr~SL>fF-~$bC)Os|E{=U3GmB|ZOg#FE6!LF`~vjV z;NKOOECGI*14SA5cf~PFfM1XOyXl@Kz^~^#chgBrfM3sf?xw4j0KcB|+)alq34Q_k z8u0Ik+m-;op7Y#I=Pdz#J@)UW3zq=Dp7Y#IM=k+=nKM@j_;<#gOM+j^dG4lDmju56 zeI@vJ#+}Ueo@XrDM%3Quycw%n^{ydIf z8Eor!{c3!#+r<6rP-aRI+3CCrcQAFszT`|aQqkUu6WBrnnH&4-aRpm+5}MsCF6|Iz zvF)jG32Op;Qw8}P-c_0of9?KN3t+#_$EphaReM=Ay;YyX)2a&mRr^|T&Qs!TRYE*9 z{#KR5vto}c0>8}Xss#Ky<8>v#Z@BBO-}lP6xohTQ(|p^yEe|Zq7EnHvM&Pf(4{HHT zFAV;Ays=g>f@a{);ggjCrDd64n{(Eua=)x1_)R>sD#2felT(>@RuTLr{#ljauiR0q zkqJCSNA??zhTXQnbyXCzl zz;7Thui1xdaWkb)<$hcy_$%?`%E7s;FIPh);mZ8EY~U~9(be>@*vP9ZXM$a4Q@E0+ zjP|)_*Xi@aqj}u97Ixms{IBYKuGpd;R#`!)r_5ROA?}i=tE8gwx^y{%codtn9mNy3-tAQLw8^9{CethvdO3Cla z@R+X!o!=0W{BpgIMXdRyl~04X<<%YNN9> z_CCbH2GK0eViC}F##;n1|JK{yvw~$EMD4{u zVOx;kuhM#^Fo+UyBsT~$4LFQ16GueyvQ(0R+$j_ekJ~ ztb=H_CbB^&Rdb=*yn{GfkxcYH49>m@Cqt#V+a}DAy zdDHUK^-j?{qizs)$){GR2<&618pK`ltS!65tR2K%@~@>qSp3{-2XU9YZ0DGXyX$K^ z2Y0%=9=DvF8ZGR*>vuZ`ce=aYw~McdJLH2q2Y0%=p15<&ggDx~ib7PulyRpce%*d% z6lSMF5ab?y!`Pjoy|`#ucz9&TsW(%M{}jz}YHh4M%MC)^VfKy2q@n=trBo5-hvJSC zUz-gnNtXvP9%R-Nb3Asto%%ueVTx9O=H;_F2w8-=sWes-xEF>}n6L|{WjV7oCRl8l z33$aIc<+K4^;Ki~AOr(r?ggG1w5gz~Xf8jg#2#5lVQ5ZWij{-l_XMyQ zvQpB4PIwuJp};u!rRS1^m`m>@2QiMqD9+o4WrNT@XBQr77%Lvb!V5by)YL{o;k5|Y zs2^Zr5XvZZh93u<)->IYA10|Apz$CufE!rl($pCn61EW1dP#Io(@J(j6y>)1c-m^= zq1hcoSDto*#3t@kQiX8i6aJJV?*jG%g65r}r~YMQoTk&{pxeEKUS|Ug@Z8N`(+b@u zRoHacx4^q5QH&e{So4J2(@X=;O;EzT;DbaMmhDO?j^W)5{X=BZROrUQ$XhWm4iG2WRwOUF(eMd=BU2~6YY8P^a`#m_A&Fmh8Dn)IHz)9HKpgIZzC zKo<6`PqK9Ig{~6KU!;#c_R)Qf4B%vEG&Hk+7+hwU8Qd^mnD3(Ynkh(d;~KHP4qYEG z{tx#0&H2j%hHh*;?*4F$_gy|x7Dg)fMqAh-{6^cjX_O>BT~tKEii1E#8zz59v-l{H zE_VRL2qpvM`eciNkTYiFzKNDm6K-uMN04lZDgGx+hQrSFwiBauy-~sWM=GHD7M>q~ zV8}MRU_g?vjgudfO=Dg2u_E?kufrRH!EPaJXdhS{+F<4fX$RJtJDR{*h9W4#c@fYN z0YLr*Q^vV~+rxm&z_37Yvjvm!uftbA@Sxn>Dw(L=7ltq*5~}nX($@aa4M~g1Ezi+R z-Y3HvqFK2zXejfOdFMiO?A*AMz(%S6V2>Z4`_spd57@t=837}4Bc1KvX-)jHq5asz z8*vr2oX;R)TL8jejJy8iPwOY?P z0=S4Nr_*JjcP+@8T0m5y=2+9IB9&6HD_kPnV3HzxqHVk?2TRt!41qjh!j;7qt&Cyp z6Wr`-7C=FF;0N%+OStE!!{_lW0h2+^nVRSk*k6POkW!S>7q|q9mMH*~@uV%~=BPKE z8wB51v<*YMmCZI3po=byL~lpCO)k{dFQ!!^4oGhLj>wU2>}a%@3;Ssl3cjW-aa zly*18Tpm#f4s=B3bi?P8)-dcEd@p+^$&o%TED(@cD6Jle3NB*tv3?@x_XHYsOv%BA<2% z4Nf(F&Kk+Cr?Nfi%6BaQsyYUwucK7#C|Tm1NCrfy9;FaOgfT7-Fyxj~C;|7KWZd7v ze}040B*IO9I+y?M^NNX=jSAH*0t;$U0cQgi2n-d`&B@-6vtAtb6YXTwkI|C%(HqXN zDcmmvI?*9Au|s3jjl$r@Xn=N8aZ^c+2v!aM8+bz(iOhr`nN8`w88u+i-zCK>E+RY< zKjaw{uiAn0kh3PdxZ$=NvoPc%-V%E?Nk6;^EP(rf=!7#lF(`~3FNZ{7l>yBQG8x2= z3xtk~O2`9zQLcbjNOsTxX3&wVPH~Gf;XX|ukcnRA3l3ZE1ugEed&Rkl$JJU!z#Fww z!KeY3w5t3t+aW3UK*BlA7Bz=L$q0^LQMPU}FK!OKzF^LLz{ia^ItdPw<$(c-J0^-1 zNY_j(vQo%Q91^fl&S}Zs4)D}Kvi1SBD>y2?ppyXo)IVi!Pn6Bg0q~w{_a>olNF>xO zKC)?isoI1`!G$iz2duZVyT^q$7%7OOF)}cTrz}8E-sL{W7IhbXony!`n`OSx$jgTg zqZ_utZ}IhJz!#85p|}Uor5~!9Ms)Zu~Ed15ky0gVg~^bAN@paYn(7fH#mf$ z{7%OaWaId4a3c=`4n~8r!~oHP^l&l^60*&Z`U`&UfljBh7^E$fa0Rb`>7$tQuye#Q zYE#Hf81*UG18up*!`@b=ufXN~4rKfp>PyCOs>W)UGu#}p(_-CbQq_g~TQ}Eg{&zATZt`oetYN2LO7IZFuNr7=;Gl37(H$cNuG6?E>Xz7 zD(NV7lMhK(o^oLU2rX9^_#7}4b-~pUo2j;6Y0O2&;++sqhQR>Wvnhd%` z1cczRRRi{_=R9G5!GHES>;tMal(KPm%6&9D7 z4m5em!@ZLX%@oq#1FGM1qad;SsHP5x3&L7UqQHaGH6n1BDlc&Tnn0eB+b}X`07qTN z#I}jYXTSg47Cuuyw-4|K^gtlP&!5}cxPESb{@muA?k8LVCO!m`x0K_Ryoew}A}(zH ziZ~@f2g6#3NgzQ26E6CRM8=kF77ahQXIv>OI20WC9whCQs^K^(BEkkZ-{iwIJ_y9 z@eC+HMxzc&qumFxw+<>nj8sj?feyJ{Yj(!T1O%zw5m}LPTN<%UZ7^HKR8V~Bn<Dh)Ev{aLve(_{fpian4>U1CFKZ8=yUGw=yb7%)^42|4+y$tz~-+R8o@^I(z2 zgT5D29clBQa6&sb`9CbF3X%}#{gLMee0lJ($XyO^kHkt9Qp=-+6r_BB#FX9<1GDaJ z6`L4ef`QbP>D4=^C+tuM9Y0BCfKNWMx6{iQZ=NqvYX*S8N`;k%M=G&!2*Cwaxvq5K zITK|&=52P@jFy_+n7~iiMA1R^C#;S%DtAFmTP>BQ7k&&p4Q~cv)}slXw7kT^Jh(1Z z=}tAJvf6EM8IT&0iaa{Z6uzVbQ#Br;tpwjxPPh^dI|bUi7DrF1xhFOAEEGZ?SNa2M z6Ivg_i#&|rpbiS^TUka^-x_jd>3zk!(>HI<-=7?wo&EIY^u;fH*Z3s%KpZfg!*C*J z1CZO&St>X{06Q3(9sm;F=NfsRv2;@SvS>hJtYXZDj+f>^91vk)e&cLA+pSiIoq1PC zin=Q7+9US|eqRBo#!w~GsprPL=?nKNqU^7ziHbrVQItqw;{FO>gz}0XM^K!FMkfh- z%=yM+nuW(n0O!xg{1iIyfUYpSB#c#E42ViXZ6Iq&OX0^PD90r7Z0WEA%e?SEN<~ng z_bJZx*kxZF|F82Ul}$DkRkqD2#iFRjl~4|i=}041U4uwii{eW+groULBVAgR#t{yi z>5GV(M7c^P%T_DD00Ke4O>>CvyYX-gLIi0D^eX)7bV$1s17Sf+jfa|eH-PC9nj5Wi z5dUCMw2VbS5uyNh`RlD?nDE}#+^ zSC`l5K9Egx_=`=1h2#UNRwkL#4^z_l1F`kZji-;-I`0C15K{m>?5YHeG@AND9eN|$@+FVN!v%4fes*l)+H1)=#qpaT z;Kc<4(&og!MER6oaN)V0bnnEMVL4p+(JYZ|ICLXMgWl0)DcZhB_&AVD7F%@L0LGH} zxXXLkEpY=$Y6ZM(6-rATS|NhcHKV*seaF2SW|OJ5*?U(Gy{0(_3hDx*fvu(s$mn`u zS5#JW~sts1R#1t_0mDW;ZGX9^YZOKaeXd82oS;#Q?Ed?&>!10v0UYQ6h{iVR7#8h?_Is%0x#>_Sjn0aS1;ed0k`$;2M}yRenQX~yu2hPw!n`8 zlCe>*BbgC%i3>p8f&tEtqJ265>?&nGO#LpO`o?S+B#!_Ec@m_vJ6^H3OWIYN!=psI zCI=DmtzaG~{kt*kX$2vfIItCV)egeQO z^Z=z?oT3yQ`&*>57k5zC*Vm3qP&y#9f>=bQlI{=3M=xKWz3l8D&AsK6q}DeG=fYHg z9J1VN6qnu0STI+#$VJ&)V$y z;o0%oR_mwZ^MAg1d(M73JUu;peSZA%jJ-K!M{i!gI6gmq^BR7A%MM@vi+z9m`o$LW zd?|S=knAlE~bYtq7c>*Ec{Enz9q%F9?b&SL5uWvLKRaO zj`OEG@Q5ozzssfnPG&CCLDK0^s5ZPnTB46_c=q@9=wEp9|876sefp=~-p=+u6x!M8 z{b{@RbZ4*kC$_z^QC2)N%sm32EZa_ga9_y-n+8a-%A5`P2ok!``;x$q`eDu;$monN z&c@#G199fo1Hqe-y2*SAx11PGqQF7)qvrwBPLVB?*}1q`B=`?ld$--ftH@CnBbA3pdOP>)|27^i)&Y?f5Y=S_OM)F6QfSMa`a&!@0C^aGPSwwz(}VxD zMp+D`19k|T9M`gF;yf`HjQoatLa5J!=n}TlftOyRVoylKLS=-<-E+gp8HFA$f09fw zxDisj(;7#(W|mQ;wEQ7LE}PJWbJPxFnCzqr;6{nEHUJj&!I_4atq{#Jh)R#I(XaCe@h<4)zbGo z{^)L6Ur29U*sdv)pqTHZn{qOI?nm5+}4xRw-*Z*xizyIES%&z>&0htBh`44yh zXLzTM>YfAkxcm8YtA)i{ryiMexumXPx`hR|3nZIh24zuI8_Y%?YjcWnG&15g-Pl5# zZH)Y3%Gw}3wORYyH*es-eD8rzloqS~{P47m|NX!1%}v$;NC=$YfA2_h;F=z=%9Lyi zfswtK;R*GO=aYcHjxWPVy?)7Wxa3_luY`98(x#PCroH5fA}KCQYgZV-0fTWn@CSn~ zGZ)cFxWl7Bt-Tg^t=b#La8xmAk{xjzH(J7eunYEy{We3Jh1^%H4ySPW`r1PsBuH&7 z`Jq#s$T8l>mP}-$dBg_RJCD2vtRnZL=F^BR$T;ZdXd@uc@n6wgR?OZK6Q|}3S3ArM zUoC5yK|XnWO#TJ)eq$6QQT8cDPG6gG4M%Ix3s5brx60{do>qAcqg8?L3fvP5)U4CG zIfJz3^MMAJCO<^=eH40Os^)@pQLm6_x!*g0`y&_9^Ix!bn8MpOd)RoMG!W#lA1WZ& z!L!UP7CO#8sImnhv??^FO;!YSan_t+9T&A+c!8evAX|--tjhzM+>QKP@VhQ&VxU)G zaT8W(=%Y16%6i-8G8F`kI*ARCm|)H`q;2Ml;;nR;Ua+=1o#t$cRiy*=XXskPe4#sy zzvubPLs+|8>>;=A0{j;zB@fX<&ftjIiF#IcROmQM9-;G1>-gEvL~m z3Zl!K?~&b_2pLC7<~7oMA6wmOGQ3LG4Z7C1SWz-l&w{Rl8_yV`=e}=gViPX z(aRtjxPh^_@*nj!^nw+oI$AMW&)RRfK{`h|oZn})9odTPptNOuQHx$y(?g@9iHOh< zN3a8u8LPUulg>aRbw@p*J8GxdoK1x*nQG6Nxx||Q1stKVSqu&QchA}Bb++5Zoxhw8 zoK4jr?cS`-yX;=|&=arMHqlyEw!Jc32_HH!@Mc)b`Q0@|*3+eNl|_SShOf*e)%oxZ zNd>D!-7t>AU!y@oq{-5^Nm~OH*P5Y97F`{BG8@W<7}6>u^rY%^sTLGrQSekPgQ%&xQhSEqHzuH73Ryp!A8+Dc!$+%8VJJ)-4&>IbR- zwB>AbM*#Eefz1dEZHbV~!t9KP8_1$OwSWKoxy_g|2Xtgu1_TR(b7g&#b7bsYdLKz4 zMt)tMJ3Nn!vL&Kh4`YDrQbP@8&zm}SwCd|4FeOtMIzQ?n?hw@pfU_GLOzyl z?x7D<`1qV0fnskGT~S<4PdK)?sT)L>j}nG1Hj`Osn~leh8;T#?sVdQ6HE3DGO#AnZ zt9RR-zx}fL_}%~e`IpC^KR*PTHqgU9!R()&?`+TKPbG{t0Af1nZ_eq8uoFrq5FXa~w>8%F`c-nt;{%>iLgL0AAqEMDoYt)`_2 zi9Rf)hquf~5e1v(6&LPwr_ydvxvi4hu!uRA?+soF28W)_yr#o`Kmei(tu$^vGUz^W zaokC_kfri?*PQg=@9QXhgbssobS+sw_A8)7|8DhH*58Rsj->K(rIgp#p)}L&=TSx1 zaKA>dPvNy)5exdw1zK1Z>ldzmj+3{-#ZuQlJ|q|gPNC2C&!KcIK4ULWPv4yK(9pCV zMU-k8NGT>PWa~G#$Tcrar(NFZGa)G%F98gef3i@h-W7DjBZSAE9*bm)psas{&(MO+=sWgKY2}PJSH< zQ`%AUBFc-INfsF5pL3PO?Ws|$NRXcxX7ssR_gVMPd{2%1#{*igSN_}Eds6WK+}+u| zm;Y|#nM3~5j{H2%mx!+_EC~wYhoBw$2V@Gyr49Od3al=(nvwDkw9HGhHov`-B1PG5 zZ1$k!G#PIlWhUGK9o47Gysj6P(c-PeRhW5I0VDpKrSOe*P}4Q0_KQq$H_2O&5Q#FZ zID}^#XL9SXJ@*L4B!MK!edt2q-b~ZDr zHt(pLkhlSrCS+>wkthi~#A%+^bP5!9$vR2tIFeLl>ERjb`f%Qiw~mz%&0j5}Ymp|Q zQv_;yOMn1oHgr2Ot(i@lk33oqa*!GwQ!mmxob0-Da2Q~$j{ZG7L~<{EaneR{Snl2x zL|#}KD<}2bp|FO$E%4M6x%es%cF2?E-^N7C66MAvVoI&$R83-Pp9IGf`y6U&MM+uq}y(n%XLZJvvsQ4;=iEo%y zBKszO<@J*%nl@5vQz;@H+FdC|xt1G2l$2xFYhypEcJ~;wi_t3zRJMVJWHSuGw<*Ku zh-wb{S<7I@DR8Ppg^^xce+9!N-?!U$`O0%6;z!s5OKPm^IW8`z|Zn( zO)hsOPGEA!(hWO5IqF=bUH!sVTzxldvq8zs|FyFVfVkoPW;AZIR@f|-DmN|Q9cVMp zw*j3LvhO@kiT0glHh&Hh-wsrK4oibPMM}0c;q@;pwc`Y+m!`5^=tKGeSOG9BSp}+O zgFbqz=oL1zZx=!2ru%#O@3K20)nbRAki;xIW%a4kJOi-2XUKuD{O zXyYWG)&^dEo~!6p-coW_HFlHly&GmU-OeLIq_N>wAu+P~M%7etjbE+MX5x?Z)X4wB zPj9XI|6Z>s|L<l zjq)n=UDPS|C$k`OF-P~Lw3p|sWz-qmm^EbPc-RoZ_qHyV^cQ(v7r<%Jv(^mX$*w_Efo=;WVRyK{%f1H-?tYZ zl&cVTF~fWfzs=l$r^p7rO@nn>KY6@$DxYE>2ziz}neuFDJ9&0$#J z@%jwQk|qqcr%ea*eHsz@mFc*Kdp}cf5#Gr-9Dx_mR^uEC77mh17Ltzonm|itF6*_n;=GLiTHQvq*V4ez%)_A}35L@*Lxt0jCj7VQ4EtVgpIW=kt zpEAOfF=DVAp^DDR3A;RAl9%~WM7ogFlP}!(; z_4vQ3$qK!8axG z7_3rWsGSM_2AkT^EQ2hS7zwUHzSe-9PHxnnRTq4@raJ7U@uI7I58h0QKGUSp!WLGzp2$n4=B=gLcP$7qntqnwaKu(utT*Rrp(7kU z^K_r>u4GA5xOvR(R(SnT-(w}rj(txnZvLf3bwD!=$l==s&phAx;N3K0I}~AT39iE< zl)-;Vu)Ak1x5HY{(bwt{CXF(rH?ZUwRJDGA(`+g##Rkw|kb2V>u4Ubrfd>mK$E;zR zwG~^$9$&$6T^fpOmUjpX`V)@4k5#oEDxk}a=W3UmY@&X(DJMQk01kr&#*MT^erf@x z1iulCFL)o+VRqntdZUxV&PAnWFPUFe54{vpmSa9nMud-;vUn=j-0BskZakp z7QbeEnjMi89Y=ZhaRGB$uJ&Dcyh0^;pfB8st>ZLY*@^kRvuQIBW79^7BnhzzIrPTl1~SISh!@dj?|Ff_kQ}qQY<_q|(&rL|dNucmZ~bliR06 zp9~d1XlzR`+8RU_BGtVv@}Q8@??Mp|ilgcj=jKO?yF3p=U)FJM{UN+y8?))OSn89A9q_(gwr;6> zGW7id4=$^Kp=eI|zJD|t z$r#SGfoENMcBWGpI3fy=SOBl0NxK27Atq0Cf-N`6ycVK`n`O7o%rT$mZM1%LM%lu|Q^ctirPlJ-aSWE6_9;K(4Fos3SfQjHL9@8h4D3`pv2E z63uIKgi~mtnv4xo?D1K3rAiek@vUMl>9gSdMrvTA$YxHhB=9jnvkl^e@De>=?7-+A;hy!z3N zWf~{%Bl_ftyk5M4&!3+?+&~|SVHyx}`7QgS<+W*hKD98Khw$!D;3<|?l;)P*VkHP^~2 zG@R*ia5Um#N1H*Xo!!2R6(}xY+FE7WhSE2KUn$RCPT{VKiP=;=6>jadcJw)x=bI@v z9Y)=qcg+nm!qkt&rD$})9{o9aBs6z;7b^4BJ*>W?$O6I6&?IIs5>v?;X_u z89C$(Mx5epQl(B?EIM{K4%0&)v(4@UIwDwTjPMThn!>=<>IrG!t!UbO>E$ z!AUnoOu2mY)=&Ii^jp}gLClPQ{(1D*ho6$MyYpoKpv}Y0{d9Qx`uO#C2kbN(J;0cP z$Pc^c28!fb;MmWf5AY3^`TTiviwR*PRI$%_Kqr2E9Na+nI80Ptkmr+o75SqgwnfI1 zDWzSZ2;?Bo`_Ke8npM9DTid>KV{e?jd;zoLNjv+)4E)g<0cT4- z`;`_~Hp^5;8K!1ENv9b%$?9Sl!6uJALL%-z{gEtOe0z!L7n* zPng3sYKWK3wJ;jIO}zLd_R!&_Ni*mg!Yk!klGby&U0rr6%IueOeVQg-BvxSTDpD!r z?fxkOVcv&??pQbFM}ZI?l+5%LC;|K&1LIh^C3&^gJQjk5;g*^QT8Ot@2Uw;7D}J-v z`ehd|^*mq#)a_3T`I2P27IQ+~KF?~NSMJn)IQ!@az6)a7#Vq77h``Ac*cg_-_s>#L;t!m=M z%FTu{-`q>?W}o!tlOfw_FblepuzW`3z$$QWXd999m6$!-bluD1U0R3p`JM~s`c@z% z2qi0su#k6t_34ACne+TAvrVV@7pV189#&H3-?{?3!!`LFl% z>HYcdR-Q%9f3J`bUOZp<=}!{aojLo-ftH^9X!R_2>XUt}=gen8>0Bp1Szk1V3BwmY z^yS*JANkyPIQCJKI*YB>X?cLO;`x>U*7Boct{vQHu|mnQ5jD~!N5@$E)HZsxw1eBP zG&9lKn2Fc5_3MwX)%DTa2V+zNiIfeeSfbeK9X%4~XQ}j9Y!oa%8Veff;1sX;bZkIi zj)s8L31tzd)B?fTPROQVq@H2NfI+t?Uy~9-V!DXrm8v1~_V)Vlc>)QY`?R&S(5^s2 z$)YN!K1=xqM3~iD1Db3wRb%WwMUm5F2V9vhXNAH*6Sx;p=E!C(?Jc6nY1qt))i`nx zbIkjKDx9)bmQ>)#X79NAMrWtr2j!ec=7x% zv3L3R)hmGnmE9dBkUrXSO%MlrNnMcM)tVGSMfA?65EAuQq7X9MDp3ekt8vUGO4K;D zN~PSO%9ToYO(BHwnb6}WSAQg4-4ce0`bGiq?*zixM>yy?$9v zX7lJvbSP@hqHEB8aUFyX!ktu$30C(Xpcdm5^hLHzud1DTWgJpgZpb-U)~}Px?PzyO zCkMhx1C?CHmgUXZRcwe#fO{_A$0Mf86s zoDk0b&krlt{%JIJXB2=LOy%motjuy6K&`L!=>O~po=XL&HL;`{kfkN74p5z1ReAl8 zTi+@Q1oCZ?>E>$s&}x$AvF+o@tPD`v~p2gTy$M`z30SdoiS5eidh zBrv2+wF`VDgTPaBDoKh%5Qf(~}oJm(`n6v)H^jh^h=zN-v`u4Pqjw6b& z){GmL5@^1Y`U?~LK~ILT+Q`qT28&%!wP_Bg;xJg+$Gi=%SRJMfaZZX^Oet2;(2{yF zeX_m|)mT9!oO23F#L6{lXI6VWXgs2|aF49$TXoxA&p}95E-uXqx@Gzrn-)aIY*z@3 zTeYf^VJT@Bb|HI@Yv3fhu(}v4F~!}ln2N3ASsi!N5j`iT%ePw7)Qb6?-rIC>mS;WN ze#PWcI;I+uDItkzZkg-!P*v{BsG;Z3|H6U7Unme@jsNfd?)FaJ|99^`{@?99twj{T zr$`BZNin_J>J30SncW%fFM(68_@&Rw>3n$)>rwX>kl9Qz%d0ih$(Gt?8jWBeU1v&V za}80@K{xjQ?TvoK#5{H-oza+9#kt~j+B^lexgW6Z-tD+$_V;GNy_Kw##*>YU~KV>{>{ErqWKcbIVa%g=wB%vRu1uvf$E= z=eEbwd;j`X*L8|0&S0zdYWt=zpZdw(#*$|<-sqiFI`JKk=Mj)cF+Tdnm?oW!*f`gC z%37HeQoik%PcyHh&sjC$_&FyPr4w-9{?}hCW*$X@1t6f+Pc?QtV+^) zZ>zPQXO8o~zOBur1*kdy@9pny=k33qZr}TV-paGc@n0N9Vl)=l!C&-9?i-XcSt9KaJtZ zqLctd8=s~|#t}~G{BhyTvnb8Alh;@-nxZRPab>=nT%eW%(==4=wyt%;ymzwHrVO~I zy(!*yUls1T^=y^69K=>;6oKI!&EnV#GnHc7EFx4^l5-gO6bn7gl@?R0p9dp5!Lak} zusq~NIB6gJ9R3_iYs5b%KZllcPDwgX$5c#Z7xm`(pEplQ{R(VNCFpTSBlf6+j-Ct; zThAV1alr);3c#I%XRwSkvf0<(Ndy)!aW_siHu>Kfd%?uN41uStq>mc;fBQ*3{!8!4 z?#}ML{C^wI19sx3$f+oqFQ0VWc0xA5EV0PNr|$3r?DL7!dH~xYrmC7vG3_n<0fZ8e z%?q2j>2M6Cw^;1K+0nle+h+D2)(bQcdY9ZvdV?z_gf%MruT6)&34Y%*B(29zay^Cr4%`p$-*f_h|9?;vn42HX5_bIJC3HBAqhGyY+H(An=XQBH zc-3;Q63n^V{fcxFw)KDd^6$cHZ>bLxv*&;q~Bn@c{VE4}X7g+G_RteUz+QBMo`@B%39nU1J8ez8)J5qTLFNEkj!cvu+_{(q0k?&u`)pZja>GR< z)`r;#BKA`h!&!&}0cbi7QMJ<=rr8ROMaM+dj( z^#1l&ctzvgb#|QXzw$bG$n_8*ND_G42b6Qo^G6N>p3s?JkVsN+uUtQ%Z{p-9ma?D` zvVIvxQ;$xDBmd&UBdHz4@C?)k?C=7PXs{V0pq~&b+{S?jL&O05sXw0pb;&UH>6pkG zKcCpkk5is*^EjN&()tRmPo3(g?)$2$0VJ$1mb|Ml<%A$NI7xJo_F-^5Q#Y(xe8KRO z!08P?|N1s;L9@Df^|x={w1u7}%d`Fb@U)Hp{lD$aKC6NRR?WzO1Yg`XBvV*s93=&u zz$Xbv7yky@vH_+bfySpGS%#^CC({u1O*$!qgrlT@ll4JDi|NRnx&uFGxST8tWuf)e z{IRXX`o*^VGFn9k`eY9^JUhtb#eUw;4_C}Saz_vxk<)9eSY9k|@Xc}<_{bln?-Aa5 z3qu2~2h?++0wVEJ*hv5)r_c8^CNsg8!a0ni@YiVIWa3BP1}AJMaWsv6Qs=6owB5)S z7MS-r;LJAhJg80K)LK|0oNyFKJ&?d)Dsq4}g;Q@~eQ*M8p@Ys6^DbMM51I4A8N<=q z0^}TUo<2W30{J}wsc7iU1MZybXk-VoF`P%IFBU-Nyl_q<5DRZ?pf-gQb|{e6W-;te zqOoIZ4#5L}j??{xkpw(Q&EHY`km4YSm!PmH`<5wS-w8V-*v-Yl6eQ0?Ef5fJVe(57-1Z~Bj1Nzam83l1 zw98eSc=44#^rQt?6t&g6lZY(625#II%ow|TCqa{J5&H%i11gwGe6F!UHU1!oF1tJ1 zy}xz0pLToSbm1t{(P}3hKkW26z5TAn)^Ebp{isyna5QB$q{{Qf32>70o2lBXaDXRh zi;_qO4spPkih|PqO5L@?l3n_-9Xam!Q-xG9(MJIYqqwv8%aH2l3a%8ne-NbN{nRUd zb%(Z(UV=(@Ub=`>bq>H<9dY9a!WJwu;bDX81h2gk|1uQi3il zHliysB;x2@SXiu3x=?|td0R?gv&>x8#4ROL)J+|?GvFLk!GC(JXQ4N z>cbLmU{j9)L;c#xGmRRs{88+2Q9|B|;Kv*G+YHX*fbJ1$1qqCz)~{J-1hybZUX}(m z4Q9za!bkZj8S(BHm)W{ubkPN$beV9?HW8mwF@RYGb25no*Kov&$#x;Eo4x?}{vt zmD7(ztwbNM@330*+3iifrbq6>X%qv!tmCq?`#Q68=2dx*@?^~3DWz_?AEm5?I$;4*gs<(8@3L0K}6W&wuQ!H#)U z71_;}I#gVsIt54$4nd;w&^UDTflUJe<7q@t?OSwyYjEAL#yf_U5qbj!{~AedWlig} zXK{?|6Uv^WM-}tJD<9;93HNu5eX_SE0i9o@yIf?L)98hhtj$HbKi_QRhLLlzR7zI5 zMvS?Ye4=dLrA)$IzUQF>V;CiZiyWPJOPzG&OY>qbKa8AnFBehMj2B$Md!^%m^)^8N zE-@iKl%sIr=+8FKFg_UZCit&;gfHtb0D2t64}PNCMJGTsF$lfc5D;SWvv7sa^hgX9 zcnL2n&VwW@lNcVbFbX@ev_6cYPz^78=HN(_whd2ktsgU2?F9^(zr2yt9p2k-`&q!i z`^!rXU%LN0^yrMGCW1mgn{>H?t73wzljsb&tWhi7Ch z=Z`~%IjNDK-efj$TbgqfH(|TXZqmomwcIwft<+&>c$oQ7j|2qA85tde#<|IA=FbdE z(^vS@=1&$WkL85Mf3etGU|)wp1LNZsd6dVL4E0pplX>QEHNlYj%}{kGpSt9tk;?yKBvN)R7#E)v`&z57cKll7;SvFTPg~j>P(!zb& zDbzAN+{!H0%wH~OdSBvizWFnPBIoN~~fF*W~*_(If zj$|t<;+@6$Q*eIaIaF}m);!kjd?Lm7k~^QCUVe?+9|!hJZ+{$$+ui=?K>w2)hj;^7 zdzx=>a$o9jU3(mDaP(WwyF?r=n z(4XJ800Tap0S0{_C!a8w49DKYO*AhSY$}Uc5(^V43@?H)Uk|WB00(}KvgtaIc2%;t z7L$Z0oa^|(6w=BZ6bi+$O{Mc#% z^6Hx9?La}jnV!?zcY2+XH}KuC)7$RE`@5|cdSr07#kxib*%tsPZ~jD8RDse-E&Av% zxOQ)nN2p;_a3Ibek%L0|Xp4tDM~g@-ethvdN>4x*f`#;m!z0X%GJxDTc5mR9pL#rq zWi_NS24ir;5(#w38$lhQ0$)0aY-EXI4POeU)dFnIG;|MmJoy4@_*kUpB(W>vZh`;7 z!!JnPz!qWK@k5~I3AfVb0f7>Il6a(fr0v6yj>#@w{#^@JdC+>G$Wo`c2S&snlU^Pm z{!3>60n?_R=`#VJ_|OGJBJR!urrkPbD@$Y%sKt{Ez0b=t?atECk$`w?wZzRTU7FU> z=67Qds3~4&24Kp5jDp$3Gp}E_&@oTYUc1xYVtO~PDO6FrEyBNmxQB2iY;=PN9rEBp zXioC4}j!r#y^ivG$;0;Yc;{Ut0vok0& zM-Sh8=ff&=({Ww9Cn8AfAe+dM(UOWURYlH{boit{Fs6g!^#Rimn?nGUM+^|q59q!^ z>sHaMX-kxmEk&lP8KoY?Rhl44i!yz%lN-_c2k+*UJH?=12l=+ddlmnSw<)9myuP4I zF`H#7e)PjKAUOQ|3a1j;JQSA#^#Rj?eE?ilN|jhh=4JWbyHTh_T+)CMuo-fypM)1I z1>^;oLf}Fe3z60$Ntk*>D?e0MX0Y-i7$63lO0a=zH>5?0ibVVuT95hdFmSfSCl~e` z-Ytm`W6y{0Ic#7=CONwgGEUwVxT5zUzSzT;39Kw0yNn5=Z@xnTUYl3tQ}|};{syZ+ z_MXiKG?U%R3&H;r8|?1^?^GGTF_5l4=+QLDM(j9_h*Il?R|hRANy>O~Cx>TeKfO79 z(aMC(D|vl*puJ=Q(Uh&|!u(Q!F5O;J#ThQM6)a8a^-u&J)*qnY${s&xfsLDXD6cV9 zRxIch@dwz|qFxhPb1TYR{B^5|8LC{dz+S&Oe|hGlA32|bu_v&VE|Xtid;4}X3~cun z$T83`$v|yZsim_G(AZfQMU$syW`&b$g{{xA7ndXWp3cbXe6&qn+T>z z%otDFApZg_h@jyVX*oAl&)V&+95&$<(fp%a(T1U%^Bp{tOZ`1Ikqhr!9-_LX)*b9v z5rD5X^X%aUIv5VqfGW$7?_|Z0OkF^tbbZLEfpsGGYZ8SB-m~`a4)Q!lqr=Z_WN7P+m`kzZ z=(vF4n!{lf1K`rkKMTot;kl?8NN~n^xJEwIT`o^iE4V6!08EA=sw*)9G<@*_boaY- z?!AHL3=D5scIm2?X*;_G|lRid+Ck8@j7~H_|3ZSP-J2bpqiF>UfoLL^Zm4haYVARvt ztB!grqunA2K;vRi+&pQM&cGM}6=(`WeiD|llP^~aA$I$Euq18=2B>BdELv8tm}gBqnacKILa&DS1C8#M0jXn1W|#QC4B8a(l;M zItCFi^@p^eNZcJqlL)WDaptex8$<0XW3g&+U6I6&kWfc>aZY4Hk!G?GGqgy2MMj*s zj)mGmNYOH4Os*_9`_hc-$s>Uif(%nD*3A~R#|=8eM}QM#Uv7vtn+e(vH|~tYoN zI!%Uf?u-3Mg#uK5$*6$QYmXu?!R{r)lB*FQ!FkNU$VR~}ug$fl4~?%G1UspM+eC)g zPh#%^)9-OM^)9-&@JRtl#efQDc*>z!t6n=-JMXV{-%tEYKE>S!>dy`^fDW2gB%5ND zh))s;-0(n$Qd+oj#?2OsDHPnAc}^oHk?{L2vj z{5$SmJ94$OTB6*D&Ct%6M_!m>_Ac4N7L&2ncARatfd$&)Q+xC8Eu@zTEJY-}8CkLT z3189P$Duc+6^~&{1D_wExvYTr0CZemUpp>A z=|u5mH{hj`?hnUDFJGU%?CjvS{Wc6xGo&_1V9X*mG6gVpukoV9y^KB1)uHd}*iR|l z0EUUZc40}3P(SDoW~qT72^EY~FM!NAWbMNsxmC`d{q(r4cx zkew?{KB;B>A&e-zoS|~=5@Z#AVKDXL3D6#KK1`ty_!D3H$rpy>d_|cD?w|YT{`u0+ N{|~VGlH>qb1OQG3a54Y@ literal 0 HcmV?d00001 diff --git a/pkg/iac/scanners/helm/test/testdata/nope.tgz b/pkg/iac/scanners/helm/test/testdata/nope.tgz new file mode 100644 index 0000000000000000000000000000000000000000..a47332d93877da0074012f36f0100b4f8c7decb4 GIT binary patch literal 114 zcmb2|=3oE==C|hzg_;Z)SPs~z>K6zpJhT=1.18-0" .Capabilities.KubeVersion.GitVersion)) }} + {{- if not (hasKey .Values.ingress.annotations "kubernetes.io/ingress.class") }} + {{- $_ := set .Values.ingress.annotations "kubernetes.io/ingress.class" .Values.ingress.className}} + {{- end }} +{{- end }} +{{- if semverCompare ">=1.19-0" .Capabilities.KubeVersion.GitVersion -}} +apiVersion: networking.k8s.io/v1 +{{- else if semverCompare ">=1.14-0" .Capabilities.KubeVersion.GitVersion -}} +apiVersion: networking.k8s.io/v1beta1 +{{- else -}} +apiVersion: extensions/v1beta1 +{{- end }} +kind: Ingress +metadata: + name: {{ $fullName }} + labels: + {{- include "testchart.labels" . | nindent 4 }} + {{- with .Values.ingress.annotations }} + annotations: + {{- toYaml . | nindent 4 }} + {{- end }} +spec: + {{- if and .Values.ingress.className (semverCompare ">=1.18-0" .Capabilities.KubeVersion.GitVersion) }} + ingressClassName: {{ .Values.ingress.className }} + {{- end }} + {{- if .Values.ingress.tls }} + tls: + {{- range .Values.ingress.tls }} + - hosts: + {{- range .hosts }} + - {{ . | quote }} + {{- end }} + secretName: {{ .secretName }} + {{- end }} + {{- end }} + rules: + {{- range .Values.ingress.hosts }} + - host: {{ .host | quote }} + http: + paths: + {{- range .paths }} + - path: {{ .path }} + {{- if and .pathType (semverCompare ">=1.18-0" $.Capabilities.KubeVersion.GitVersion) }} + pathType: {{ .pathType }} + {{- end }} + backend: + {{- if semverCompare ">=1.19-0" $.Capabilities.KubeVersion.GitVersion }} + service: + name: {{ $fullName }} + port: + number: {{ $svcPort }} + {{- else }} + serviceName: {{ $fullName }} + servicePort: {{ $svcPort }} + {{- end }} + {{- end }} + {{- end }} +{{- end }} diff --git a/pkg/iac/scanners/helm/test/testdata/testchart/templates/service.yaml b/pkg/iac/scanners/helm/test/testdata/testchart/templates/service.yaml new file mode 100644 index 000000000000..86baf148215d --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/testchart/templates/service.yaml @@ -0,0 +1,15 @@ +apiVersion: v1 +kind: Service +metadata: + name: {{ include "testchart.fullname" . }} + labels: + {{- include "testchart.labels" . | nindent 4 }} +spec: + type: {{ .Values.service.type }} + ports: + - port: {{ .Values.service.port }} + targetPort: http + protocol: TCP + name: http + selector: + {{- include "testchart.selectorLabels" . | nindent 4 }} diff --git a/pkg/iac/scanners/helm/test/testdata/testchart/templates/serviceaccount.yaml b/pkg/iac/scanners/helm/test/testdata/testchart/templates/serviceaccount.yaml new file mode 100644 index 000000000000..f728deb2a6bb --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/testchart/templates/serviceaccount.yaml @@ -0,0 +1,12 @@ +{{- if .Values.serviceAccount.create -}} +apiVersion: v1 +kind: ServiceAccount +metadata: + name: {{ include "testchart.serviceAccountName" . }} + labels: + {{- include "testchart.labels" . | nindent 4 }} + {{- with .Values.serviceAccount.annotations }} + annotations: + {{- toYaml . | nindent 4 }} + {{- end }} +{{- end }} diff --git a/pkg/iac/scanners/helm/test/testdata/testchart/templates/tests/test-connection.yaml b/pkg/iac/scanners/helm/test/testdata/testchart/templates/tests/test-connection.yaml new file mode 100644 index 000000000000..a391ef1c462f --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/testchart/templates/tests/test-connection.yaml @@ -0,0 +1,15 @@ +apiVersion: v1 +kind: Pod +metadata: + name: "{{ include "testchart.fullname" . }}-test-connection" + labels: + {{- include "testchart.labels" . | nindent 4 }} + annotations: + "helm.sh/hook": test +spec: + containers: + - name: wget + image: busybox + command: ['wget'] + args: ['{{ include "testchart.fullname" . }}:{{ .Values.service.port }}'] + restartPolicy: Never diff --git a/pkg/iac/scanners/helm/test/testdata/testchart/values.yaml b/pkg/iac/scanners/helm/test/testdata/testchart/values.yaml new file mode 100644 index 000000000000..4acdf3c931bd --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/testchart/values.yaml @@ -0,0 +1,86 @@ +# Default values for testchart. +# This is a YAML-formatted file. +# Declare variables to be passed into your templates. + +replicaCount: 1 + +image: + repository: nginx + pullPolicy: IfNotPresent + # Overrides the image tag whose default is the chart appVersion. + tag: "" + +imagePullSecrets: [] +nameOverride: "" +fullnameOverride: "" + +serviceAccount: + # Specifies whether a service account should be created + create: true + # Annotations to add to the service account + annotations: {} + # The name of the service account to use. + # If not set and create is true, a name is generated using the fullname template + name: "" + +podAnnotations: {} + +podSecurityContext: + {} + # fsGroup: 2000 + +securityContext: + {} + # capabilities: + # drop: + # - ALL + # readOnlyRootFilesystem: true + # runAsNonRoot: true + # runAsUser: 1000 + +service: + type: ClusterIP + port: 80 + +ingress: + enabled: false + className: "" + annotations: + {} + # kubernetes.io/ingress.class: nginx + # kubernetes.io/tls-acme: "true" + hosts: + - host: chart-example.local + paths: + - path: / + pathType: ImplementationSpecific + tls: [] + # - secretName: chart-example-tls + # hosts: + # - chart-example.local + +resources: + {} + # We usually recommend not to specify default resources and to leave this as a conscious + # choice for the user. This also increases chances charts run on environments with little + # resources, such as Minikube. If you do want to specify resources, uncomment the following + # lines, adjust them as necessary, and remove the curly braces after 'resources:'. + # limits: + # cpu: 100m + # memory: 128Mi + # requests: + # cpu: 100m + # memory: 128Mi + +autoscaling: + enabled: false + minReplicas: 1 + maxReplicas: 100 + targetCPUUtilizationPercentage: 80 + # targetMemoryUtilizationPercentage: 80 + +nodeSelector: {} + +tolerations: [] + +affinity: {} diff --git a/pkg/iac/scanners/helm/test/testdata/with-api-version/.helmignore b/pkg/iac/scanners/helm/test/testdata/with-api-version/.helmignore new file mode 100644 index 000000000000..0e8a0eb36f4c --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/with-api-version/.helmignore @@ -0,0 +1,23 @@ +# Patterns to ignore when building packages. +# This supports shell glob matching, relative path matching, and +# negation (prefixed with !). Only one pattern per line. +.DS_Store +# Common VCS dirs +.git/ +.gitignore +.bzr/ +.bzrignore +.hg/ +.hgignore +.svn/ +# Common backup files +*.swp +*.bak +*.tmp +*.orig +*~ +# Various IDEs +.project +.idea/ +*.tmproj +.vscode/ diff --git a/pkg/iac/scanners/helm/test/testdata/with-api-version/Chart.yaml b/pkg/iac/scanners/helm/test/testdata/with-api-version/Chart.yaml new file mode 100644 index 000000000000..22dab35d32f4 --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/with-api-version/Chart.yaml @@ -0,0 +1,24 @@ +apiVersion: v2 +name: with-api-version +description: A Helm chart for Kubernetes + +# A chart can be either an 'application' or a 'library' chart. +# +# Application charts are a collection of templates that can be packaged into versioned archives +# to be deployed. +# +# Library charts provide useful utilities or functions for the chart developer. They're included as +# a dependency of application charts to inject those utilities and functions into the rendering +# pipeline. Library charts do not define any templates and therefore cannot be deployed. +type: application + +# This is the chart version. This version number should be incremented each time you make changes +# to the chart and its templates, including the app version. +# Versions are expected to follow Semantic Versioning (https://semver.org/) +version: 0.1.0 + +# This is the version number of the application being deployed. This version number should be +# incremented each time you make changes to the application. Versions are not expected to +# follow Semantic Versioning. They should reflect the version the application is using. +# It is recommended to use it with quotes. +appVersion: "1.16.0" diff --git a/pkg/iac/scanners/helm/test/testdata/with-api-version/templates/_helpers.tpl b/pkg/iac/scanners/helm/test/testdata/with-api-version/templates/_helpers.tpl new file mode 100644 index 000000000000..cab726131dc5 --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/with-api-version/templates/_helpers.tpl @@ -0,0 +1,62 @@ +{{/* +Expand the name of the chart. +*/}} +{{- define "with-api-version.name" -}} +{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Create a default fully qualified app name. +We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). +If release name contains chart name it will be used as a full name. +*/}} +{{- define "with-api-version.fullname" -}} +{{- if .Values.fullnameOverride }} +{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- $name := default .Chart.Name .Values.nameOverride }} +{{- if contains $name .Release.Name }} +{{- .Release.Name | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }} +{{- end }} +{{- end }} +{{- end }} + +{{/* +Create chart name and version as used by the chart label. +*/}} +{{- define "with-api-version.chart" -}} +{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Common labels +*/}} +{{- define "with-api-version.labels" -}} +helm.sh/chart: {{ include "with-api-version.chart" . }} +{{ include "with-api-version.selectorLabels" . }} +{{- if .Chart.AppVersion }} +app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} +{{- end }} +app.kubernetes.io/managed-by: {{ .Release.Service }} +{{- end }} + +{{/* +Selector labels +*/}} +{{- define "with-api-version.selectorLabels" -}} +app.kubernetes.io/name: {{ include "with-api-version.name" . }} +app.kubernetes.io/instance: {{ .Release.Name }} +{{- end }} + +{{/* +Create the name of the service account to use +*/}} +{{- define "with-api-version.serviceAccountName" -}} +{{- if .Values.serviceAccount.create }} +{{- default (include "with-api-version.fullname" .) .Values.serviceAccount.name }} +{{- else }} +{{- default "default" .Values.serviceAccount.name }} +{{- end }} +{{- end }} diff --git a/pkg/iac/scanners/helm/test/testdata/with-api-version/templates/pdb.yaml b/pkg/iac/scanners/helm/test/testdata/with-api-version/templates/pdb.yaml new file mode 100644 index 000000000000..a0a54cbc232b --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/with-api-version/templates/pdb.yaml @@ -0,0 +1,11 @@ +apiVersion: {{ $.Capabilities.APIVersions.Has "policy/v1/PodDisruptionBudget" | ternary "policy/v1" "policy/v1beta1" }} +kind: PodDisruptionBudget +metadata: + name: {{ include "with-api-version.fullname" . }} + labels: + {{- include "with-api-version.labels" . | nindent 4 }} +spec: + selector: + matchLabels: + {{- include "with-api-version.selectorLabels" . | nindent 6 }} + maxUnavailable: 0 diff --git a/pkg/iac/scanners/helm/test/testdata/with-api-version/values.yaml b/pkg/iac/scanners/helm/test/testdata/with-api-version/values.yaml new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/.helmignore b/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/.helmignore new file mode 100644 index 000000000000..50af03172541 --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/.helmignore @@ -0,0 +1,22 @@ +# Patterns to ignore when building packages. +# This supports shell glob matching, relative path matching, and +# negation (prefixed with !). Only one pattern per line. +.DS_Store +# Common VCS dirs +.git/ +.gitignore +.bzr/ +.bzrignore +.hg/ +.hgignore +.svn/ +# Common backup files +*.swp +*.bak +*.tmp +*~ +# Various IDEs +.project +.idea/ +*.tmproj +.vscode/ diff --git a/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/Chart.yaml b/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/Chart.yaml new file mode 100644 index 000000000000..bd163a944cae --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/Chart.yaml @@ -0,0 +1,14 @@ +apiVersion: v2 +name: with-tarred-dep +description: Test With Tarred Dependencies +type: application +version: 0.1.1 +appVersion: "1.0" +sources: + - https://github.com/test/with-tarred-dep +dependencies: + - name: common + repository: https://charts.bitnami.com/bitnami + tags: + - bitnami-common + version: 1.16.1 diff --git a/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/LICENSE b/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/LICENSE new file mode 100644 index 000000000000..261eeb9e9f8b --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/charts/common-1.16.1.tgz b/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/charts/common-1.16.1.tgz new file mode 100644 index 0000000000000000000000000000000000000000..6a2df2e15b934a29b7c000cbd1271dff601e65a9 GIT binary patch literal 14613 zcmV+wIqJqAiwG0|00000|0w_~VMtOiV@ORlOnEsqVl!4SWK%V1T2nbTPgYhoO;>Dc zVQyr3R8em|NM&qo0PKBhciT9U=zP|%s7t3eb|#`EJ8@<_o4xBenHit%PJHcja%Q{d zwjmOd(53)30LsxMxxf7$yh!k+hb=#{Yko*%5-1c3RfR&KP>9%c%4qLsg18LlXqx5Br4CsXdvHi2tE~ z?Y63&`$ir_DU(PNM#Vt~AVLz48J{1(F&%LvB+nw5aoj;E`GC0~j2^&jw}a9&|8o!y z_QQTB#vi3r79SEopISY~Zo_e}<9_iyN8%mgxWf zV1K`=|6lAq=>J=Jwzi<62Di50SSwqOf6oZVu~M@~$GxN1AQ?*pAR0r;S%ihaB2?x7 zQ(TY~@Fz4)6Z}E3QfF(cBn1<2L=g&ZBlw>i6z;IXSv6QMMLIogz?U+Oof(twkAo%OMcL4-u z9tvPPAwoj%>hLsB|Nd97vjg3)UppF`&!4-d!KE#zDk;Y!%sGx^PbhR24TJtv-wj!Wm9*BpH8?`P34;`z?*-g3-Iiz~y76ggt=3 zNXw0kpcy>VCo%&3JA|rH*QV{c{<@mh2t9x}5x5N1@XI<-O^p&%OYa4=*BUTov7>)a z{ic$vKOQNIEBY{E)09z6Wj+@8ljP_}#k4=5Ax=a9;Rj=|SHGy*RN_xEfJZBjlZS%q z_M;pLIC!b5qAZaR1P~S<^0GA~yehg+J5WdT>Z-Y*x)N$9c!>h*q`i>FY0>0?9^CEf zSm=r(oR-LIskj4QfD#(3?hg88F|`a$nS^aTP37Dp+D^47q?}MWf&isyc##b;r&wYU z64uKr9l&=Y@F`zw&eQKzJ@e=S&!560EwzgN^+Bm(^vEbUt?X>QMs-wcn4&2bDT;7O zZ#HnbE*R#;3$oT5k}PEj8_&O2+P6%f2@`64g4dkZHYc2=N$zfhvzP^MPdL|~Z@bMx zxH%F0lh2te%ir~-uR->%D0wT(U8Tvo%w137R*|;_61Fknx*bErw6p;`#G5HMXNGtZ zzRU{Q{Q9rH{*O?Kh9udb2Vn8~e=yj6{;ab8-|s(v_OSlHjpqvMe>uS*5g%g--{+bG z?CwCTIF{H-X&k$MR7`7Rl+T>wgbh(rlAr2FABJCWL8-1+OpC1dx0@2fBOlN2i`t=s z;u>g$?8~ifWiu_Tv^G(rgVL0CkLqKYY0q}z1d)hB^m@}vbL2CHbyIo z-RaH%YpPGx{4Fu1GXj`ZjrM zs0_5#Nac!djyz@$dy46*!sk_3nMi|Yx6V{3O~pnDB_xt~lqF(iHRad1O;Xve-vyBf z9dj(iiUe|{TZrQ(A6sm}Cl5kiYK0=sSiT#EZEpC_*KW8;oo+-6KTz$q=dRONheyD% z4We05s`wB^Yv`Nn&-bRmOimal|Iz_mYmsnK_05s;==Alf1dMpR2K}zjdMG6Y+pk5s zmYSQR+TqFZ8Rjz*;T36zQsOGCPQv~sNXYS+2+7yyzt*~&Bjeu{YcncQ0w*kn!%Q*} zp#<|4sX9r~8W!o^y5!9LoBj-|D7RaqmBpOUac{P(i!32-twI}FmsVbVI0Q(Z>8V1E zOD1q^Vn3~80EmFSaPN;{=MXN@93+Eif}@L;(wE3YdlD)+i?ayFkWopJyue<#7xv+j z$Vp{It-x3+KZVN)i6)>(jR+NhD4;mj&PkL&@cHZ403Z)?4Ldg4RG~$L-!>9}fa5X# zq#e6JsE`=Nz((2)J8H)PzJ7%vj!-7BflTOFv#09!1vUY)t}LPO?|b2%ntifFDhvQY zK&Zrggd(gdGGZBx1Hha!#mP_JGZyf)OE!5;lG;U`i;NX0&NEo;+F6 zsLs-qaVb1pOY-1RCiuKP`?=r!^FMc<{QQ5v{`1M#ua5%QE|CYmfMjQqD@s^Ty@I3J zXA!sEwXf{I<5TzlYR1}F?AITb$A9e&o((Gg-)ArO_8$DdxACmz|J7>#{e-h@JTaQb zD4HxCa}+B%&V>5c?8Bu<2ptY@V`D&0kOT!%E3hFp<-$(qCxOQJz-&~`P(osD47^bd z^!sq|vbJqf6cRSFCR7b4IMg8X?Iiu}d%cU4A~^{q3msPdegJ{M5yvwB@fJ;S0Ktgh zB-T|-(QdyV6!UWp!@*5qI9LP2L5^V!6)VYRkZ=xO)G_h9m2b5*j7XgXJLOGnWfPpb zxM9~Iq=+LEebt3(YtRKd6?DPX4B-cb02_Iw)ih1$SSZ6m&v(k^c&a8JqN9}!>k$c; zj6RkiC!AfnNm8}zHbT2_l>aK)J3$;%=?b+Qh|5S?Y~NK$r8nMxV}$CB`Z^W-%}GS# zhWgH83{P=_k-*`x_{rc=+JAVYakMl&cnQHT^yKjD?Bl!B*JtqN^z_~70sMnyMjlhn zW+cWiXBmem{u~80qhAE(t#2h$Q#uZ8u z-S?-!k{BY6V@xGMiGV4ZD+Zt#$Hp*|6FhfsFL0`CMo5*BBBbigj3wC=LzEyg6&peO zuNjvU%z@Am<5R<(5u&PnHq=LXVn8xKRui*Gl+M+RF~uAuy$QSQO4gH{Wy1uE31hN5 z!O7GF$#;nm85X^*qVrwDe%;FnNpLqG7hB$rjJQG3$_>_5-zl5&bAz$~b^ic8Nj*J~DCU=GKcA@V+-X=@l1XaU4h*1}6bX{yGp^fqi{#PVOAQq!hWrE2y|dP|6q5p4UB zqaK(YJswrE98@qZHLP24N4NFCjtU+nG&aZoSS=!vR7%ILw3RvRrt$?$UXzTCvwCGw zxP-kM@LA5}_N!aiDV^+omj>FEXbL)utw0|HXp6_9CST?3}$Wv4&@p-(@zPq1@e z#O&6)bB~1euFbWLKMnT34c*w>0${2AKiI9F|Lwnc@c-V*vrhc)-?ZOCVi=<6LP-Rs zYD&@$93qtQl_E)o2{zkuC`tSP;lq<-xBDb~*Kd7}vJ7Zj$+d|vTin(YYQNgQ?6(uf zW%14aRrODvc-}}4X6}d#_3R341c(CuM1-DTZAsU6zqV>8uiZPQG6elfsTkO19B4Kj zV!oq5stMj!3*lJ8B}oz(VvrnBp(HQ{$si^pef(I$98D95vC_f{i`?CbtUnYOS7;I{ z@fg=!ov#()d_U*=3C=&eO-LvExjDgTy92Z~IQhEB+kHQHxnw82DF&@^L!5Eg#=n<< zH$^M$)JwI87MTXTkvG$T&kDtNHISw6dSt8XAp1=pI&K39kSbXILQsj?v4Fq~Wl(Fw zo&0G;pcKh&N#y_%D$CGzif^NYA-Vso`rS3BS+(Ba7sX-?Gv(q{-H8b)kX)GoHZnlz z0cA42pQjk~eum;_yTgp*6%>QpjXtqZ0yBp%W7&E&kI}XpB4qxwc7#kVL}r8%ffomB z7~)(l%EET-C_zGKZF4Pp9#^2PN!NPy#Q{j`llR7{n-e#}Tz033qOsnQup3%lvNWwd z-~Vt1f{ZDSJE{U1kx1E4pt`80Q(_i$mN0(x&M6)hX}$c`l%|SVBzl<|x1J?(Zz~_r zt`BDU;40WZ8qjJQz(N*lJr7cgH_qv4uj7l?a|>E^yOc&%O_2B=&!Gf5s1hPdG9fWH zTOnMj`IV~W#XWuX5M9#v)a`$J829%4|ARrz{(rXr{P~0Ze;dyl{(sMw7ZTAJ@z@;r z7Om>Q@{%iVbU^(Oc5dUepJ%pn(F7;cP)tnj!Ua|IcU!04p59=lk+rMbcDggv>gA1U ziMzvjxrPn8ZT9Hd!{3r=&ora<&`&5*9EHN3k@wHM>urBAi^?UI(YADG{<`E{msAh> z^IL!F`rk0c2G##k|Ihy3^Q!)T{t*9nJI@;W|IMdVX((mUQtKemfyO#@bgP>3bxgM? zVvj=Wx>}n!^HY_4H^ZEh7{eE3w$TXo_w-LPJ+({yp)ePD2)bZX?0e;kRijz9BxFjg6G~2qfNe{v*Kf~M zS?vi+akLY5jz=yQ%mB%Q>^<6(!oj8KuEXdR@)q00G*<;{3rWW(?qkj?-O3TfT7i}5 z?k^!9Q5zAkGQWCw=HxI?%2Q{KYw7LU2v)hU1kJR&4zn#@AunY@!Ygl_F2Knb7O&9-60@+>2E-;OPr0d8 zMW-6~S{9+kuh$l(MyQ(>sly~;mwr@jk>|;VIWt3^7@H$)(=3ssTNEnN0_d|s5t|Nm z^r_3AzkE`*d7>H=Z2q-X!?u?wk+wubgZCQRrfsUz2f1uavL7(~SQJEDb%JT4M1>)0 ze#+_;74Ec^@zZhbti?UIVE@(ae{MC z?w~sxv;A8pq}@EuiNB+(3{iA(iFjOWz>|c?d8je?BcUU9y?ar%T>nkWnyR${@lWv- zlr;AU=>9XB%+>62iD80fsv9%RQBtL;P@3J(j&sxd*<~KBnJ~-MereY_pFcis4FE$Sm$P{4J+*%YYi zoeMl)N$uz)lGo5WCJE&Q)^W_igKD`Q)pCL7%|`;?dOjyr$Y~5+b-94+(agD20XTSR zlDwA$w?S^osb*`EQ?5}CYpg?5E&hno)EyM?A}ao|{Tf@fIqmOiJLaE#(PMA}%7SCY!&$fVg-@xr@G2VUK>xh#bc*b|HKhjLoj z>KWQW!@1hsUIQh2V06 zIW`gBj>bfQkW8`rB3Y zCVo$H3I`#4BuN4(i7sG7xRB=TFP50wpg=31u>odKHDdx@9b;7JONF!fVtYEaycq*W zG-lJBa0ZMSPUylb#uAKZ8PvxK)~7ZlQ%gq>^XZV8{kauD7aoiTL#Hwx#*w2{vpzp7 z5Q&Q`?2K{a{7)hqTL1a7Gs@7d9a8S7abrBzM^SC66YVH-x6Ij7(z>CtG%MyEsOiB7 za6S*!xYQ`9?c))k1{PLrv5dx$QH4dmHQ;#9QTMU1H4AQVPQ4sgv$ud`Lo-GG7R~IM zk&3pE5cxr1czl893!9NHL5ksQJ!V8yXW~L6ozKva6306ofZY&I^0V!(t22VCLQ)V< zDa=A#g+tnrr!1=r@z=4RT0nD=MZOB~O$SBN39=P3{IuF{Y-Kbjsx8>q>Aet6wP+-B zPg*MEvOPWoiZ7ic)hTL+&x*ZBab$cuwhNEVhsPz|VGt@BR(&hbTRQ#Mr=5Iae3WB$ z@DhT+O`eaOY*t1(odF2Ov2yOTC!j=adXMJx>lVaX4vGovg$3L%+2>-XyCeMT3vkH-bMkpjHh>)s_0!yS??(87Z~n8 z!!+`*5Agf#oinT)p28Fj6C9Tan+w!SUy9mJ{XBAEmnWWhR&%E`bDT3J9r;|^6ivp! zp%Hi(H881v0@W3Rb?amEp&@mMNy*ZTh__1FR0^1OTpH!Md|E5CmRM!* z&m_-#vYoI?%wb#25r;`SK`_|aF@L_zrkIlm20O5wrD?s?7n?`r*Pu;qb$JmHQq#e0 z74R5;IyD>hhe`4Sk=i>I{PN33j}{R8X8>Q6(IzB^=c@uQR$hATWhqgVC<&aHWTR1n z8%3{K>C`%|2=z9b*3er3vN@Q)v}8Mi-v9EvzyGlRcPr2O z`Ct4T1!sm=K%e<()ZdnwB2}O`c#@~*(wpK9zId;|s>xs~O%=;K`$6NVoxUFMu=KH8 z&)ib==GvqHwQByoUF8;EF0Qn2i$v?{dM*2D8~?7ifl5VoDsE*rhx=vBp~qi>UjjeR zf^SzWt7{CIX3Sc9^Dvp*zo)MMi);>?ngN#R|Nefz7XR_=+3th>zl~=t{ZC`eb8n(? z*5}tZ=!&@|a;u6>X4iqz%b`1x-0{71iE%m4iR`NR2-TY1)ye{-0hP?IlTpS+a}#%ew9 zR@ZhWbAYk=T6kw{76!Xkhc1KP1fxnh zz6?HVmv^X2m_z?xzkUti3k;FK`_G`OliXFEG_Nv}#DM7xW{8{ZGZS8dOx*p_k*iR_ z44WH@>Aah71TPmk$+ofBHwySBq#4ecg8LIVG_R#kf&yVcxS z)i(S|q7_<^4y&!iHW-Ix2s;f1vHr90UA97LiOZ11)H~vA>LyDwf=fqk;l@oKDq;Rw zOQvdo3$K;xjorrbMlcMDxEm{bcFQU7D(l%uz8n6g?^LdAFXYASXyz@|;&nVY-G+^t z)P&u0JR{gd;$tcax8V?M1dqQHkFB2)2nBZ;@uS5%GDQmT48u#0* zn%r@@Lw2Uduh@GuRWw9c;F~qOy^e<7u5T$O0~og@tw&UgkPi1rfHK;(rB`9O_D#x4 z+|C7_56XO~qjlEh#8P|JXfNA+y#b$p<9LR-z~8Hv2QR_?X-k#4%r9CUJCGZ29+SydeGeDPwYxwr_;y`%DpyVLSIs1TN?} z0575Uj?F1r&EEaR>;?ao&=`U)|K*qXkB`2H3EF+We-M~-qaP1X-yXmH+X0*wy@wb} z97!C*wlc}R#Nl7R9;i2}%-63wPr;fTG{5`8B=j>|=E)p5R^7zZhR>lHBXOPD_HM9L zUr?q4!!Vr@&Zs#yp@XyAZ=kTW!CbgYD0Bdy&ePJht#nl@U9V)v^IbelA^%ODn*R49 zSbD!kNC-q@j;=KsEYbhZ`-A<8{(rImaQ^Rho;CEpjReAQ)ibzx)xj0 zu8T5jicAml!1SUecW4fYqAuf#tRcKMtG)x%-_|*DRpNSU(8|%5h80Z#!Gf$Et17{> zA%H6ZGG!|e;^9vM^AnCo2#GoHbSgehT^9z+W%VBNWtYF+24v(tm zesl)f>KQV<$GYP_T7sCQl;x8`wa7kEbzZF#6JHS#QnX7Bwy;3amNfwLfTTfwFUzW37S zcVfwySEa0JILlui!aN_ICF{Q_;snLRYq|oLtp8r@?^g3azSw*Au>QN1=PK*J9~D=; zezoz{pX0WBv-WcxZC&~4)6dsi^|>}~X3f_?(zRB6uD%6_GC8^OqOSt0dC7-(G$9g4 zGUJV#YE^{1;#EkQoU|_;D{$0At17fC8&4UNElWr4eF{pgHo#HSuM0ErU}X07()!Dn zuZ!!S;xr+VW|6k;)JxQ%-qR& zM%34mO(W*Nzgsul68{gQV|K&-pF#iG-d@fBv;T1Z<5r$4`F|8wyngj>9FPC9c_)x% zx%=Y;@_XCv2{LG3(--77bwduJI(gS~2-*4@aR`-RwK#+pyK%}OTHH8Gol2EXwL6vW zn?vX|l4yuTwSH`0-TnomO)=%bjtGvnpM#;Bxamxyp=ouY7U>%N1O5 zg<4wXjI$+IV0u%&E3oY5DWx~)fwPD__}*4EHa6#b)5h0uBg@7W@V<&qz=Hj@eQ&lC zYmEC8y%86m59x{#UnOL&9Pi~rd5HFUi1wf)6zL_bDpM6P5jGozRC(q1O2vrOu7pzkM>plON}tD z>s``7@40VjWhX|v>2{MYjGGa!YkMw?Tz%v5T+L?mH#eHAy9je_c)#PguKFzY|D;UF zF~`?*0xgOE8thm7KfC?G!}-74d9LFBIWb&u6HXwH;qHwmsKBY+9aNOLo=?bY>t-B7 z%>-V{JLG}5W*V3VNR2L{e07+4g0Ay?Fb&wm~*FlwziiMcKV#DdY|0T3qOlVljGUKSDzj)$v$B>~$Rq3z>7J z5x80R!s4FYCr888%;y+KB*yt2cUU2&W<+~gki~P+oL6VIOyJPna8%04eSzm)HtM=> zi{`b~OY?PRaeME-!`9ruOQ^-#O<@%!-a4r)?m^j{t}J3&?o?_l<+bCnr{NTRL;liQ zHbuAZY$%WVo%x!U&!FphB9_DRCOSi?)@y#(V9;tK%L8VcSIRDt`F3?oJzSc7NIT{3 zELcyswy`eY4ZEtmF0RQjS94k|0CY{yl|NYb+kNGn9W|41%^_$pD{s&lvf1Uq%00Rf z?NJo{Dc=xXu0gb|To=s+d<(ver8SeWe5X@WrFu!)%G$MAsJX=(&?LUGdNoy2Hg~^a zE^h44=Cqqn=ruK673vaD8`gJzYw6WhUgO&K8)lc*J}qIH7M4gWV(#<9qH=fRKVIzr zp$H8csoz$Dh}XN#TGYn{MT^Yz485OblTm%`t$WXzNUqn zarV}Txtd$nR9np>+Zu3J0&;=pB20DJ3Lt&u1mfghIR1dSXmQb%fO%EInW+<)yO7#_ z*iu=Td=A@1@SDC8snU<2DTrccX{~w%{Ci{9oNKVn-QH?fuM!7Vbrv^Afz^WqVoGl5UmLi1jD^Szr33kqE)lCVVWb`NUa zcZI)e+S+|(mz@`XR^hel-3>@J8|CN8f#-#Nc*(kKFp?$KW2+be>B0-g~)ed zG`%(c8scN&C@PFS=M=b+N!YeF1N_HJ7(Clqc_bRmj%)CLBRtR9<Wv4=nrvyt#531U0w_J5u>AiUn!UEs>zpM`m0|BZEO8J zn8BJz-G1Mi$t|r7Hrv`DZ*3Kmug%gk*B&?0rl||{E08P00I4_GyIwXsIo?m~wbqh3 zOGIN$#}*W!o`n+5(KIQYb}SZqE&i0ktZ4P8G`A~Yj49?w;<&f^F-v<3{Id2`5-j6D>Rj*Mh!Z2%iVKD^#!S3x^ zAfs>{p)AWz)n|Fti`ol`Ydc(NOlsFL&qoymzTYVfip9^4JA~=Isqc2rYWv)eW+*=~ zs>H!gWu+oxERQnIF)f^mK~QzvAR6bI!)lC*|HQS_Mm1gOJ$plE9}xG0BwNrZ)-dqC zST0AVh+*x6U+6E?MI(MEexVI39n-kMT#cNiK)BNq=vDRr81Rq36OZd^Ex(`{#W6hY zc9mJAmRm0$sp3`(C@c_-L$wMkSfdzy@0}xHjS}}`U1RP4aDtO58B@mb=G$0e|M#Eo zR`37a-GA}o!T!IEXA4e{RGLaC-ZmVdrX32yj3lwr;wg$Q&=`xbvjy)bM1aWBlyNCQ zOmLFum=~BL8BGWsKLw5xB+1N9o5g!X&(rIMOT&ACAr-CR}vFF_As}uR*^P4*$h_`d{~AGVZB=+@E4b zdj+5&iY~HLuYE-4NhmJU&XaJ6E;>&_IZZoH{$FPcJ|Iq5CgAw>8_@|<&VIv@?1Uu7 zsAtM?_FE^MiHOCx_YHX^to)z8Ieh)&n{XQ6L>r6cf4~1?@S-aJ{k@0$KezI1!7Cyu zni3}sKae3;R?Cr^$U2=v`1$WRnU-Jwb9*9XDh_%*HEo1q(u-LndXl9i61~I}{Z&te zqPL~W>}db@n6oTZD#dUeBy3C~7-D&emBO)zn0jHSv$X~9fB4Jm(@y97{9IYqofxN> z#+XI~TkBoTtp^sKp69JA^#BIpr|^@Dx?zGy#^(p%5VwukHp7Tbd-k_#Ot&~hV$yj8 z3Vp~MoLz#y@;1;lv1bahkCCR2hfLST*4FpKJu0Gmi{>LLo{rAu+eCbHcG+ zehVV8=ybZE+T#>UEMO20{`|*I*8tcHcfg^C`uVT45d2G?U9KbiQ;KKg+&vs@j*%7dW?4f^tfQ zl*MYf$42rTzQ8G#8K;0iy#V#i5_N9YjHHQNU*OyiN&f4*cY*ay2fiqg zcFtiTDig%x5-RG&ZKIO1*b${hm6XK}lb-rbC27JeMO~mSy7}Kgyl*yw))c~jmrh@`l7Un2K>>EJUWL;ecNg9yxLtm`btAe z#MQdrQ^#&ccH7J`Q9_iCbVqi`2;2_6*8&is*X;t!olx;fcy6Wwu#5q%ct)Jj-`FrL zX6^GPJW-l*mU5!Kpm|X}D>(>P822^sEatu{s;lDZLAXIYg+vmMvP9r2qO0QRL%2CS z3DxIXt}*To!g|D97f(J!Q3H}|;CcG$@CY~-EaMSg2i-O0C~ih`Wjv2gUtfWk>*ATR zghcZuR9D4Q&15*rIFWM;>?ZJ}SY9%IaZ*QjRXi<(^>ADpPn41~%x5IRSJg|0C&v!L z4UYRHMc3rdzcWt$#i&FHoUj-UGs#4Rl4a(@y86@l)6{CDjI6^`m{1sEc2D#gN-tez z#ay@-?)`B+TDIeQF0@i#MX6cxt z7#F7%%nqK?ea8|mNs<7jBGYFw8jqZe$qZBDy1vBvjRC1FN@TncAsmn3yx21|EPn1x zNwXJ6Y-@7D*(D$f8bR%S(C>w0sS!hiF(AE^4~w-*wSj8Xhx_2geB`eNJ5XNQSZO?- zqVe*j%x(DwuKX;Mk=t@9T>0@im*_Du|20~YYF@#Hbw*i|fG(*$fb|iuyrYYpGA4t3 z(c=P^C}NCG;|=%lEj=z5a*2@ zc+H{<%nc91DIOCc`5f#qhPP;n1#HJSsY5TX`oq{&y#31KQ9j{?^_3pdmD#c+jk-J*O<~!M>A`-%RQMFB(BoQuecNh;?NLX)}uyJp< zKlpR6|DreeQxB!2>(v%rLc4?RV87?7^*NR3Q@*VbVF67xH6x4AaO8OwOWL|qug-GI9yvr-S#3trC8MXJ#{cj`cucsIkGlei3O8Bdhs zKFVIoBaV&1ek0ENxQ(cl)_1ozd*cY){mMovyXd)(pz zudzXQE%N2rg!S_EE@`x=haUNgEdXQGpEr;%KPALCa4b2&Gpu9B$p}!wF^cC9>6s%A z{dIilU|H8@oh(xb0vk0Me<+7vIU>8Z2SNSaZm{<8&*Ca$~4K7>d_c; z!;0#c$d5?t!>mdfB0`JZpVGc?*20SBNMLyu$j|xa=Zis1SeC?KP%w#lTr*GeMa78Y z-!n{QGIy1u(xT)1(Ycy6)oR3Y?N5>{_ZHpkZqddVeFgWz4YJ(EK>NJcaMISy>aQ}A z#DM9Ha7L%*04*o_G=etJO-KYXCc~QHV1{Lz&fKpv$=AD4IkTI)%k|QoniCXbJd9-i13dL2pADAq}K(7u|Bwi>>>Qvc9n5k)~*M&v8>JT z1Sq3jSK9AJ-Kf66^Fg_D+LUhUp4D0(inIS$2=TxBaJ73px8L@2g`*F5Z#mp``}gSa z70Z&89RB9>3Q1;elIcoz*-FWwLVBH4H0O3Uduq3S)w(`U8Bt!(A5(~s(7v{L5#9LI zJ9=n>5ywSL#DrbCY15lZLpW25nGb$ysef*7xpXVKpF6N*{K|Qb&YQ;16=_cn2FDV( zom&uy2~q~EiLKOk3hR)|Jdw^iRvv}#;=lMN!YDVeNfnEr?v=NJwiV! zD6d~t^1EMsSnJlA#j{&T-GWr7=Y{O#krYmVjLS@~m4?{F&MSPh3yw$|AtZoNn_+3r zGDCn&V6a%&Rg>ERG-SFVeY9-+N|~fY?o(cK{Pa!An;N~w@zYtM?s)VR4)J7iw=<=c_D z6~-+>LLeG*R2sW=$L+{L*^u6>k6(`G9gks-$8C+{vN?~@F}uFaqM*EfwE@Y4MT2OO zz3ZvZU~#*XpLWH3M4S>jUhaok!RK-%bj^amBs#q~=@_5s)x_2B-soKcQ)w-~)(tt&p%z$IHaDq^Z-q zq-NyeBM-LdE_3N z%Oz^0F@15Tm$kam+ASd$I)G4dH~u6!0zyZOPt8dQlN(IBeLWZ5f4meR@H!(qyU zx7!^0L{N0>bnKqE-tJz;n}3@yVN&-N2uk?Cl5C308|P2qc;t95=mt;0Z|1GeS{4L0 ztyN5f!c#yzR|Jhro-sj~QfwuRgKp62peVvZ{K#Uw4BbOr5~eIJuC8pwyVTGrM)5~Z zBz~s{K#+eeZS0Kv3$M|_pZ-QVofOH*GPox;U-YmT$hZV4Pc^rQogM3#LEkD3hoS2h zc!aGGU<)(_G}2por^#zJ=h<=;Si!bBN!^lO`uUt)@R*AQ^gBD9Tj&>f{v%3LLbci` z!Lsl3RDcGiwE`~i+{XDf1Y@g@gtj4J@Hd-p3yE#A^5v**fsgDKKyFp4Lhh2L@9{h* zrJ|+outF?Tn(CuHiLP^VgTPb}ofcKgqP8&o&=Ly?6?1_=Upu8v$7xdZI1EFZuo~0ZLC0B= zF16&z;n~^8cc-sAg^@XP8@~q~-}W@5)K`~{%3l_c%a63pQ;xSqdA8p0_sHfT_n!+q zU)bV>m*afZ2Hgg9ensBbubO_HEk6OiCTm5a%EG@)nxN!_w*vO|-TOCZq5Nd@RK%!Y z3BJx%BmMsEUN+JnT;aNr74I8add1weFCNijb39TSX~cM>W=RHp(J!3;@C`uBo(W}FH@}_JLfJlLp zky=(Eo*%GsBzyDjhp^Lm^5mnAd9;XMVvd2>T*TPkA5ta4|9 zjyYbO^x7r6RxE&0VmoDm$#(R*H$qhLR18z5sZ^|XjUz}=_uNlcW}@~wUHC~+!Pr4I zyYP7pr`cXuLTiM6?`_%AU5#AVa`_T$RDz#2MotmT*G{llbTA;LvJ7H5c#uTfOzeMwrtJP(U_R=~l7CTam8e?uxW$O%2MQSH4 zBqV1e!(7MK)nF}Wm#L{z)&%K!MBOSP4ory8CKzom~QjFjR+bPhy_42#v9 zG&Z`GBFm>D!W41B@|31|J^&=jwPS{bYThX8OjvEXvf%MvvNJ*_rMBI@|koO2%f0dt3R>ZYFRyLIR5& zD<$?59h<>(eUJ=hdx}|p5_ps)kPvzy3@^BrRg0y3?=q8&6O@P^$ATp@+*Qc!>VNmT zDHF0=AY#V3=fPX0Ck@4Qnh#R~cU2#Bd)?V?S2Fu^ucv3TA4r6$T1g(UbZ#z`{kMpv z^S`LsYu8Cjr(?^-kkQy)hkBTzXo4Mtr{HoL?uLEXRs{n4DcJc-NAb%P%{A*~I_8K7 zBhmO%gi|f?5u2t7G0SE*Sy&KZ=N}dT8%m^wTw&>EH(LsjG**J5$3d@md3hNkjb+IA zxR;nxqW8n`(VMqtZ@Rl`+Wv_qScqbR9L~+$7@1`3=n|Cn7;|j2I-zjMiPX1^2sV0e*5|%rOX literal 0 HcmV?d00001 diff --git a/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/renovate.json b/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/renovate.json new file mode 100644 index 000000000000..a78e667b7736 --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/renovate.json @@ -0,0 +1,5 @@ +{ + "extends": [ + "config:base" + ] + } \ No newline at end of file diff --git a/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/templates/.gitkeep b/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/templates/.gitkeep new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/templates/deployment.yaml b/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/templates/deployment.yaml new file mode 100644 index 000000000000..003d08eb745d --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/templates/deployment.yaml @@ -0,0 +1,62 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ template "common.names.fullname" . }} + labels: {{- include "common.labels.standard" . | nindent 4 }} +spec: + replicas: {{ .Values.replicaCount }} + selector: + matchLabels: {{- include "common.labels.matchLabels" . | nindent 6 }} + template: + metadata: + labels: {{- include "common.labels.standard" . | nindent 8 }} + spec: + containers: + - name: metadata-service + env: + - name: METADATASERVICE_UPSTREAM_API_URL + value: '{{ .Values.upstreamAPI }}' + - name: METADATASERVICE_OIDC_AUDIENCE + value: "{{ .Values.oidc.audience }}" + - name: METADATASERVICE_OIDC_ISSUER + value: "{{ .Values.oidc.issuer }}" + - name: METADATASERVICE_OIDC_JWKSURI + value: "{{ .Values.oidc.jwksuri }}" + - name: METADATASERVICE_OIDC_CLAIMS_ROLES + value: "{{ .Values.oidc.rolesClaim }}" + - name: METADATASERVICE_OIDC_CLAIMS_USERNAME + value: "{{ .Values.oidc.userClaim }}" + - name: METADATASERVICE_DB_URI + valueFrom: + secretKeyRef: + name: {{ template "common.names.fullname" . }}-dbconn + key: uri + image: "{{ .Values.metadataservice.image.repository }}:{{ .Values.metadataservice.image.tag }}" + imagePullPolicy: Always + volumeMounts: + - name: dbcerts + mountPath: "/dbcerts" + readOnly: true + ports: + - name: http + containerPort: 8000 + protocol: TCP + livenessProbe: + httpGet: + path: /healthz/liveness + port: http + initialDelaySeconds: 5 + timeoutSeconds: 2 + readinessProbe: + httpGet: + path: /healthz/readiness + port: http + initialDelaySeconds: 5 + timeoutSeconds: 2 + resources: +{{ toYaml .Values.resources | indent 12 }} + volumes: + - name: dbcerts + secret: + secretName: {{ template "common.names.fullname" . }}-crdb-ca + defaultMode: 0400 diff --git a/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/templates/ingress.yaml b/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/templates/ingress.yaml new file mode 100644 index 000000000000..45cd321ca9a9 --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/templates/ingress.yaml @@ -0,0 +1,36 @@ +{{- if .Values.ingress.enabled }} +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + name: {{ template "common.names.fullname" . }} + labels: {{- include "common.labels.standard" . | nindent 4 }} +spec: + {{- if and .Values.ingress.ingressClassName (eq "true" (include "common.ingress.supportsIngressClassname" .)) }} + ingressClassName: {{ .Values.ingress.ingressClassName | quote }} + {{- end }} + rules: + {{- range .Values.ingress.hostnames }} + - host: {{ . }} + http: + paths: + - path: / + {{- if $.Values.ingress.publicPaths -}} + ( + {{- range $index,$path := $.Values.ingress.publicPaths }} + {{- if $index }}|{{ end }} + {{- $path }} + {{- end -}} + ) + {{- end }} + pathType: Prefix + backend: + service: + name: {{ template "common.names.fullname" $ }} + port: + name: http + {{- end }} +# tls: [] +# hosts: +# - hollow-metadataservice.mydomain +# secretName: hollow-metadataservice-example-tls +{{- end }} diff --git a/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/templates/secrets-crdb-ca.yaml b/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/templates/secrets-crdb-ca.yaml new file mode 100644 index 000000000000..18c39c058dcd --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/templates/secrets-crdb-ca.yaml @@ -0,0 +1,17 @@ +{{- if .Values.crdbCA }} +apiVersion: v1 +kind: Secret +metadata: + name: {{ template "common.names.fullname" . }}-crdb-ca + namespace: {{ .Release.Namespace | quote }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 4 }} + {{- end }} + {{- if .Values.commonAnnotations }} + annotations: {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} +type: Opaque +data: + ca.crt: {{ .Values.crdbCA | b64enc | quote }} +{{- end }} diff --git a/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/templates/secrets-dbconn.yaml b/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/templates/secrets-dbconn.yaml new file mode 100644 index 000000000000..06c93061d08c --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/templates/secrets-dbconn.yaml @@ -0,0 +1,17 @@ +{{- if .Values.dbconnURI }} +apiVersion: v1 +kind: Secret +metadata: + name: {{ template "common.names.fullname" . }}-dbconn + namespace: {{ .Release.Namespace | quote }} + labels: {{- include "common.labels.standard" . | nindent 4 }} + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" ( dict "value" .Values.commonLabels "context" $ ) | nindent 4 }} + {{- end }} + {{- if .Values.commonAnnotations }} + annotations: {{- include "common.tplvalues.render" ( dict "value" .Values.commonAnnotations "context" $ ) | nindent 4 }} + {{- end }} +type: Opaque +data: + uri: {{ .Values.dbconnURI | b64enc | quote }} +{{- end }} diff --git a/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/templates/service.yaml b/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/templates/service.yaml new file mode 100644 index 000000000000..fdb8b82d76f8 --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/templates/service.yaml @@ -0,0 +1,17 @@ +apiVersion: v1 +kind: Service +metadata: + name: {{ template "common.names.fullname" . }} + labels: {{- include "common.labels.standard" . | nindent 4 }} +spec: + ports: + - name: http + port: 80 + protocol: TCP + targetPort: 8000 + - name: https + port: 443 + protocol: TCP + targetPort: 8000 + selector:{{ include "common.labels.matchLabels" . | nindent 4 }} + type: ClusterIP diff --git a/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/values.yaml b/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/values.yaml new file mode 100644 index 000000000000..7a86583f54e3 --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/with-tarred-dep/values.yaml @@ -0,0 +1,30 @@ +metadataservice: + image: + repository: ghcr.io/metal-toolbox/hollow-metadataservice + tag: "v0.0.1" + +ingress: + enabled: true + hostnames: + - metadata-service.mydomain + publicPaths: + - $ + - metadata + - userdata + - '2009-04-04' + +oidc: + audience: "" + issuer: "" + jwksuri: "" + rolesClaim: "" + userClaim: "" + +replicaCount: 1 +resources: + limits: + cpu: 4 + memory: 4Gi + requests: + cpu: 4 + memory: 4Gi diff --git a/pkg/iac/scanners/helm/test/values/values.yaml b/pkg/iac/scanners/helm/test/values/values.yaml new file mode 100644 index 000000000000..6f637160ffa9 --- /dev/null +++ b/pkg/iac/scanners/helm/test/values/values.yaml @@ -0,0 +1,3 @@ +--- +securityContext: + runAsUser: 0 \ No newline at end of file diff --git a/pkg/iac/scanners/json/parser/parser.go b/pkg/iac/scanners/json/parser/parser.go new file mode 100644 index 000000000000..7205df351de4 --- /dev/null +++ b/pkg/iac/scanners/json/parser/parser.go @@ -0,0 +1,89 @@ +package parser + +import ( + "context" + "encoding/json" + "io" + "io/fs" + "path/filepath" + + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/pkg/iac/detection" +) + +var _ options.ConfigurableParser = (*Parser)(nil) + +type Parser struct { + debug debug.Logger + skipRequired bool +} + +func (p *Parser) SetDebugWriter(writer io.Writer) { + p.debug = debug.New(writer, "json", "parser") +} + +func (p *Parser) SetSkipRequiredCheck(b bool) { + p.skipRequired = b +} + +// New creates a new parser +func New(opts ...options.ParserOption) *Parser { + p := &Parser{} + for _, opt := range opts { + opt(p) + } + return p +} + +func (p *Parser) ParseFS(ctx context.Context, target fs.FS, path string) (map[string]interface{}, error) { + + files := make(map[string]interface{}) + if err := fs.WalkDir(target, filepath.ToSlash(path), func(path string, entry fs.DirEntry, err error) error { + select { + case <-ctx.Done(): + return ctx.Err() + default: + } + if err != nil { + return err + } + if entry.IsDir() { + return nil + } + if !p.Required(path) { + return nil + } + df, err := p.ParseFile(ctx, target, path) + if err != nil { + p.debug.Log("Parse error in '%s': %s", path, err) + return nil + } + files[path] = df + return nil + }); err != nil { + return nil, err + } + return files, nil +} + +// ParseFile parses Dockerfile content from the provided filesystem path. +func (p *Parser) ParseFile(_ context.Context, fs fs.FS, path string) (interface{}, error) { + f, err := fs.Open(filepath.ToSlash(path)) + if err != nil { + return nil, err + } + defer func() { _ = f.Close() }() + var target interface{} + if err := json.NewDecoder(f).Decode(&target); err != nil { + return nil, err + } + return target, nil +} + +func (p *Parser) Required(path string) bool { + if p.skipRequired { + return true + } + return detection.IsType(path, nil, detection.FileTypeJSON) +} diff --git a/pkg/iac/scanners/json/parser/parser_test.go b/pkg/iac/scanners/json/parser/parser_test.go new file mode 100644 index 000000000000..2af3936d6124 --- /dev/null +++ b/pkg/iac/scanners/json/parser/parser_test.go @@ -0,0 +1,51 @@ +package parser + +import ( + "context" + "testing" + + "github.com/liamg/memoryfs" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_Parser(t *testing.T) { + input := `{ "x": { "y": 123, "z": ["a", "b", "c"]}}` + + memfs := memoryfs.New() + err := memfs.WriteFile("something.json", []byte(input), 0644) + require.NoError(t, err) + + data, err := New().ParseFile(context.TODO(), memfs, "something.json") + require.NoError(t, err) + + msi, ok := data.(map[string]interface{}) + require.True(t, ok) + + xObj, ok := msi["x"] + require.True(t, ok) + + xMsi, ok := xObj.(map[string]interface{}) + require.True(t, ok) + + yRaw, ok := xMsi["y"] + require.True(t, ok) + + y, ok := yRaw.(float64) + require.True(t, ok) + + assert.Equal(t, 123.0, y) + + zRaw, ok := xMsi["z"] + require.True(t, ok) + + z, ok := zRaw.([]interface{}) + require.True(t, ok) + + require.Len(t, z, 3) + + assert.Equal(t, "a", z[0]) + assert.Equal(t, "b", z[1]) + assert.Equal(t, "c", z[2]) + +} diff --git a/pkg/iac/scanners/json/scanner.go b/pkg/iac/scanners/json/scanner.go new file mode 100644 index 000000000000..a1ad82e86690 --- /dev/null +++ b/pkg/iac/scanners/json/scanner.go @@ -0,0 +1,170 @@ +package json + +import ( + "context" + "io" + "io/fs" + "sync" + + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/framework" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/rego" + "github.com/aquasecurity/trivy/pkg/iac/scanners" + "github.com/aquasecurity/trivy/pkg/iac/scanners/json/parser" +) + +var _ scanners.FSScanner = (*Scanner)(nil) +var _ options.ConfigurableScanner = (*Scanner)(nil) + +type Scanner struct { + debug debug.Logger + policyDirs []string + policyReaders []io.Reader + parser *parser.Parser + regoScanner *rego.Scanner + skipRequired bool + options []options.ScannerOption + sync.Mutex + frameworks []framework.Framework + spec string + loadEmbeddedPolicies bool + loadEmbeddedLibraries bool +} + +func (s *Scanner) SetRegoOnly(bool) { +} + +func (s *Scanner) SetFrameworks(frameworks []framework.Framework) { + s.frameworks = frameworks +} + +func (s *Scanner) SetSpec(spec string) { + s.spec = spec +} + +func (s *Scanner) SetUseEmbeddedPolicies(b bool) { + s.loadEmbeddedPolicies = b +} + +func (s *Scanner) SetUseEmbeddedLibraries(b bool) { + s.loadEmbeddedLibraries = b +} + +func (s *Scanner) SetPolicyReaders(readers []io.Reader) { + s.policyReaders = readers +} + +func (s *Scanner) SetDebugWriter(writer io.Writer) { + s.debug = debug.New(writer, "json", "scanner") +} + +func (s *Scanner) SetTraceWriter(_ io.Writer) { +} + +func (s *Scanner) SetPerResultTracingEnabled(_ bool) { +} + +func (s *Scanner) SetPolicyDirs(dirs ...string) { + s.policyDirs = dirs +} + +func (s *Scanner) SetDataDirs(_ ...string) {} +func (s *Scanner) SetPolicyNamespaces(_ ...string) {} + +func (s *Scanner) SetSkipRequiredCheck(skip bool) { + s.skipRequired = skip +} + +func (s *Scanner) SetPolicyFilesystem(_ fs.FS) { + // handled by rego when option is passed on +} + +func (s *Scanner) SetDataFilesystem(_ fs.FS) { + // handled by rego when option is passed on +} +func (s *Scanner) SetRegoErrorLimit(_ int) {} + +func NewScanner(opts ...options.ScannerOption) *Scanner { + s := &Scanner{ + options: opts, + } + for _, opt := range opts { + opt(s) + } + s.parser = parser.New(options.ParserWithSkipRequiredCheck(s.skipRequired)) + return s +} + +func (s *Scanner) Name() string { + return "JSON" +} + +func (s *Scanner) ScanFS(ctx context.Context, fs fs.FS, path string) (scan.Results, error) { + + files, err := s.parser.ParseFS(ctx, fs, path) + if err != nil { + return nil, err + } + + if len(files) == 0 { + return nil, nil + } + + var inputs []rego.Input + for path, file := range files { + inputs = append(inputs, rego.Input{ + Path: path, + FS: fs, + Contents: file, + }) + } + + results, err := s.scanRego(ctx, fs, inputs...) + if err != nil { + return nil, err + } + return results, nil +} + +func (s *Scanner) ScanFile(ctx context.Context, fs fs.FS, path string) (scan.Results, error) { + parsed, err := s.parser.ParseFile(ctx, fs, path) + if err != nil { + return nil, err + } + s.debug.Log("Scanning %s...", path) + return s.scanRego(ctx, fs, rego.Input{ + Path: path, + Contents: parsed, + }) +} + +func (s *Scanner) initRegoScanner(srcFS fs.FS) (*rego.Scanner, error) { + s.Lock() + defer s.Unlock() + if s.regoScanner != nil { + return s.regoScanner, nil + } + regoScanner := rego.NewScanner(types.SourceJSON, s.options...) + regoScanner.SetParentDebugLogger(s.debug) + if err := regoScanner.LoadPolicies(s.loadEmbeddedLibraries, s.loadEmbeddedPolicies, srcFS, s.policyDirs, s.policyReaders); err != nil { + return nil, err + } + s.regoScanner = regoScanner + return regoScanner, nil +} + +func (s *Scanner) scanRego(ctx context.Context, srcFS fs.FS, inputs ...rego.Input) (scan.Results, error) { + regoScanner, err := s.initRegoScanner(srcFS) + if err != nil { + return nil, err + } + results, err := regoScanner.ScanInput(ctx, inputs...) + if err != nil { + return nil, err + } + results.SetSourceAndFilesystem("", srcFS, false) + return results, nil +} diff --git a/pkg/iac/scanners/json/scanner_test.go b/pkg/iac/scanners/json/scanner_test.go new file mode 100644 index 000000000000..18e6dc90d49b --- /dev/null +++ b/pkg/iac/scanners/json/scanner_test.go @@ -0,0 +1,77 @@ +package json + +import ( + "context" + "testing" + + "github.com/aquasecurity/defsec/pkg/framework" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_BasicScan(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "/code/data.json": `{ "x": { "y": 123, "z": ["a", "b", "c"]}}`, + "/rules/rule.rego": `package builtin.json.lol + +__rego_metadata__ := { + "id": "ABC123", + "avd_id": "AVD-AB-0123", + "title": "title", + "short_code": "short", + "severity": "CRITICAL", + "type": "JSON Check", + "description": "description", + "recommended_actions": "actions", + "url": "https://example.com", +} + +__rego_input__ := { + "combine": false, + "selector": [{"type": "json"}], +} + +deny[res] { + input.x.y == 123 + res := { + "msg": "oh no", + "startline": 1, + "endline": 2, + } +} + +`, + }) + + scanner := NewScanner(options.ScannerWithPolicyDirs("rules")) + + results, err := scanner.ScanFS(context.TODO(), fs, "code") + require.NoError(t, err) + + require.Len(t, results.GetFailed(), 1) + + assert.Equal(t, scan.Rule{ + AVDID: "AVD-AB-0123", + Aliases: []string{"ABC123"}, + ShortCode: "short", + Summary: "title", + Explanation: "description", + Impact: "", + Resolution: "actions", + Provider: "json", + Service: "general", + Links: []string{"https://example.com"}, + Severity: "CRITICAL", + Terraform: &scan.EngineMetadata{}, + CloudFormation: &scan.EngineMetadata{}, + CustomChecks: scan.CustomChecks{ + Terraform: (*scan.TerraformCustomCheck)(nil), + }, + RegoPackage: "data.builtin.json.lol", + Frameworks: map[framework.Framework][]string{}, + }, results.GetFailed()[0].Rule()) +} diff --git a/pkg/iac/scanners/kubernetes/parser/manifest.go b/pkg/iac/scanners/kubernetes/parser/manifest.go new file mode 100644 index 000000000000..3f809a6f9145 --- /dev/null +++ b/pkg/iac/scanners/kubernetes/parser/manifest.go @@ -0,0 +1,33 @@ +package parser + +import ( + "fmt" + + "gopkg.in/yaml.v3" +) + +type Manifest struct { + Path string + Content *ManifestNode +} + +func (m *Manifest) UnmarshalYAML(value *yaml.Node) error { + + switch value.Tag { + case "!!map": + node := new(ManifestNode) + node.Path = m.Path + if err := value.Decode(node); err != nil { + return err + } + m.Content = node + default: + return fmt.Errorf("failed to handle tag: %s", value.Tag) + } + + return nil +} + +func (m *Manifest) ToRego() interface{} { + return m.Content.ToRego() +} diff --git a/pkg/iac/scanners/kubernetes/parser/manifest_node.go b/pkg/iac/scanners/kubernetes/parser/manifest_node.go new file mode 100644 index 000000000000..1f82ca1e3680 --- /dev/null +++ b/pkg/iac/scanners/kubernetes/parser/manifest_node.go @@ -0,0 +1,140 @@ +package parser + +import ( + "fmt" + "strconv" + + "gopkg.in/yaml.v3" +) + +type TagType string + +const ( + TagBool TagType = "!!bool" + TagInt TagType = "!!int" + TagFloat TagType = "!!float" + TagStr TagType = "!!str" + TagString TagType = "!!string" + TagSlice TagType = "!!seq" + TagMap TagType = "!!map" +) + +type ManifestNode struct { + StartLine int + EndLine int + Offset int + Value interface{} + Type TagType + Path string +} + +func (r *ManifestNode) ToRego() interface{} { + if r == nil { + return nil + } + switch r.Type { + case TagBool, TagInt, TagString, TagStr: + return r.Value + case TagSlice: + var output []interface{} + for _, node := range r.Value.([]ManifestNode) { + output = append(output, node.ToRego()) + } + return output + case TagMap: + output := make(map[string]interface{}) + output["__defsec_metadata"] = map[string]interface{}{ + "startline": r.StartLine, + "endline": r.EndLine, + "filepath": r.Path, + "offset": r.Offset, + } + for key, node := range r.Value.(map[string]ManifestNode) { + output[key] = node.ToRego() + } + return output + } + return nil +} + +func (r *ManifestNode) UnmarshalYAML(node *yaml.Node) error { + + r.StartLine = node.Line + r.EndLine = node.Line + r.Type = TagType(node.Tag) + + switch TagType(node.Tag) { + case TagString, TagStr: + + r.Value = node.Value + case TagInt: + val, err := strconv.Atoi(node.Value) + if err != nil { + return err + } + r.Value = val + case TagFloat: + val, err := strconv.ParseFloat(node.Value, 64) + if err != nil { + return err + } + r.Value = val + case TagBool: + val, err := strconv.ParseBool(node.Value) + if err != nil { + return err + } + r.Value = val + case TagMap: + return r.handleMapTag(node) + case TagSlice: + return r.handleSliceTag(node) + + default: + return fmt.Errorf("node tag is not supported %s", node.Tag) + } + return nil +} + +func (r *ManifestNode) handleSliceTag(node *yaml.Node) error { + var nodes []ManifestNode + max := node.Line + for _, contentNode := range node.Content { + newNode := new(ManifestNode) + newNode.Path = r.Path + if err := contentNode.Decode(newNode); err != nil { + return err + } + if newNode.EndLine > max { + max = newNode.EndLine + } + nodes = append(nodes, *newNode) + } + r.EndLine = max + r.Value = nodes + return nil +} + +func (r *ManifestNode) handleMapTag(node *yaml.Node) error { + output := make(map[string]ManifestNode) + var key string + max := node.Line + for i, contentNode := range node.Content { + if i == 0 || i%2 == 0 { + key = contentNode.Value + } else { + newNode := new(ManifestNode) + newNode.Path = r.Path + if err := contentNode.Decode(newNode); err != nil { + return err + } + output[key] = *newNode + if newNode.EndLine > max { + max = newNode.EndLine + } + } + } + r.EndLine = max + r.Value = output + return nil +} diff --git a/pkg/iac/scanners/kubernetes/parser/parser.go b/pkg/iac/scanners/kubernetes/parser/parser.go new file mode 100644 index 000000000000..aa915adc21ee --- /dev/null +++ b/pkg/iac/scanners/kubernetes/parser/parser.go @@ -0,0 +1,137 @@ +package parser + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "io/fs" + "path/filepath" + "regexp" + "strings" + + "gopkg.in/yaml.v3" + + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/pkg/iac/detection" +) + +var _ options.ConfigurableParser = (*Parser)(nil) + +type Parser struct { + debug debug.Logger + skipRequired bool +} + +func (p *Parser) SetDebugWriter(writer io.Writer) { + p.debug = debug.New(writer, "kubernetes", "parser") +} + +func (p *Parser) SetSkipRequiredCheck(b bool) { + p.skipRequired = b +} + +// New creates a new K8s parser +func New(options ...options.ParserOption) *Parser { + p := &Parser{} + for _, option := range options { + option(p) + } + return p +} + +func (p *Parser) ParseFS(ctx context.Context, target fs.FS, path string) (map[string][]interface{}, error) { + files := make(map[string][]interface{}) + if err := fs.WalkDir(target, filepath.ToSlash(path), func(path string, entry fs.DirEntry, err error) error { + select { + case <-ctx.Done(): + return ctx.Err() + default: + } + if err != nil { + return err + } + if entry.IsDir() { + return nil + } + if !p.required(target, path) { + return nil + } + parsed, err := p.ParseFile(ctx, target, path) + if err != nil { + p.debug.Log("Parse error in '%s': %s", path, err) + return nil + } + files[path] = parsed + return nil + }); err != nil { + return nil, err + } + return files, nil +} + +// ParseFile parses Kubernetes manifest from the provided filesystem path. +func (p *Parser) ParseFile(_ context.Context, fs fs.FS, path string) ([]interface{}, error) { + f, err := fs.Open(filepath.ToSlash(path)) + if err != nil { + return nil, err + } + defer func() { _ = f.Close() }() + return p.Parse(f, path) +} + +func (p *Parser) required(fs fs.FS, path string) bool { + if p.skipRequired { + return true + } + f, err := fs.Open(filepath.ToSlash(path)) + if err != nil { + return false + } + defer func() { _ = f.Close() }() + if data, err := io.ReadAll(f); err == nil { + return detection.IsType(path, bytes.NewReader(data), detection.FileTypeKubernetes) + } + return false +} + +func (p *Parser) Parse(r io.Reader, path string) ([]interface{}, error) { + + contents, err := io.ReadAll(r) + if err != nil { + return nil, err + } + + if len(contents) == 0 { + return nil, nil + } + + if strings.TrimSpace(string(contents))[0] == '{' { + var target interface{} + if err := json.Unmarshal(contents, &target); err != nil { + return nil, err + } + return []interface{}{target}, nil + } + + var results []interface{} + + re := regexp.MustCompile(`(?m:^---\r?\n)`) + pos := 0 + for _, partial := range re.Split(string(contents), -1) { + var result Manifest + result.Path = path + if err := yaml.Unmarshal([]byte(partial), &result); err != nil { + return nil, fmt.Errorf("unmarshal yaml: %w", err) + } + if result.Content != nil { + result.Content.Offset = pos + results = append(results, result.ToRego()) + } + pos += len(strings.Split(partial, "\n")) + } + + return results, nil +} diff --git a/pkg/iac/scanners/kubernetes/scanner.go b/pkg/iac/scanners/kubernetes/scanner.go new file mode 100644 index 000000000000..9bbc03aec74c --- /dev/null +++ b/pkg/iac/scanners/kubernetes/scanner.go @@ -0,0 +1,176 @@ +package kubernetes + +import ( + "context" + "io" + "io/fs" + "path/filepath" + "sort" + "sync" + + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/framework" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/rego" + "github.com/aquasecurity/trivy/pkg/iac/scanners" + "github.com/aquasecurity/trivy/pkg/iac/scanners/kubernetes/parser" + "github.com/liamg/memoryfs" +) + +var _ scanners.FSScanner = (*Scanner)(nil) +var _ options.ConfigurableScanner = (*Scanner)(nil) + +type Scanner struct { + debug debug.Logger + options []options.ScannerOption + policyDirs []string + policyReaders []io.Reader + regoScanner *rego.Scanner + parser *parser.Parser + skipRequired bool + sync.Mutex + loadEmbeddedPolicies bool + frameworks []framework.Framework + spec string + loadEmbeddedLibraries bool +} + +func (s *Scanner) SetSpec(spec string) { + s.spec = spec +} + +func (s *Scanner) SetRegoOnly(bool) {} + +func (s *Scanner) SetFrameworks(frameworks []framework.Framework) { + s.frameworks = frameworks +} + +func (s *Scanner) SetUseEmbeddedPolicies(b bool) { + s.loadEmbeddedPolicies = b +} + +func (s *Scanner) SetUseEmbeddedLibraries(b bool) { + s.loadEmbeddedLibraries = b +} + +func (s *Scanner) SetPolicyReaders(readers []io.Reader) { + s.policyReaders = readers +} + +func (s *Scanner) SetSkipRequiredCheck(skip bool) { + s.skipRequired = skip +} + +func (s *Scanner) SetDebugWriter(writer io.Writer) { + s.debug = debug.New(writer, "kubernetes", "scanner") +} + +func (s *Scanner) SetTraceWriter(_ io.Writer) { +} + +func (s *Scanner) SetPerResultTracingEnabled(_ bool) { +} + +func (s *Scanner) SetPolicyDirs(dirs ...string) { + s.policyDirs = dirs +} + +func (s *Scanner) SetDataDirs(...string) {} +func (s *Scanner) SetPolicyNamespaces(_ ...string) { +} + +func (s *Scanner) SetPolicyFilesystem(_ fs.FS) { + // handled by rego when option is passed on +} + +func (s *Scanner) SetDataFilesystem(_ fs.FS) { + // handled by rego when option is passed on +} +func (s *Scanner) SetRegoErrorLimit(_ int) {} + +func NewScanner(opts ...options.ScannerOption) *Scanner { + s := &Scanner{ + options: opts, + } + for _, opt := range opts { + opt(s) + } + s.parser = parser.New(options.ParserWithSkipRequiredCheck(s.skipRequired)) + return s +} + +func (s *Scanner) Name() string { + return "Kubernetes" +} + +func (s *Scanner) initRegoScanner(srcFS fs.FS) (*rego.Scanner, error) { + s.Lock() + defer s.Unlock() + if s.regoScanner != nil { + return s.regoScanner, nil + } + regoScanner := rego.NewScanner(types.SourceKubernetes, s.options...) + regoScanner.SetParentDebugLogger(s.debug) + if err := regoScanner.LoadPolicies(s.loadEmbeddedLibraries, s.loadEmbeddedPolicies, srcFS, s.policyDirs, s.policyReaders); err != nil { + return nil, err + } + s.regoScanner = regoScanner + return regoScanner, nil +} + +func (s *Scanner) ScanReader(ctx context.Context, filename string, reader io.Reader) (scan.Results, error) { + memfs := memoryfs.New() + if err := memfs.MkdirAll(filepath.Base(filename), 0o700); err != nil { + return nil, err + } + data, err := io.ReadAll(reader) + if err != nil { + return nil, err + } + if err := memfs.WriteFile(filename, data, 0o644); err != nil { + return nil, err + } + return s.ScanFS(ctx, memfs, ".") +} + +func (s *Scanner) ScanFS(ctx context.Context, target fs.FS, dir string) (scan.Results, error) { + + k8sFilesets, err := s.parser.ParseFS(ctx, target, dir) + if err != nil { + return nil, err + } + + if len(k8sFilesets) == 0 { + return nil, nil + } + + var inputs []rego.Input + for path, k8sFiles := range k8sFilesets { + for _, content := range k8sFiles { + inputs = append(inputs, rego.Input{ + Path: path, + FS: target, + Contents: content, + }) + } + } + + regoScanner, err := s.initRegoScanner(target) + if err != nil { + return nil, err + } + + s.debug.Log("Scanning %d files...", len(inputs)) + results, err := regoScanner.ScanInput(ctx, inputs...) + if err != nil { + return nil, err + } + results.SetSourceAndFilesystem("", target, false) + + sort.Slice(results, func(i, j int) bool { + return results[i].Rule().AVDID < results[j].Rule().AVDID + }) + return results, nil +} diff --git a/pkg/iac/scanners/kubernetes/scanner_test.go b/pkg/iac/scanners/kubernetes/scanner_test.go new file mode 100644 index 000000000000..db4f713fcd6f --- /dev/null +++ b/pkg/iac/scanners/kubernetes/scanner_test.go @@ -0,0 +1,736 @@ +package kubernetes + +import ( + "context" + "os" + "strings" + "testing" + + "github.com/aquasecurity/defsec/pkg/framework" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_BasicScan(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "/code/example.yaml": ` +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - command: ["sh", "-c", "echo 'Hello' && sleep 1h"] + image: busybox + name: hello +`, + "/rules/lib.k8s.rego": ` + package lib.kubernetes + + default is_gatekeeper = false + + is_gatekeeper { + has_field(input, "review") + has_field(input.review, "object") + } + + object = input { + not is_gatekeeper + } + + object = input.review.object { + is_gatekeeper + } + + format(msg) = gatekeeper_format { + is_gatekeeper + gatekeeper_format = {"msg": msg} + } + + format(msg) = msg { + not is_gatekeeper + } + + name = object.metadata.name + + default namespace = "default" + + namespace = object.metadata.namespace + + #annotations = object.metadata.annotations + + kind = object.kind + + is_pod { + kind = "Pod" + } + + is_cronjob { + kind = "CronJob" + } + + default is_controller = false + + is_controller { + kind = "Deployment" + } + + is_controller { + kind = "StatefulSet" + } + + is_controller { + kind = "DaemonSet" + } + + is_controller { + kind = "ReplicaSet" + } + + is_controller { + kind = "ReplicationController" + } + + is_controller { + kind = "Job" + } + + split_image(image) = [image, "latest"] { + not contains(image, ":") + } + + split_image(image) = [image_name, tag] { + [image_name, tag] = split(image, ":") + } + + pod_containers(pod) = all_containers { + keys = {"containers", "initContainers"} + all_containers = [c | keys[k]; c = pod.spec[k][_]] + } + + containers[container] { + pods[pod] + all_containers = pod_containers(pod) + container = all_containers[_] + } + + containers[container] { + all_containers = pod_containers(object) + container = all_containers[_] + } + + pods[pod] { + is_pod + pod = object + } + + pods[pod] { + is_controller + pod = object.spec.template + } + + pods[pod] { + is_cronjob + pod = object.spec.jobTemplate.spec.template + } + + volumes[volume] { + pods[pod] + volume = pod.spec.volumes[_] + } + + dropped_capability(container, cap) { + container.securityContext.capabilities.drop[_] == cap + } + + added_capability(container, cap) { + container.securityContext.capabilities.add[_] == cap + } + + has_field(obj, field) { + obj[field] + } + + no_read_only_filesystem(c) { + not has_field(c, "securityContext") + } + + no_read_only_filesystem(c) { + has_field(c, "securityContext") + not has_field(c.securityContext, "readOnlyRootFilesystem") + } + + privilege_escalation_allowed(c) { + not has_field(c, "securityContext") + } + + privilege_escalation_allowed(c) { + has_field(c, "securityContext") + has_field(c.securityContext, "allowPrivilegeEscalation") + } + + annotations[annotation] { + pods[pod] + annotation = pod.metadata.annotations + } + + host_ipcs[host_ipc] { + pods[pod] + host_ipc = pod.spec.hostIPC + } + + host_networks[host_network] { + pods[pod] + host_network = pod.spec.hostNetwork + } + + host_pids[host_pid] { + pods[pod] + host_pid = pod.spec.hostPID + } + + host_aliases[host_alias] { + pods[pod] + host_alias = pod.spec + } + `, + "/rules/lib.util.rego": ` + package lib.utils + + has_key(x, k) { + _ = x[k] + }`, + "/rules/rule.rego": ` +package builtin.kubernetes.KSV011 + +import data.lib.kubernetes +import data.lib.utils + +default failLimitsCPU = false + +__rego_metadata__ := { + "id": "KSV011", + "avd_id": "AVD-KSV-0011", + "title": "CPU not limited", + "short_code": "limit-cpu", + "version": "v1.0.0", + "severity": "LOW", + "type": "Kubernetes Security Check", + "description": "Enforcing CPU limits prevents DoS via resource exhaustion.", + "recommended_actions": "Set a limit value under 'containers[].resources.limits.cpu'.", + "url": "https://cloud.google.com/blog/products/containers-kubernetes/kubernetes-best-practices-resource-requests-and-limits", +} + +__rego_input__ := { + "combine": false, + "selector": [{"type": "kubernetes"}], +} + +# getLimitsCPUContainers returns all containers which have set resources.limits.cpu +getLimitsCPUContainers[container] { + allContainers := kubernetes.containers[_] + utils.has_key(allContainers.resources.limits, "cpu") + container := allContainers.name +} + +# getNoLimitsCPUContainers returns all containers which have not set +# resources.limits.cpu +getNoLimitsCPUContainers[container] { + container := kubernetes.containers[_].name + not getLimitsCPUContainers[container] +} + +# failLimitsCPU is true if containers[].resources.limits.cpu is not set +# for ANY container +failLimitsCPU { + count(getNoLimitsCPUContainers) > 0 +} + +deny[res] { + failLimitsCPU + + msg := kubernetes.format(sprintf("Container '%s' of %s '%s' should set 'resources.limits.cpu'", [getNoLimitsCPUContainers[_], kubernetes.kind, kubernetes.name])) + + res := { + "msg": msg, + "id": __rego_metadata__.id, + "title": __rego_metadata__.title, + "severity": __rego_metadata__.severity, + "type": __rego_metadata__.type, + "startline": 6, + "endline": 10, + } +} +`, + }) + + scanner := NewScanner(options.ScannerWithPolicyDirs("rules")) + + results, err := scanner.ScanFS(context.TODO(), fs, "code") + require.NoError(t, err) + + require.Len(t, results.GetFailed(), 1) + + assert.Equal(t, scan.Rule{ + AVDID: "AVD-KSV-0011", + Aliases: []string{"KSV011"}, + ShortCode: "limit-cpu", + Summary: "CPU not limited", + Explanation: "Enforcing CPU limits prevents DoS via resource exhaustion.", + Impact: "", + Resolution: "Set a limit value under 'containers[].resources.limits.cpu'.", + Provider: "kubernetes", + Service: "general", + Links: []string{"https://cloud.google.com/blog/products/containers-kubernetes/kubernetes-best-practices-resource-requests-and-limits"}, + Severity: "LOW", + Terraform: &scan.EngineMetadata{}, + CloudFormation: &scan.EngineMetadata{}, + CustomChecks: scan.CustomChecks{Terraform: (*scan.TerraformCustomCheck)(nil)}, + RegoPackage: "data.builtin.kubernetes.KSV011", + Frameworks: map[framework.Framework][]string{}, + }, results.GetFailed()[0].Rule()) + + failure := results.GetFailed()[0] + actualCode, err := failure.GetCode() + require.NoError(t, err) + for i := range actualCode.Lines { + actualCode.Lines[i].Highlighted = "" + } + assert.Equal(t, []scan.Line{ + { + Number: 6, + Content: "spec: ", + IsCause: true, + FirstCause: true, + Annotation: "", + }, + { + Number: 7, + Content: " containers: ", + IsCause: true, + Annotation: "", + }, + { + Number: 8, + Content: " - command: [\"sh\", \"-c\", \"echo 'Hello' && sleep 1h\"]", + IsCause: true, + Annotation: "", + }, + { + Number: 9, + Content: " image: busybox", + IsCause: true, + Annotation: "", + }, + { + Number: 10, + Content: " name: hello", + IsCause: true, + LastCause: true, + Annotation: "", + }, + }, actualCode.Lines) +} + +func Test_FileScan(t *testing.T) { + + results, err := NewScanner(options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true), options.ScannerWithEmbeddedLibraries(true)).ScanReader(context.TODO(), "k8s.yaml", strings.NewReader(` +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - command: ["sh", "-c", "echo 'Hello' && sleep 1h"] + image: busybox + name: hello +`)) + require.NoError(t, err) + + assert.Greater(t, len(results.GetFailed()), 0) +} + +func Test_FileScan_WithSeparator(t *testing.T) { + + results, err := NewScanner(options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true)).ScanReader(context.TODO(), "k8s.yaml", strings.NewReader(` +--- +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - command: ["sh", "-c", "echo 'Hello' && sleep 1h"] + image: busybox + name: hello +`)) + require.NoError(t, err) + + assert.Greater(t, len(results.GetFailed()), 0) +} + +func Test_FileScan_MultiManifests(t *testing.T) { + file := ` +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello1-cpu-limit +spec: + containers: + - command: ["sh", "-c", "echo 'Hello1' && sleep 1h"] + image: busybox + name: hello1 +--- +apiVersion: v1 +kind: Pod +metadata: + name: hello2-cpu-limit +spec: + containers: + - command: ["sh", "-c", "echo 'Hello2' && sleep 1h"] + image: busybox + name: hello2 +` + + results, err := NewScanner( + options.ScannerWithEmbeddedPolicies(true), + options.ScannerWithEmbeddedLibraries(true), + options.ScannerWithEmbeddedLibraries(true)).ScanReader(context.TODO(), "k8s.yaml", strings.NewReader(file)) + require.NoError(t, err) + + assert.Greater(t, len(results.GetFailed()), 1) + fileLines := strings.Split(file, "\n") + for _, failure := range results.GetFailed() { + actualCode, err := failure.GetCode() + require.NoError(t, err) + assert.Greater(t, len(actualCode.Lines), 0) + for _, line := range actualCode.Lines { + assert.Greater(t, len(fileLines), line.Number) + assert.Equal(t, line.Content, fileLines[line.Number-1]) + } + } +} + +func Test_FileScanWithPolicyReader(t *testing.T) { + + results, err := NewScanner(options.ScannerWithPolicyReader(strings.NewReader(`package defsec + +deny[msg] { + msg = "fail" +} +`))).ScanReader(context.TODO(), "k8s.yaml", strings.NewReader(` +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - command: ["sh", "-c", "echo 'Hello' && sleep 1h"] + image: busybox + name: hello +`)) + require.NoError(t, err) + + assert.Equal(t, 1, len(results.GetFailed())) +} + +func Test_FileScanJSON(t *testing.T) { + + results, err := NewScanner(options.ScannerWithPolicyReader(strings.NewReader(`package defsec + +deny[msg] { + input.kind == "Pod" + msg = "fail" +} +`))).ScanReader(context.TODO(), "k8s.json", strings.NewReader(` +{ + "kind": "Pod", + "apiVersion": "v1", + "metadata": { + "name": "mongo", + "labels": { + "name": "mongo", + "role": "mongo" + } + }, + "spec": { + "volumes": [ + { + "name": "mongo-disk", + "gcePersistentDisk": { + "pdName": "mongo-disk", + "fsType": "ext4" + } + } + ], + "containers": [ + { + "name": "mongo", + "image": "mongo:latest", + "ports": [ + { + "name": "mongo", + "containerPort": 27017 + } + ], + "volumeMounts": [ + { + "name": "mongo-disk", + "mountPath": "/data/db" + } + ] + } + ] + } +} +`)) + require.NoError(t, err) + + assert.Equal(t, 1, len(results.GetFailed())) +} + +func Test_FileScanWithMetadata(t *testing.T) { + + results, err := NewScanner( + options.ScannerWithDebug(os.Stdout), + options.ScannerWithTrace(os.Stdout), + options.ScannerWithPolicyReader(strings.NewReader(`package defsec + +deny[msg] { + input.kind == "Pod" + msg := { + "msg": "fail", + "startline": 2, + "endline": 2, + "filepath": "chartname/template/serviceAccount.yaml" + } +} +`))).ScanReader( + context.TODO(), + "k8s.yaml", + strings.NewReader(` +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - command: ["sh", "-c", "echo 'Hello' && sleep 1h"] + image: busybox + name: hello +`)) + require.NoError(t, err) + + assert.Greater(t, len(results.GetFailed()), 0) + + firstResult := results.GetFailed()[0] + assert.Equal(t, 2, firstResult.Metadata().Range().GetStartLine()) + assert.Equal(t, 2, firstResult.Metadata().Range().GetEndLine()) + assert.Equal(t, "chartname/template/serviceAccount.yaml", firstResult.Metadata().Range().GetFilename()) +} + +func Test_FileScanExampleWithResultFunction(t *testing.T) { + + results, err := NewScanner( + options.ScannerWithDebug(os.Stdout), + options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true), + options.ScannerWithPolicyReader(strings.NewReader(`package defsec + +import data.lib.kubernetes + +default checkCapsDropAll = false + +__rego_metadata__ := { +"id": "KSV003", +"avd_id": "AVD-KSV-0003", +"title": "Default capabilities not dropped", +"short_code": "drop-default-capabilities", +"version": "v1.0.0", +"severity": "LOW", +"type": "Kubernetes Security Check", +"description": "The container should drop all default capabilities and add only those that are needed for its execution.", +"recommended_actions": "Add 'ALL' to containers[].securityContext.capabilities.drop.", +"url": "https://kubesec.io/basics/containers-securitycontext-capabilities-drop-index-all/", +} + +__rego_input__ := { +"combine": false, +"selector": [{"type": "kubernetes"}], +} + +# Get all containers which include 'ALL' in security.capabilities.drop +getCapsDropAllContainers[container] { +allContainers := kubernetes.containers[_] +lower(allContainers.securityContext.capabilities.drop[_]) == "all" +container := allContainers.name +} + +# Get all containers which don't include 'ALL' in security.capabilities.drop +getCapsNoDropAllContainers[container] { +container := kubernetes.containers[_] +not getCapsDropAllContainers[container.name] +} + +deny[res] { +output := getCapsNoDropAllContainers[_] + +msg := kubernetes.format(sprintf("Container '%s' of %s '%s' should add 'ALL' to 'securityContext.capabilities.drop'", [output.name, kubernetes.kind, kubernetes.name])) + +res := result.new(msg, output) +} + +`))).ScanReader( + context.TODO(), + "k8s.yaml", + strings.NewReader(` +apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - command: ["sh", "-c", "echo 'Hello' && sleep 1h"] + image: busybox + name: hello + securityContext: + capabilities: + drop: + - nothing +`)) + require.NoError(t, err) + + require.Greater(t, len(results.GetFailed()), 0) + + firstResult := results.GetFailed()[0] + assert.Equal(t, 8, firstResult.Metadata().Range().GetStartLine()) + assert.Equal(t, 14, firstResult.Metadata().Range().GetEndLine()) + assert.Equal(t, "k8s.yaml", firstResult.Metadata().Range().GetFilename()) +} + +/* +// TODO(simar): Uncomment once all k8s policies have subtype selector added +func Test_checkPolicyIsApplicable(t *testing.T) { + srcFS := testutil.CreateFS(t, map[string]string{ + "policies/pod_policy.rego": `# METADATA +# title: "Process can elevate its own privileges" +# description: "A program inside the container can elevate its own privileges and run as root, which might give the program control over the container and node." +# scope: package +# schemas: +# - input: schema["kubernetes"] +# related_resources: +# - https://kubernetes.io/docs/concepts/security/pod-security-standards/#restricted +# custom: +# id: KSV001 +# avd_id: AVD-KSV-0999 +# severity: MEDIUM +# short_code: no-self-privesc +# recommended_action: "Set 'set containers[].securityContext.allowPrivilegeEscalation' to 'false'." +# input: +# selector: +# - type: kubernetes +# subtypes: +# - kind: Pod +package builtin.kubernetes.KSV999 + +import data.lib.kubernetes +import data.lib.utils + +default checkAllowPrivilegeEscalation = false + +# getNoPrivilegeEscalationContainers returns the names of all containers which have +# securityContext.allowPrivilegeEscalation set to false. +getNoPrivilegeEscalationContainers[container] { + allContainers := kubernetes.containers[_] + allContainers.securityContext.allowPrivilegeEscalation == false + container := allContainers.name +} + +# getPrivilegeEscalationContainers returns the names of all containers which have +# securityContext.allowPrivilegeEscalation set to true or not set. +getPrivilegeEscalationContainers[container] { + containerName := kubernetes.containers[_].name + not getNoPrivilegeEscalationContainers[containerName] + container := kubernetes.containers[_] +} + +deny[res] { + output := getPrivilegeEscalationContainers[_] + msg := kubernetes.format(sprintf("Container '%s' of %s '%s' should set 'securityContext.allowPrivilegeEscalation' to false", [output.name, kubernetes.kind, kubernetes.name])) + res := result.new(msg, output) +} + +`, + "policies/namespace_policy.rego": `# METADATA +# title: "The default namespace should not be used" +# description: "ensure that default namespace should not be used" +# scope: package +# schemas: +# - input: schema["kubernetes"] +# related_resources: +# - https://kubernetes.io/docs/concepts/overview/working-with-objects/namespaces/ +# custom: +# id: KSV110 +# avd_id: AVD-KSV-0888 +# severity: LOW +# short_code: default-namespace-should-not-be-used +# recommended_action: "Ensure that namespaces are created to allow for appropriate segregation of Kubernetes resources and that all new resources are created in a specific namespace." +# input: +# selector: +# - type: kubernetes +# subtypes: +# - kind: Namespace +package builtin.kubernetes.KSV888 + +import data.lib.kubernetes + +default defaultNamespaceInUse = false + +defaultNamespaceInUse { + kubernetes.namespace == "default" +} + +deny[res] { + defaultNamespaceInUse + msg := sprintf("%s '%s' should not be set with 'default' namespace", [kubernetes.kind, kubernetes.name]) + res := result.new(msg, input.metadata.namespace) +} + +`, + "test/KSV001/pod.yaml": `apiVersion: v1 +kind: Pod +metadata: + name: hello-cpu-limit +spec: + containers: + - command: ["sh", "-c", "echo 'Hello' && sleep 1h"] + image: busybox + name: hello + securityContext: + capabilities: + drop: + - all +`, + }) + + scanner := NewScanner( + //options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true), + options.ScannerWithEmbeddedLibraries(true), + options.ScannerWithPolicyDirs("policies/"), + options.ScannerWithPolicyFilesystem(srcFS), + ) + results, err := scanner.ScanFS(context.TODO(), srcFS, "test/KSV001") + require.NoError(t, err) + + require.NoError(t, err) + require.Len(t, results.GetFailed(), 1) + + failure := results.GetFailed()[0].Rule() + assert.Equal(t, "Process can elevate its own privileges", failure.Summary) +} +*/ diff --git a/pkg/iac/scanners/scanner.go b/pkg/iac/scanners/scanner.go new file mode 100644 index 000000000000..4d940d029d42 --- /dev/null +++ b/pkg/iac/scanners/scanner.go @@ -0,0 +1,21 @@ +package scanners + +import ( + "context" + "io/fs" + "os" + + "github.com/aquasecurity/defsec/pkg/scan" +) + +type WriteFileFS interface { + WriteFile(name string, data []byte, perm os.FileMode) error +} + +type FSScanner interface { + // Name provides the human-readable name of the scanner e.g. "CloudFormation" + Name() string + // ScanFS scans the given filesystem for issues, starting at the provided directory. + // Use '.' to scan an entire filesystem. + ScanFS(ctx context.Context, fs fs.FS, dir string) (scan.Results, error) +} diff --git a/pkg/iac/scanners/terraform/executor/executor.go b/pkg/iac/scanners/terraform/executor/executor.go new file mode 100644 index 000000000000..f11844c9c301 --- /dev/null +++ b/pkg/iac/scanners/terraform/executor/executor.go @@ -0,0 +1,269 @@ +package executor + +import ( + "runtime" + "sort" + "strings" + "time" + + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/framework" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/severity" + "github.com/aquasecurity/defsec/pkg/state" + "github.com/aquasecurity/defsec/pkg/terraform" + + adapter "github.com/aquasecurity/trivy/internal/adapters/terraform" + "github.com/aquasecurity/trivy/pkg/iac/rego" + "github.com/aquasecurity/trivy/pkg/iac/rules" +) + +// Executor scans HCL blocks by running all registered rules against them +type Executor struct { + enableIgnores bool + excludedRuleIDs []string + excludeIgnoresIDs []string + includedRuleIDs []string + ignoreCheckErrors bool + workspaceName string + useSingleThread bool + debug debug.Logger + resultsFilters []func(scan.Results) scan.Results + alternativeIDProviderFunc func(string) []string + severityOverrides map[string]string + regoScanner *rego.Scanner + regoOnly bool + stateFuncs []func(*state.State) + frameworks []framework.Framework +} + +type Metrics struct { + Timings struct { + Adaptation time.Duration + RunningChecks time.Duration + } + Counts struct { + Ignored int + Failed int + Passed int + Critical int + High int + Medium int + Low int + } +} + +// New creates a new Executor +func New(options ...Option) *Executor { + s := &Executor{ + ignoreCheckErrors: true, + enableIgnores: true, + regoOnly: false, + } + for _, option := range options { + option(s) + } + return s +} + +// Find element in list +func checkInList(id string, altIDs []string, list []string) bool { + for _, codeIgnored := range list { + if codeIgnored == id { + return true + } + for _, alt := range altIDs { + if alt == codeIgnored { + return true + } + } + } + return false +} + +func (e *Executor) Execute(modules terraform.Modules) (scan.Results, Metrics, error) { + + var metrics Metrics + + e.debug.Log("Adapting modules...") + adaptationTime := time.Now() + infra := adapter.Adapt(modules) + metrics.Timings.Adaptation = time.Since(adaptationTime) + e.debug.Log("Adapted %d module(s) into defsec state data.", len(modules)) + + threads := runtime.NumCPU() + if threads > 1 { + threads-- + } + if e.useSingleThread { + threads = 1 + } + e.debug.Log("Using max routines of %d", threads) + + e.debug.Log("Applying state modifier functions...") + for _, f := range e.stateFuncs { + f(infra) + } + + checksTime := time.Now() + registeredRules := rules.GetRegistered(e.frameworks...) + e.debug.Log("Initialised %d rule(s).", len(registeredRules)) + + pool := NewPool(threads, registeredRules, modules, infra, e.ignoreCheckErrors, e.regoScanner, e.regoOnly) + e.debug.Log("Created pool with %d worker(s) to apply rules.", threads) + results, err := pool.Run() + if err != nil { + return nil, metrics, err + } + metrics.Timings.RunningChecks = time.Since(checksTime) + e.debug.Log("Finished applying rules.") + + if e.enableIgnores { + e.debug.Log("Applying ignores...") + var ignores terraform.Ignores + for _, module := range modules { + ignores = append(ignores, module.Ignores()...) + } + + ignores = e.removeExcludedIgnores(ignores) + + for i, result := range results { + allIDs := []string{ + result.Rule().LongID(), + result.Rule().AVDID, + strings.ToLower(result.Rule().AVDID), + result.Rule().ShortCode, + } + allIDs = append(allIDs, result.Rule().Aliases...) + + if e.alternativeIDProviderFunc != nil { + allIDs = append(allIDs, e.alternativeIDProviderFunc(result.Rule().LongID())...) + } + if ignores.Covering( + modules, + result.Metadata(), + e.workspaceName, + allIDs..., + ) != nil { + e.debug.Log("Ignored '%s' at '%s'.", result.Rule().LongID(), result.Range()) + results[i].OverrideStatus(scan.StatusIgnored) + } + } + } else { + e.debug.Log("Ignores are disabled.") + } + + results = e.updateSeverity(results) + results = e.filterResults(results) + metrics.Counts.Ignored = len(results.GetIgnored()) + metrics.Counts.Passed = len(results.GetPassed()) + metrics.Counts.Failed = len(results.GetFailed()) + + for _, res := range results.GetFailed() { + switch res.Severity() { + case severity.Critical: + metrics.Counts.Critical++ + case severity.High: + metrics.Counts.High++ + case severity.Medium: + metrics.Counts.Medium++ + case severity.Low: + metrics.Counts.Low++ + } + } + + e.sortResults(results) + return results, metrics, nil +} + +func (e *Executor) removeExcludedIgnores(ignores terraform.Ignores) terraform.Ignores { + var filteredIgnores terraform.Ignores + for _, ignore := range ignores { + if !contains(e.excludeIgnoresIDs, ignore.RuleID) { + filteredIgnores = append(filteredIgnores, ignore) + } + } + return filteredIgnores +} + +func contains(arr []string, s string) bool { + for _, elem := range arr { + if elem == s { + return true + } + } + return false +} + +func (e *Executor) updateSeverity(results []scan.Result) scan.Results { + if len(e.severityOverrides) == 0 { + return results + } + + var overriddenResults scan.Results + for _, res := range results { + for code, sev := range e.severityOverrides { + + var altMatch bool + if e.alternativeIDProviderFunc != nil { + alts := e.alternativeIDProviderFunc(res.Rule().LongID()) + for _, alt := range alts { + if alt == code { + altMatch = true + break + } + } + } + + if altMatch || res.Rule().LongID() == code { + overrides := scan.Results([]scan.Result{res}) + override := res.Rule() + override.Severity = severity.Severity(sev) + overrides.SetRule(override) + res = overrides[0] + } + } + overriddenResults = append(overriddenResults, res) + } + + return overriddenResults +} + +func (e *Executor) filterResults(results scan.Results) scan.Results { + includedOnly := len(e.includedRuleIDs) > 0 + for i, result := range results { + id := result.Rule().LongID() + var altIDs []string + if e.alternativeIDProviderFunc != nil { + altIDs = e.alternativeIDProviderFunc(id) + } + if (includedOnly && !checkInList(id, altIDs, e.includedRuleIDs)) || checkInList(id, altIDs, e.excludedRuleIDs) { + e.debug.Log("Excluding '%s' at '%s'.", result.Rule().LongID(), result.Range()) + results[i].OverrideStatus(scan.StatusIgnored) + } + } + + if len(e.resultsFilters) > 0 && len(results) > 0 { + before := len(results.GetIgnored()) + e.debug.Log("Applying %d results filters to %d results...", len(results), before) + for _, filter := range e.resultsFilters { + results = filter(results) + } + e.debug.Log("Filtered out %d results.", len(results.GetIgnored())-before) + } + + return results +} + +func (e *Executor) sortResults(results []scan.Result) { + sort.Slice(results, func(i, j int) bool { + switch { + case results[i].Rule().LongID() < results[j].Rule().LongID(): + return true + case results[i].Rule().LongID() > results[j].Rule().LongID(): + return false + default: + return results[i].Range().String() > results[j].Range().String() + } + }) +} diff --git a/pkg/iac/scanners/terraform/executor/executor_test.go b/pkg/iac/scanners/terraform/executor/executor_test.go new file mode 100644 index 000000000000..dac05d430a83 --- /dev/null +++ b/pkg/iac/scanners/terraform/executor/executor_test.go @@ -0,0 +1,124 @@ +package executor + +import ( + "context" + "testing" + + "github.com/aquasecurity/defsec/pkg/providers" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/severity" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/trivy/pkg/iac/rules" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" + "github.com/aquasecurity/trivy/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +var panicRule = scan.Rule{ + Provider: providers.AWSProvider, + Service: "service", + ShortCode: "abc", + Severity: severity.High, + CustomChecks: scan.CustomChecks{ + Terraform: &scan.TerraformCustomCheck{ + RequiredTypes: []string{"resource"}, + RequiredLabels: []string{"problem"}, + Check: func(resourceBlock *terraform.Block, _ *terraform.Module) (results scan.Results) { + if resourceBlock.GetAttribute("panic").IsTrue() { + panic("This is fine") + } + return + }, + }, + }, +} + +func Test_PanicInCheckNotAllowed(t *testing.T) { + + reg := rules.Register(panicRule) + defer rules.Deregister(reg) + + fs := testutil.CreateFS(t, map[string]string{ + "project/main.tf": ` +resource "problem" "this" { + panic = true +} +`, + }) + + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + err := p.ParseFS(context.TODO(), "project") + require.NoError(t, err) + modules, _, err := p.EvaluateAll(context.TODO()) + require.NoError(t, err) + results, _, _ := New().Execute(modules) + assert.Equal(t, len(results.GetFailed()), 0) +} + +func Test_PanicInCheckAllowed(t *testing.T) { + + reg := rules.Register(panicRule) + defer rules.Deregister(reg) + + fs := testutil.CreateFS(t, map[string]string{ + "project/main.tf": ` +resource "problem" "this" { + panic = true +} +`, + }) + + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + err := p.ParseFS(context.TODO(), "project") + require.NoError(t, err) + modules, _, err := p.EvaluateAll(context.TODO()) + require.NoError(t, err) + _, _, err = New(OptionStopOnErrors(false)).Execute(modules) + assert.Error(t, err) +} + +func Test_PanicNotInCheckNotIncludePassed(t *testing.T) { + + reg := rules.Register(panicRule) + defer rules.Deregister(reg) + + fs := testutil.CreateFS(t, map[string]string{ + "project/main.tf": ` +resource "problem" "this" { + panic = true +} +`, + }) + + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + err := p.ParseFS(context.TODO(), "project") + require.NoError(t, err) + modules, _, err := p.EvaluateAll(context.TODO()) + require.NoError(t, err) + results, _, _ := New().Execute(modules) + assert.Equal(t, len(results.GetFailed()), 0) +} + +func Test_PanicNotInCheckNotIncludePassedStopOnError(t *testing.T) { + + reg := rules.Register(panicRule) + defer rules.Deregister(reg) + + fs := testutil.CreateFS(t, map[string]string{ + "project/main.tf": ` +resource "problem" "this" { + panic = true +} +`, + }) + + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) + err := p.ParseFS(context.TODO(), "project") + require.NoError(t, err) + modules, _, err := p.EvaluateAll(context.TODO()) + require.NoError(t, err) + + _, _, err = New(OptionStopOnErrors(false)).Execute(modules) + assert.Error(t, err) +} diff --git a/pkg/iac/scanners/terraform/executor/option.go b/pkg/iac/scanners/terraform/executor/option.go new file mode 100644 index 000000000000..7b60905fead6 --- /dev/null +++ b/pkg/iac/scanners/terraform/executor/option.go @@ -0,0 +1,103 @@ +package executor + +import ( + "io" + + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/framework" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/state" + "github.com/aquasecurity/trivy/pkg/iac/rego" +) + +type Option func(s *Executor) + +func OptionWithFrameworks(frameworks ...framework.Framework) Option { + return func(s *Executor) { + s.frameworks = frameworks + } +} + +func OptionWithAlternativeIDProvider(f func(string) []string) Option { + return func(s *Executor) { + s.alternativeIDProviderFunc = f + } +} + +func OptionWithResultsFilter(f func(scan.Results) scan.Results) Option { + return func(s *Executor) { + s.resultsFilters = append(s.resultsFilters, f) + } +} + +func OptionWithSeverityOverrides(overrides map[string]string) Option { + return func(s *Executor) { + s.severityOverrides = overrides + } +} + +func OptionWithDebugWriter(w io.Writer) Option { + return func(s *Executor) { + s.debug = debug.New(w, "terraform", "executor") + } +} + +func OptionNoIgnores() Option { + return func(s *Executor) { + s.enableIgnores = false + } +} + +func OptionExcludeRules(ruleIDs []string) Option { + return func(s *Executor) { + s.excludedRuleIDs = ruleIDs + } +} + +func OptionExcludeIgnores(ruleIDs []string) Option { + return func(s *Executor) { + s.excludeIgnoresIDs = ruleIDs + } +} + +func OptionIncludeRules(ruleIDs []string) Option { + return func(s *Executor) { + s.includedRuleIDs = ruleIDs + } +} + +func OptionStopOnErrors(stop bool) Option { + return func(s *Executor) { + s.ignoreCheckErrors = !stop + } +} + +func OptionWithWorkspaceName(workspaceName string) Option { + return func(s *Executor) { + s.workspaceName = workspaceName + } +} + +func OptionWithSingleThread(single bool) Option { + return func(s *Executor) { + s.useSingleThread = single + } +} + +func OptionWithRegoScanner(s *rego.Scanner) Option { + return func(e *Executor) { + e.regoScanner = s + } +} + +func OptionWithStateFunc(f ...func(*state.State)) Option { + return func(e *Executor) { + e.stateFuncs = f + } +} + +func OptionWithRegoOnly(regoOnly bool) Option { + return func(e *Executor) { + e.regoOnly = regoOnly + } +} diff --git a/pkg/iac/scanners/terraform/executor/pool.go b/pkg/iac/scanners/terraform/executor/pool.go new file mode 100644 index 000000000000..aa4c5b85a058 --- /dev/null +++ b/pkg/iac/scanners/terraform/executor/pool.go @@ -0,0 +1,299 @@ +package executor + +import ( + "context" + "fmt" + "os" + "path/filepath" + runtimeDebug "runtime/debug" + "strings" + "sync" + + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/state" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/trivy/pkg/iac/rego" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +type Pool struct { + size int + modules terraform.Modules + state *state.State + rules []types.RegisteredRule + ignoreErrors bool + rs *rego.Scanner + regoOnly bool +} + +func NewPool(size int, rules []types.RegisteredRule, modules terraform.Modules, state *state.State, ignoreErrors bool, regoScanner *rego.Scanner, regoOnly bool) *Pool { + return &Pool{ + size: size, + rules: rules, + state: state, + modules: modules, + ignoreErrors: ignoreErrors, + rs: regoScanner, + regoOnly: regoOnly, + } +} + +// Run runs the job in the pool - this will only return an error if a job panics +func (p *Pool) Run() (scan.Results, error) { + + outgoing := make(chan Job, p.size*2) + + var workers []*Worker + for i := 0; i < p.size; i++ { + worker := NewWorker(outgoing) + go worker.Start() + workers = append(workers, worker) + } + + if p.rs != nil { + var basePath string + if len(p.modules) > 0 { + basePath = p.modules[0].RootPath() + } + outgoing <- ®oJob{ + state: p.state, + scanner: p.rs, + basePath: basePath, + } + } + + if !p.regoOnly { + for _, r := range p.rules { + if r.GetRule().CustomChecks.Terraform != nil && r.GetRule().CustomChecks.Terraform.Check != nil { + // run local hcl rule + for _, module := range p.modules { + mod := *module + outgoing <- &hclModuleRuleJob{ + module: &mod, + rule: r, + ignoreErrors: p.ignoreErrors, + } + } + } else { + // run defsec rule + outgoing <- &infraRuleJob{ + state: p.state, + rule: r, + ignoreErrors: p.ignoreErrors, + } + } + } + } + + close(outgoing) + + var results scan.Results + for _, worker := range workers { + results = append(results, worker.Wait()...) + if err := worker.Error(); err != nil { + return nil, err + } + } + + return results, nil +} + +type Job interface { + Run() (scan.Results, error) +} + +type infraRuleJob struct { + state *state.State + rule types.RegisteredRule + + ignoreErrors bool +} + +type hclModuleRuleJob struct { + module *terraform.Module + rule types.RegisteredRule + ignoreErrors bool +} + +type regoJob struct { + state *state.State + scanner *rego.Scanner + basePath string +} + +func (h *infraRuleJob) Run() (_ scan.Results, err error) { + if h.ignoreErrors { + defer func() { + if panicErr := recover(); panicErr != nil { + err = fmt.Errorf("%s\n%s", panicErr, string(runtimeDebug.Stack())) + } + }() + } + return h.rule.Evaluate(h.state), err +} + +func (h *hclModuleRuleJob) Run() (results scan.Results, err error) { + if h.ignoreErrors { + defer func() { + if panicErr := recover(); panicErr != nil { + err = fmt.Errorf("%s\n%s", panicErr, string(runtimeDebug.Stack())) + } + }() + } + customCheck := h.rule.GetRule().CustomChecks.Terraform + for _, block := range h.module.GetBlocks() { + if !isCustomCheckRequiredForBlock(customCheck, block) { + continue + } + results = append(results, customCheck.Check(block, h.module)...) + } + results.SetRule(h.rule.GetRule()) + return +} + +func (h *regoJob) Run() (results scan.Results, err error) { + regoResults, err := h.scanner.ScanInput(context.TODO(), rego.Input{ + Contents: h.state.ToRego(), + Path: h.basePath, + }) + if err != nil { + return nil, fmt.Errorf("rego scan error: %w", err) + } + return regoResults, nil +} + +// nolint +func isCustomCheckRequiredForBlock(custom *scan.TerraformCustomCheck, b *terraform.Block) bool { + + var found bool + for _, requiredType := range custom.RequiredTypes { + if b.Type() == requiredType { + found = true + break + } + } + if !found && len(custom.RequiredTypes) > 0 { + return false + } + + found = false + for _, requiredLabel := range custom.RequiredLabels { + if requiredLabel == "*" || (len(b.Labels()) > 0 && wildcardMatch(requiredLabel, b.TypeLabel())) { + found = true + break + } + } + if !found && len(custom.RequiredLabels) > 0 { + return false + } + + found = false + if len(custom.RequiredSources) > 0 && b.Type() == terraform.TypeModule.Name() { + if sourceAttr := b.GetAttribute("source"); sourceAttr.IsNotNil() { + values := sourceAttr.AsStringValues().AsStrings() + if len(values) == 0 { + return false + } + sourcePath := values[0] + + // resolve module source path to path relative to cwd + if strings.HasPrefix(sourcePath, ".") { + sourcePath = cleanPathRelativeToWorkingDir(filepath.Dir(b.GetMetadata().Range().GetFilename()), sourcePath) + } + + for _, requiredSource := range custom.RequiredSources { + if requiredSource == "*" || wildcardMatch(requiredSource, sourcePath) { + found = true + break + } + } + } + return found + } + + return true +} + +func cleanPathRelativeToWorkingDir(dir, path string) string { + absPath := filepath.Clean(filepath.Join(dir, path)) + wDir, err := os.Getwd() + if err != nil { + return absPath + } + relPath, err := filepath.Rel(wDir, absPath) + if err != nil { + return absPath + } + return relPath +} + +func wildcardMatch(pattern string, subject string) bool { + if pattern == "" { + return false + } + parts := strings.Split(pattern, "*") + var lastIndex int + for i, part := range parts { + if part == "" { + continue + } + if i == 0 { + if !strings.HasPrefix(subject, part) { + return false + } + } + if i == len(parts)-1 { + if !strings.HasSuffix(subject, part) { + return false + } + } + newIndex := strings.Index(subject, part) + if newIndex < lastIndex { + return false + } + lastIndex = newIndex + } + return true +} + +type Worker struct { + incoming <-chan Job + mu sync.Mutex + results scan.Results + panic interface{} +} + +func NewWorker(incoming <-chan Job) *Worker { + w := &Worker{ + incoming: incoming, + } + w.mu.Lock() + return w +} + +func (w *Worker) Start() { + defer w.mu.Unlock() + w.results = nil + for job := range w.incoming { + func() { + results, err := job.Run() + if err != nil { + w.panic = err + } + w.results = append(w.results, results...) + }() + } +} + +func (w *Worker) Wait() scan.Results { + w.mu.Lock() + defer w.mu.Unlock() + return w.results +} + +func (w *Worker) Error() error { + if w.panic == nil { + return nil + } + return fmt.Errorf("job failed: %s", w.panic) +} diff --git a/pkg/iac/scanners/terraform/executor/statistics.go b/pkg/iac/scanners/terraform/executor/statistics.go new file mode 100644 index 000000000000..5c2dd1784ea2 --- /dev/null +++ b/pkg/iac/scanners/terraform/executor/statistics.go @@ -0,0 +1,91 @@ +package executor + +import ( + "encoding/json" + "fmt" + "io" + "sort" + "strconv" + "strings" + + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/olekukonko/tablewriter" +) + +type StatisticsItem struct { + RuleID string `json:"rule_id"` + RuleDescription string `json:"rule_description"` + Links []string `json:"links"` + Count int `json:"count"` +} + +type Statistics []StatisticsItem + +type StatisticsResult struct { + Result Statistics `json:"results"` +} + +func SortStatistics(statistics Statistics) Statistics { + sort.Slice(statistics, func(i, j int) bool { + return statistics[i].Count > statistics[j].Count + }) + return statistics +} + +func (statistics Statistics) PrintStatisticsTable(format string, w io.Writer) error { + // lovely is the default so we keep it like that + if format != "lovely" && format != "markdown" && format != "json" { + return fmt.Errorf("you must specify only lovely, markdown or json format with --run-statistics") + } + + sorted := SortStatistics(statistics) + + if format == "json" { + result := StatisticsResult{Result: sorted} + val, err := json.MarshalIndent(result, "", " ") + if err != nil { + return err + } + + _, _ = fmt.Fprintln(w, string(val)) + + return nil + } + + table := tablewriter.NewWriter(w) + table.SetHeader([]string{"Rule ID", "Description", "Link", "Count"}) + table.SetRowLine(true) + + if format == "markdown" { + table.SetBorders(tablewriter.Border{Left: true, Top: false, Right: true, Bottom: false}) + table.SetCenterSeparator("|") + } + + for _, item := range sorted { + table.Append([]string{item.RuleID, + item.RuleDescription, + strings.Join(item.Links, "\n"), + strconv.Itoa(item.Count)}) + } + + table.Render() + + return nil +} + +func AddStatisticsCount(statistics Statistics, result scan.Result) Statistics { + for i, statistic := range statistics { + if statistic.RuleID == result.Rule().LongID() { + statistics[i].Count += 1 + return statistics + } + } + statistics = append(statistics, StatisticsItem{ + RuleID: result.Rule().LongID(), + RuleDescription: result.Rule().Summary, + Links: result.Rule().Links, + Count: 1, + }) + + return statistics +} diff --git a/pkg/iac/scanners/terraform/options.go b/pkg/iac/scanners/terraform/options.go new file mode 100644 index 000000000000..73e31e9f950c --- /dev/null +++ b/pkg/iac/scanners/terraform/options.go @@ -0,0 +1,211 @@ +package terraform + +import ( + "io/fs" + "strings" + + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/defsec/pkg/severity" + "github.com/aquasecurity/defsec/pkg/state" + + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/executor" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" +) + +type ConfigurableTerraformScanner interface { + options.ConfigurableScanner + SetForceAllDirs(bool) + AddExecutorOptions(options ...executor.Option) + AddParserOptions(options ...options.ParserOption) +} + +func ScannerWithTFVarsPaths(paths ...string) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.AddParserOptions(parser.OptionWithTFVarsPaths(paths...)) + } + } +} + +func ScannerWithAlternativeIDProvider(f func(string) []string) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.AddExecutorOptions(executor.OptionWithAlternativeIDProvider(f)) + } + } +} + +func ScannerWithSeverityOverrides(overrides map[string]string) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.AddExecutorOptions(executor.OptionWithSeverityOverrides(overrides)) + } + } +} + +func ScannerWithNoIgnores() options.ScannerOption { + return func(s options.ConfigurableScanner) { + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.AddExecutorOptions(executor.OptionNoIgnores()) + } + } +} + +func ScannerWithExcludedRules(ruleIDs []string) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.AddExecutorOptions(executor.OptionExcludeRules(ruleIDs)) + } + } +} + +func ScannerWithExcludeIgnores(ruleIDs []string) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.AddExecutorOptions(executor.OptionExcludeIgnores(ruleIDs)) + } + } +} + +func ScannerWithIncludedRules(ruleIDs []string) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.AddExecutorOptions(executor.OptionIncludeRules(ruleIDs)) + } + } +} + +func ScannerWithStopOnRuleErrors(stop bool) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.AddExecutorOptions(executor.OptionStopOnErrors(stop)) + } + } +} + +func ScannerWithWorkspaceName(name string) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.AddParserOptions(parser.OptionWithWorkspaceName(name)) + tf.AddExecutorOptions(executor.OptionWithWorkspaceName(name)) + } + } +} + +func ScannerWithSingleThread(single bool) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.AddExecutorOptions(executor.OptionWithSingleThread(single)) + } + } +} + +func ScannerWithAllDirectories(all bool) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.SetForceAllDirs(all) + } + } +} + +func ScannerWithStopOnHCLError(stop bool) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.AddParserOptions(parser.OptionStopOnHCLError(stop)) + } + } +} + +func ScannerWithSkipDownloaded(skip bool) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if !skip { + return + } + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.AddExecutorOptions(executor.OptionWithResultsFilter(func(results scan.Results) scan.Results { + for i, result := range results { + prefix := result.Range().GetSourcePrefix() + switch { + case prefix == "": + case strings.HasPrefix(prefix, "."): + default: + results[i].OverrideStatus(scan.StatusIgnored) + } + } + return results + })) + } + } +} + +func ScannerWithResultsFilter(f func(scan.Results) scan.Results) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.AddExecutorOptions(executor.OptionWithResultsFilter(f)) + } + } +} + +func ScannerWithMinimumSeverity(minimum severity.Severity) options.ScannerOption { + min := severityAsOrdinal(minimum) + return func(s options.ConfigurableScanner) { + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.AddExecutorOptions(executor.OptionWithResultsFilter(func(results scan.Results) scan.Results { + for i, result := range results { + if severityAsOrdinal(result.Severity()) < min { + results[i].OverrideStatus(scan.StatusIgnored) + } + } + return results + })) + } + } +} + +func severityAsOrdinal(sev severity.Severity) int { + switch sev { + case severity.Critical: + return 4 + case severity.High: + return 3 + case severity.Medium: + return 2 + case severity.Low: + return 1 + default: + return 0 + } +} + +func ScannerWithStateFunc(f ...func(*state.State)) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.AddExecutorOptions(executor.OptionWithStateFunc(f...)) + } + } +} + +func ScannerWithDownloadsAllowed(allowed bool) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.AddParserOptions(parser.OptionWithDownloads(allowed)) + } + } +} + +func ScannerWithSkipCachedModules(b bool) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.AddParserOptions(parser.OptionWithDownloads(b)) + } + } +} + +func ScannerWithConfigsFileSystem(fsys fs.FS) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if tf, ok := s.(ConfigurableTerraformScanner); ok { + tf.AddParserOptions(parser.OptionWithConfigsFS(fsys)) + } + } +} diff --git a/pkg/iac/scanners/terraform/parser/evaluator.go b/pkg/iac/scanners/terraform/parser/evaluator.go new file mode 100644 index 000000000000..41a656909e0c --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/evaluator.go @@ -0,0 +1,508 @@ +package parser + +import ( + "context" + "errors" + "fmt" + "io/fs" + "reflect" + "time" + + "golang.org/x/exp/slices" + + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/terraform" + tfcontext "github.com/aquasecurity/defsec/pkg/terraform/context" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/ext/typeexpr" + "github.com/zclconf/go-cty/cty" + "github.com/zclconf/go-cty/cty/convert" +) + +const ( + maxContextIterations = 32 +) + +type evaluator struct { + filesystem fs.FS + ctx *tfcontext.Context + blocks terraform.Blocks + inputVars map[string]cty.Value + moduleMetadata *modulesMetadata + projectRootPath string // root of the current scan + modulePath string + moduleName string + ignores terraform.Ignores + parentParser *Parser + debug debug.Logger + allowDownloads bool + skipCachedModules bool +} + +func newEvaluator( + target fs.FS, + parentParser *Parser, + projectRootPath string, + modulePath string, + workingDir string, + moduleName string, + blocks terraform.Blocks, + inputVars map[string]cty.Value, + moduleMetadata *modulesMetadata, + workspace string, + ignores []terraform.Ignore, + logger debug.Logger, + allowDownloads bool, + skipCachedModules bool, +) *evaluator { + + // create a context to store variables and make functions available + ctx := tfcontext.NewContext(&hcl.EvalContext{ + Functions: Functions(target, modulePath), + }, nil) + + // these variables are made available by terraform to each module + ctx.SetByDot(cty.StringVal(workspace), "terraform.workspace") + ctx.SetByDot(cty.StringVal(projectRootPath), "path.root") + ctx.SetByDot(cty.StringVal(modulePath), "path.module") + ctx.SetByDot(cty.StringVal(workingDir), "path.cwd") + + // each block gets its own scope to define variables in + for _, b := range blocks { + b.OverrideContext(ctx.NewChild()) + } + + return &evaluator{ + filesystem: target, + parentParser: parentParser, + modulePath: modulePath, + moduleName: moduleName, + projectRootPath: projectRootPath, + ctx: ctx, + blocks: blocks, + inputVars: inputVars, + moduleMetadata: moduleMetadata, + ignores: ignores, + debug: logger, + allowDownloads: allowDownloads, + } +} + +func (e *evaluator) evaluateStep() { + + e.ctx.Set(e.getValuesByBlockType("variable"), "var") + e.ctx.Set(e.getValuesByBlockType("locals"), "local") + e.ctx.Set(e.getValuesByBlockType("provider"), "provider") + + resources := e.getValuesByBlockType("resource") + for key, resource := range resources.AsValueMap() { + e.ctx.Set(resource, key) + } + + e.ctx.Set(e.getValuesByBlockType("data"), "data") + e.ctx.Set(e.getValuesByBlockType("output"), "output") +} + +// exportOutputs is used to export module outputs to the parent module +func (e *evaluator) exportOutputs() cty.Value { + data := make(map[string]cty.Value) + for _, block := range e.blocks.OfType("output") { + attr := block.GetAttribute("value") + if attr.IsNil() { + continue + } + data[block.Label()] = attr.Value() + e.debug.Log("Added module output %s=%s.", block.Label(), attr.Value().GoString()) + } + return cty.ObjectVal(data) +} + +func (e *evaluator) EvaluateAll(ctx context.Context) (terraform.Modules, map[string]fs.FS, time.Duration) { + + fsKey := types.CreateFSKey(e.filesystem) + e.debug.Log("Filesystem key is '%s'", fsKey) + + fsMap := make(map[string]fs.FS) + fsMap[fsKey] = e.filesystem + + var parseDuration time.Duration + + var lastContext hcl.EvalContext + start := time.Now() + e.debug.Log("Starting module evaluation...") + for i := 0; i < maxContextIterations; i++ { + + e.evaluateStep() + + // if ctx matches the last evaluation, we can bail, nothing left to resolve + if i > 0 && reflect.DeepEqual(lastContext.Variables, e.ctx.Inner().Variables) { + break + } + + if len(e.ctx.Inner().Variables) != len(lastContext.Variables) { + lastContext.Variables = make(map[string]cty.Value, len(e.ctx.Inner().Variables)) + } + for k, v := range e.ctx.Inner().Variables { + lastContext.Variables[k] = v + } + } + + // expand out resources and modules via count (not a typo, we do this twice so every order is processed) + e.blocks = e.expandBlocks(e.blocks) + e.blocks = e.expandBlocks(e.blocks) + + parseDuration += time.Since(start) + + e.debug.Log("Starting submodule evaluation...") + var modules terraform.Modules + for _, definition := range e.loadModules(ctx) { + submodules, outputs, err := definition.Parser.EvaluateAll(ctx) + if err != nil { + e.debug.Log("Failed to evaluate submodule '%s': %s.", definition.Name, err) + continue + } + // export module outputs + e.ctx.Set(outputs, "module", definition.Name) + modules = append(modules, submodules...) + for key, val := range definition.Parser.GetFilesystemMap() { + fsMap[key] = val + } + } + e.debug.Log("Finished processing %d submodule(s).", len(modules)) + + e.debug.Log("Starting post-submodule evaluation...") + for i := 0; i < maxContextIterations; i++ { + + e.evaluateStep() + + // if ctx matches the last evaluation, we can bail, nothing left to resolve + if i > 0 && reflect.DeepEqual(lastContext.Variables, e.ctx.Inner().Variables) { + break + } + + if len(e.ctx.Inner().Variables) != len(lastContext.Variables) { + lastContext.Variables = make(map[string]cty.Value, len(e.ctx.Inner().Variables)) + } + for k, v := range e.ctx.Inner().Variables { + lastContext.Variables[k] = v + } + } + + e.debug.Log("Module evaluation complete.") + parseDuration += time.Since(start) + rootModule := terraform.NewModule(e.projectRootPath, e.modulePath, e.blocks, e.ignores, e.isModuleLocal()) + for _, m := range modules { + m.SetParent(rootModule) + } + return append(terraform.Modules{rootModule}, modules...), fsMap, parseDuration +} + +func (e *evaluator) isModuleLocal() bool { + // the module source is empty only for local modules + return e.parentParser.moduleSource == "" +} + +func (e *evaluator) expandBlocks(blocks terraform.Blocks) terraform.Blocks { + return e.expandDynamicBlocks(e.expandBlockForEaches(e.expandBlockCounts(blocks))...) +} + +func (e *evaluator) expandDynamicBlocks(blocks ...*terraform.Block) terraform.Blocks { + for _, b := range blocks { + e.expandDynamicBlock(b) + } + return blocks +} + +func (e *evaluator) expandDynamicBlock(b *terraform.Block) { + for _, sub := range b.AllBlocks() { + e.expandDynamicBlock(sub) + } + for _, sub := range b.AllBlocks().OfType("dynamic") { + blockName := sub.TypeLabel() + expanded := e.expandBlockForEaches(terraform.Blocks{sub}) + for _, ex := range expanded { + if content := ex.GetBlock("content"); content.IsNotNil() { + _ = e.expandDynamicBlocks(content) + b.InjectBlock(content, blockName) + } + } + } +} + +func validateForEachArg(arg cty.Value) error { + if arg.IsNull() { + return errors.New("arg is null") + } + + ty := arg.Type() + + if !arg.IsKnown() || ty.Equals(cty.DynamicPseudoType) || arg.LengthInt() == 0 { + return nil + } + + if !(ty.IsSetType() || ty.IsObjectType() || ty.IsMapType()) { + return fmt.Errorf("%s type is not supported: arg is not set or map", ty.FriendlyName()) + } + + if ty.IsSetType() { + if !ty.ElementType().Equals(cty.String) { + return errors.New("arg is not set of strings") + } + + it := arg.ElementIterator() + for it.Next() { + key, _ := it.Element() + if key.IsNull() { + return errors.New("arg is set of strings, but contains null") + } + + if !key.IsKnown() { + return errors.New("arg is set of strings, but contains unknown value") + } + } + } + + return nil +} + +func isBlockSupportsForEachMetaArgument(block *terraform.Block) bool { + return slices.Contains([]string{"module", "resource", "data", "dynamic"}, block.Type()) +} + +func (e *evaluator) expandBlockForEaches(blocks terraform.Blocks) terraform.Blocks { + var forEachFiltered terraform.Blocks + + for _, block := range blocks { + + forEachAttr := block.GetAttribute("for_each") + + if forEachAttr.IsNil() || block.IsCountExpanded() || !isBlockSupportsForEachMetaArgument(block) { + forEachFiltered = append(forEachFiltered, block) + continue + } + + forEachVal := forEachAttr.Value() + + if err := validateForEachArg(forEachVal); err != nil { + e.debug.Log(`"for_each" argument is invalid: %s`, err.Error()) + continue + } + + clones := make(map[string]cty.Value) + _ = forEachAttr.Each(func(key cty.Value, val cty.Value) { + + if !key.Type().Equals(cty.String) { + e.debug.Log( + `Invalid "for-each" argument: map key (or set value) is not a string, but %s`, + key.Type().FriendlyName(), + ) + return + } + + clone := block.Clone(key) + + ctx := clone.Context() + + e.copyVariables(block, clone) + + ctx.SetByDot(key, "each.key") + ctx.SetByDot(val, "each.value") + + ctx.Set(key, block.TypeLabel(), "key") + ctx.Set(val, block.TypeLabel(), "value") + + forEachFiltered = append(forEachFiltered, clone) + + clones[key.AsString()] = clone.Values() + metadata := clone.GetMetadata() + e.ctx.SetByDot(clone.Values(), metadata.Reference()) + }) + metadata := block.GetMetadata() + if len(clones) == 0 { + e.ctx.SetByDot(cty.EmptyTupleVal, metadata.Reference()) + } else { + e.ctx.SetByDot(cty.MapVal(clones), metadata.Reference()) + } + e.debug.Log("Expanded block '%s' into %d clones via 'for_each' attribute.", block.LocalName(), len(clones)) + } + + return forEachFiltered +} + +func isBlockSupportsCountMetaArgument(block *terraform.Block) bool { + return slices.Contains([]string{"module", "resource", "data"}, block.Type()) +} + +func (e *evaluator) expandBlockCounts(blocks terraform.Blocks) terraform.Blocks { + var countFiltered terraform.Blocks + for _, block := range blocks { + countAttr := block.GetAttribute("count") + if countAttr.IsNil() || block.IsCountExpanded() || !isBlockSupportsCountMetaArgument(block) { + countFiltered = append(countFiltered, block) + continue + } + count := 1 + countAttrVal := countAttr.Value() + if !countAttrVal.IsNull() && countAttrVal.IsKnown() && countAttrVal.Type() == cty.Number { + count = int(countAttr.AsNumber()) + } + + var clones []cty.Value + for i := 0; i < count; i++ { + clone := block.Clone(cty.NumberIntVal(int64(i))) + clones = append(clones, clone.Values()) + countFiltered = append(countFiltered, clone) + metadata := clone.GetMetadata() + e.ctx.SetByDot(clone.Values(), metadata.Reference()) + } + metadata := block.GetMetadata() + if len(clones) == 0 { + e.ctx.SetByDot(cty.EmptyTupleVal, metadata.Reference()) + } else { + e.ctx.SetByDot(cty.TupleVal(clones), metadata.Reference()) + } + e.debug.Log("Expanded block '%s' into %d clones via 'count' attribute.", block.LocalName(), len(clones)) + } + + return countFiltered +} + +func (e *evaluator) copyVariables(from, to *terraform.Block) { + + var fromBase string + var fromRel string + var toRel string + + switch from.Type() { + case "resource": + fromBase = from.TypeLabel() + fromRel = from.NameLabel() + toRel = to.NameLabel() + case "module": + fromBase = from.Type() + fromRel = from.TypeLabel() + toRel = to.TypeLabel() + default: + return + } + + srcValue := e.ctx.Root().Get(fromBase, fromRel) + if srcValue == cty.NilVal { + return + } + e.ctx.Root().Set(srcValue, fromBase, toRel) +} + +func (e *evaluator) evaluateVariable(b *terraform.Block) (cty.Value, error) { + if b.Label() == "" { + return cty.NilVal, errors.New("empty label - cannot resolve") + } + + attributes := b.Attributes() + if attributes == nil { + return cty.NilVal, errors.New("cannot resolve variable with no attributes") + } + + var valType cty.Type + var defaults *typeexpr.Defaults + if typeAttr, exists := attributes["type"]; exists { + ty, def, err := typeAttr.DecodeVarType() + if err != nil { + return cty.NilVal, err + } + valType = ty + defaults = def + } + + var val cty.Value + + if override, exists := e.inputVars[b.Label()]; exists { + val = override + } else if def, exists := attributes["default"]; exists { + val = def.NullableValue() + } else { + return cty.NilVal, errors.New("no value found") + } + + if valType != cty.NilType { + if defaults != nil { + val = defaults.Apply(val) + } + + typedVal, err := convert.Convert(val, valType) + if err != nil { + return cty.NilVal, err + } + return typedVal, nil + } + + return val, nil + +} + +func (e *evaluator) evaluateOutput(b *terraform.Block) (cty.Value, error) { + if b.Label() == "" { + return cty.NilVal, errors.New("empty label - cannot resolve") + } + + attribute := b.GetAttribute("value") + if attribute.IsNil() { + return cty.NilVal, errors.New("cannot resolve output with no attributes") + } + return attribute.Value(), nil +} + +// returns true if all evaluations were successful +func (e *evaluator) getValuesByBlockType(blockType string) cty.Value { + + blocksOfType := e.blocks.OfType(blockType) + values := make(map[string]cty.Value) + + for _, b := range blocksOfType { + + switch b.Type() { + case "variable": // variables are special in that their value comes from the "default" attribute + val, err := e.evaluateVariable(b) + if err != nil { + continue + } + values[b.Label()] = val + case "output": + val, err := e.evaluateOutput(b) + if err != nil { + continue + } + values[b.Label()] = val + case "locals", "moved", "import": + for key, val := range b.Values().AsValueMap() { + values[key] = val + } + case "provider", "module", "check": + if b.Label() == "" { + continue + } + values[b.Label()] = b.Values() + case "resource", "data": + if len(b.Labels()) < 2 { + continue + } + + blockMap, ok := values[b.Labels()[0]] + if !ok { + values[b.Labels()[0]] = cty.ObjectVal(make(map[string]cty.Value)) + blockMap = values[b.Labels()[0]] + } + + valueMap := blockMap.AsValueMap() + if valueMap == nil { + valueMap = make(map[string]cty.Value) + } + + valueMap[b.Labels()[1]] = b.Values() + values[b.Labels()[0]] = cty.ObjectVal(valueMap) + } + } + + return cty.ObjectVal(values) +} diff --git a/pkg/iac/scanners/terraform/parser/evaluator_test.go b/pkg/iac/scanners/terraform/parser/evaluator_test.go new file mode 100644 index 000000000000..8d3ef7b0f6e0 --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/evaluator_test.go @@ -0,0 +1,94 @@ +package parser + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/zclconf/go-cty/cty" +) + +func TestValidateForEachArg(t *testing.T) { + tests := []struct { + name string + arg cty.Value + expectedError string + }{ + { + name: "empty set", + arg: cty.SetValEmpty(cty.String), + }, + { + name: "set of strings", + arg: cty.SetVal([]cty.Value{cty.StringVal("val1"), cty.StringVal("val2")}), + }, + { + name: "set of non-strings", + arg: cty.SetVal([]cty.Value{cty.NumberIntVal(1), cty.NumberIntVal(2)}), + expectedError: "is not set of strings", + }, + { + name: "set with null", + arg: cty.SetVal([]cty.Value{cty.StringVal("val1"), cty.NullVal(cty.String)}), + expectedError: "arg is set of strings, but contains null", + }, + { + name: "set with unknown", + arg: cty.SetVal([]cty.Value{cty.StringVal("val1"), cty.UnknownVal(cty.String)}), + expectedError: "arg is set of strings, but contains unknown", + }, + { + name: "set with unknown", + arg: cty.SetVal([]cty.Value{cty.StringVal("val1"), cty.UnknownVal(cty.String)}), + expectedError: "arg is set of strings, but contains unknown", + }, + { + name: "non empty map", + arg: cty.MapVal(map[string]cty.Value{ + "val1": cty.StringVal("..."), + "val2": cty.StringVal("..."), + }), + }, + { + name: "map with unknown", + arg: cty.MapVal(map[string]cty.Value{ + "val1": cty.UnknownVal(cty.String), + "val2": cty.StringVal("..."), + }), + }, + { + name: "empty obj", + arg: cty.EmptyObjectVal, + }, + { + name: "obj with strings", + arg: cty.ObjectVal(map[string]cty.Value{ + "val1": cty.StringVal("..."), + "val2": cty.StringVal("..."), + }), + }, + { + name: "null", + arg: cty.NullVal(cty.Set(cty.String)), + expectedError: "arg is null", + }, + { + name: "unknown", + arg: cty.UnknownVal(cty.Set(cty.String)), + }, + { + name: "dynamic", + arg: cty.DynamicVal, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := validateForEachArg(tt.arg) + if tt.expectedError != "" && err != nil { + assert.ErrorContains(t, err, tt.expectedError) + return + } + assert.NoError(t, err) + }) + } +} diff --git a/pkg/iac/scanners/terraform/parser/funcs/cidr.go b/pkg/iac/scanners/terraform/parser/funcs/cidr.go new file mode 100644 index 000000000000..5f1504c0a8a1 --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/funcs/cidr.go @@ -0,0 +1,212 @@ +// Copied from github.com/hashicorp/terraform/internal/lang/funcs +package funcs + +import ( + "fmt" + "math/big" + "net" + + "github.com/apparentlymart/go-cidr/cidr" + "github.com/zclconf/go-cty/cty" + "github.com/zclconf/go-cty/cty/function" + "github.com/zclconf/go-cty/cty/gocty" +) + +// CidrHostFunc constructs a function that calculates a full host IP address +// within a given IP network address prefix. +var CidrHostFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "prefix", + Type: cty.String, + }, + { + Name: "hostnum", + Type: cty.Number, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + var hostNum *big.Int + if err := gocty.FromCtyValue(args[1], &hostNum); err != nil { + return cty.UnknownVal(cty.String), err + } + _, network, err := net.ParseCIDR(args[0].AsString()) + if err != nil { + return cty.UnknownVal(cty.String), fmt.Errorf("invalid CIDR expression: %s", err) + } + + ip, err := cidr.HostBig(network, hostNum) + if err != nil { + return cty.UnknownVal(cty.String), err + } + + return cty.StringVal(ip.String()), nil + }, +}) + +// CidrNetmaskFunc constructs a function that converts an IPv4 address prefix given +// in CIDR notation into a subnet mask address. +var CidrNetmaskFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "prefix", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + _, network, err := net.ParseCIDR(args[0].AsString()) + if err != nil { + return cty.UnknownVal(cty.String), fmt.Errorf("invalid CIDR expression: %s", err) + } + + return cty.StringVal(net.IP(network.Mask).String()), nil + }, +}) + +// CidrSubnetFunc constructs a function that calculates a subnet address within +// a given IP network address prefix. +var CidrSubnetFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "prefix", + Type: cty.String, + }, + { + Name: "newbits", + Type: cty.Number, + }, + { + Name: "netnum", + Type: cty.Number, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + var newbits int + if err := gocty.FromCtyValue(args[1], &newbits); err != nil { + return cty.UnknownVal(cty.String), err + } + var netnum *big.Int + if err := gocty.FromCtyValue(args[2], &netnum); err != nil { + return cty.UnknownVal(cty.String), err + } + + _, network, err := net.ParseCIDR(args[0].AsString()) + if err != nil { + return cty.UnknownVal(cty.String), fmt.Errorf("invalid CIDR expression: %s", err) + } + + newNetwork, err := cidr.SubnetBig(network, newbits, netnum) + if err != nil { + return cty.UnknownVal(cty.String), err + } + + return cty.StringVal(newNetwork.String()), nil + }, +}) + +// CidrSubnetsFunc is similar to CidrSubnetFunc but calculates many consecutive +// subnet addresses at once, rather than just a single subnet extension. +var CidrSubnetsFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "prefix", + Type: cty.String, + }, + }, + VarParam: &function.Parameter{ + Name: "newbits", + Type: cty.Number, + }, + Type: function.StaticReturnType(cty.List(cty.String)), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + _, network, err := net.ParseCIDR(args[0].AsString()) + if err != nil { + return cty.UnknownVal(cty.String), function.NewArgErrorf(0, "invalid CIDR expression: %s", err) + } + startPrefixLen, _ := network.Mask.Size() + + prefixLengthArgs := args[1:] + if len(prefixLengthArgs) == 0 { + return cty.ListValEmpty(cty.String), nil + } + + var firstLength int + if err := gocty.FromCtyValue(prefixLengthArgs[0], &firstLength); err != nil { + return cty.UnknownVal(cty.String), function.NewArgError(1, err) + } + firstLength += startPrefixLen + + retVals := make([]cty.Value, len(prefixLengthArgs)) + + current, _ := cidr.PreviousSubnet(network, firstLength) + for i, lengthArg := range prefixLengthArgs { + var length int + if err := gocty.FromCtyValue(lengthArg, &length); err != nil { + return cty.UnknownVal(cty.String), function.NewArgError(i+1, err) + } + + if length < 1 { + return cty.UnknownVal(cty.String), function.NewArgErrorf(i+1, "must extend prefix by at least one bit") + } + // For portability with 32-bit systems where the subnet number + // will be a 32-bit int, we only allow extension of 32 bits in + // one call even if we're running on a 64-bit machine. + // (Of course, this is significant only for IPv6.) + if length > 32 { + return cty.UnknownVal(cty.String), function.NewArgErrorf(i+1, "may not extend prefix by more than 32 bits") + } + length += startPrefixLen + if length > (len(network.IP) * 8) { + protocol := "IP" + switch len(network.IP) * 8 { + case 32: + protocol = "IPv4" + case 128: + protocol = "IPv6" + } + return cty.UnknownVal(cty.String), function.NewArgErrorf(i+1, "would extend prefix to %d bits, which is too long for an %s address", length, protocol) + } + + next, rollover := cidr.NextSubnet(current, length) + if rollover || !network.Contains(next.IP) { + // If we run out of suffix bits in the base CIDR prefix then + // NextSubnet will start incrementing the prefix bits, which + // we don't allow because it would then allocate addresses + // outside of the caller's given prefix. + return cty.UnknownVal(cty.String), function.NewArgErrorf(i+1, "not enough remaining address space for a subnet with a prefix of %d bits after %s", length, current.String()) + } + + current = next + retVals[i] = cty.StringVal(current.String()) + } + + return cty.ListVal(retVals), nil + }, +}) + +// CidrHost calculates a full host IP address within a given IP network address prefix. +func CidrHost(prefix, hostnum cty.Value) (cty.Value, error) { + return CidrHostFunc.Call([]cty.Value{prefix, hostnum}) +} + +// CidrNetmask converts an IPv4 address prefix given in CIDR notation into a subnet mask address. +func CidrNetmask(prefix cty.Value) (cty.Value, error) { + return CidrNetmaskFunc.Call([]cty.Value{prefix}) +} + +// CidrSubnet calculates a subnet address within a given IP network address prefix. +func CidrSubnet(prefix, newbits, netnum cty.Value) (cty.Value, error) { + return CidrSubnetFunc.Call([]cty.Value{prefix, newbits, netnum}) +} + +// CidrSubnets calculates a sequence of consecutive subnet prefixes that may +// be of different prefix lengths under a common base prefix. +func CidrSubnets(prefix cty.Value, newbits ...cty.Value) (cty.Value, error) { + args := make([]cty.Value, len(newbits)+1) + args[0] = prefix + copy(args[1:], newbits) + return CidrSubnetsFunc.Call(args) +} diff --git a/pkg/iac/scanners/terraform/parser/funcs/collection.go b/pkg/iac/scanners/terraform/parser/funcs/collection.go new file mode 100644 index 000000000000..693b8912f618 --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/funcs/collection.go @@ -0,0 +1,711 @@ +// Copied from github.com/hashicorp/terraform/internal/lang/funcs +package funcs + +import ( + "errors" + "fmt" + "math/big" + "sort" + + "github.com/zclconf/go-cty/cty" + "github.com/zclconf/go-cty/cty/convert" + "github.com/zclconf/go-cty/cty/function" + "github.com/zclconf/go-cty/cty/function/stdlib" + "github.com/zclconf/go-cty/cty/gocty" +) + +var LengthFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "value", + Type: cty.DynamicPseudoType, + AllowDynamicType: true, + AllowUnknown: true, + AllowMarked: true, + }, + }, + Type: func(args []cty.Value) (cty.Type, error) { + collTy := args[0].Type() + switch { + case collTy == cty.String || collTy.IsTupleType() || collTy.IsObjectType() || collTy.IsListType() || collTy.IsMapType() || collTy.IsSetType() || collTy == cty.DynamicPseudoType: + return cty.Number, nil + default: + return cty.Number, errors.New("argument must be a string, a collection type, or a structural type") + } + }, + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + coll := args[0] + collTy := args[0].Type() + marks := coll.Marks() + switch { + case collTy == cty.DynamicPseudoType: + return cty.UnknownVal(cty.Number).WithMarks(marks), nil + case collTy.IsTupleType(): + l := len(collTy.TupleElementTypes()) + return cty.NumberIntVal(int64(l)).WithMarks(marks), nil + case collTy.IsObjectType(): + l := len(collTy.AttributeTypes()) + return cty.NumberIntVal(int64(l)).WithMarks(marks), nil + case collTy == cty.String: + // We'll delegate to the cty stdlib strlen function here, because + // it deals with all of the complexities of tokenizing unicode + // grapheme clusters. + return stdlib.Strlen(coll) + case collTy.IsListType() || collTy.IsSetType() || collTy.IsMapType(): + return coll.Length(), nil + default: + // Should never happen, because of the checks in our Type func above + return cty.UnknownVal(cty.Number), errors.New("impossible value type for length(...)") + } + }, +}) + +// AllTrueFunc constructs a function that returns true if all elements of the +// list are true. If the list is empty, return true. +var AllTrueFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "list", + Type: cty.List(cty.Bool), + }, + }, + Type: function.StaticReturnType(cty.Bool), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + result := cty.True + for it := args[0].ElementIterator(); it.Next(); { + _, v := it.Element() + if !v.IsKnown() { + return cty.UnknownVal(cty.Bool), nil + } + if v.IsNull() { + return cty.False, nil + } + result = result.And(v) + if result.False() { + return cty.False, nil + } + } + return result, nil + }, +}) + +// AnyTrueFunc constructs a function that returns true if any element of the +// list is true. If the list is empty, return false. +var AnyTrueFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "list", + Type: cty.List(cty.Bool), + }, + }, + Type: function.StaticReturnType(cty.Bool), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + result := cty.False + var hasUnknown bool + for it := args[0].ElementIterator(); it.Next(); { + _, v := it.Element() + if !v.IsKnown() { + hasUnknown = true + continue + } + if v.IsNull() { + continue + } + result = result.Or(v) + if result.True() { + return cty.True, nil + } + } + if hasUnknown { + return cty.UnknownVal(cty.Bool), nil + } + return result, nil + }, +}) + +// CoalesceFunc constructs a function that takes any number of arguments and +// returns the first one that isn't empty. This function was copied from go-cty +// stdlib and modified so that it returns the first *non-empty* non-null element +// from a sequence, instead of merely the first non-null. +var CoalesceFunc = function.New(&function.Spec{ + Params: []function.Parameter{}, + VarParam: &function.Parameter{ + Name: "vals", + Type: cty.DynamicPseudoType, + AllowUnknown: true, + AllowDynamicType: true, + AllowNull: true, + }, + Type: func(args []cty.Value) (ret cty.Type, err error) { + argTypes := make([]cty.Type, len(args)) + for i, val := range args { + argTypes[i] = val.Type() + } + retType, _ := convert.UnifyUnsafe(argTypes) + if retType == cty.NilType { + return cty.NilType, errors.New("all arguments must have the same type") + } + return retType, nil + }, + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + for _, argVal := range args { + // We already know this will succeed because of the checks in our Type func above + argVal, _ = convert.Convert(argVal, retType) + if !argVal.IsKnown() { + return cty.UnknownVal(retType), nil + } + if argVal.IsNull() { + continue + } + if retType == cty.String && argVal.RawEquals(cty.StringVal("")) { + continue + } + + return argVal, nil + } + return cty.NilVal, errors.New("no non-null, non-empty-string arguments") + }, +}) + +// IndexFunc constructs a function that finds the element index for a given value in a list. +var IndexFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "list", + Type: cty.DynamicPseudoType, + }, + { + Name: "value", + Type: cty.DynamicPseudoType, + }, + }, + Type: function.StaticReturnType(cty.Number), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + if !(args[0].Type().IsListType() || args[0].Type().IsTupleType()) { + return cty.NilVal, errors.New("argument must be a list or tuple") + } + + if !args[0].IsKnown() { + return cty.UnknownVal(cty.Number), nil + } + + if args[0].LengthInt() == 0 { // Easy path + return cty.NilVal, errors.New("cannot search an empty list") + } + + for it := args[0].ElementIterator(); it.Next(); { + i, v := it.Element() + eq, err := stdlib.Equal(v, args[1]) + if err != nil { + return cty.NilVal, err + } + if !eq.IsKnown() { + return cty.UnknownVal(cty.Number), nil + } + if eq.True() { + return i, nil + } + } + return cty.NilVal, errors.New("item not found") + + }, +}) + +// LookupFunc constructs a function that performs dynamic lookups of map types. +var LookupFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "inputMap", + Type: cty.DynamicPseudoType, + AllowMarked: true, + }, + { + Name: "key", + Type: cty.String, + AllowMarked: true, + }, + }, + VarParam: &function.Parameter{ + Name: "default", + Type: cty.DynamicPseudoType, + AllowUnknown: true, + AllowDynamicType: true, + AllowNull: true, + AllowMarked: true, + }, + Type: func(args []cty.Value) (ret cty.Type, err error) { + if len(args) < 1 || len(args) > 3 { + return cty.NilType, fmt.Errorf("lookup() takes two or three arguments, got %d", len(args)) + } + + ty := args[0].Type() + + switch { + case ty.IsObjectType(): + if !args[1].IsKnown() { + return cty.DynamicPseudoType, nil + } + + keyVal, _ := args[1].Unmark() + key := keyVal.AsString() + if ty.HasAttribute(key) { + return args[0].GetAttr(key).Type(), nil + } else if len(args) == 3 { + // if the key isn't found but a default is provided, + // return the default type + return args[2].Type(), nil + } + return cty.DynamicPseudoType, function.NewArgErrorf(0, "the given object has no attribute %q", key) + case ty.IsMapType(): + if len(args) == 3 { + _, err = convert.Convert(args[2], ty.ElementType()) + if err != nil { + return cty.NilType, function.NewArgErrorf(2, "the default value must have the same type as the map elements") + } + } + return ty.ElementType(), nil + default: + return cty.NilType, function.NewArgErrorf(0, "lookup() requires a map as the first argument") + } + }, + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + var defaultVal cty.Value + defaultValueSet := false + + if len(args) == 3 { + // intentionally leave default value marked + defaultVal = args[2] + defaultValueSet = true + } + + // keep track of marks from the collection and key + var markses []cty.ValueMarks + + // unmark collection, retain marks to reapply later + mapVar, mapMarks := args[0].Unmark() + markses = append(markses, mapMarks) + + // include marks on the key in the result + keyVal, keyMarks := args[1].Unmark() + if len(keyMarks) > 0 { + markses = append(markses, keyMarks) + } + lookupKey := keyVal.AsString() + + if !mapVar.IsKnown() { + return cty.UnknownVal(retType).WithMarks(markses...), nil + } + + if mapVar.Type().IsObjectType() { + if mapVar.Type().HasAttribute(lookupKey) { + return mapVar.GetAttr(lookupKey).WithMarks(markses...), nil + } + } else if mapVar.HasIndex(cty.StringVal(lookupKey)) == cty.True { + return mapVar.Index(cty.StringVal(lookupKey)).WithMarks(markses...), nil + } + + if defaultValueSet { + defaultVal, err = convert.Convert(defaultVal, retType) + if err != nil { + return cty.NilVal, err + } + return defaultVal.WithMarks(markses...), nil + } + + return cty.UnknownVal(cty.DynamicPseudoType).WithMarks(markses...), fmt.Errorf( + "lookup failed to find '%s'", lookupKey) + }, +}) + +// MatchkeysFunc constructs a function that constructs a new list by taking a +// subset of elements from one list whose indexes match the corresponding +// indexes of values in another list. +var MatchkeysFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "values", + Type: cty.List(cty.DynamicPseudoType), + }, + { + Name: "keys", + Type: cty.List(cty.DynamicPseudoType), + }, + { + Name: "searchset", + Type: cty.List(cty.DynamicPseudoType), + }, + }, + Type: func(args []cty.Value) (cty.Type, error) { + ty, _ := convert.UnifyUnsafe([]cty.Type{args[1].Type(), args[2].Type()}) + if ty == cty.NilType { + return cty.NilType, errors.New("keys and searchset must be of the same type") + } + + // the return type is based on args[0] (values) + return args[0].Type(), nil + }, + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + if !args[0].IsKnown() { + return cty.UnknownVal(cty.List(retType.ElementType())), nil + } + + if args[0].LengthInt() != args[1].LengthInt() { + return cty.ListValEmpty(retType.ElementType()), errors.New("length of keys and values should be equal") + } + + output := make([]cty.Value, 0) + values := args[0] + + // Keys and searchset must be the same type. + // We can skip error checking here because we've already verified that + // they can be unified in the Type function + ty, _ := convert.UnifyUnsafe([]cty.Type{args[1].Type(), args[2].Type()}) + keys, _ := convert.Convert(args[1], ty) + searchset, _ := convert.Convert(args[2], ty) + + // if searchset is empty, return an empty list. + if searchset.LengthInt() == 0 { + return cty.ListValEmpty(retType.ElementType()), nil + } + + if !values.IsWhollyKnown() || !keys.IsWhollyKnown() { + return cty.UnknownVal(retType), nil + } + + i := 0 + for it := keys.ElementIterator(); it.Next(); { + _, key := it.Element() + for iter := searchset.ElementIterator(); iter.Next(); { + _, search := iter.Element() + eq, err := stdlib.Equal(key, search) + if err != nil { + return cty.NilVal, err + } + if !eq.IsKnown() { + return cty.ListValEmpty(retType.ElementType()), nil + } + if eq.True() { + v := values.Index(cty.NumberIntVal(int64(i))) + output = append(output, v) + break + } + } + i++ + } + + // if we haven't matched any key, then output is an empty list. + if len(output) == 0 { + return cty.ListValEmpty(retType.ElementType()), nil + } + return cty.ListVal(output), nil + }, +}) + +// OneFunc returns either the first element of a one-element list, or null +// if given a zero-element list. +var OneFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "list", + Type: cty.DynamicPseudoType, + }, + }, + Type: func(args []cty.Value) (cty.Type, error) { + ty := args[0].Type() + switch { + case ty.IsListType() || ty.IsSetType(): + return ty.ElementType(), nil + case ty.IsTupleType(): + etys := ty.TupleElementTypes() + switch len(etys) { + case 0: + // No specific type information, so we'll ultimately return + // a null value of unknown type. + return cty.DynamicPseudoType, nil + case 1: + return etys[0], nil + } + } + return cty.NilType, function.NewArgErrorf(0, "must be a list, set, or tuple value with either zero or one elements") + }, + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + val := args[0] + ty := val.Type() + + // Our parameter spec above doesn't set AllowUnknown or AllowNull, + // so we can assume our top-level collection is both known and non-null + // in here. + + switch { + case ty.IsListType() || ty.IsSetType(): + lenVal := val.Length() + if !lenVal.IsKnown() { + return cty.UnknownVal(retType), nil + } + var l int + err := gocty.FromCtyValue(lenVal, &l) + if err != nil { + // It would be very strange to get here, because that would + // suggest that the length is either not a number or isn't + // an integer, which would suggest a bug in cty. + return cty.NilVal, fmt.Errorf("invalid collection length: %s", err) + } + switch l { + case 0: + return cty.NullVal(retType), nil + case 1: + var ret cty.Value + // We'll use an iterator here because that works for both lists + // and sets, whereas indexing directly would only work for lists. + // Since we've just checked the length, we should only actually + // run this loop body once. + for it := val.ElementIterator(); it.Next(); { + _, ret = it.Element() + } + return ret, nil + } + case ty.IsTupleType(): + etys := ty.TupleElementTypes() + switch len(etys) { + case 0: + return cty.NullVal(retType), nil + case 1: + ret := val.Index(cty.NumberIntVal(0)) + return ret, nil + } + } + return cty.NilVal, function.NewArgErrorf(0, "must be a list, set, or tuple value with either zero or one elements") + }, +}) + +// SumFunc constructs a function that returns the sum of all +// numbers provided in a list +var SumFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "list", + Type: cty.DynamicPseudoType, + }, + }, + Type: function.StaticReturnType(cty.Number), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + + if !args[0].CanIterateElements() { + return cty.NilVal, function.NewArgErrorf(0, "cannot sum noniterable") + } + + if args[0].LengthInt() == 0 { // Easy path + return cty.NilVal, function.NewArgErrorf(0, "cannot sum an empty list") + } + + arg := args[0].AsValueSlice() + ty := args[0].Type() + + if !ty.IsListType() && !ty.IsSetType() && !ty.IsTupleType() { + return cty.NilVal, function.NewArgErrorf(0, fmt.Sprintf("argument must be list, set, or tuple. Received %s", ty.FriendlyName())) + } + + if !args[0].IsWhollyKnown() { + return cty.UnknownVal(cty.Number), nil + } + + // big.Float.Add can panic if the input values are opposing infinities, + // so we must catch that here in order to remain within + // the cty Function abstraction. + defer func() { + if r := recover(); r != nil { + if _, ok := r.(big.ErrNaN); ok { + ret = cty.NilVal + err = fmt.Errorf("can't compute sum of opposing infinities") + } else { + // not a panic we recognize + panic(r) + } + } + }() + + s := arg[0] + if s.IsNull() { + return cty.NilVal, function.NewArgErrorf(0, "argument must be list, set, or tuple of number values") + } + for _, v := range arg[1:] { + if v.IsNull() { + return cty.NilVal, function.NewArgErrorf(0, "argument must be list, set, or tuple of number values") + } + v, err = convert.Convert(v, cty.Number) + if err != nil { + return cty.NilVal, function.NewArgErrorf(0, "argument must be list, set, or tuple of number values") + } + s = s.Add(v) + } + + return s, nil + }, +}) + +// TransposeFunc constructs a function that takes a map of lists of strings and +// swaps the keys and values to produce a new map of lists of strings. +var TransposeFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "values", + Type: cty.Map(cty.List(cty.String)), + }, + }, + Type: function.StaticReturnType(cty.Map(cty.List(cty.String))), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + inputMap := args[0] + if !inputMap.IsWhollyKnown() { + return cty.UnknownVal(retType), nil + } + + outputMap := make(map[string]cty.Value) + tmpMap := make(map[string][]string) + + for it := inputMap.ElementIterator(); it.Next(); { + inKey, inVal := it.Element() + for iter := inVal.ElementIterator(); iter.Next(); { + _, val := iter.Element() + if !val.Type().Equals(cty.String) { + return cty.MapValEmpty(cty.List(cty.String)), errors.New("input must be a map of lists of strings") + } + + outKey := val.AsString() + if _, ok := tmpMap[outKey]; !ok { + tmpMap[outKey] = make([]string, 0) + } + outVal := tmpMap[outKey] + outVal = append(outVal, inKey.AsString()) + sort.Strings(outVal) + tmpMap[outKey] = outVal + } + } + + for outKey, outVal := range tmpMap { + values := make([]cty.Value, 0) + for _, v := range outVal { + values = append(values, cty.StringVal(v)) + } + outputMap[outKey] = cty.ListVal(values) + } + + if len(outputMap) == 0 { + return cty.MapValEmpty(cty.List(cty.String)), nil + } + + return cty.MapVal(outputMap), nil + }, +}) + +// ListFunc constructs a function that takes an arbitrary number of arguments +// and returns a list containing those values in the same order. +// +// This function is deprecated in Terraform v0.12 +var ListFunc = function.New(&function.Spec{ + Params: []function.Parameter{}, + VarParam: &function.Parameter{ + Name: "vals", + Type: cty.DynamicPseudoType, + AllowUnknown: true, + AllowDynamicType: true, + AllowNull: true, + }, + Type: func(args []cty.Value) (ret cty.Type, err error) { + return cty.DynamicPseudoType, fmt.Errorf("the \"list\" function was deprecated in Terraform v0.12 and is no longer available; use tolist([ ... ]) syntax to write a literal list") + }, + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + return cty.DynamicVal, fmt.Errorf("the \"list\" function was deprecated in Terraform v0.12 and is no longer available; use tolist([ ... ]) syntax to write a literal list") + }, +}) + +// MapFunc constructs a function that takes an even number of arguments and +// returns a map whose elements are constructed from consecutive pairs of arguments. +// +// This function is deprecated in Terraform v0.12 +var MapFunc = function.New(&function.Spec{ + Params: []function.Parameter{}, + VarParam: &function.Parameter{ + Name: "vals", + Type: cty.DynamicPseudoType, + AllowUnknown: true, + AllowDynamicType: true, + AllowNull: true, + }, + Type: func(args []cty.Value) (ret cty.Type, err error) { + return cty.DynamicPseudoType, fmt.Errorf("the \"map\" function was deprecated in Terraform v0.12 and is no longer available; use tomap({ ... }) syntax to write a literal map") + }, + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + return cty.DynamicVal, fmt.Errorf("the \"map\" function was deprecated in Terraform v0.12 and is no longer available; use tomap({ ... }) syntax to write a literal map") + }, +}) + +// Length returns the number of elements in the given collection or number of +// Unicode characters in the given string. +func Length(collection cty.Value) (cty.Value, error) { + return LengthFunc.Call([]cty.Value{collection}) +} + +// AllTrue returns true if all elements of the list are true. If the list is empty, +// return true. +func AllTrue(collection cty.Value) (cty.Value, error) { + return AllTrueFunc.Call([]cty.Value{collection}) +} + +// AnyTrue returns true if any element of the list is true. If the list is empty, +// return false. +func AnyTrue(collection cty.Value) (cty.Value, error) { + return AnyTrueFunc.Call([]cty.Value{collection}) +} + +// Coalesce takes any number of arguments and returns the first one that isn't empty. +func Coalesce(args ...cty.Value) (cty.Value, error) { + return CoalesceFunc.Call(args) +} + +// Index finds the element index for a given value in a list. +func Index(list, value cty.Value) (cty.Value, error) { + return IndexFunc.Call([]cty.Value{list, value}) +} + +// List takes any number of list arguments and returns a list containing those +// +// values in the same order. +func List(args ...cty.Value) (cty.Value, error) { + return ListFunc.Call(args) +} + +// Lookup performs a dynamic lookup into a map. +// There are two required arguments, map and key, plus an optional default, +// which is a value to return if no key is found in map. +func Lookup(args ...cty.Value) (cty.Value, error) { + return LookupFunc.Call(args) +} + +// Map takes an even number of arguments and returns a map whose elements are constructed +// from consecutive pairs of arguments. +func Map(args ...cty.Value) (cty.Value, error) { + return MapFunc.Call(args) +} + +// Matchkeys constructs a new list by taking a subset of elements from one list +// whose indexes match the corresponding indexes of values in another list. +func Matchkeys(values, keys, searchset cty.Value) (cty.Value, error) { + return MatchkeysFunc.Call([]cty.Value{values, keys, searchset}) +} + +// One returns either the first element of a one-element list, or null +// if given a zero-element list.. +func One(list cty.Value) (cty.Value, error) { + return OneFunc.Call([]cty.Value{list}) +} + +// Sum adds numbers in a list, set, or tuple +func Sum(list cty.Value) (cty.Value, error) { + return SumFunc.Call([]cty.Value{list}) +} + +// Transpose takes a map of lists of strings and swaps the keys and values to +// produce a new map of lists of strings. +func Transpose(values cty.Value) (cty.Value, error) { + return TransposeFunc.Call([]cty.Value{values}) +} diff --git a/pkg/iac/scanners/terraform/parser/funcs/conversion.go b/pkg/iac/scanners/terraform/parser/funcs/conversion.go new file mode 100644 index 000000000000..02fb3164a6f0 --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/funcs/conversion.go @@ -0,0 +1,223 @@ +// Copied from github.com/hashicorp/terraform/internal/lang/funcs +package funcs + +import ( + "fmt" + "sort" + "strconv" + "strings" + + "github.com/zclconf/go-cty/cty" + "github.com/zclconf/go-cty/cty/convert" + "github.com/zclconf/go-cty/cty/function" +) + +// MakeToFunc constructs a "to..." function, like "tostring", which converts +// its argument to a specific type or type kind. +// +// The given type wantTy can be any type constraint that cty's "convert" package +// would accept. In particular, this means that you can pass +// cty.List(cty.DynamicPseudoType) to mean "list of any single type", which +// will then cause cty to attempt to unify all of the element types when given +// a tuple. +func MakeToFunc(wantTy cty.Type) function.Function { + return function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "v", + // We use DynamicPseudoType rather than wantTy here so that + // all values will pass through the function API verbatim and + // we can handle the conversion logic within the Type and + // Impl functions. This allows us to customize the error + // messages to be more appropriate for an explicit type + // conversion, whereas the cty function system produces + // messages aimed at _implicit_ type conversions. + Type: cty.DynamicPseudoType, + AllowNull: true, + AllowMarked: true, + }, + }, + Type: func(args []cty.Value) (cty.Type, error) { + gotTy := args[0].Type() + if gotTy.Equals(wantTy) { + return wantTy, nil + } + conv := convert.GetConversionUnsafe(args[0].Type(), wantTy) + if conv == nil { + // We'll use some specialized errors for some trickier cases, + // but most we can handle in a simple way. + switch { + case gotTy.IsTupleType() && wantTy.IsTupleType(): + return cty.NilType, function.NewArgErrorf(0, "incompatible tuple type for conversion: %s", convert.MismatchMessage(gotTy, wantTy)) + case gotTy.IsObjectType() && wantTy.IsObjectType(): + return cty.NilType, function.NewArgErrorf(0, "incompatible object type for conversion: %s", convert.MismatchMessage(gotTy, wantTy)) + default: + return cty.NilType, function.NewArgErrorf(0, "cannot convert %s to %s", gotTy.FriendlyName(), wantTy.FriendlyNameForConstraint()) + } + } + // If a conversion is available then everything is fine. + return wantTy, nil + }, + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + // We didn't set "AllowUnknown" on our argument, so it is guaranteed + // to be known here but may still be null. + ret, err := convert.Convert(args[0], retType) + if err != nil { + val, _ := args[0].UnmarkDeep() + // Because we used GetConversionUnsafe above, conversion can + // still potentially fail in here. For example, if the user + // asks to convert the string "a" to bool then we'll + // optimistically permit it during type checking but fail here + // once we note that the value isn't either "true" or "false". + gotTy := val.Type() + switch { + case Contains(args[0], MarkedSensitive): + // Generic message so we won't inadvertently disclose + // information about sensitive values. + return cty.NilVal, function.NewArgErrorf(0, "cannot convert this sensitive %s to %s", gotTy.FriendlyName(), wantTy.FriendlyNameForConstraint()) + + case gotTy == cty.String && wantTy == cty.Bool: + what := "string" + if !val.IsNull() { + what = strconv.Quote(val.AsString()) + } + return cty.NilVal, function.NewArgErrorf(0, `cannot convert %s to bool; only the strings "true" or "false" are allowed`, what) + case gotTy == cty.String && wantTy == cty.Number: + what := "string" + if !val.IsNull() { + what = strconv.Quote(val.AsString()) + } + return cty.NilVal, function.NewArgErrorf(0, `cannot convert %s to number; given string must be a decimal representation of a number`, what) + default: + return cty.NilVal, function.NewArgErrorf(0, "cannot convert %s to %s", gotTy.FriendlyName(), wantTy.FriendlyNameForConstraint()) + } + } + return ret, nil + }, + }) +} + +var TypeFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "value", + Type: cty.DynamicPseudoType, + AllowDynamicType: true, + AllowUnknown: true, + AllowNull: true, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + return cty.StringVal(TypeString(args[0].Type())).Mark(MarkedRaw), nil + }, +}) + +// Modified copy of TypeString from go-cty: +// https://github.com/zclconf/go-cty-debug/blob/master/ctydebug/type_string.go +// +// TypeString returns a string representation of a given type that is +// reminiscent of Go syntax calling into the cty package but is mainly +// intended for easy human inspection of values in tests, debug output, etc. +// +// The resulting string will include newlines and indentation in order to +// increase the readability of complex structures. It always ends with a +// newline, so you can print this result directly to your output. +func TypeString(ty cty.Type) string { + var b strings.Builder + writeType(ty, &b, 0) + return b.String() +} + +func writeType(ty cty.Type, b *strings.Builder, indent int) { + switch { + case ty == cty.NilType: + b.WriteString("nil") + return + case ty.IsObjectType(): + atys := ty.AttributeTypes() + if len(atys) == 0 { + b.WriteString("object({})") + return + } + attrNames := make([]string, 0, len(atys)) + for name := range atys { + attrNames = append(attrNames, name) + } + sort.Strings(attrNames) + b.WriteString("object({\n") + indent++ + for _, name := range attrNames { + aty := atys[name] + b.WriteString(indentSpaces(indent)) + fmt.Fprintf(b, "%s: ", name) + writeType(aty, b, indent) + b.WriteString(",\n") + } + indent-- + b.WriteString(indentSpaces(indent)) + b.WriteString("})") + case ty.IsTupleType(): + etys := ty.TupleElementTypes() + if len(etys) == 0 { + b.WriteString("tuple([])") + return + } + b.WriteString("tuple([\n") + indent++ + for _, ety := range etys { + b.WriteString(indentSpaces(indent)) + writeType(ety, b, indent) + b.WriteString(",\n") + } + indent-- + b.WriteString(indentSpaces(indent)) + b.WriteString("])") + case ty.IsCollectionType(): + ety := ty.ElementType() + switch { + case ty.IsListType(): + b.WriteString("list(") + case ty.IsMapType(): + b.WriteString("map(") + case ty.IsSetType(): + b.WriteString("set(") + default: + // At the time of writing there are no other collection types, + // but we'll be robust here and just pass through the GoString + // of anything we don't recognize. + b.WriteString(ty.FriendlyName()) + return + } + // Because object and tuple types render split over multiple + // lines, a collection type container around them can end up + // being hard to see when scanning, so we'll generate some extra + // indentation to make a collection of structural type more visually + // distinct from the structural type alone. + complexElem := ety.IsObjectType() || ety.IsTupleType() + if complexElem { + indent++ + b.WriteString("\n") + b.WriteString(indentSpaces(indent)) + } + writeType(ty.ElementType(), b, indent) + if complexElem { + indent-- + b.WriteString(",\n") + b.WriteString(indentSpaces(indent)) + } + b.WriteString(")") + default: + // For any other type we'll just use its GoString and assume it'll + // follow the usual GoString conventions. + b.WriteString(ty.FriendlyName()) + } +} + +func indentSpaces(level int) string { + return strings.Repeat(" ", level) +} + +func Type(input []cty.Value) (cty.Value, error) { + return TypeFunc.Call(input) +} diff --git a/pkg/iac/scanners/terraform/parser/funcs/crypto.go b/pkg/iac/scanners/terraform/parser/funcs/crypto.go new file mode 100644 index 000000000000..424c4c184763 --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/funcs/crypto.go @@ -0,0 +1,335 @@ +// Copied from github.com/hashicorp/terraform/internal/lang/funcs +package funcs + +import ( + "crypto/md5" + "crypto/rsa" + "crypto/sha1" + "crypto/sha256" + "crypto/sha512" + "encoding/asn1" + "encoding/base64" + "encoding/hex" + "fmt" + "hash" + "io" + "io/fs" + "strings" + + uuidv5 "github.com/google/uuid" + uuid "github.com/hashicorp/go-uuid" + "github.com/zclconf/go-cty/cty" + "github.com/zclconf/go-cty/cty/function" + "github.com/zclconf/go-cty/cty/gocty" + "golang.org/x/crypto/bcrypt" + "golang.org/x/crypto/ssh" +) + +var UUIDFunc = function.New(&function.Spec{ + Params: []function.Parameter{}, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + result, err := uuid.GenerateUUID() + if err != nil { + return cty.UnknownVal(cty.String), err + } + return cty.StringVal(result), nil + }, +}) + +var UUIDV5Func = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "namespace", + Type: cty.String, + }, + { + Name: "name", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + var namespace uuidv5.UUID + switch { + case args[0].AsString() == "dns": + namespace = uuidv5.NameSpaceDNS + case args[0].AsString() == "url": + namespace = uuidv5.NameSpaceURL + case args[0].AsString() == "oid": + namespace = uuidv5.NameSpaceOID + case args[0].AsString() == "x500": + namespace = uuidv5.NameSpaceX500 + default: + if namespace, err = uuidv5.Parse(args[0].AsString()); err != nil { + return cty.UnknownVal(cty.String), fmt.Errorf("uuidv5() doesn't support namespace %s (%v)", args[0].AsString(), err) + } + } + val := args[1].AsString() + return cty.StringVal(uuidv5.NewSHA1(namespace, []byte(val)).String()), nil + }, +}) + +// Base64Sha256Func constructs a function that computes the SHA256 hash of a given string +// and encodes it with Base64. +var Base64Sha256Func = makeStringHashFunction(sha256.New, base64.StdEncoding.EncodeToString) + +// MakeFileBase64Sha256Func constructs a function that is like Base64Sha256Func but reads the +// contents of a file rather than hashing a given literal string. +func MakeFileBase64Sha256Func(target fs.FS, baseDir string) function.Function { + return makeFileHashFunction(target, baseDir, sha256.New, base64.StdEncoding.EncodeToString) +} + +// Base64Sha512Func constructs a function that computes the SHA256 hash of a given string +// and encodes it with Base64. +var Base64Sha512Func = makeStringHashFunction(sha512.New, base64.StdEncoding.EncodeToString) + +// MakeFileBase64Sha512Func constructs a function that is like Base64Sha512Func but reads the +// contents of a file rather than hashing a given literal string. +func MakeFileBase64Sha512Func(target fs.FS, baseDir string) function.Function { + return makeFileHashFunction(target, baseDir, sha512.New, base64.StdEncoding.EncodeToString) +} + +// BcryptFunc constructs a function that computes a hash of the given string using the Blowfish cipher. +var BcryptFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "str", + Type: cty.String, + }, + }, + VarParam: &function.Parameter{ + Name: "cost", + Type: cty.Number, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + defaultCost := 10 + + if len(args) > 1 { + var val int + if err := gocty.FromCtyValue(args[1], &val); err != nil { + return cty.UnknownVal(cty.String), err + } + defaultCost = val + } + + if len(args) > 2 { + return cty.UnknownVal(cty.String), fmt.Errorf("bcrypt() takes no more than two arguments") + } + + input := args[0].AsString() + out, err := bcrypt.GenerateFromPassword([]byte(input), defaultCost) + if err != nil { + return cty.UnknownVal(cty.String), fmt.Errorf("error occurred generating password %s", err.Error()) + } + + return cty.StringVal(string(out)), nil + }, +}) + +// Md5Func constructs a function that computes the MD5 hash of a given string and encodes it with hexadecimal digits. +var Md5Func = makeStringHashFunction(md5.New, hex.EncodeToString) + +// MakeFileMd5Func constructs a function that is like Md5Func but reads the +// contents of a file rather than hashing a given literal string. +func MakeFileMd5Func(target fs.FS, baseDir string) function.Function { + return makeFileHashFunction(target, baseDir, md5.New, hex.EncodeToString) +} + +// RsaDecryptFunc constructs a function that decrypts an RSA-encrypted ciphertext. +var RsaDecryptFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "ciphertext", + Type: cty.String, + }, + { + Name: "privatekey", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + s := args[0].AsString() + key := args[1].AsString() + + b, err := base64.StdEncoding.DecodeString(s) + if err != nil { + return cty.UnknownVal(cty.String), function.NewArgErrorf(0, "failed to decode input %q: cipher text must be base64-encoded", s) + } + + rawKey, err := ssh.ParseRawPrivateKey([]byte(key)) + if err != nil { + var errStr string + switch e := err.(type) { + case asn1.SyntaxError: + errStr = strings.ReplaceAll(e.Error(), "asn1: syntax error", "invalid ASN1 data in the given private key") + case asn1.StructuralError: + errStr = strings.ReplaceAll(e.Error(), "asn1: structure error", "invalid ASN1 data in the given private key") + default: + errStr = fmt.Sprintf("invalid private key: %s", e) + } + return cty.UnknownVal(cty.String), function.NewArgErrorf(1, errStr) + } + privateKey, ok := rawKey.(*rsa.PrivateKey) + if !ok { + return cty.UnknownVal(cty.String), function.NewArgErrorf(1, "invalid private key type %t", rawKey) + } + + out, err := rsa.DecryptPKCS1v15(nil, privateKey, b) + if err != nil { + return cty.UnknownVal(cty.String), fmt.Errorf("failed to decrypt: %s", err) + } + + return cty.StringVal(string(out)), nil + }, +}) + +// Sha1Func constructs a function that computes the SHA1 hash of a given string +// and encodes it with hexadecimal digits. +var Sha1Func = makeStringHashFunction(sha1.New, hex.EncodeToString) + +// MakeFileSha1Func constructs a function that is like Sha1Func but reads the +// contents of a file rather than hashing a given literal string. +func MakeFileSha1Func(target fs.FS, baseDir string) function.Function { + return makeFileHashFunction(target, baseDir, sha1.New, hex.EncodeToString) +} + +// Sha256Func constructs a function that computes the SHA256 hash of a given string +// and encodes it with hexadecimal digits. +var Sha256Func = makeStringHashFunction(sha256.New, hex.EncodeToString) + +// MakeFileSha256Func constructs a function that is like Sha256Func but reads the +// contents of a file rather than hashing a given literal string. +func MakeFileSha256Func(target fs.FS, baseDir string) function.Function { + return makeFileHashFunction(target, baseDir, sha256.New, hex.EncodeToString) +} + +// Sha512Func constructs a function that computes the SHA512 hash of a given string +// and encodes it with hexadecimal digits. +var Sha512Func = makeStringHashFunction(sha512.New, hex.EncodeToString) + +// MakeFileSha512Func constructs a function that is like Sha512Func but reads the +// contents of a file rather than hashing a given literal string. +func MakeFileSha512Func(target fs.FS, baseDir string) function.Function { + return makeFileHashFunction(target, baseDir, sha512.New, hex.EncodeToString) +} + +func makeStringHashFunction(hf func() hash.Hash, enc func([]byte) string) function.Function { + return function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "str", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + s := args[0].AsString() + h := hf() + h.Write([]byte(s)) + rv := enc(h.Sum(nil)) + return cty.StringVal(rv), nil + }, + }) +} + +func makeFileHashFunction(target fs.FS, baseDir string, hf func() hash.Hash, enc func([]byte) string) function.Function { + return function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "path", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + path := args[0].AsString() + f, err := openFile(target, baseDir, path) + if err != nil { + return cty.UnknownVal(cty.String), err + } + + h := hf() + _, err = io.Copy(h, f) + if err != nil { + return cty.UnknownVal(cty.String), err + } + rv := enc(h.Sum(nil)) + return cty.StringVal(rv), nil + }, + }) +} + +// UUID generates and returns a Type-4 UUID in the standard hexadecimal string +// format. +// +// This is not a pure function: it will generate a different result for each +// call. It must therefore be registered as an impure function in the function +// table in the "lang" package. +func UUID() (cty.Value, error) { + return UUIDFunc.Call(nil) +} + +// UUIDV5 generates and returns a Type-5 UUID in the standard hexadecimal string +// format. +func UUIDV5(namespace cty.Value, name cty.Value) (cty.Value, error) { + return UUIDV5Func.Call([]cty.Value{namespace, name}) +} + +// Base64Sha256 computes the SHA256 hash of a given string and encodes it with +// Base64. +// +// The given string is first encoded as UTF-8 and then the SHA256 algorithm is applied +// as defined in RFC 4634. The raw hash is then encoded with Base64 before returning. +// Terraform uses the "standard" Base64 alphabet as defined in RFC 4648 section 4. +func Base64Sha256(str cty.Value) (cty.Value, error) { + return Base64Sha256Func.Call([]cty.Value{str}) +} + +// Base64Sha512 computes the SHA512 hash of a given string and encodes it with +// Base64. +// +// The given string is first encoded as UTF-8 and then the SHA256 algorithm is applied +// as defined in RFC 4634. The raw hash is then encoded with Base64 before returning. +// Terraform uses the "standard" Base64 alphabet as defined in RFC 4648 section 4 +func Base64Sha512(str cty.Value) (cty.Value, error) { + return Base64Sha512Func.Call([]cty.Value{str}) +} + +// Bcrypt computes a hash of the given string using the Blowfish cipher, +// returning a string in the Modular Crypt Format +// usually expected in the shadow password file on many Unix systems. +func Bcrypt(str cty.Value, cost ...cty.Value) (cty.Value, error) { + args := make([]cty.Value, len(cost)+1) + args[0] = str + copy(args[1:], cost) + return BcryptFunc.Call(args) +} + +// Md5 computes the MD5 hash of a given string and encodes it with hexadecimal digits. +func Md5(str cty.Value) (cty.Value, error) { + return Md5Func.Call([]cty.Value{str}) +} + +// RsaDecrypt decrypts an RSA-encrypted ciphertext, returning the corresponding +// cleartext. +func RsaDecrypt(ciphertext, privatekey cty.Value) (cty.Value, error) { + return RsaDecryptFunc.Call([]cty.Value{ciphertext, privatekey}) +} + +// Sha1 computes the SHA1 hash of a given string and encodes it with hexadecimal digits. +func Sha1(str cty.Value) (cty.Value, error) { + return Sha1Func.Call([]cty.Value{str}) +} + +// Sha256 computes the SHA256 hash of a given string and encodes it with hexadecimal digits. +func Sha256(str cty.Value) (cty.Value, error) { + return Sha256Func.Call([]cty.Value{str}) +} + +// Sha512 computes the SHA512 hash of a given string and encodes it with hexadecimal digits. +func Sha512(str cty.Value) (cty.Value, error) { + return Sha512Func.Call([]cty.Value{str}) +} diff --git a/pkg/iac/scanners/terraform/parser/funcs/datetime.go b/pkg/iac/scanners/terraform/parser/funcs/datetime.go new file mode 100644 index 000000000000..253e59eef018 --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/funcs/datetime.go @@ -0,0 +1,71 @@ +// Copied from github.com/hashicorp/terraform/internal/lang/funcs +package funcs + +import ( + "time" + + "github.com/zclconf/go-cty/cty" + "github.com/zclconf/go-cty/cty/function" +) + +// TimestampFunc constructs a function that returns a string representation of the current date and time. +var TimestampFunc = function.New(&function.Spec{ + Params: []function.Parameter{}, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + return cty.StringVal(time.Now().UTC().Format(time.RFC3339)), nil + }, +}) + +// TimeAddFunc constructs a function that adds a duration to a timestamp, returning a new timestamp. +var TimeAddFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "timestamp", + Type: cty.String, + }, + { + Name: "duration", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + ts, err := time.Parse(time.RFC3339, args[0].AsString()) + if err != nil { + return cty.UnknownVal(cty.String), err + } + duration, err := time.ParseDuration(args[1].AsString()) + if err != nil { + return cty.UnknownVal(cty.String), err + } + + return cty.StringVal(ts.Add(duration).Format(time.RFC3339)), nil + }, +}) + +// Timestamp returns a string representation of the current date and time. +// +// In the Terraform language, timestamps are conventionally represented as +// strings using RFC 3339 "Date and Time format" syntax, and so timestamp +// returns a string in this format. +func Timestamp() (cty.Value, error) { + return TimestampFunc.Call([]cty.Value{}) +} + +// TimeAdd adds a duration to a timestamp, returning a new timestamp. +// +// In the Terraform language, timestamps are conventionally represented as +// strings using RFC 3339 "Date and Time format" syntax. Timeadd requires +// the timestamp argument to be a string conforming to this syntax. +// +// `duration` is a string representation of a time difference, consisting of +// sequences of number and unit pairs, like `"1.5h"` or `1h30m`. The accepted +// units are `ns`, `us` (or `µs`), `"ms"`, `"s"`, `"m"`, and `"h"`. The first +// number may be negative to indicate a negative duration, like `"-2h5m"`. +// +// The result is a string, also in RFC 3339 format, representing the result +// of adding the given direction to the given timestamp. +func TimeAdd(timestamp cty.Value, duration cty.Value) (cty.Value, error) { + return TimeAddFunc.Call([]cty.Value{timestamp, duration}) +} diff --git a/pkg/iac/scanners/terraform/parser/funcs/defaults.go b/pkg/iac/scanners/terraform/parser/funcs/defaults.go new file mode 100644 index 000000000000..4467b81e35ce --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/funcs/defaults.go @@ -0,0 +1,288 @@ +// Copied from github.com/hashicorp/terraform/internal/lang/funcs +package funcs + +import ( + "fmt" + + "github.com/zclconf/go-cty/cty" + "github.com/zclconf/go-cty/cty/convert" + "github.com/zclconf/go-cty/cty/function" +) + +// DefaultsFunc is a helper function for substituting default values in +// place of null values in a given data structure. +// +// See the documentation for function Defaults for more information. +var DefaultsFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "input", + Type: cty.DynamicPseudoType, + AllowNull: true, + AllowMarked: true, + }, + { + Name: "defaults", + Type: cty.DynamicPseudoType, + AllowMarked: true, + }, + }, + Type: func(args []cty.Value) (cty.Type, error) { + // The result type is guaranteed to be the same as the input type, + // since all we're doing is replacing null values with non-null + // values of the same type. + retType := args[0].Type() + defaultsType := args[1].Type() + + // This function is aimed at filling in object types or collections + // of object types where some of the attributes might be null, so + // it doesn't make sense to use a primitive type directly with it. + // (The "coalesce" function may be appropriate for such cases.) + if retType.IsPrimitiveType() { + // This error message is a bit of a fib because we can actually + // apply defaults to tuples too, but we expect that to be so + // unusual as to not be worth mentioning here, because mentioning + // it would require using some less-well-known Terraform language + // terminology in the message (tuple types, structural types). + return cty.DynamicPseudoType, function.NewArgErrorf(1, "only object types and collections of object types can have defaults applied") + } + + defaultsPath := make(cty.Path, 0, 4) // some capacity so that most structures won't reallocate + if err := defaultsAssertSuitableFallback(retType, defaultsType, defaultsPath); err != nil { + errMsg := err.Error() + return cty.DynamicPseudoType, function.NewArgErrorf(1, "%s", errMsg) + } + + return retType, nil + }, + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + if args[0].Type().HasDynamicTypes() { + // If the types our input object aren't known yet for some reason + // then we'll defer all of our work here, because our + // interpretation of the defaults depends on the types in + // the input. + return cty.UnknownVal(retType), nil + } + + v := defaultsApply(args[0], args[1]) + return v, nil + }, +}) + +func defaultsApply(input, fallback cty.Value) cty.Value { + wantTy := input.Type() + + umInput, inputMarks := input.Unmark() + umFb, fallbackMarks := fallback.Unmark() + + // If neither are known, we very conservatively return an unknown value + // with the union of marks on both input and default. + if !(umInput.IsKnown() && umFb.IsKnown()) { + return cty.UnknownVal(wantTy).WithMarks(inputMarks).WithMarks(fallbackMarks) + } + + // For the rest of this function we're assuming that the given defaults + // will always be valid, because we expect to have caught any problems + // during the type checking phase. Any inconsistencies that reach here are + // therefore considered to be implementation bugs, and so will panic. + + // Our strategy depends on the kind of type we're working with. + switch { + case wantTy.IsPrimitiveType(): + // For leaf primitive values the rule is relatively simple: use the + // input if it's non-null, or fallback if input is null. + if !umInput.IsNull() { + return input + } + v, err := convert.Convert(umFb, wantTy) + if err != nil { + // Should not happen because we checked in defaultsAssertSuitableFallback + panic(err.Error()) + } + return v.WithMarks(fallbackMarks) + + case wantTy.IsObjectType(): + // For structural types, a null input value must be passed through. We + // do not apply default values for missing optional structural values, + // only their contents. + // + // We also pass through the input if the fallback value is null. This + // can happen if the given defaults do not include a value for this + // attribute. + if umInput.IsNull() || umFb.IsNull() { + return input + } + atys := wantTy.AttributeTypes() + ret := map[string]cty.Value{} + for attr, aty := range atys { + inputSub := umInput.GetAttr(attr) + fallbackSub := cty.NullVal(aty) + if umFb.Type().HasAttribute(attr) { + fallbackSub = umFb.GetAttr(attr) + } + ret[attr] = defaultsApply(inputSub.WithMarks(inputMarks), fallbackSub.WithMarks(fallbackMarks)) + } + return cty.ObjectVal(ret) + + case wantTy.IsTupleType(): + // For structural types, a null input value must be passed through. We + // do not apply default values for missing optional structural values, + // only their contents. + // + // We also pass through the input if the fallback value is null. This + // can happen if the given defaults do not include a value for this + // attribute. + if umInput.IsNull() || umFb.IsNull() { + return input + } + + l := wantTy.Length() + ret := make([]cty.Value, l) + for i := 0; i < l; i++ { + inputSub := umInput.Index(cty.NumberIntVal(int64(i))) + fallbackSub := umFb.Index(cty.NumberIntVal(int64(i))) + ret[i] = defaultsApply(inputSub.WithMarks(inputMarks), fallbackSub.WithMarks(fallbackMarks)) + } + return cty.TupleVal(ret) + + case wantTy.IsCollectionType(): + // For collection types we apply a single fallback value to each + // element of the input collection, because in the situations this + // function is intended for we assume that the number of elements + // is the caller's decision, and so we'll just apply the same defaults + // to all of the elements. + ety := wantTy.ElementType() + switch { + case wantTy.IsMapType(): + newVals := map[string]cty.Value{} + + if !umInput.IsNull() { + for it := umInput.ElementIterator(); it.Next(); { + k, v := it.Element() + newVals[k.AsString()] = defaultsApply(v.WithMarks(inputMarks), fallback.WithMarks(fallbackMarks)) + } + } + + if len(newVals) == 0 { + return cty.MapValEmpty(ety) + } + return cty.MapVal(newVals) + case wantTy.IsListType(), wantTy.IsSetType(): + var newVals []cty.Value + + if !umInput.IsNull() { + for it := umInput.ElementIterator(); it.Next(); { + _, v := it.Element() + newV := defaultsApply(v.WithMarks(inputMarks), fallback.WithMarks(fallbackMarks)) + newVals = append(newVals, newV) + } + } + + if len(newVals) == 0 { + if wantTy.IsSetType() { + return cty.SetValEmpty(ety) + } + return cty.ListValEmpty(ety) + } + if wantTy.IsSetType() { + return cty.SetVal(newVals) + } + return cty.ListVal(newVals) + default: + // There are no other collection types, so this should not happen + panic(fmt.Sprintf("invalid collection type %#v", wantTy)) + } + default: + // We should've caught anything else in defaultsAssertSuitableFallback, + // so this should not happen. + panic(fmt.Sprintf("invalid target type %#v", wantTy)) + } +} + +func defaultsAssertSuitableFallback(wantTy, fallbackTy cty.Type, fallbackPath cty.Path) error { + // If the type we want is a collection type then we need to keep peeling + // away collection type wrappers until we find the non-collection-type + // that's underneath, which is what the fallback will actually be applied + // to. + inCollection := false + for wantTy.IsCollectionType() { + wantTy = wantTy.ElementType() + inCollection = true + } + + switch { + case wantTy.IsPrimitiveType(): + // The fallback is valid if it's equal to or convertible to what we want. + if fallbackTy.Equals(wantTy) { + return nil + } + conversion := convert.GetConversion(fallbackTy, wantTy) + if conversion == nil { + msg := convert.MismatchMessage(fallbackTy, wantTy) + return fallbackPath.NewErrorf("invalid default value for %s: %s", wantTy.FriendlyName(), msg) + } + return nil + case wantTy.IsObjectType(): + if !fallbackTy.IsObjectType() { + if inCollection { + return fallbackPath.NewErrorf("the default value for a collection of an object type must itself be an object type, not %s", fallbackTy.FriendlyName()) + } + return fallbackPath.NewErrorf("the default value for an object type must itself be an object type, not %s", fallbackTy.FriendlyName()) + } + for attr, wantAty := range wantTy.AttributeTypes() { + if !fallbackTy.HasAttribute(attr) { + continue // it's always okay to not have a default value + } + fallbackSubpath := fallbackPath.GetAttr(attr) + fallbackSubTy := fallbackTy.AttributeType(attr) + err := defaultsAssertSuitableFallback(wantAty, fallbackSubTy, fallbackSubpath) + if err != nil { + return err + } + } + for attr := range fallbackTy.AttributeTypes() { + if !wantTy.HasAttribute(attr) { + fallbackSubpath := fallbackPath.GetAttr(attr) + return fallbackSubpath.NewErrorf("target type does not expect an attribute named %q", attr) + } + } + return nil + case wantTy.IsTupleType(): + if !fallbackTy.IsTupleType() { + if inCollection { + return fallbackPath.NewErrorf("the default value for a collection of a tuple type must itself be a tuple type, not %s", fallbackTy.FriendlyName()) + } + return fallbackPath.NewErrorf("the default value for a tuple type must itself be a tuple type, not %s", fallbackTy.FriendlyName()) + } + wantEtys := wantTy.TupleElementTypes() + fallbackEtys := fallbackTy.TupleElementTypes() + if got, want := len(wantEtys), len(fallbackEtys); got != want { + return fallbackPath.NewErrorf("the default value for a tuple type of length %d must also have length %d, not %d", want, want, got) + } + for i := 0; i < len(wantEtys); i++ { + fallbackSubpath := fallbackPath.IndexInt(i) + wantSubTy := wantEtys[i] + fallbackSubTy := fallbackEtys[i] + err := defaultsAssertSuitableFallback(wantSubTy, fallbackSubTy, fallbackSubpath) + if err != nil { + return err + } + } + return nil + default: + // No other types are supported right now. + return fallbackPath.NewErrorf("cannot apply defaults to %s", wantTy.FriendlyName()) + } +} + +// Defaults is a helper function for substituting default values in +// place of null values in a given data structure. +// +// This is primarily intended for use with a module input variable that +// has an object type constraint (or a collection thereof) that has optional +// attributes, so that the receiver of a value that omits those attributes +// can insert non-null default values in place of the null values caused by +// omitting the attributes. +func Defaults(input, defaults cty.Value) (cty.Value, error) { + return DefaultsFunc.Call([]cty.Value{input, defaults}) +} diff --git a/pkg/iac/scanners/terraform/parser/funcs/encoding.go b/pkg/iac/scanners/terraform/parser/funcs/encoding.go new file mode 100644 index 000000000000..f74a508fb7ed --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/funcs/encoding.go @@ -0,0 +1,254 @@ +// Copied from github.com/hashicorp/terraform/internal/lang/funcs +package funcs + +import ( + "bytes" + "compress/gzip" + "encoding/base64" + "fmt" + "log" + "net/url" + "unicode/utf8" + + "github.com/zclconf/go-cty/cty" + "github.com/zclconf/go-cty/cty/function" + "golang.org/x/text/encoding/ianaindex" +) + +// Base64DecodeFunc constructs a function that decodes a string containing a base64 sequence. +var Base64DecodeFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "str", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + s := args[0].AsString() + sDec, err := base64.StdEncoding.DecodeString(s) + if err != nil { + return cty.UnknownVal(cty.String), fmt.Errorf("failed to decode base64 data '%s'", s) + } + if !utf8.Valid([]byte(sDec)) { + log.Printf("[DEBUG] the result of decoding the provided string is not valid UTF-8: %s", sDec) + return cty.UnknownVal(cty.String), fmt.Errorf("the result of decoding the provided string is not valid UTF-8") + } + return cty.StringVal(string(sDec)), nil + }, +}) + +// Base64EncodeFunc constructs a function that encodes a string to a base64 sequence. +var Base64EncodeFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "str", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + return cty.StringVal(base64.StdEncoding.EncodeToString([]byte(args[0].AsString()))), nil + }, +}) + +// TextEncodeBase64Func constructs a function that encodes a string to a target encoding and then to a base64 sequence. +var TextEncodeBase64Func = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "string", + Type: cty.String, + }, + { + Name: "encoding", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + encoding, err := ianaindex.IANA.Encoding(args[1].AsString()) + if err != nil || encoding == nil { + return cty.UnknownVal(cty.String), function.NewArgErrorf(1, "%q is not a supported IANA encoding name or alias in this Terraform version", args[1].AsString()) + } + + encName, err := ianaindex.IANA.Name(encoding) + if err != nil { // would be weird, since we just read this encoding out + encName = args[1].AsString() + } + + encoder := encoding.NewEncoder() + encodedInput, err := encoder.Bytes([]byte(args[0].AsString())) + if err != nil { + // The string representations of "err" disclose implementation + // details of the underlying library, and the main error we might + // like to return a special message for is unexported as + // golang.org/x/text/encoding/internal.RepertoireError, so this + // is just a generic error message for now. + // + // We also don't include the string itself in the message because + // it can typically be very large, contain newline characters, + // etc. + return cty.UnknownVal(cty.String), function.NewArgErrorf(0, "the given string contains characters that cannot be represented in %s", encName) + } + + return cty.StringVal(base64.StdEncoding.EncodeToString(encodedInput)), nil + }, +}) + +// TextDecodeBase64Func constructs a function that decodes a base64 sequence to a target encoding. +var TextDecodeBase64Func = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "source", + Type: cty.String, + }, + { + Name: "encoding", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + encoding, err := ianaindex.IANA.Encoding(args[1].AsString()) + if err != nil || encoding == nil { + return cty.UnknownVal(cty.String), function.NewArgErrorf(1, "%q is not a supported IANA encoding name or alias in this Terraform version", args[1].AsString()) + } + + encName, err := ianaindex.IANA.Name(encoding) + if err != nil { // would be weird, since we just read this encoding out + encName = args[1].AsString() + } + + s := args[0].AsString() + sDec, err := base64.StdEncoding.DecodeString(s) + if err != nil { + switch err := err.(type) { + case base64.CorruptInputError: + return cty.UnknownVal(cty.String), function.NewArgErrorf(0, "the given value is has an invalid base64 symbol at offset %d", int(err)) + default: + return cty.UnknownVal(cty.String), function.NewArgErrorf(0, "invalid source string: %T", err) + } + + } + + decoder := encoding.NewDecoder() + decoded, err := decoder.Bytes(sDec) + if err != nil || bytes.ContainsRune(decoded, '�') { + return cty.UnknownVal(cty.String), function.NewArgErrorf(0, "the given string contains symbols that are not defined for %s", encName) + } + + return cty.StringVal(string(decoded)), nil + }, +}) + +// Base64GzipFunc constructs a function that compresses a string with gzip and then encodes the result in +// Base64 encoding. +var Base64GzipFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "str", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + s := args[0].AsString() + + var b bytes.Buffer + gz := gzip.NewWriter(&b) + if _, err := gz.Write([]byte(s)); err != nil { + return cty.UnknownVal(cty.String), fmt.Errorf("failed to write gzip raw data: '%s'", s) + } + if err := gz.Flush(); err != nil { + return cty.UnknownVal(cty.String), fmt.Errorf("failed to flush gzip writer: '%s'", s) + } + if err := gz.Close(); err != nil { + return cty.UnknownVal(cty.String), fmt.Errorf("failed to close gzip writer: '%s'", s) + } + return cty.StringVal(base64.StdEncoding.EncodeToString(b.Bytes())), nil + }, +}) + +// URLEncodeFunc constructs a function that applies URL encoding to a given string. +var URLEncodeFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "str", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + return cty.StringVal(url.QueryEscape(args[0].AsString())), nil + }, +}) + +// Base64Decode decodes a string containing a base64 sequence. +// +// Terraform uses the "standard" Base64 alphabet as defined in RFC 4648 section 4. +// +// Strings in the Terraform language are sequences of unicode characters rather +// than bytes, so this function will also interpret the resulting bytes as +// UTF-8. If the bytes after Base64 decoding are _not_ valid UTF-8, this function +// produces an error. +func Base64Decode(str cty.Value) (cty.Value, error) { + return Base64DecodeFunc.Call([]cty.Value{str}) +} + +// Base64Encode applies Base64 encoding to a string. +// +// Terraform uses the "standard" Base64 alphabet as defined in RFC 4648 section 4. +// +// Strings in the Terraform language are sequences of unicode characters rather +// than bytes, so this function will first encode the characters from the string +// as UTF-8, and then apply Base64 encoding to the result. +func Base64Encode(str cty.Value) (cty.Value, error) { + return Base64EncodeFunc.Call([]cty.Value{str}) +} + +// Base64Gzip compresses a string with gzip and then encodes the result in +// Base64 encoding. +// +// Terraform uses the "standard" Base64 alphabet as defined in RFC 4648 section 4. +// +// Strings in the Terraform language are sequences of unicode characters rather +// than bytes, so this function will first encode the characters from the string +// as UTF-8, then apply gzip compression, and then finally apply Base64 encoding. +func Base64Gzip(str cty.Value) (cty.Value, error) { + return Base64GzipFunc.Call([]cty.Value{str}) +} + +// URLEncode applies URL encoding to a given string. +// +// This function identifies characters in the given string that would have a +// special meaning when included as a query string argument in a URL and +// escapes them using RFC 3986 "percent encoding". +// +// If the given string contains non-ASCII characters, these are first encoded as +// UTF-8 and then percent encoding is applied separately to each UTF-8 byte. +func URLEncode(str cty.Value) (cty.Value, error) { + return URLEncodeFunc.Call([]cty.Value{str}) +} + +// TextEncodeBase64 applies Base64 encoding to a string that was encoded before with a target encoding. +// +// Terraform uses the "standard" Base64 alphabet as defined in RFC 4648 section 4. +// +// First step is to apply the target IANA encoding (e.g. UTF-16LE). +// Strings in the Terraform language are sequences of unicode characters rather +// than bytes, so this function will first encode the characters from the string +// as UTF-8, and then apply Base64 encoding to the result. +func TextEncodeBase64(str, enc cty.Value) (cty.Value, error) { + return TextEncodeBase64Func.Call([]cty.Value{str, enc}) +} + +// TextDecodeBase64 decodes a string containing a base64 sequence whereas a specific encoding of the string is expected. +// +// Terraform uses the "standard" Base64 alphabet as defined in RFC 4648 section 4. +// +// Strings in the Terraform language are sequences of unicode characters rather +// than bytes, so this function will also interpret the resulting bytes as +// the target encoding. +func TextDecodeBase64(str, enc cty.Value) (cty.Value, error) { + return TextDecodeBase64Func.Call([]cty.Value{str, enc}) +} diff --git a/pkg/iac/scanners/terraform/parser/funcs/filesystem.go b/pkg/iac/scanners/terraform/parser/funcs/filesystem.go new file mode 100644 index 000000000000..910e17f325c6 --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/funcs/filesystem.go @@ -0,0 +1,467 @@ +// Copied from github.com/hashicorp/terraform/internal/lang/funcs +package funcs + +import ( + "encoding/base64" + "fmt" + "io" + "io/fs" + "os" + "path/filepath" + "unicode/utf8" + + "github.com/bmatcuk/doublestar/v4" + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/hclsyntax" + "github.com/mitchellh/go-homedir" + "github.com/zclconf/go-cty/cty" + "github.com/zclconf/go-cty/cty/function" +) + +// MakeFileFunc constructs a function that takes a file path and returns the +// contents of that file, either directly as a string (where valid UTF-8 is +// required) or as a string containing base64 bytes. +func MakeFileFunc(target fs.FS, baseDir string, encBase64 bool) function.Function { + return function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "path", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + path := args[0].AsString() + src, err := readFileBytes(target, baseDir, path) + if err != nil { + err = function.NewArgError(0, err) + return cty.UnknownVal(cty.String), err + } + + switch { + case encBase64: + enc := base64.StdEncoding.EncodeToString(src) + return cty.StringVal(enc), nil + default: + if !utf8.Valid(src) { + return cty.UnknownVal(cty.String), fmt.Errorf("contents of %s are not valid UTF-8; use the filebase64 function to obtain the Base64 encoded contents or the other file functions (e.g. filemd5, filesha256) to obtain file hashing results instead", path) + } + return cty.StringVal(string(src)), nil + } + }, + }) +} + +// MakeTemplateFileFunc constructs a function that takes a file path and +// an arbitrary object of named values and attempts to render the referenced +// file as a template using HCL template syntax. +// +// The template itself may recursively call other functions so a callback +// must be provided to get access to those functions. The template cannot, +// however, access any variables defined in the scope: it is restricted only to +// those variables provided in the second function argument, to ensure that all +// dependencies on other graph nodes can be seen before executing this function. +// +// As a special exception, a referenced template file may not recursively call +// the templatefile function, since that would risk the same file being +// included into itself indefinitely. +func MakeTemplateFileFunc(target fs.FS, baseDir string, funcsCb func() map[string]function.Function) function.Function { + + params := []function.Parameter{ + { + Name: "path", + Type: cty.String, + }, + { + Name: "vars", + Type: cty.DynamicPseudoType, + }, + } + + loadTmpl := func(fn string) (hcl.Expression, error) { + // We re-use File here to ensure the same filename interpretation + // as it does, along with its other safety checks. + tmplVal, err := File(target, baseDir, cty.StringVal(fn)) + if err != nil { + return nil, err + } + + expr, diags := hclsyntax.ParseTemplate([]byte(tmplVal.AsString()), fn, hcl.Pos{Line: 1, Column: 1}) + if diags.HasErrors() { + return nil, diags + } + + return expr, nil + } + + renderTmpl := func(expr hcl.Expression, varsVal cty.Value) (cty.Value, error) { + if varsTy := varsVal.Type(); !(varsTy.IsMapType() || varsTy.IsObjectType()) { + return cty.DynamicVal, function.NewArgErrorf(1, "invalid vars value: must be a map") // or an object, but we don't strongly distinguish these most of the time + } + + ctx := &hcl.EvalContext{ + Variables: varsVal.AsValueMap(), + } + + // We require all of the variables to be valid HCL identifiers, because + // otherwise there would be no way to refer to them in the template + // anyway. Rejecting this here gives better feedback to the user + // than a syntax error somewhere in the template itself. + for n := range ctx.Variables { + if !hclsyntax.ValidIdentifier(n) { + // This error message intentionally doesn't describe _all_ of + // the different permutations that are technically valid as an + // HCL identifier, but rather focuses on what we might + // consider to be an "idiomatic" variable name. + return cty.DynamicVal, function.NewArgErrorf(1, "invalid template variable name %q: must start with a letter, followed by zero or more letters, digits, and underscores", n) + } + } + + // We'll pre-check references in the template here so we can give a + // more specialized error message than HCL would by default, so it's + // clearer that this problem is coming from a templatefile call. + for _, traversal := range expr.Variables() { + root := traversal.RootName() + if _, ok := ctx.Variables[root]; !ok { + return cty.DynamicVal, function.NewArgErrorf(1, "vars map does not contain key %q, referenced at %s", root, traversal[0].SourceRange()) + } + } + + givenFuncs := funcsCb() // this callback indirection is to avoid chicken/egg problems + funcs := make(map[string]function.Function, len(givenFuncs)) + for name, fn := range givenFuncs { + if name == "templatefile" { + // We stub this one out to prevent recursive calls. + funcs[name] = function.New(&function.Spec{ + Params: params, + Type: func(args []cty.Value) (cty.Type, error) { + return cty.NilType, fmt.Errorf("cannot recursively call templatefile from inside templatefile call") + }, + }) + continue + } + funcs[name] = fn + } + ctx.Functions = funcs + + val, diags := expr.Value(ctx) + if diags.HasErrors() { + return cty.DynamicVal, diags + } + return val, nil + } + + return function.New(&function.Spec{ + Params: params, + Type: func(args []cty.Value) (cty.Type, error) { + if !(args[0].IsKnown() && args[1].IsKnown()) { + return cty.DynamicPseudoType, nil + } + + // We'll render our template now to see what result type it produces. + // A template consisting only of a single interpolation an potentially + // return any type. + expr, err := loadTmpl(args[0].AsString()) + if err != nil { + return cty.DynamicPseudoType, err + } + + // This is safe even if args[1] contains unknowns because the HCL + // template renderer itself knows how to short-circuit those. + val, err := renderTmpl(expr, args[1]) + return val.Type(), err + }, + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + expr, err := loadTmpl(args[0].AsString()) + if err != nil { + return cty.DynamicVal, err + } + return renderTmpl(expr, args[1]) + }, + }) + +} + +// MakeFileExistsFunc constructs a function that takes a path +// and determines whether a file exists at that path +func MakeFileExistsFunc(baseDir string) function.Function { + return function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "path", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.Bool), + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + path := args[0].AsString() + path, err := homedir.Expand(path) + if err != nil { + return cty.UnknownVal(cty.Bool), fmt.Errorf("failed to expand ~: %s", err) + } + + if !filepath.IsAbs(path) { + path = filepath.Join(baseDir, path) + } + + // Ensure that the path is canonical for the host OS + path = filepath.Clean(path) + + fi, err := os.Stat(path) + if err != nil { + if os.IsNotExist(err) { + return cty.False, nil + } + return cty.UnknownVal(cty.Bool), fmt.Errorf("failed to stat %s", path) + } + + if fi.Mode().IsRegular() { + return cty.True, nil + } + + return cty.False, fmt.Errorf("%s is not a regular file, but %q", + path, fi.Mode().String()) + }, + }) +} + +// MakeFileSetFunc constructs a function that takes a glob pattern +// and enumerates a file set from that pattern +func MakeFileSetFunc(baseDir string) function.Function { + return function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "path", + Type: cty.String, + }, + { + Name: "pattern", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.Set(cty.String)), + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + path := args[0].AsString() + pattern := args[1].AsString() + + if !filepath.IsAbs(path) { + path = filepath.Join(baseDir, path) + } + + // Join the path to the glob pattern, while ensuring the full + // pattern is canonical for the host OS. The joined path is + // automatically cleaned during this operation. + pattern = filepath.Join(path, pattern) + + matches, err := doublestar.Glob(os.DirFS(path), pattern) + if err != nil { + return cty.UnknownVal(cty.Set(cty.String)), fmt.Errorf("failed to glob pattern (%s): %s", pattern, err) + } + + var matchVals []cty.Value + for _, match := range matches { + fi, err := os.Stat(match) + + if err != nil { + return cty.UnknownVal(cty.Set(cty.String)), fmt.Errorf("failed to stat (%s): %s", match, err) + } + + if !fi.Mode().IsRegular() { + continue + } + + // Remove the path and file separator from matches. + match, err = filepath.Rel(path, match) + + if err != nil { + return cty.UnknownVal(cty.Set(cty.String)), fmt.Errorf("failed to trim path of match (%s): %s", match, err) + } + + // Replace any remaining file separators with forward slash (/) + // separators for cross-system compatibility. + match = filepath.ToSlash(match) + + matchVals = append(matchVals, cty.StringVal(match)) + } + + if len(matchVals) == 0 { + return cty.SetValEmpty(cty.String), nil + } + + return cty.SetVal(matchVals), nil + }, + }) +} + +// BasenameFunc constructs a function that takes a string containing a filesystem path +// and removes all except the last portion from it. +var BasenameFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "path", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + return cty.StringVal(filepath.Base(args[0].AsString())), nil + }, +}) + +// DirnameFunc constructs a function that takes a string containing a filesystem path +// and removes the last portion from it. +var DirnameFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "path", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + return cty.StringVal(filepath.Dir(args[0].AsString())), nil + }, +}) + +// AbsPathFunc constructs a function that converts a filesystem path to an absolute path +var AbsPathFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "path", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + absPath, err := filepath.Abs(args[0].AsString()) + return cty.StringVal(filepath.ToSlash(absPath)), err + }, +}) + +// PathExpandFunc constructs a function that expands a leading ~ character to the current user's home directory. +var PathExpandFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "path", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + + homePath, err := homedir.Expand(args[0].AsString()) + return cty.StringVal(homePath), err + }, +}) + +func openFile(target fs.FS, baseDir, path string) (fs.File, error) { + path, err := homedir.Expand(path) + if err != nil { + return nil, fmt.Errorf("failed to expand ~: %s", err) + } + + if !filepath.IsAbs(path) { + path = filepath.Join(baseDir, path) + } + + // Ensure that the path is canonical for the host OS + path = filepath.Clean(path) + + if target != nil { + return target.Open(path) + } + return os.Open(path) +} + +func readFileBytes(target fs.FS, baseDir, path string) ([]byte, error) { + f, err := openFile(target, baseDir, path) + if err != nil { + if os.IsNotExist(err) { + // An extra Terraform-specific hint for this situation + return nil, fmt.Errorf("no file exists at %s; this function works only with files that are distributed as part of the configuration source code, so if this file will be created by a resource in this configuration you must instead obtain this result from an attribute of that resource", path) + } + return nil, err + } + + src, err := io.ReadAll(f) + if err != nil { + return nil, fmt.Errorf("failed to read %s", path) + } + + return src, nil +} + +// File reads the contents of the file at the given path. +// +// The file must contain valid UTF-8 bytes, or this function will return an error. +// +// The underlying function implementation works relative to a particular base +// directory, so this wrapper takes a base directory string and uses it to +// construct the underlying function before calling it. +func File(target fs.FS, baseDir string, path cty.Value) (cty.Value, error) { + fn := MakeFileFunc(target, baseDir, false) + return fn.Call([]cty.Value{path}) +} + +// FileExists determines whether a file exists at the given path. +// +// The underlying function implementation works relative to a particular base +// directory, so this wrapper takes a base directory string and uses it to +// construct the underlying function before calling it. +func FileExists(baseDir string, path cty.Value) (cty.Value, error) { + fn := MakeFileExistsFunc(baseDir) + return fn.Call([]cty.Value{path}) +} + +// FileSet enumerates a set of files given a glob pattern +// +// The underlying function implementation works relative to a particular base +// directory, so this wrapper takes a base directory string and uses it to +// construct the underlying function before calling it. +func FileSet(baseDir string, path, pattern cty.Value) (cty.Value, error) { + fn := MakeFileSetFunc(baseDir) + return fn.Call([]cty.Value{path, pattern}) +} + +// FileBase64 reads the contents of the file at the given path. +// +// The bytes from the file are encoded as base64 before returning. +// +// The underlying function implementation works relative to a particular base +// directory, so this wrapper takes a base directory string and uses it to +// construct the underlying function before calling it. +func FileBase64(target fs.FS, baseDir string, path cty.Value) (cty.Value, error) { + fn := MakeFileFunc(target, baseDir, true) + return fn.Call([]cty.Value{path}) +} + +// Basename takes a string containing a filesystem path and removes all except the last portion from it. +// +// The underlying function implementation works only with the path string and does not access the filesystem itself. +// It is therefore unable to take into account filesystem features such as symlinks. +// +// If the path is empty then the result is ".", representing the current working directory. +func Basename(path cty.Value) (cty.Value, error) { + return BasenameFunc.Call([]cty.Value{path}) +} + +// Dirname takes a string containing a filesystem path and removes the last portion from it. +// +// The underlying function implementation works only with the path string and does not access the filesystem itself. +// It is therefore unable to take into account filesystem features such as symlinks. +// +// If the path is empty then the result is ".", representing the current working directory. +func Dirname(path cty.Value) (cty.Value, error) { + return DirnameFunc.Call([]cty.Value{path}) +} + +// Pathexpand takes a string that might begin with a `~` segment, and if so it replaces that segment with +// the current user's home directory path. +// +// The underlying function implementation works only with the path string and does not access the filesystem itself. +// It is therefore unable to take into account filesystem features such as symlinks. +// +// If the leading segment in the path is not `~` then the given path is returned unmodified. +func Pathexpand(path cty.Value) (cty.Value, error) { + return PathExpandFunc.Call([]cty.Value{path}) +} diff --git a/pkg/iac/scanners/terraform/parser/funcs/marks.go b/pkg/iac/scanners/terraform/parser/funcs/marks.go new file mode 100644 index 000000000000..ca368c113c5c --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/funcs/marks.go @@ -0,0 +1,44 @@ +// Copied from github.com/hashicorp/terraform/internal/lang/marks +package funcs + +import ( + "github.com/zclconf/go-cty/cty" + "golang.org/x/text/cases" + "golang.org/x/text/language" +) + +// valueMarks allow creating strictly typed values for use as cty.Value marks. +// The variable name for new values should be the title-cased format of the +// value to better match the GoString output for debugging. +type valueMark string + +func (m valueMark) GoString() string { + return "marks." + cases.Title(language.English).String(string(m)) +} + +// Has returns true if and only if the cty.Value has the given mark. +func Has(val cty.Value, mark valueMark) bool { + return val.HasMark(mark) +} + +// Contains returns true if the cty.Value or any any value within it contains +// the given mark. +func Contains(val cty.Value, mark valueMark) bool { + ret := false + _ = cty.Walk(val, func(_ cty.Path, v cty.Value) (bool, error) { + if v.HasMark(mark) { + ret = true + return false, nil + } + return true, nil + }) + return ret +} + +// MarkedSensitive indicates that this value is marked as sensitive in the context of +// Terraform. +var MarkedSensitive = valueMark("sensitive") + +// MarkedRaw is used to indicate to the repl that the value should be written without +// any formatting. +var MarkedRaw = valueMark("raw") diff --git a/pkg/iac/scanners/terraform/parser/funcs/number.go b/pkg/iac/scanners/terraform/parser/funcs/number.go new file mode 100644 index 000000000000..6c8f5dc3b6d9 --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/funcs/number.go @@ -0,0 +1,170 @@ +// Copied from github.com/hashicorp/terraform/internal/lang/funcs +package funcs + +import ( + "math" + "math/big" + + "github.com/zclconf/go-cty/cty" + "github.com/zclconf/go-cty/cty/function" + "github.com/zclconf/go-cty/cty/gocty" +) + +// LogFunc constructs a function that returns the logarithm of a given number in a given base. +var LogFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "num", + Type: cty.Number, + }, + { + Name: "base", + Type: cty.Number, + }, + }, + Type: function.StaticReturnType(cty.Number), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + var num float64 + if err := gocty.FromCtyValue(args[0], &num); err != nil { + return cty.UnknownVal(cty.String), err + } + + var base float64 + if err := gocty.FromCtyValue(args[1], &base); err != nil { + return cty.UnknownVal(cty.String), err + } + + return cty.NumberFloatVal(math.Log(num) / math.Log(base)), nil + }, +}) + +// PowFunc constructs a function that returns the logarithm of a given number in a given base. +var PowFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "num", + Type: cty.Number, + }, + { + Name: "power", + Type: cty.Number, + }, + }, + Type: function.StaticReturnType(cty.Number), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + var num float64 + if err := gocty.FromCtyValue(args[0], &num); err != nil { + return cty.UnknownVal(cty.String), err + } + + var power float64 + if err := gocty.FromCtyValue(args[1], &power); err != nil { + return cty.UnknownVal(cty.String), err + } + + return cty.NumberFloatVal(math.Pow(num, power)), nil + }, +}) + +// SignumFunc constructs a function that returns the closest whole number greater +// than or equal to the given value. +var SignumFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "num", + Type: cty.Number, + }, + }, + Type: function.StaticReturnType(cty.Number), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + var num int + if err := gocty.FromCtyValue(args[0], &num); err != nil { + return cty.UnknownVal(cty.String), err + } + switch { + case num < 0: + return cty.NumberIntVal(-1), nil + case num > 0: + return cty.NumberIntVal(+1), nil + default: + return cty.NumberIntVal(0), nil + } + }, +}) + +// ParseIntFunc constructs a function that parses a string argument and returns an integer of the specified base. +var ParseIntFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "number", + Type: cty.DynamicPseudoType, + }, + { + Name: "base", + Type: cty.Number, + }, + }, + + Type: func(args []cty.Value) (cty.Type, error) { + if !args[0].Type().Equals(cty.String) { + return cty.Number, function.NewArgErrorf(0, "first argument must be a string, not %s", args[0].Type().FriendlyName()) + } + return cty.Number, nil + }, + + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + var numstr string + var base int + var err error + + if err = gocty.FromCtyValue(args[0], &numstr); err != nil { + return cty.UnknownVal(cty.String), function.NewArgError(0, err) + } + + if err = gocty.FromCtyValue(args[1], &base); err != nil { + return cty.UnknownVal(cty.Number), function.NewArgError(1, err) + } + + if base < 2 || base > 62 { + return cty.UnknownVal(cty.Number), function.NewArgErrorf( + 1, + "base must be a whole number between 2 and 62 inclusive", + ) + } + + num, ok := (&big.Int{}).SetString(numstr, base) + if !ok { + return cty.UnknownVal(cty.Number), function.NewArgErrorf( + 0, + "cannot parse %q as a base %d integer", + numstr, + base, + ) + } + + parsedNum := cty.NumberVal((&big.Float{}).SetInt(num)) + + return parsedNum, nil + }, +}) + +// Log returns returns the logarithm of a given number in a given base. +func Log(num, base cty.Value) (cty.Value, error) { + return LogFunc.Call([]cty.Value{num, base}) +} + +// Pow returns the logarithm of a given number in a given base. +func Pow(num, power cty.Value) (cty.Value, error) { + return PowFunc.Call([]cty.Value{num, power}) +} + +// Signum determines the sign of a number, returning a number between -1 and +// 1 to represent the sign. +func Signum(num cty.Value) (cty.Value, error) { + return SignumFunc.Call([]cty.Value{num}) +} + +// ParseInt parses a string argument and returns an integer of the specified base. +func ParseInt(num cty.Value, base cty.Value) (cty.Value, error) { + return ParseIntFunc.Call([]cty.Value{num, base}) +} diff --git a/pkg/iac/scanners/terraform/parser/funcs/sensitive.go b/pkg/iac/scanners/terraform/parser/funcs/sensitive.go new file mode 100644 index 000000000000..c67ed13e6e7b --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/funcs/sensitive.go @@ -0,0 +1,67 @@ +// Copied from github.com/hashicorp/terraform/internal/lang/funcs +package funcs + +import ( + "github.com/zclconf/go-cty/cty" + "github.com/zclconf/go-cty/cty/function" +) + +// SensitiveFunc returns a value identical to its argument except that +// Terraform will consider it to be sensitive. +var SensitiveFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "value", + Type: cty.DynamicPseudoType, + AllowUnknown: true, + AllowNull: true, + AllowMarked: true, + AllowDynamicType: true, + }, + }, + Type: func(args []cty.Value) (cty.Type, error) { + // This function only affects the value's marks, so the result + // type is always the same as the argument type. + return args[0].Type(), nil + }, + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + val, _ := args[0].Unmark() + return val.Mark(MarkedSensitive), nil + }, +}) + +// NonsensitiveFunc takes a sensitive value and returns the same value without +// the sensitive marking, effectively exposing the value. +var NonsensitiveFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "value", + Type: cty.DynamicPseudoType, + AllowUnknown: true, + AllowNull: true, + AllowMarked: true, + AllowDynamicType: true, + }, + }, + Type: func(args []cty.Value) (cty.Type, error) { + // This function only affects the value's marks, so the result + // type is always the same as the argument type. + return args[0].Type(), nil + }, + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + if args[0].IsKnown() && !args[0].HasMark(MarkedSensitive) { + return cty.DynamicVal, function.NewArgErrorf(0, "the given value is not sensitive, so this call is redundant") + } + v, m := args[0].Unmark() + delete(m, MarkedSensitive) // remove the sensitive marking + return v.WithMarks(m), nil + }, +}) + +func Sensitive(v cty.Value) (cty.Value, error) { + return SensitiveFunc.Call([]cty.Value{v}) +} + +func Nonsensitive(v cty.Value) (cty.Value, error) { + return NonsensitiveFunc.Call([]cty.Value{v}) +} diff --git a/pkg/iac/scanners/terraform/parser/funcs/string.go b/pkg/iac/scanners/terraform/parser/funcs/string.go new file mode 100644 index 000000000000..49696784e872 --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/funcs/string.go @@ -0,0 +1,54 @@ +// Copied from github.com/hashicorp/terraform/internal/lang/funcs +package funcs + +import ( + "regexp" + "strings" + + "github.com/zclconf/go-cty/cty" + "github.com/zclconf/go-cty/cty/function" +) + +// ReplaceFunc constructs a function that searches a given string for another +// given substring, and replaces each occurrence with a given replacement string. +var ReplaceFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "str", + Type: cty.String, + }, + { + Name: "substr", + Type: cty.String, + }, + { + Name: "replace", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + str := args[0].AsString() + substr := args[1].AsString() + replace := args[2].AsString() + + // We search/replace using a regexp if the string is surrounded + // in forward slashes. + if len(substr) > 1 && substr[0] == '/' && substr[len(substr)-1] == '/' { + re, err := regexp.Compile(substr[1 : len(substr)-1]) + if err != nil { + return cty.UnknownVal(cty.String), err + } + + return cty.StringVal(re.ReplaceAllString(str, replace)), nil + } + + return cty.StringVal(strings.Replace(str, substr, replace, -1)), nil + }, +}) + +// Replace searches a given string for another given substring, +// and replaces all occurrences with a given replacement string. +func Replace(str, substr, replace cty.Value) (cty.Value, error) { + return ReplaceFunc.Call([]cty.Value{str, substr, replace}) +} diff --git a/pkg/iac/scanners/terraform/parser/functions.go b/pkg/iac/scanners/terraform/parser/functions.go new file mode 100644 index 000000000000..f8553d150f16 --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/functions.go @@ -0,0 +1,123 @@ +package parser + +import ( + "io/fs" + + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser/funcs" + "github.com/hashicorp/hcl/v2/ext/tryfunc" + ctyyaml "github.com/zclconf/go-cty-yaml" + "github.com/zclconf/go-cty/cty" + "github.com/zclconf/go-cty/cty/function" + "github.com/zclconf/go-cty/cty/function/stdlib" +) + +// Functions returns the set of functions that should be used to when evaluating +// expressions in the receiving scope. +func Functions(target fs.FS, baseDir string) map[string]function.Function { + return map[string]function.Function{ + "abs": stdlib.AbsoluteFunc, + "abspath": funcs.AbsPathFunc, + "basename": funcs.BasenameFunc, + "base64decode": funcs.Base64DecodeFunc, + "base64encode": funcs.Base64EncodeFunc, + "base64gzip": funcs.Base64GzipFunc, + "base64sha256": funcs.Base64Sha256Func, + "base64sha512": funcs.Base64Sha512Func, + "bcrypt": funcs.BcryptFunc, + "can": tryfunc.CanFunc, + "ceil": stdlib.CeilFunc, + "chomp": stdlib.ChompFunc, + "cidrhost": funcs.CidrHostFunc, + "cidrnetmask": funcs.CidrNetmaskFunc, + "cidrsubnet": funcs.CidrSubnetFunc, + "cidrsubnets": funcs.CidrSubnetsFunc, + "coalesce": funcs.CoalesceFunc, + "coalescelist": stdlib.CoalesceListFunc, + "compact": stdlib.CompactFunc, + "concat": stdlib.ConcatFunc, + "contains": stdlib.ContainsFunc, + "csvdecode": stdlib.CSVDecodeFunc, + "dirname": funcs.DirnameFunc, + "distinct": stdlib.DistinctFunc, + "element": stdlib.ElementFunc, + "chunklist": stdlib.ChunklistFunc, + "file": funcs.MakeFileFunc(target, baseDir, false), + "fileexists": funcs.MakeFileExistsFunc(baseDir), + "fileset": funcs.MakeFileSetFunc(baseDir), + "filebase64": funcs.MakeFileFunc(target, baseDir, true), + "filebase64sha256": funcs.MakeFileBase64Sha256Func(target, baseDir), + "filebase64sha512": funcs.MakeFileBase64Sha512Func(target, baseDir), + "filemd5": funcs.MakeFileMd5Func(target, baseDir), + "filesha1": funcs.MakeFileSha1Func(target, baseDir), + "filesha256": funcs.MakeFileSha256Func(target, baseDir), + "filesha512": funcs.MakeFileSha512Func(target, baseDir), + "flatten": stdlib.FlattenFunc, + "floor": stdlib.FloorFunc, + "format": stdlib.FormatFunc, + "formatdate": stdlib.FormatDateFunc, + "formatlist": stdlib.FormatListFunc, + "indent": stdlib.IndentFunc, + "index": funcs.IndexFunc, // stdlib.IndexFunc is not compatible + "join": stdlib.JoinFunc, + "jsondecode": stdlib.JSONDecodeFunc, + "jsonencode": stdlib.JSONEncodeFunc, + "keys": stdlib.KeysFunc, + "length": funcs.LengthFunc, + "list": funcs.ListFunc, + "log": stdlib.LogFunc, + "lookup": funcs.LookupFunc, + "lower": stdlib.LowerFunc, + "map": funcs.MapFunc, + "matchkeys": funcs.MatchkeysFunc, + "max": stdlib.MaxFunc, + "md5": funcs.Md5Func, + "merge": stdlib.MergeFunc, + "min": stdlib.MinFunc, + "parseint": stdlib.ParseIntFunc, + "pathexpand": funcs.PathExpandFunc, + "pow": stdlib.PowFunc, + "range": stdlib.RangeFunc, + "regex": stdlib.RegexFunc, + "regexall": stdlib.RegexAllFunc, + "replace": funcs.ReplaceFunc, + "reverse": stdlib.ReverseListFunc, + "rsadecrypt": funcs.RsaDecryptFunc, + "setintersection": stdlib.SetIntersectionFunc, + "setproduct": stdlib.SetProductFunc, + "setsubtract": stdlib.SetSubtractFunc, + "setunion": stdlib.SetUnionFunc, + "sha1": funcs.Sha1Func, + "sha256": funcs.Sha256Func, + "sha512": funcs.Sha512Func, + "signum": stdlib.SignumFunc, + "slice": stdlib.SliceFunc, + "sort": stdlib.SortFunc, + "split": stdlib.SplitFunc, + "strrev": stdlib.ReverseFunc, + "substr": stdlib.SubstrFunc, + "timestamp": funcs.TimestampFunc, + "timeadd": stdlib.TimeAddFunc, + "title": stdlib.TitleFunc, + "tostring": funcs.MakeToFunc(cty.String), + "tonumber": funcs.MakeToFunc(cty.Number), + "tobool": funcs.MakeToFunc(cty.Bool), + "toset": funcs.MakeToFunc(cty.Set(cty.DynamicPseudoType)), + "tolist": funcs.MakeToFunc(cty.List(cty.DynamicPseudoType)), + "tomap": funcs.MakeToFunc(cty.Map(cty.DynamicPseudoType)), + "transpose": funcs.TransposeFunc, + "trim": stdlib.TrimFunc, + "trimprefix": stdlib.TrimPrefixFunc, + "trimspace": stdlib.TrimSpaceFunc, + "trimsuffix": stdlib.TrimSuffixFunc, + "try": tryfunc.TryFunc, + "upper": stdlib.UpperFunc, + "urlencode": funcs.URLEncodeFunc, + "uuid": funcs.UUIDFunc, + "uuidv5": funcs.UUIDV5Func, + "values": stdlib.ValuesFunc, + "yamldecode": ctyyaml.YAMLDecodeFunc, + "yamlencode": ctyyaml.YAMLEncodeFunc, + "zipmap": stdlib.ZipmapFunc, + } + +} diff --git a/pkg/iac/scanners/terraform/parser/load_blocks.go b/pkg/iac/scanners/terraform/parser/load_blocks.go new file mode 100644 index 000000000000..88bd9de47f3b --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/load_blocks.go @@ -0,0 +1,130 @@ +package parser + +import ( + "fmt" + "regexp" + "strings" + "time" + + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/hashicorp/hcl/v2" +) + +func loadBlocksFromFile(file sourceFile, moduleSource string) (hcl.Blocks, []terraform.Ignore, error) { + ignores := parseIgnores(file.file.Bytes, file.path, moduleSource) + contents, diagnostics := file.file.Body.Content(terraform.Schema) + if diagnostics != nil && diagnostics.HasErrors() { + return nil, nil, diagnostics + } + if contents == nil { + return nil, nil, nil + } + return contents.Blocks, ignores, nil +} + +func parseIgnores(data []byte, path string, moduleSource string) []terraform.Ignore { + var ignores []terraform.Ignore + for i, line := range strings.Split(string(data), "\n") { + line = strings.TrimSpace(line) + lineIgnores := parseIgnoresFromLine(line) + for _, lineIgnore := range lineIgnores { + lineIgnore.Range = types.NewRange(path, i+1, i+1, moduleSource, nil) + ignores = append(ignores, lineIgnore) + } + } + for a, ignoreA := range ignores { + if !ignoreA.Block { + continue + } + for _, ignoreB := range ignores { + if !ignoreB.Block { + continue + } + if ignoreA.Range.GetStartLine()+1 == ignoreB.Range.GetStartLine() { + ignoreA.Range = ignoreB.Range + ignores[a] = ignoreA + } + } + } + return ignores + +} + +var commentPattern = regexp.MustCompile(`^\s*([/]+|/\*|#)+\s*tfsec:`) +var trivyCommentPattern = regexp.MustCompile(`^\s*([/]+|/\*|#)+\s*trivy:`) + +func parseIgnoresFromLine(input string) []terraform.Ignore { + + var ignores []terraform.Ignore + + input = commentPattern.ReplaceAllString(input, "tfsec:") + input = trivyCommentPattern.ReplaceAllString(input, "trivy:") + + bits := strings.Split(strings.TrimSpace(input), " ") + for i, bit := range bits { + bit := strings.TrimSpace(bit) + bit = strings.TrimPrefix(bit, "#") + bit = strings.TrimPrefix(bit, "//") + bit = strings.TrimPrefix(bit, "/*") + + if strings.HasPrefix(bit, "tfsec:") || strings.HasPrefix(bit, "trivy:") { + ignore, err := parseIgnoreFromComment(bit) + if err != nil { + continue + } + ignore.Block = i == 0 + ignores = append(ignores, *ignore) + } + } + + return ignores +} + +func parseIgnoreFromComment(input string) (*terraform.Ignore, error) { + var ignore terraform.Ignore + if !strings.HasPrefix(input, "tfsec:") && !strings.HasPrefix(input, "trivy:") { + return nil, fmt.Errorf("invalid ignore") + } + + input = input[6:] + + segments := strings.Split(input, ":") + + for i := 0; i < len(segments)-1; i += 2 { + key := segments[i] + val := segments[i+1] + switch key { + case "ignore": + ignore.RuleID, ignore.Params = parseIDWithParams(val) + case "exp": + parsed, err := time.Parse("2006-01-02", val) + if err != nil { + return &ignore, err + } + ignore.Expiry = &parsed + case "ws": + ignore.Workspace = val + } + } + + return &ignore, nil +} + +func parseIDWithParams(input string) (string, map[string]string) { + params := make(map[string]string) + if !strings.Contains(input, "[") { + return input, params + } + parts := strings.Split(input, "[") + id := parts[0] + paramStr := strings.TrimSuffix(parts[1], "]") + for _, pair := range strings.Split(paramStr, ",") { + parts := strings.Split(pair, "=") + if len(parts) != 2 { + continue + } + params[parts[0]] = parts[1] + } + return id, params +} diff --git a/pkg/iac/scanners/terraform/parser/load_blocks_test.go b/pkg/iac/scanners/terraform/parser/load_blocks_test.go new file mode 100644 index 000000000000..e32d19a75044 --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/load_blocks_test.go @@ -0,0 +1,13 @@ +package parser + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestParsingDoubleComment(t *testing.T) { + ignores := parseIgnoresFromLine("## tfsec:ignore:abc") + assert.Equal(t, 1, len(ignores)) + assert.Truef(t, ignores[0].Block, "Expected ignore to be a block") +} diff --git a/pkg/iac/scanners/terraform/parser/load_module.go b/pkg/iac/scanners/terraform/parser/load_module.go new file mode 100644 index 000000000000..9d6c7adcdcd1 --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/load_module.go @@ -0,0 +1,183 @@ +package parser + +import ( + "context" + "errors" + "fmt" + "io/fs" + "path/filepath" + "strings" + + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser/resolvers" + + "github.com/zclconf/go-cty/cty" +) + +type moduleLoadError struct { + source string + err error +} + +func (m *moduleLoadError) Error() string { + return fmt.Sprintf("failed to load module '%s': %s", m.source, m.err) +} + +type ModuleDefinition struct { + Name string + Path string + FileSystem fs.FS + Definition *terraform.Block + Parser *Parser + External bool +} + +// LoadModules reads all module blocks and loads the underlying modules, adding blocks to e.moduleBlocks +func (e *evaluator) loadModules(ctx context.Context) []*ModuleDefinition { + + blocks := e.blocks + + var moduleDefinitions []*ModuleDefinition + + expanded := e.expandBlocks(blocks.OfType("module")) + + var loadErrors []*moduleLoadError + + for _, moduleBlock := range expanded { + if moduleBlock.Label() == "" { + continue + } + moduleDefinition, err := e.loadModule(ctx, moduleBlock) + if err != nil { + var loadErr *moduleLoadError + if errors.As(err, &loadErr) { + var found bool + for _, fm := range loadErrors { + if fm.source == loadErr.source { + found = true + break + } + } + if !found { + loadErrors = append(loadErrors, loadErr) + } + continue + } + e.debug.Log("Failed to load module '%s'. Maybe try 'terraform init'?", err) + continue + } + e.debug.Log("Loaded module '%s' from '%s'.", moduleDefinition.Name, moduleDefinition.Path) + moduleDefinitions = append(moduleDefinitions, moduleDefinition) + } + + return moduleDefinitions +} + +// takes in a module "x" {} block and loads resources etc. into e.moduleBlocks - additionally returns variables to add to ["module.x.*"] variables +func (e *evaluator) loadModule(ctx context.Context, b *terraform.Block) (*ModuleDefinition, error) { + + metadata := b.GetMetadata() + + if b.Label() == "" { + return nil, fmt.Errorf("module without label at %s", metadata.Range()) + } + + var source string + attrs := b.Attributes() + for _, attr := range attrs { + if attr.Name() == "source" { + sourceVal := attr.Value() + if sourceVal.Type() == cty.String { + source = sourceVal.AsString() + } + } + } + if source == "" { + return nil, fmt.Errorf("could not read module source attribute at %s", metadata.Range().String()) + } + + if def, err := e.loadModuleFromTerraformCache(ctx, b, source); err == nil { + e.debug.Log("found module '%s' in .terraform/modules", source) + return def, nil + } + + // we don't have the module installed via 'terraform init' so we need to grab it... + return e.loadExternalModule(ctx, b, source) +} + +func (e *evaluator) loadModuleFromTerraformCache(ctx context.Context, b *terraform.Block, source string) (*ModuleDefinition, error) { + var modulePath string + if e.moduleMetadata != nil { + // if we have module metadata we can parse all the modules as they'll be cached locally! + name := b.ModuleName() + for _, module := range e.moduleMetadata.Modules { + if module.Key == name { + modulePath = filepath.Clean(filepath.Join(e.projectRootPath, module.Dir)) + break + } + } + } + if modulePath == "" { + return nil, fmt.Errorf("failed to load module from .terraform/modules") + } + if strings.HasPrefix(source, ".") { + source = "" + } + + if prefix, relativeDir, ok := strings.Cut(source, "//"); ok && !strings.HasSuffix(prefix, ":") && strings.Count(prefix, "/") == 2 { + if !strings.HasSuffix(modulePath, relativeDir) { + modulePath = fmt.Sprintf("%s/%s", modulePath, relativeDir) + } + } + + e.debug.Log("Module '%s' resolved to path '%s' in filesystem '%s' using modules.json", b.FullName(), modulePath, e.filesystem) + moduleParser := e.parentParser.newModuleParser(e.filesystem, source, modulePath, b.Label(), b) + if err := moduleParser.ParseFS(ctx, modulePath); err != nil { + return nil, err + } + return &ModuleDefinition{ + Name: b.Label(), + Path: modulePath, + Definition: b, + Parser: moduleParser, + FileSystem: e.filesystem, + }, nil +} + +func (e *evaluator) loadExternalModule(ctx context.Context, b *terraform.Block, source string) (*ModuleDefinition, error) { + + e.debug.Log("locating non-initialised module '%s'...", source) + + version := b.GetAttribute("version").AsStringValueOrDefault("", b).Value() + opt := resolvers.Options{ + Source: source, + OriginalSource: source, + Version: version, + OriginalVersion: version, + WorkingDir: e.projectRootPath, + Name: b.FullName(), + ModulePath: e.modulePath, + DebugLogger: e.debug.Extend("resolver"), + AllowDownloads: e.allowDownloads, + SkipCache: e.skipCachedModules, + } + + filesystem, prefix, path, err := resolveModule(ctx, e.filesystem, opt) + if err != nil { + return nil, err + } + prefix = filepath.Join(e.parentParser.moduleSource, prefix) + e.debug.Log("Module '%s' resolved to path '%s' in filesystem '%s' with prefix '%s'", b.FullName(), path, filesystem, prefix) + moduleParser := e.parentParser.newModuleParser(filesystem, prefix, path, b.Label(), b) + if err := moduleParser.ParseFS(ctx, path); err != nil { + return nil, err + } + return &ModuleDefinition{ + Name: b.Label(), + Path: path, + Definition: b, + Parser: moduleParser, + FileSystem: filesystem, + External: true, + }, nil +} diff --git a/pkg/iac/scanners/terraform/parser/load_module_metadata.go b/pkg/iac/scanners/terraform/parser/load_module_metadata.go new file mode 100644 index 000000000000..9d06402a76fc --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/load_module_metadata.go @@ -0,0 +1,33 @@ +package parser + +import ( + "encoding/json" + "io/fs" + "path/filepath" +) + +type modulesMetadata struct { + Modules []struct { + Key string `json:"Key"` + Source string `json:"Source"` + Version string `json:"Version"` + Dir string `json:"Dir"` + } `json:"Modules"` +} + +func loadModuleMetadata(target fs.FS, fullPath string) (*modulesMetadata, string, error) { + metadataPath := filepath.Join(fullPath, ".terraform/modules/modules.json") + + f, err := target.Open(metadataPath) + if err != nil { + return nil, metadataPath, err + } + defer func() { _ = f.Close() }() + + var metadata modulesMetadata + if err := json.NewDecoder(f).Decode(&metadata); err != nil { + return nil, metadataPath, err + } + + return &metadata, metadataPath, nil +} diff --git a/pkg/iac/scanners/terraform/parser/load_vars.go b/pkg/iac/scanners/terraform/parser/load_vars.go new file mode 100644 index 000000000000..58f67ce93910 --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/load_vars.go @@ -0,0 +1,83 @@ +package parser + +import ( + "fmt" + "io/fs" + "os" + "path/filepath" + "strings" + + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/hclsyntax" + hcljson "github.com/hashicorp/hcl/v2/json" + "github.com/zclconf/go-cty/cty" +) + +func loadTFVars(srcFS fs.FS, filenames []string) (map[string]cty.Value, error) { + combinedVars := make(map[string]cty.Value) + + for _, env := range os.Environ() { + split := strings.Split(env, "=") + key := split[0] + if !strings.HasPrefix(key, "TF_VAR_") { + continue + } + key = strings.TrimPrefix(key, "TF_VAR_") + var val string + if len(split) > 1 { + val = split[1] + } + combinedVars[key] = cty.StringVal(val) + } + + for _, filename := range filenames { + vars, err := loadTFVarsFile(srcFS, filename) + if err != nil { + return nil, fmt.Errorf("failed to load tfvars from %s: %w", filename, err) + } + for k, v := range vars { + combinedVars[k] = v + } + } + + return combinedVars, nil +} + +func loadTFVarsFile(srcFS fs.FS, filename string) (map[string]cty.Value, error) { + inputVars := make(map[string]cty.Value) + if filename == "" { + return inputVars, nil + } + + src, err := fs.ReadFile(srcFS, filepath.ToSlash(filename)) + if err != nil { + return nil, err + } + + var attrs hcl.Attributes + if strings.HasSuffix(filename, ".json") { + variableFile, err := hcljson.Parse(src, filename) + if err != nil { + return nil, err + } + attrs, err = variableFile.Body.JustAttributes() + if err != nil { + return nil, err + } + } else { + variableFile, err := hclsyntax.ParseConfig(src, filename, hcl.Pos{Line: 1, Column: 1}) + if err != nil { + return nil, err + } + attrs, err = variableFile.Body.JustAttributes() + if err != nil { + return nil, err + } + } + + for _, attr := range attrs { + inputVars[attr.Name], _ = attr.Expr.Value(&hcl.EvalContext{}) + } + + return inputVars, nil +} diff --git a/pkg/iac/scanners/terraform/parser/load_vars_test.go b/pkg/iac/scanners/terraform/parser/load_vars_test.go new file mode 100644 index 000000000000..f6e6792206a8 --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/load_vars_test.go @@ -0,0 +1,46 @@ +package parser + +import ( + "testing" + + "github.com/aquasecurity/trivy/test/testutil" + + "github.com/zclconf/go-cty/cty" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_TFVarsFile(t *testing.T) { + t.Run("tfvars file", func(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "test.tfvars": `instance_type = "t2.large"`, + }) + + vars, err := loadTFVars(fs, []string{"test.tfvars"}) + require.NoError(t, err) + assert.Equal(t, "t2.large", vars["instance_type"].AsString()) + }) + + t.Run("tfvars json file", func(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "test.tfvars.json": `{ + "variable": { + "foo": { + "default": "bar" + }, + "baz": "qux" + }, + "foo2": true, + "foo3": 3 +}`, + }) + + vars, err := loadTFVars(fs, []string{"test.tfvars.json"}) + require.NoError(t, err) + assert.Equal(t, "bar", vars["variable"].GetAttr("foo").GetAttr("default").AsString()) + assert.Equal(t, "qux", vars["variable"].GetAttr("baz").AsString()) + assert.Equal(t, true, vars["foo2"].True()) + assert.Equal(t, true, vars["foo3"].Equals(cty.NumberIntVal(3)).True()) + }) +} diff --git a/pkg/iac/scanners/terraform/parser/module_retrieval.go b/pkg/iac/scanners/terraform/parser/module_retrieval.go new file mode 100644 index 000000000000..e96108a212cf --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/module_retrieval.go @@ -0,0 +1,33 @@ +package parser + +import ( + "context" + "fmt" + "io/fs" + + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser/resolvers" +) + +type ModuleResolver interface { + Resolve(context.Context, fs.FS, resolvers.Options) (filesystem fs.FS, prefix string, downloadPath string, applies bool, err error) +} + +var defaultResolvers = []ModuleResolver{ + resolvers.Cache, + resolvers.Local, + resolvers.Remote, + resolvers.Registry, +} + +func resolveModule(ctx context.Context, current fs.FS, opt resolvers.Options) (filesystem fs.FS, sourcePrefix string, downloadPath string, err error) { + opt.Debug("Resolving module '%s' with source: '%s'...", opt.Name, opt.Source) + for _, resolver := range defaultResolvers { + if filesystem, prefix, path, applies, err := resolver.Resolve(ctx, current, opt); err != nil { + return nil, "", "", err + } else if applies { + opt.Debug("Module path is %s", path) + return filesystem, prefix, path, nil + } + } + return nil, "", "", fmt.Errorf("failed to resolve module '%s' with source: %s", opt.Name, opt.Source) +} diff --git a/pkg/iac/scanners/terraform/parser/option.go b/pkg/iac/scanners/terraform/parser/option.go new file mode 100644 index 000000000000..a37e20da1888 --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/option.go @@ -0,0 +1,67 @@ +package parser + +import ( + "io/fs" + + "github.com/aquasecurity/defsec/pkg/scanners/options" +) + +type ConfigurableTerraformParser interface { + options.ConfigurableParser + SetTFVarsPaths(...string) + SetStopOnHCLError(bool) + SetWorkspaceName(string) + SetAllowDownloads(bool) + SetSkipCachedModules(bool) + SetConfigsFS(fsys fs.FS) +} + +type Option func(p ConfigurableTerraformParser) + +func OptionWithTFVarsPaths(paths ...string) options.ParserOption { + return func(p options.ConfigurableParser) { + if tf, ok := p.(ConfigurableTerraformParser); ok { + tf.SetTFVarsPaths(paths...) + } + } +} + +func OptionStopOnHCLError(stop bool) options.ParserOption { + return func(p options.ConfigurableParser) { + if tf, ok := p.(ConfigurableTerraformParser); ok { + tf.SetStopOnHCLError(stop) + } + } +} + +func OptionWithWorkspaceName(workspaceName string) options.ParserOption { + return func(p options.ConfigurableParser) { + if tf, ok := p.(ConfigurableTerraformParser); ok { + tf.SetWorkspaceName(workspaceName) + } + } +} + +func OptionWithDownloads(allowed bool) options.ParserOption { + return func(p options.ConfigurableParser) { + if tf, ok := p.(ConfigurableTerraformParser); ok { + tf.SetAllowDownloads(allowed) + } + } +} + +func OptionWithSkipCachedModules(b bool) options.ParserOption { + return func(p options.ConfigurableParser) { + if tf, ok := p.(ConfigurableTerraformParser); ok { + tf.SetSkipCachedModules(b) + } + } +} + +func OptionWithConfigsFS(fsys fs.FS) options.ParserOption { + return func(s options.ConfigurableParser) { + if p, ok := s.(ConfigurableTerraformParser); ok { + p.SetConfigsFS(fsys) + } + } +} diff --git a/pkg/iac/scanners/terraform/parser/parser.go b/pkg/iac/scanners/terraform/parser/parser.go new file mode 100644 index 000000000000..fde407e5a696 --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/parser.go @@ -0,0 +1,349 @@ +package parser + +import ( + "context" + "io" + "io/fs" + "os" + "path/filepath" + "sort" + "strings" + "time" + + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/defsec/pkg/terraform" + tfcontext "github.com/aquasecurity/defsec/pkg/terraform/context" + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/hclparse" + "github.com/zclconf/go-cty/cty" + + "github.com/aquasecurity/trivy/pkg/iac/extrafs" +) + +type sourceFile struct { + file *hcl.File + path string +} + +type Metrics struct { + Timings struct { + DiskIODuration time.Duration + ParseDuration time.Duration + } + Counts struct { + Blocks int + Modules int + ModuleDownloads int + Files int + } +} + +var _ ConfigurableTerraformParser = (*Parser)(nil) + +// Parser is a tool for parsing terraform templates at a given file system location +type Parser struct { + projectRoot string + moduleName string + modulePath string + moduleSource string + moduleFS fs.FS + moduleBlock *terraform.Block + files []sourceFile + tfvarsPaths []string + stopOnHCLError bool + workspaceName string + underlying *hclparse.Parser + children []*Parser + metrics Metrics + options []options.ParserOption + debug debug.Logger + allowDownloads bool + skipCachedModules bool + fsMap map[string]fs.FS + skipRequired bool + configsFS fs.FS +} + +func (p *Parser) SetDebugWriter(writer io.Writer) { + p.debug = debug.New(writer, "terraform", "parser", "<"+p.moduleName+">") +} + +func (p *Parser) SetTFVarsPaths(s ...string) { + p.tfvarsPaths = s +} + +func (p *Parser) SetStopOnHCLError(b bool) { + p.stopOnHCLError = b +} + +func (p *Parser) SetWorkspaceName(s string) { + p.workspaceName = s +} + +func (p *Parser) SetAllowDownloads(b bool) { + p.allowDownloads = b +} + +func (p *Parser) SetSkipCachedModules(b bool) { + p.skipCachedModules = b +} + +func (p *Parser) SetSkipRequiredCheck(b bool) { + p.skipRequired = b +} + +func (p *Parser) SetConfigsFS(fsys fs.FS) { + p.configsFS = fsys +} + +// New creates a new Parser +func New(moduleFS fs.FS, moduleSource string, opts ...options.ParserOption) *Parser { + p := &Parser{ + workspaceName: "default", + underlying: hclparse.NewParser(), + options: opts, + moduleName: "root", + allowDownloads: true, + moduleFS: moduleFS, + moduleSource: moduleSource, + configsFS: moduleFS, + } + + for _, option := range opts { + option(p) + } + + return p +} + +func (p *Parser) newModuleParser(moduleFS fs.FS, moduleSource, modulePath, moduleName string, moduleBlock *terraform.Block) *Parser { + mp := New(moduleFS, moduleSource) + mp.modulePath = modulePath + mp.moduleBlock = moduleBlock + mp.moduleName = moduleName + mp.projectRoot = p.projectRoot + p.children = append(p.children, mp) + for _, option := range p.options { + option(mp) + } + return mp +} + +func (p *Parser) Metrics() Metrics { + total := p.metrics + for _, child := range p.children { + metrics := child.Metrics() + total.Counts.Files += metrics.Counts.Files + total.Counts.Blocks += metrics.Counts.Blocks + total.Timings.ParseDuration += metrics.Timings.ParseDuration + total.Timings.DiskIODuration += metrics.Timings.DiskIODuration + // NOTE: we don't add module count - this has already propagated to the top level + } + return total +} + +func (p *Parser) ParseFile(_ context.Context, fullPath string) error { + diskStart := time.Now() + + isJSON := strings.HasSuffix(fullPath, ".tf.json") + isHCL := strings.HasSuffix(fullPath, ".tf") + if !isJSON && !isHCL { + return nil + } + + p.debug.Log("Parsing '%s'...", fullPath) + f, err := p.moduleFS.Open(filepath.ToSlash(fullPath)) + if err != nil { + return err + } + defer func() { _ = f.Close() }() + + data, err := io.ReadAll(f) + if err != nil { + return err + } + p.metrics.Timings.DiskIODuration += time.Since(diskStart) + if dir := filepath.Dir(fullPath); p.projectRoot == "" { + p.debug.Log("Setting project/module root to '%s'", dir) + p.projectRoot = dir + p.modulePath = dir + } + + start := time.Now() + var file *hcl.File + var diag hcl.Diagnostics + + if isHCL { + file, diag = p.underlying.ParseHCL(data, fullPath) + } else { + file, diag = p.underlying.ParseJSON(data, fullPath) + } + if diag != nil && diag.HasErrors() { + return diag + } + p.files = append(p.files, sourceFile{ + file: file, + path: fullPath, + }) + p.metrics.Counts.Files++ + p.metrics.Timings.ParseDuration += time.Since(start) + p.debug.Log("Added file %s.", fullPath) + return nil +} + +// ParseFS parses a root module, where it exists at the root of the provided filesystem +func (p *Parser) ParseFS(ctx context.Context, dir string) error { + + dir = filepath.Clean(dir) + + if p.projectRoot == "" { + p.debug.Log("Setting project/module root to '%s'", dir) + p.projectRoot = dir + p.modulePath = dir + } + + slashed := filepath.ToSlash(dir) + p.debug.Log("Parsing FS from '%s'", slashed) + fileInfos, err := fs.ReadDir(p.moduleFS, slashed) + if err != nil { + return err + } + + var paths []string + for _, info := range fileInfos { + realPath := filepath.Join(dir, info.Name()) + if info.Type()&os.ModeSymlink != 0 { + extra, ok := p.moduleFS.(extrafs.FS) + if !ok { + // we can't handle symlinks in this fs type for now + p.debug.Log("Cannot resolve symlink '%s' in '%s' for this fs type", info.Name(), dir) + continue + } + realPath, err = extra.ResolveSymlink(info.Name(), dir) + if err != nil { + p.debug.Log("Failed to resolve symlink '%s' in '%s': %s", info.Name(), dir, err) + continue + } + info, err := extra.Stat(realPath) + if err != nil { + p.debug.Log("Failed to stat resolved symlink '%s': %s", realPath, err) + continue + } + if info.IsDir() { + continue + } + p.debug.Log("Resolved symlink '%s' in '%s' to '%s'", info.Name(), dir, realPath) + } else if info.IsDir() { + continue + } + paths = append(paths, realPath) + } + sort.Strings(paths) + for _, path := range paths { + if err := p.ParseFile(ctx, path); err != nil { + if p.stopOnHCLError { + return err + } + p.debug.Log("error parsing '%s': %s", path, err) + continue + } + } + + return nil +} + +func (p *Parser) EvaluateAll(ctx context.Context) (terraform.Modules, cty.Value, error) { + + p.debug.Log("Evaluating module...") + + if len(p.files) == 0 { + p.debug.Log("No files found, nothing to do.") + return nil, cty.NilVal, nil + } + + blocks, ignores, err := p.readBlocks(p.files) + if err != nil { + return nil, cty.NilVal, err + } + p.debug.Log("Read %d block(s) and %d ignore(s) for module '%s' (%d file[s])...", len(blocks), len(ignores), p.moduleName, len(p.files)) + + p.metrics.Counts.Blocks = len(blocks) + + var inputVars map[string]cty.Value + if p.moduleBlock != nil { + inputVars = p.moduleBlock.Values().AsValueMap() + p.debug.Log("Added %d input variables from module definition.", len(inputVars)) + } else { + inputVars, err = loadTFVars(p.configsFS, p.tfvarsPaths) + if err != nil { + return nil, cty.NilVal, err + } + p.debug.Log("Added %d variables from tfvars.", len(inputVars)) + } + + modulesMetadata, metadataPath, err := loadModuleMetadata(p.moduleFS, p.projectRoot) + if err != nil { + p.debug.Log("Error loading module metadata: %s.", err) + } else { + p.debug.Log("Loaded module metadata for %d module(s) from '%s'.", len(modulesMetadata.Modules), metadataPath) + } + + workingDir, err := os.Getwd() + if err != nil { + return nil, cty.NilVal, err + } + p.debug.Log("Working directory for module evaluation is '%s'", workingDir) + evaluator := newEvaluator( + p.moduleFS, + p, + p.projectRoot, + p.modulePath, + workingDir, + p.moduleName, + blocks, + inputVars, + modulesMetadata, + p.workspaceName, + ignores, + p.debug.Extend("evaluator"), + p.allowDownloads, + p.skipCachedModules, + ) + modules, fsMap, parseDuration := evaluator.EvaluateAll(ctx) + p.metrics.Counts.Modules = len(modules) + p.metrics.Timings.ParseDuration = parseDuration + p.debug.Log("Finished parsing module '%s'.", p.moduleName) + p.fsMap = fsMap + return modules, evaluator.exportOutputs(), nil +} + +func (p *Parser) GetFilesystemMap() map[string]fs.FS { + if p.fsMap == nil { + return make(map[string]fs.FS) + } + return p.fsMap +} + +func (p *Parser) readBlocks(files []sourceFile) (terraform.Blocks, terraform.Ignores, error) { + var blocks terraform.Blocks + var ignores terraform.Ignores + moduleCtx := tfcontext.NewContext(&hcl.EvalContext{}, nil) + for _, file := range files { + fileBlocks, fileIgnores, err := loadBlocksFromFile(file, p.moduleSource) + if err != nil { + if p.stopOnHCLError { + return nil, nil, err + } + p.debug.Log("Encountered HCL parse error: %s", err) + continue + } + for _, fileBlock := range fileBlocks { + blocks = append(blocks, terraform.NewBlock(fileBlock, moduleCtx, p.moduleBlock, nil, p.moduleSource, p.moduleFS)) + } + ignores = append(ignores, fileIgnores...) + } + + sortBlocksByHierarchy(blocks) + return blocks, ignores, nil +} diff --git a/pkg/iac/scanners/terraform/parser/parser_integration_test.go b/pkg/iac/scanners/terraform/parser/parser_integration_test.go new file mode 100644 index 000000000000..bbce2a151ce0 --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/parser_integration_test.go @@ -0,0 +1,51 @@ +package parser + +import ( + "context" + "testing" + + "github.com/aquasecurity/trivy/test/testutil" + "github.com/stretchr/testify/require" +) + +func Test_DefaultRegistry(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test in short mode") + } + fs := testutil.CreateFS(t, map[string]string{ + "code/test.tf": ` +module "registry" { + source = "terraform-aws-modules/vpc/aws" +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true), OptionWithSkipCachedModules(true)) + if err := parser.ParseFS(context.TODO(), "code"); err != nil { + t.Fatal(err) + } + modules, _, err := parser.EvaluateAll(context.TODO()) + require.NoError(t, err) + require.Len(t, modules, 2) +} + +func Test_SpecificRegistry(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test in short mode") + } + fs := testutil.CreateFS(t, map[string]string{ + "code/test.tf": ` +module "registry" { + source = "registry.terraform.io/terraform-aws-modules/vpc/aws" +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true), OptionWithSkipCachedModules(true)) + if err := parser.ParseFS(context.TODO(), "code"); err != nil { + t.Fatal(err) + } + modules, _, err := parser.EvaluateAll(context.TODO()) + require.NoError(t, err) + require.Len(t, modules, 2) +} diff --git a/pkg/iac/scanners/terraform/parser/parser_test.go b/pkg/iac/scanners/terraform/parser/parser_test.go new file mode 100644 index 000000000000..c0e82a6a22ca --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/parser_test.go @@ -0,0 +1,1026 @@ +package parser + +import ( + "context" + "os" + "sort" + "testing" + + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/defsec/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/zclconf/go-cty/cty" +) + +func Test_BasicParsing(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "test.tf": ` + +locals { + proxy = var.cats_mother +} + +variable "cats_mother" { + default = "boots" +} + +provider "cats" { + +} + +moved { + +} + +import { + to = cats_cat.mittens + id = "mittens" +} + +resource "cats_cat" "mittens" { + name = "mittens" + special = true +} + +resource "cats_kitten" "the-great-destroyer" { + name = "the great destroyer" + parent = cats_cat.mittens.name +} + +data "cats_cat" "the-cats-mother" { + name = local.proxy +} + +check "cats_mittens_is_special" { + data "cats_cat" "mittens" { + name = "mittens" + } + + assert { + condition = data.cats_cat.mittens.special == true + error_message = "${data.cats_cat.mittens.name} must be special" + } +} + +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), ".")) + modules, _, err := parser.EvaluateAll(context.TODO()) + require.NoError(t, err) + + blocks := modules[0].GetBlocks() + + // variable + variables := blocks.OfType("variable") + require.Len(t, variables, 1) + assert.Equal(t, "variable", variables[0].Type()) + require.Len(t, variables[0].Labels(), 1) + assert.Equal(t, "cats_mother", variables[0].TypeLabel()) + defaultVal := variables[0].GetAttribute("default") + require.NotNil(t, defaultVal) + assert.Equal(t, cty.String, defaultVal.Value().Type()) + assert.Equal(t, "boots", defaultVal.Value().AsString()) + + // provider + providerBlocks := blocks.OfType("provider") + require.Len(t, providerBlocks, 1) + assert.Equal(t, "provider", providerBlocks[0].Type()) + require.Len(t, providerBlocks[0].Labels(), 1) + assert.Equal(t, "cats", providerBlocks[0].TypeLabel()) + + // resources + resourceBlocks := blocks.OfType("resource") + + sort.Slice(resourceBlocks, func(i, j int) bool { + return resourceBlocks[i].TypeLabel() < resourceBlocks[j].TypeLabel() + }) + + require.Len(t, resourceBlocks, 2) + require.Len(t, resourceBlocks[0].Labels(), 2) + + assert.Equal(t, "resource", resourceBlocks[0].Type()) + assert.Equal(t, "cats_cat", resourceBlocks[0].TypeLabel()) + assert.Equal(t, "mittens", resourceBlocks[0].NameLabel()) + + assert.Equal(t, "mittens", resourceBlocks[0].GetAttribute("name").Value().AsString()) + assert.True(t, resourceBlocks[0].GetAttribute("special").Value().True()) + + assert.Equal(t, "resource", resourceBlocks[1].Type()) + assert.Equal(t, "cats_kitten", resourceBlocks[1].TypeLabel()) + assert.Equal(t, "the great destroyer", resourceBlocks[1].GetAttribute("name").Value().AsString()) + assert.Equal(t, "mittens", resourceBlocks[1].GetAttribute("parent").Value().AsString()) + + // import + importBlocks := blocks.OfType("import") + + assert.Equal(t, "import", importBlocks[0].Type()) + require.NotNil(t, importBlocks[0].GetAttribute("to")) + assert.Equal(t, "mittens", importBlocks[0].GetAttribute("id").Value().AsString()) + + // data + dataBlocks := blocks.OfType("data") + require.Len(t, dataBlocks, 1) + require.Len(t, dataBlocks[0].Labels(), 2) + + assert.Equal(t, "data", dataBlocks[0].Type()) + assert.Equal(t, "cats_cat", dataBlocks[0].TypeLabel()) + assert.Equal(t, "the-cats-mother", dataBlocks[0].NameLabel()) + + assert.Equal(t, "boots", dataBlocks[0].GetAttribute("name").Value().AsString()) + + // check + checkBlocks := blocks.OfType("check") + require.Len(t, checkBlocks, 1) + require.Len(t, checkBlocks[0].Labels(), 1) + + assert.Equal(t, "check", checkBlocks[0].Type()) + assert.Equal(t, "cats_mittens_is_special", checkBlocks[0].TypeLabel()) + + require.NotNil(t, checkBlocks[0].GetBlock("data")) + require.NotNil(t, checkBlocks[0].GetBlock("assert")) +} + +func Test_Modules(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "code/test.tf": ` +module "my-mod" { + source = "../module" + input = "ok" +} + +output "result" { + value = module.my-mod.mod_result +} +`, + "module/module.tf": ` +variable "input" { + default = "?" +} + +output "mod_result" { + value = var.input +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true), options.ParserWithDebug(os.Stderr)) + require.NoError(t, parser.ParseFS(context.TODO(), "code")) + + modules, _, err := parser.EvaluateAll(context.TODO()) + assert.NoError(t, err) + + require.Len(t, modules, 2) + rootModule := modules[0] + childModule := modules[1] + + moduleBlocks := rootModule.GetBlocks().OfType("module") + require.Len(t, moduleBlocks, 1) + + assert.Equal(t, "module", moduleBlocks[0].Type()) + assert.Equal(t, "module.my-mod", moduleBlocks[0].FullName()) + inputAttr := moduleBlocks[0].GetAttribute("input") + require.NotNil(t, inputAttr) + require.Equal(t, cty.String, inputAttr.Value().Type()) + assert.Equal(t, "ok", inputAttr.Value().AsString()) + + rootOutputs := rootModule.GetBlocks().OfType("output") + require.Len(t, rootOutputs, 1) + assert.Equal(t, "output.result", rootOutputs[0].FullName()) + valAttr := rootOutputs[0].GetAttribute("value") + require.NotNil(t, valAttr) + require.Equal(t, cty.String, valAttr.Type()) + assert.Equal(t, "ok", valAttr.Value().AsString()) + + childOutputs := childModule.GetBlocks().OfType("output") + require.Len(t, childOutputs, 1) + assert.Equal(t, "module.my-mod.output.mod_result", childOutputs[0].FullName()) + childValAttr := childOutputs[0].GetAttribute("value") + require.NotNil(t, childValAttr) + require.Equal(t, cty.String, childValAttr.Type()) + assert.Equal(t, "ok", childValAttr.Value().AsString()) + +} + +func Test_NestedParentModule(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "code/test.tf": ` +module "my-mod" { + source = "../." + input = "ok" +} + +output "result" { + value = module.my-mod.mod_result +} +`, + "root.tf": ` +variable "input" { + default = "?" +} + +output "mod_result" { + value = var.input +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), "code")) + modules, _, err := parser.EvaluateAll(context.TODO()) + assert.NoError(t, err) + require.Len(t, modules, 2) + rootModule := modules[0] + childModule := modules[1] + + moduleBlocks := rootModule.GetBlocks().OfType("module") + require.Len(t, moduleBlocks, 1) + + assert.Equal(t, "module", moduleBlocks[0].Type()) + assert.Equal(t, "module.my-mod", moduleBlocks[0].FullName()) + inputAttr := moduleBlocks[0].GetAttribute("input") + require.NotNil(t, inputAttr) + require.Equal(t, cty.String, inputAttr.Value().Type()) + assert.Equal(t, "ok", inputAttr.Value().AsString()) + + rootOutputs := rootModule.GetBlocks().OfType("output") + require.Len(t, rootOutputs, 1) + assert.Equal(t, "output.result", rootOutputs[0].FullName()) + valAttr := rootOutputs[0].GetAttribute("value") + require.NotNil(t, valAttr) + require.Equal(t, cty.String, valAttr.Type()) + assert.Equal(t, "ok", valAttr.Value().AsString()) + + childOutputs := childModule.GetBlocks().OfType("output") + require.Len(t, childOutputs, 1) + assert.Equal(t, "module.my-mod.output.mod_result", childOutputs[0].FullName()) + childValAttr := childOutputs[0].GetAttribute("value") + require.NotNil(t, childValAttr) + require.Equal(t, cty.String, childValAttr.Type()) + assert.Equal(t, "ok", childValAttr.Value().AsString()) +} + +func Test_UndefinedModuleOutputReference(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "code/test.tf": ` +resource "something" "blah" { + value = module.x.y +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), "code")) + modules, _, err := parser.EvaluateAll(context.TODO()) + assert.NoError(t, err) + require.Len(t, modules, 1) + rootModule := modules[0] + + blocks := rootModule.GetResourcesByType("something") + require.Len(t, blocks, 1) + block := blocks[0] + + attr := block.GetAttribute("value") + require.NotNil(t, attr) + + assert.Equal(t, false, attr.IsResolvable()) +} + +func Test_UndefinedModuleOutputReferenceInSlice(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "code/test.tf": ` +resource "something" "blah" { + value = ["first", module.x.y, "last"] +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), "code")) + modules, _, err := parser.EvaluateAll(context.TODO()) + assert.NoError(t, err) + require.Len(t, modules, 1) + rootModule := modules[0] + + blocks := rootModule.GetResourcesByType("something") + require.Len(t, blocks, 1) + block := blocks[0] + + attr := block.GetAttribute("value") + require.NotNil(t, attr) + + assert.Equal(t, true, attr.IsResolvable()) + + values := attr.AsStringValueSliceOrEmpty() + require.Len(t, values, 3) + + assert.Equal(t, "first", values[0].Value()) + assert.Equal(t, true, values[0].GetMetadata().IsResolvable()) + + assert.Equal(t, false, values[1].GetMetadata().IsResolvable()) + + assert.Equal(t, "last", values[2].Value()) + assert.Equal(t, true, values[2].GetMetadata().IsResolvable()) +} + +func Test_TemplatedSliceValue(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "code/test.tf": ` + +variable "x" { + default = "hello" +} + +resource "something" "blah" { + value = ["first", "${var.x}-${var.x}", "last"] +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), "code")) + modules, _, err := parser.EvaluateAll(context.TODO()) + assert.NoError(t, err) + require.Len(t, modules, 1) + rootModule := modules[0] + + blocks := rootModule.GetResourcesByType("something") + require.Len(t, blocks, 1) + block := blocks[0] + + attr := block.GetAttribute("value") + require.NotNil(t, attr) + + assert.Equal(t, true, attr.IsResolvable()) + + values := attr.AsStringValueSliceOrEmpty() + require.Len(t, values, 3) + + assert.Equal(t, "first", values[0].Value()) + assert.Equal(t, true, values[0].GetMetadata().IsResolvable()) + + assert.Equal(t, "hello-hello", values[1].Value()) + assert.Equal(t, true, values[1].GetMetadata().IsResolvable()) + + assert.Equal(t, "last", values[2].Value()) + assert.Equal(t, true, values[2].GetMetadata().IsResolvable()) +} + +func Test_SliceOfVars(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "code/test.tf": ` + +variable "x" { + default = "1" +} + +variable "y" { + default = "2" +} + +resource "something" "blah" { + value = [var.x, var.y] +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), "code")) + modules, _, err := parser.EvaluateAll(context.TODO()) + assert.NoError(t, err) + require.Len(t, modules, 1) + rootModule := modules[0] + + blocks := rootModule.GetResourcesByType("something") + require.Len(t, blocks, 1) + block := blocks[0] + + attr := block.GetAttribute("value") + require.NotNil(t, attr) + + assert.Equal(t, true, attr.IsResolvable()) + + values := attr.AsStringValueSliceOrEmpty() + require.Len(t, values, 2) + + assert.Equal(t, "1", values[0].Value()) + assert.Equal(t, true, values[0].GetMetadata().IsResolvable()) + + assert.Equal(t, "2", values[1].Value()) + assert.Equal(t, true, values[1].GetMetadata().IsResolvable()) +} + +func Test_VarSlice(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "code/test.tf": ` + +variable "x" { + default = ["a", "b", "c"] +} + +resource "something" "blah" { + value = var.x +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), "code")) + modules, _, err := parser.EvaluateAll(context.TODO()) + assert.NoError(t, err) + require.Len(t, modules, 1) + rootModule := modules[0] + + blocks := rootModule.GetResourcesByType("something") + require.Len(t, blocks, 1) + block := blocks[0] + + attr := block.GetAttribute("value") + require.NotNil(t, attr) + + assert.Equal(t, true, attr.IsResolvable()) + + values := attr.AsStringValueSliceOrEmpty() + require.Len(t, values, 3) + + assert.Equal(t, "a", values[0].Value()) + assert.Equal(t, true, values[0].GetMetadata().IsResolvable()) + + assert.Equal(t, "b", values[1].Value()) + assert.Equal(t, true, values[1].GetMetadata().IsResolvable()) + + assert.Equal(t, "c", values[2].Value()) + assert.Equal(t, true, values[2].GetMetadata().IsResolvable()) +} + +func Test_LocalSliceNested(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "code/test.tf": ` + +variable "x" { + default = "a" +} + +locals { + y = [var.x, "b", "c"] +} + +resource "something" "blah" { + value = local.y +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), "code")) + modules, _, err := parser.EvaluateAll(context.TODO()) + assert.NoError(t, err) + require.Len(t, modules, 1) + rootModule := modules[0] + + blocks := rootModule.GetResourcesByType("something") + require.Len(t, blocks, 1) + block := blocks[0] + + attr := block.GetAttribute("value") + require.NotNil(t, attr) + + assert.Equal(t, true, attr.IsResolvable()) + + values := attr.AsStringValueSliceOrEmpty() + require.Len(t, values, 3) + + assert.Equal(t, "a", values[0].Value()) + assert.Equal(t, true, values[0].GetMetadata().IsResolvable()) + + assert.Equal(t, "b", values[1].Value()) + assert.Equal(t, true, values[1].GetMetadata().IsResolvable()) + + assert.Equal(t, "c", values[2].Value()) + assert.Equal(t, true, values[2].GetMetadata().IsResolvable()) +} + +func Test_FunctionCall(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "code/test.tf": ` + +variable "x" { + default = ["a", "b"] +} + +resource "something" "blah" { + value = concat(var.x, ["c"]) +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), "code")) + modules, _, err := parser.EvaluateAll(context.TODO()) + require.NoError(t, err) + + require.Len(t, modules, 1) + rootModule := modules[0] + + blocks := rootModule.GetResourcesByType("something") + require.Len(t, blocks, 1) + block := blocks[0] + + attr := block.GetAttribute("value") + require.NotNil(t, attr) + + assert.Equal(t, true, attr.IsResolvable()) + + values := attr.AsStringValueSliceOrEmpty() + require.Len(t, values, 3) + + assert.Equal(t, "a", values[0].Value()) + assert.Equal(t, true, values[0].GetMetadata().IsResolvable()) + + assert.Equal(t, "b", values[1].Value()) + assert.Equal(t, true, values[1].GetMetadata().IsResolvable()) + + assert.Equal(t, "c", values[2].Value()) + assert.Equal(t, true, values[2].GetMetadata().IsResolvable()) +} + +func Test_NullDefaultValueForVar(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "test.tf": ` +variable "bucket_name" { + type = string + default = null +} + +resource "aws_s3_bucket" "default" { + bucket = var.bucket_name != null ? var.bucket_name : "default" +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), ".")) + modules, _, err := parser.EvaluateAll(context.TODO()) + require.NoError(t, err) + require.Len(t, modules, 1) + + rootModule := modules[0] + + blocks := rootModule.GetResourcesByType("aws_s3_bucket") + require.Len(t, blocks, 1) + block := blocks[0] + + attr := block.GetAttribute("bucket") + require.NotNil(t, attr) + assert.Equal(t, "default", attr.Value().AsString()) +} + +func Test_MultipleInstancesOfSameResource(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "test.tf": ` + +resource "aws_kms_key" "key1" { + description = "Key #1" + enable_key_rotation = true +} + +resource "aws_kms_key" "key2" { + description = "Key #2" + enable_key_rotation = true +} + +resource "aws_s3_bucket" "this" { + bucket = "test" + } + + +resource "aws_s3_bucket_server_side_encryption_configuration" "this1" { + bucket = aws_s3_bucket.this.id + + rule { + apply_server_side_encryption_by_default { + kms_master_key_id = aws_kms_key.key1.arn + sse_algorithm = "aws:kms" + } + } +} + +resource "aws_s3_bucket_server_side_encryption_configuration" "this2" { + bucket = aws_s3_bucket.this.id + + rule { + apply_server_side_encryption_by_default { + kms_master_key_id = aws_kms_key.key2.arn + sse_algorithm = "aws:kms" + } + } +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), ".")) + modules, _, err := parser.EvaluateAll(context.TODO()) + assert.NoError(t, err) + assert.Len(t, modules, 1) + + rootModule := modules[0] + + blocks := rootModule.GetResourcesByType("aws_s3_bucket_server_side_encryption_configuration") + assert.Len(t, blocks, 2) + + for _, block := range blocks { + attr, parent := block.GetNestedAttribute("rule.apply_server_side_encryption_by_default.kms_master_key_id") + assert.Equal(t, "apply_server_side_encryption_by_default", parent.Type()) + assert.NotNil(t, attr) + assert.NotEmpty(t, attr.Value().AsString()) + } +} + +func Test_IfConfigFsIsNotSet_ThenUseModuleFsForVars(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "main.tf": ` +variable "bucket_name" { + type = string +} +resource "aws_s3_bucket" "main" { + bucket = var.bucket_name +} +`, + "main.tfvars": `bucket_name = "test_bucket"`, + }) + parser := New(fs, "", + OptionStopOnHCLError(true), + OptionWithTFVarsPaths("main.tfvars"), + ) + + require.NoError(t, parser.ParseFS(context.TODO(), ".")) + modules, _, err := parser.EvaluateAll(context.TODO()) + assert.NoError(t, err) + assert.Len(t, modules, 1) + + rootModule := modules[0] + blocks := rootModule.GetResourcesByType("aws_s3_bucket") + require.Len(t, blocks, 1) + + block := blocks[0] + + assert.Equal(t, "test_bucket", block.GetAttribute("bucket").AsStringValueOrDefault("", block).Value()) +} + +func Test_ForEachRefToLocals(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "main.tf": ` +locals { + buckets = toset([ + "foo", + "bar", + ]) +} + +resource "aws_s3_bucket" "this" { + for_each = local.buckets + bucket = each.key +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), ".")) + + modules, _, err := parser.EvaluateAll(context.TODO()) + assert.NoError(t, err) + assert.Len(t, modules, 1) + + rootModule := modules[0] + + blocks := rootModule.GetResourcesByType("aws_s3_bucket") + assert.Len(t, blocks, 2) + + for _, block := range blocks { + attr := block.GetAttribute("bucket") + require.NotNil(t, attr) + assert.Contains(t, []string{"foo", "bar"}, attr.AsStringValueOrDefault("", block).Value()) + } +} + +func Test_ForEachRefToVariableWithDefault(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "main.tf": ` +variable "buckets" { + type = set(string) + default = ["foo", "bar"] +} + +resource "aws_s3_bucket" "this" { + for_each = var.buckets + bucket = each.key +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), ".")) + + modules, _, err := parser.EvaluateAll(context.TODO()) + assert.NoError(t, err) + assert.Len(t, modules, 1) + + rootModule := modules[0] + + blocks := rootModule.GetResourcesByType("aws_s3_bucket") + assert.Len(t, blocks, 2) + + for _, block := range blocks { + attr := block.GetAttribute("bucket") + require.NotNil(t, attr) + assert.Contains(t, []string{"foo", "bar"}, attr.AsStringValueOrDefault("", block).Value()) + } +} + +func Test_ForEachRefToVariableFromFile(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "main.tf": ` +variable "policy_rules" { + type = object({ + secure_tags = optional(map(object({ + session_matcher = optional(string) + priority = number + enabled = optional(bool, true) + })), {}) + }) +} + +resource "google_network_security_gateway_security_policy_rule" "secure_tag_rules" { + for_each = var.policy_rules.secure_tags + provider = google-beta + project = "test" + name = each.key + enabled = each.value.enabled + priority = each.value.priority + session_matcher = each.value.session_matcher +} +`, + "main.tfvars": ` +policy_rules = { + secure_tags = { + secure-tag-1 = { + session_matcher = "host() != 'google.com'" + priority = 1001 + } + } +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true), OptionWithTFVarsPaths("main.tfvars")) + require.NoError(t, parser.ParseFS(context.TODO(), ".")) + + modules, _, err := parser.EvaluateAll(context.TODO()) + assert.NoError(t, err) + assert.Len(t, modules, 1) + + rootModule := modules[0] + + blocks := rootModule.GetResourcesByType("google_network_security_gateway_security_policy_rule") + assert.Len(t, blocks, 1) + + block := blocks[0] + + assert.Equal(t, "secure-tag-1", block.GetAttribute("name").AsStringValueOrDefault("", block).Value()) + assert.Equal(t, true, block.GetAttribute("enabled").AsBoolValueOrDefault(false, block).Value()) + assert.Equal(t, "host() != 'google.com'", block.GetAttribute("session_matcher").AsStringValueOrDefault("", block).Value()) + assert.Equal(t, 1001, block.GetAttribute("priority").AsIntValueOrDefault(0, block).Value()) +} + +func Test_ForEachRefersToMapThatContainsSameStringValues(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "main.tf": `locals { + buckets = { + bucket1 = "test1" + bucket2 = "test1" + } +} + +resource "aws_s3_bucket" "this" { + for_each = local.buckets + bucket = each.key +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), ".")) + + modules, _, err := parser.EvaluateAll(context.TODO()) + assert.NoError(t, err) + assert.Len(t, modules, 1) + + bucketBlocks := modules.GetResourcesByType("aws_s3_bucket") + assert.Len(t, bucketBlocks, 2) + + var labels []string + + for _, b := range bucketBlocks { + labels = append(labels, b.Label()) + } + + expectedLabels := []string{ + `aws_s3_bucket.this["bucket1"]`, + `aws_s3_bucket.this["bucket2"]`, + } + assert.Equal(t, expectedLabels, labels) +} + +func TestDataSourceWithCountMetaArgument(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "main.tf": ` +data "http" "example" { + count = 2 +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), ".")) + + modules, _, err := parser.EvaluateAll(context.TODO()) + assert.NoError(t, err) + assert.Len(t, modules, 1) + + rootModule := modules[0] + + httpDataSources := rootModule.GetDatasByType("http") + assert.Len(t, httpDataSources, 2) + + var labels []string + for _, b := range httpDataSources { + labels = append(labels, b.Label()) + } + + expectedLabels := []string{ + `http.example[0]`, + `http.example[1]`, + } + assert.Equal(t, expectedLabels, labels) +} + +func TestDataSourceWithForEachMetaArgument(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "main.tf": ` +locals { + ports = ["80", "8080"] +} +data "http" "example" { + for_each = toset(local.ports) + url = "localhost:${each.key}" +} +`, + }) + + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), ".")) + + modules, _, err := parser.EvaluateAll(context.TODO()) + assert.NoError(t, err) + assert.Len(t, modules, 1) + + rootModule := modules[0] + + httpDataSources := rootModule.GetDatasByType("http") + assert.Len(t, httpDataSources, 2) +} + +func TestForEach(t *testing.T) { + + tests := []struct { + name string + source string + expectedCount int + }{ + { + name: "arg is list of strings", + source: `locals { + buckets = ["bucket1", "bucket2"] +} + +resource "aws_s3_bucket" "this" { + for_each = local.buckets + bucket = each.key +}`, + expectedCount: 0, + }, + { + name: "arg is empty set", + source: `locals { + buckets = toset([]) +} + +resource "aws_s3_bucket" "this" { + for_each = loca.buckets + bucket = each.key +}`, + expectedCount: 0, + }, + { + name: "arg is set of strings", + source: `locals { + buckets = ["bucket1", "bucket2"] +} + +resource "aws_s3_bucket" "this" { + for_each = toset(local.buckets) + bucket = each.key +}`, + expectedCount: 2, + }, + { + name: "arg is map", + source: `locals { + buckets = { + 1 = {} + 2 = {} + } +} + +resource "aws_s3_bucket" "this" { + for_each = local.buckets + bucket = each.key +}`, + expectedCount: 2, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "main.tf": tt.source, + }) + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), ".")) + + modules, _, err := parser.EvaluateAll(context.TODO()) + assert.NoError(t, err) + assert.Len(t, modules, 1) + + bucketBlocks := modules.GetResourcesByType("aws_s3_bucket") + assert.Len(t, bucketBlocks, tt.expectedCount) + }) + } +} + +func TestForEachRefToResource(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "main.tf": ` + locals { + vpcs = { + "test1" = { + cidr_block = "192.168.0.0/28" + } + "test2" = { + cidr_block = "192.168.1.0/28" + } + } +} + +resource "aws_vpc" "example" { + for_each = local.vpcs + cidr_block = each.value.cidr_block +} + +resource "aws_internet_gateway" "example" { + for_each = aws_vpc.example + vpc_id = each.key +} +`, + }) + parser := New(fs, "", OptionStopOnHCLError(true)) + require.NoError(t, parser.ParseFS(context.TODO(), ".")) + + modules, _, err := parser.EvaluateAll(context.TODO()) + assert.NoError(t, err) + assert.Len(t, modules, 1) + + blocks := modules.GetResourcesByType("aws_internet_gateway") + assert.Len(t, blocks, 2) + + var vpcIds []string + for _, b := range blocks { + vpcIds = append(vpcIds, b.GetAttribute("vpc_id").Value().AsString()) + } + + expectedVpcIds := []string{"test1", "test2"} + assert.Equal(t, expectedVpcIds, vpcIds) +} diff --git a/pkg/iac/scanners/terraform/parser/resolvers/cache.go b/pkg/iac/scanners/terraform/parser/resolvers/cache.go new file mode 100644 index 000000000000..1314d538a60a --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/resolvers/cache.go @@ -0,0 +1,62 @@ +package resolvers + +import ( + "context" + "crypto/md5" // nolint + "fmt" + "io/fs" + "os" + "path/filepath" +) + +type cacheResolver struct{} + +var Cache = &cacheResolver{} + +const tempDirName = ".aqua" + +func locateCacheFS() (fs.FS, error) { + dir, err := locateCacheDir() + if err != nil { + return nil, err + } + return os.DirFS(dir), nil +} + +func locateCacheDir() (string, error) { + cacheDir := filepath.Join(os.TempDir(), tempDirName, "cache") + if err := os.MkdirAll(cacheDir, 0o755); err != nil { + return "", err + } + if !isWritable(cacheDir) { + return "", fmt.Errorf("cache directory is not writable") + } + return cacheDir, nil +} + +func (r *cacheResolver) Resolve(_ context.Context, _ fs.FS, opt Options) (filesystem fs.FS, prefix string, downloadPath string, applies bool, err error) { + if opt.SkipCache { + opt.Debug("Cache is disabled.") + return nil, "", "", false, nil + } + cacheFS, err := locateCacheFS() + if err != nil { + opt.Debug("No cache filesystem is available on this machine.") + return nil, "", "", false, nil + } + key := cacheKey(opt.Source, opt.Version, opt.RelativePath) + opt.Debug("Trying to resolve: %s", key) + if info, err := fs.Stat(cacheFS, filepath.ToSlash(key)); err == nil && info.IsDir() { + opt.Debug("Module '%s' resolving via cache...", opt.Name) + cacheDir, err := locateCacheDir() + if err != nil { + return nil, "", "", true, err + } + return os.DirFS(filepath.Join(cacheDir, key)), opt.OriginalSource, ".", true, nil + } + return nil, "", "", false, nil +} + +func cacheKey(source, version, relativePath string) string { + return fmt.Sprintf("%x", md5.Sum([]byte(fmt.Sprintf("%s:%s:%s", source, version, relativePath)))) // nolint +} diff --git a/pkg/iac/scanners/terraform/parser/resolvers/local.go b/pkg/iac/scanners/terraform/parser/resolvers/local.go new file mode 100644 index 000000000000..94d92099b6c3 --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/resolvers/local.go @@ -0,0 +1,26 @@ +package resolvers + +import ( + "context" + "io/fs" + "path/filepath" +) + +type localResolver struct{} + +var Local = &localResolver{} + +func (r *localResolver) Resolve(_ context.Context, target fs.FS, opt Options) (filesystem fs.FS, prefix string, downloadPath string, applies bool, err error) { + if !opt.hasPrefix(".", "..") { + return nil, "", "", false, nil + } + joined := filepath.Clean(filepath.Join(opt.ModulePath, opt.Source)) + if _, err := fs.Stat(target, filepath.ToSlash(joined)); err == nil { + opt.Debug("Module '%s' resolved locally to %s", opt.Name, joined) + return target, "", joined, true, nil + } + + clean := filepath.Clean(opt.Source) + opt.Debug("Module '%s' resolved locally to %s", opt.Name, clean) + return target, "", clean, true, nil +} diff --git a/pkg/iac/scanners/terraform/parser/resolvers/options.go b/pkg/iac/scanners/terraform/parser/resolvers/options.go new file mode 100644 index 000000000000..61f720e8cc9e --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/resolvers/options.go @@ -0,0 +1,28 @@ +package resolvers + +import ( + "strings" + + "github.com/aquasecurity/defsec/pkg/debug" +) + +type Options struct { + Source, OriginalSource, Version, OriginalVersion, WorkingDir, Name, ModulePath string + DebugLogger debug.Logger + AllowDownloads bool + SkipCache bool + RelativePath string +} + +func (o *Options) hasPrefix(prefixes ...string) bool { + for _, prefix := range prefixes { + if strings.HasPrefix(o.Source, prefix) { + return true + } + } + return false +} + +func (o *Options) Debug(format string, args ...interface{}) { + o.DebugLogger.Log(format, args...) +} diff --git a/pkg/iac/scanners/terraform/parser/resolvers/registry.go b/pkg/iac/scanners/terraform/parser/resolvers/registry.go new file mode 100644 index 000000000000..5623e9064e06 --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/resolvers/registry.go @@ -0,0 +1,165 @@ +package resolvers + +import ( + "context" + "encoding/json" + "fmt" + "io/fs" + "net/http" + "os" + "sort" + "strings" + "time" + + "github.com/Masterminds/semver" +) + +type registryResolver struct { + client *http.Client +} + +var Registry = ®istryResolver{ + client: &http.Client{ + // give it a maximum 5 seconds to resolve the module + Timeout: time.Second * 5, + }, +} + +type moduleVersions struct { + Modules []struct { + Versions []struct { + Version string `json:"version"` + } `json:"versions"` + } `json:"modules"` +} + +const registryHostname = "registry.terraform.io" + +// nolint +func (r *registryResolver) Resolve(ctx context.Context, target fs.FS, opt Options) (filesystem fs.FS, prefix string, downloadPath string, applies bool, err error) { + + if !opt.AllowDownloads { + return + } + + inputVersion := opt.Version + source, relativePath, _ := strings.Cut(opt.Source, "//") + parts := strings.Split(source, "/") + if len(parts) < 3 || len(parts) > 4 { + return + } + + hostname := registryHostname + var token string + if len(parts) == 4 { + hostname = parts[0] + parts = parts[1:] + + envVar := fmt.Sprintf("TF_TOKEN_%s", strings.ReplaceAll(hostname, ".", "_")) + token = os.Getenv(envVar) + if token != "" { + opt.Debug("Found a token for the registry at %s", hostname) + } else { + opt.Debug("No token was found for the registry at %s", hostname) + } + } + + moduleName := strings.Join(parts, "/") + + if opt.Version != "" { + versionUrl := fmt.Sprintf("https://%s/v1/modules/%s/versions", hostname, moduleName) + opt.Debug("Requesting module versions from registry using '%s'...", versionUrl) + req, err := http.NewRequestWithContext(ctx, http.MethodGet, versionUrl, nil) + if err != nil { + return nil, "", "", true, err + } + if token != "" { + req.Header.Set("Authorization", "Bearer "+token) + } + resp, err := r.client.Do(req) + if err != nil { + return nil, "", "", true, err + } + defer func() { _ = resp.Body.Close() }() + if resp.StatusCode != http.StatusOK { + return nil, "", "", true, fmt.Errorf("unexpected status code for versions endpoint: %d", resp.StatusCode) + } + var availableVersions moduleVersions + if err := json.NewDecoder(resp.Body).Decode(&availableVersions); err != nil { + return nil, "", "", true, err + } + + opt.Version, err = resolveVersion(inputVersion, availableVersions) + if err != nil { + return nil, "", "", true, err + } + opt.Debug("Found version '%s' for constraint '%s'", opt.Version, inputVersion) + } + + var url string + if opt.Version == "" { + url = fmt.Sprintf("https://%s/v1/modules/%s/download", hostname, moduleName) + } else { + url = fmt.Sprintf("https://%s/v1/modules/%s/%s/download", hostname, moduleName, opt.Version) + } + + opt.Debug("Requesting module source from registry using '%s'...", url) + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + if err != nil { + return nil, "", "", true, err + } + if token != "" { + req.Header.Set("Authorization", "Bearer "+token) + } + if opt.Version != "" { + req.Header.Set("X-Terraform-Version", opt.Version) + } + + resp, err := r.client.Do(req) + if err != nil { + return nil, "", "", true, err + } + defer func() { _ = resp.Body.Close() }() + if resp.StatusCode != http.StatusNoContent { + return nil, "", "", true, fmt.Errorf("unexpected status code: %d", resp.StatusCode) + } + + opt.Source = resp.Header.Get("X-Terraform-Get") + opt.Debug("Module '%s' resolved via registry to new source: '%s'", opt.Name, opt.Source) + opt.RelativePath = relativePath + filesystem, prefix, downloadPath, _, err = Remote.Resolve(ctx, target, opt) + if err != nil { + return nil, "", "", true, err + } + + return filesystem, prefix, downloadPath, true, nil +} + +func resolveVersion(input string, versions moduleVersions) (string, error) { + if len(versions.Modules) != 1 { + return "", fmt.Errorf("1 module expected, found %d", len(versions.Modules)) + } + if len(versions.Modules[0].Versions) == 0 { + return "", fmt.Errorf("no available versions for module") + } + constraints, err := semver.NewConstraint(input) + if err != nil { + return "", err + } + var realVersions semver.Collection + for _, rawVersion := range versions.Modules[0].Versions { + realVersion, err := semver.NewVersion(rawVersion.Version) + if err != nil { + continue + } + realVersions = append(realVersions, realVersion) + } + sort.Sort(sort.Reverse(realVersions)) + for _, realVersion := range realVersions { + if constraints.Check(realVersion) { + return realVersion.String(), nil + } + } + return "", fmt.Errorf("no available versions for module constraint '%s'", input) +} diff --git a/pkg/iac/scanners/terraform/parser/resolvers/remote.go b/pkg/iac/scanners/terraform/parser/resolvers/remote.go new file mode 100644 index 000000000000..4c1a96437e65 --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/resolvers/remote.go @@ -0,0 +1,92 @@ +package resolvers + +import ( + "context" + "fmt" + "io/fs" + "os" + "path/filepath" + "sync/atomic" + + "github.com/hashicorp/go-getter" +) + +type remoteResolver struct { + count int32 +} + +var Remote = &remoteResolver{ + count: 0, +} + +func (r *remoteResolver) incrementCount(o Options) { + o.Debug("Incrementing the download counter") + atomic.CompareAndSwapInt32(&r.count, r.count, r.count+1) + o.Debug("Download counter is now %d", r.count) +} + +func (r *remoteResolver) GetDownloadCount() int { + return int(atomic.LoadInt32(&r.count)) +} + +func (r *remoteResolver) Resolve(ctx context.Context, _ fs.FS, opt Options) (filesystem fs.FS, prefix string, downloadPath string, applies bool, err error) { + if !opt.hasPrefix("github.com/", "bitbucket.org/", "s3:", "git@", "git:", "hg:", "https:", "gcs:") { + return nil, "", "", false, nil + } + + if !opt.AllowDownloads { + return nil, "", "", false, nil + } + + key := cacheKey(opt.OriginalSource, opt.OriginalVersion, opt.RelativePath) + opt.Debug("Storing with cache key %s", key) + + baseCacheDir, err := locateCacheDir() + if err != nil { + return nil, "", "", true, fmt.Errorf("failed to locate cache directory: %w", err) + } + cacheDir := filepath.Join(baseCacheDir, key) + if err := r.download(ctx, opt, cacheDir); err != nil { + return nil, "", "", true, err + } + + r.incrementCount(opt) + opt.Debug("Successfully downloaded %s from %s", opt.Name, opt.Source) + opt.Debug("Module '%s' resolved via remote download.", opt.Name) + return os.DirFS(cacheDir), opt.Source, filepath.Join(".", opt.RelativePath), true, nil +} + +func (r *remoteResolver) download(ctx context.Context, opt Options, dst string) error { + _ = os.RemoveAll(dst) + if err := os.MkdirAll(filepath.Dir(dst), 0o755); err != nil { + return err + } + + var opts []getter.ClientOption + + // Overwrite the file getter so that a file will be copied + getter.Getters["file"] = &getter.FileGetter{Copy: true} + + opt.Debug("Downloading %s...", opt.Source) + + // Build the client + client := &getter.Client{ + Ctx: ctx, + Src: opt.Source, + Dst: dst, + Pwd: opt.WorkingDir, + Getters: getter.Getters, + Mode: getter.ClientModeAny, + Options: opts, + } + + if err := client.Get(); err != nil { + return fmt.Errorf("failed to download: %w", err) + } + + return nil +} + +func (r *remoteResolver) GetSourcePrefix(source string) string { + return source +} diff --git a/pkg/iac/scanners/terraform/parser/resolvers/writable.go b/pkg/iac/scanners/terraform/parser/resolvers/writable.go new file mode 100644 index 000000000000..84f471f779c2 --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/resolvers/writable.go @@ -0,0 +1,36 @@ +//go:build !windows +// +build !windows + +package resolvers + +import ( + "os" + "syscall" +) + +func isWritable(path string) bool { + info, err := os.Stat(path) + if err != nil { + return false + } + + if !info.IsDir() { + return false + } + + // Check if the user bit is enabled in file permission + if info.Mode().Perm()&(1<<(uint(7))) == 0 { + return false + } + + var stat syscall.Stat_t + if err = syscall.Stat(path, &stat); err != nil { + return false + } + + if uint32(os.Geteuid()) != stat.Uid { + return false + } + + return true +} diff --git a/pkg/iac/scanners/terraform/parser/resolvers/writable_windows.go b/pkg/iac/scanners/terraform/parser/resolvers/writable_windows.go new file mode 100644 index 000000000000..69cb3c7169b1 --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/resolvers/writable_windows.go @@ -0,0 +1,24 @@ +package resolvers + +import ( + "os" +) + +func isWritable(path string) bool { + + info, err := os.Stat(path) + if err != nil { + return false + } + + if !info.IsDir() { + return false + } + + // Check if the user bit is enabled in file permission + if info.Mode().Perm()&(1<<(uint(7))) == 0 { + return false + } + + return true +} diff --git a/pkg/iac/scanners/terraform/parser/sort.go b/pkg/iac/scanners/terraform/parser/sort.go new file mode 100644 index 000000000000..d43e86b4e740 --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/sort.go @@ -0,0 +1,58 @@ +package parser + +import ( + "sort" + + "github.com/aquasecurity/defsec/pkg/terraform" +) + +func sortBlocksByHierarchy(blocks terraform.Blocks) { + c := &counter{ + cache: make(map[string]int), + } + sort.Slice(blocks, func(i, j int) bool { + a := blocks[i] + b := blocks[j] + iDepth, jDepth := c.countBlockRecursion(a, blocks, 0), c.countBlockRecursion(b, blocks, 0) + switch { + case iDepth < jDepth: + return true + case iDepth > jDepth: + return false + default: + return blocks[i].FullName() < blocks[j].FullName() + } + }) +} + +type counter struct { + cache map[string]int +} + +func (c *counter) countBlockRecursion(block *terraform.Block, blocks terraform.Blocks, count int) int { + metadata := block.GetMetadata() + if cached, ok := c.cache[metadata.Reference()]; ok { + return cached + } + var maxCount int + var hasRecursion bool + for _, attrName := range []string{"for_each", "count"} { + if attr := block.GetAttribute(attrName); attr.IsNotNil() { + hasRecursion = true + for _, other := range blocks { + if attr.ReferencesBlock(other) { + depth := c.countBlockRecursion(other, blocks, count) + if depth > maxCount { + maxCount = depth + } + } + } + } + } + if hasRecursion { + maxCount++ + } + result := maxCount + count + c.cache[metadata.Reference()] = result + return result +} diff --git a/pkg/iac/scanners/terraform/parser/testdata/tfvars/terraform.tfvars b/pkg/iac/scanners/terraform/parser/testdata/tfvars/terraform.tfvars new file mode 100644 index 000000000000..23fee69e2bb1 --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/testdata/tfvars/terraform.tfvars @@ -0,0 +1 @@ +instance_type = "t2.large" \ No newline at end of file diff --git a/pkg/iac/scanners/terraform/parser/testdata/tfvars/terraform.tfvars.json b/pkg/iac/scanners/terraform/parser/testdata/tfvars/terraform.tfvars.json new file mode 100644 index 000000000000..bde0e75763b1 --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/testdata/tfvars/terraform.tfvars.json @@ -0,0 +1,10 @@ +{ + "variable": { + "foo": { + "default": "bar" + }, + "baz": "qux" + }, + "foo2": true, + "foo3": 3 +} \ No newline at end of file diff --git a/pkg/iac/scanners/terraform/scanner.go b/pkg/iac/scanners/terraform/scanner.go new file mode 100644 index 000000000000..ecee5f883fb7 --- /dev/null +++ b/pkg/iac/scanners/terraform/scanner.go @@ -0,0 +1,379 @@ +package terraform + +import ( + "context" + "io" + "io/fs" + "path/filepath" + "sort" + "strings" + "sync" + "time" + + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/framework" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/defsec/pkg/types" + "golang.org/x/exp/slices" + + "github.com/aquasecurity/trivy/pkg/iac/extrafs" + "github.com/aquasecurity/trivy/pkg/iac/rego" + "github.com/aquasecurity/trivy/pkg/iac/scanners" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/executor" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser/resolvers" +) + +var _ scanners.FSScanner = (*Scanner)(nil) +var _ options.ConfigurableScanner = (*Scanner)(nil) +var _ ConfigurableTerraformScanner = (*Scanner)(nil) + +type Scanner struct { + sync.Mutex + options []options.ScannerOption + parserOpt []options.ParserOption + executorOpt []executor.Option + dirs map[string]struct{} + forceAllDirs bool + policyDirs []string + policyReaders []io.Reader + regoScanner *rego.Scanner + execLock sync.RWMutex + debug debug.Logger + frameworks []framework.Framework + spec string + loadEmbeddedLibraries bool + loadEmbeddedPolicies bool +} + +func (s *Scanner) SetSpec(spec string) { + s.spec = spec +} + +func (s *Scanner) SetRegoOnly(regoOnly bool) { + s.executorOpt = append(s.executorOpt, executor.OptionWithRegoOnly(regoOnly)) +} + +func (s *Scanner) SetFrameworks(frameworks []framework.Framework) { + s.frameworks = frameworks +} + +func (s *Scanner) SetUseEmbeddedPolicies(b bool) { + s.loadEmbeddedPolicies = b +} + +func (s *Scanner) SetUseEmbeddedLibraries(b bool) { + s.loadEmbeddedLibraries = b +} + +func (s *Scanner) Name() string { + return "Terraform" +} + +func (s *Scanner) SetForceAllDirs(b bool) { + s.forceAllDirs = b +} + +func (s *Scanner) AddParserOptions(options ...options.ParserOption) { + s.parserOpt = append(s.parserOpt, options...) +} + +func (s *Scanner) AddExecutorOptions(options ...executor.Option) { + s.executorOpt = append(s.executorOpt, options...) +} + +func (s *Scanner) SetPolicyReaders(readers []io.Reader) { + s.policyReaders = readers +} + +func (s *Scanner) SetSkipRequiredCheck(skip bool) { + s.parserOpt = append(s.parserOpt, options.ParserWithSkipRequiredCheck(skip)) +} + +func (s *Scanner) SetDebugWriter(writer io.Writer) { + s.parserOpt = append(s.parserOpt, options.ParserWithDebug(writer)) + s.executorOpt = append(s.executorOpt, executor.OptionWithDebugWriter(writer)) + s.debug = debug.New(writer, "terraform", "scanner") +} + +func (s *Scanner) SetTraceWriter(_ io.Writer) { +} + +func (s *Scanner) SetPerResultTracingEnabled(_ bool) { +} + +func (s *Scanner) SetPolicyDirs(dirs ...string) { + s.policyDirs = dirs +} + +func (s *Scanner) SetDataDirs(_ ...string) {} +func (s *Scanner) SetPolicyNamespaces(_ ...string) {} + +func (s *Scanner) SetPolicyFilesystem(_ fs.FS) { + // handled by rego when option is passed on +} + +func (s *Scanner) SetDataFilesystem(_ fs.FS) { + // handled by rego when option is passed on +} +func (s *Scanner) SetRegoErrorLimit(_ int) {} + +type Metrics struct { + Parser parser.Metrics + Executor executor.Metrics + Timings struct { + Total time.Duration + } +} + +func New(options ...options.ScannerOption) *Scanner { + s := &Scanner{ + dirs: make(map[string]struct{}), + options: options, + } + for _, opt := range options { + opt(s) + } + return s +} + +func (s *Scanner) ScanFS(ctx context.Context, target fs.FS, dir string) (scan.Results, error) { + results, _, err := s.ScanFSWithMetrics(ctx, target, dir) + return results, err +} + +func (s *Scanner) initRegoScanner(srcFS fs.FS) (*rego.Scanner, error) { + s.Lock() + defer s.Unlock() + if s.regoScanner != nil { + return s.regoScanner, nil + } + regoScanner := rego.NewScanner(types.SourceCloud, s.options...) + regoScanner.SetParentDebugLogger(s.debug) + + if err := regoScanner.LoadPolicies(s.loadEmbeddedLibraries, s.loadEmbeddedPolicies, srcFS, s.policyDirs, s.policyReaders); err != nil { + return nil, err + } + s.regoScanner = regoScanner + return regoScanner, nil +} + +// terraformRootModule represents the module to be used as the root module for Terraform deployment. +type terraformRootModule struct { + rootPath string + childs terraform.Modules + fsMap map[string]fs.FS +} + +func excludeNonRootModules(modules []terraformRootModule) []terraformRootModule { + var result []terraformRootModule + var childPaths []string + + for _, module := range modules { + childPaths = append(childPaths, module.childs.ChildModulesPaths()...) + } + + for _, module := range modules { + // if the path of the root module matches the path of the child module, + // then we should not scan it + if !slices.Contains(childPaths, module.rootPath) { + result = append(result, module) + } + } + return result +} + +func (s *Scanner) ScanFSWithMetrics(ctx context.Context, target fs.FS, dir string) (scan.Results, Metrics, error) { + + var metrics Metrics + + s.debug.Log("Scanning [%s] at '%s'...", target, dir) + + // find directories which directly contain tf files (and have no parent containing tf files) + rootDirs := s.findRootModules(target, dir, dir) + sort.Strings(rootDirs) + + if len(rootDirs) == 0 { + s.debug.Log("no root modules found") + return nil, metrics, nil + } + + regoScanner, err := s.initRegoScanner(target) + if err != nil { + return nil, metrics, err + } + + s.execLock.Lock() + s.executorOpt = append(s.executorOpt, executor.OptionWithRegoScanner(regoScanner), executor.OptionWithFrameworks(s.frameworks...)) + s.execLock.Unlock() + + var allResults scan.Results + + // parse all root module directories + var rootModules []terraformRootModule + for _, dir := range rootDirs { + + s.debug.Log("Scanning root module '%s'...", dir) + + p := parser.New(target, "", s.parserOpt...) + + if err := p.ParseFS(ctx, dir); err != nil { + return nil, metrics, err + } + + modules, _, err := p.EvaluateAll(ctx) + if err != nil { + return nil, metrics, err + } + + parserMetrics := p.Metrics() + metrics.Parser.Counts.Blocks += parserMetrics.Counts.Blocks + metrics.Parser.Counts.Modules += parserMetrics.Counts.Modules + metrics.Parser.Counts.Files += parserMetrics.Counts.Files + metrics.Parser.Timings.DiskIODuration += parserMetrics.Timings.DiskIODuration + metrics.Parser.Timings.ParseDuration += parserMetrics.Timings.ParseDuration + + rootModules = append(rootModules, terraformRootModule{ + rootPath: dir, + childs: modules, + fsMap: p.GetFilesystemMap(), + }) + } + + rootModules = excludeNonRootModules(rootModules) + + for _, module := range rootModules { + s.execLock.RLock() + e := executor.New(s.executorOpt...) + s.execLock.RUnlock() + results, execMetrics, err := e.Execute(module.childs) + if err != nil { + return nil, metrics, err + } + + for i, result := range results { + if result.Metadata().Range().GetFS() != nil { + continue + } + key := result.Metadata().Range().GetFSKey() + if key == "" { + continue + } + if filesystem, ok := module.fsMap[key]; ok { + override := scan.Results{ + result, + } + override.SetSourceAndFilesystem(result.Range().GetSourcePrefix(), filesystem, false) + results[i] = override[0] + } + } + + metrics.Executor.Counts.Passed += execMetrics.Counts.Passed + metrics.Executor.Counts.Failed += execMetrics.Counts.Failed + metrics.Executor.Counts.Ignored += execMetrics.Counts.Ignored + metrics.Executor.Counts.Critical += execMetrics.Counts.Critical + metrics.Executor.Counts.High += execMetrics.Counts.High + metrics.Executor.Counts.Medium += execMetrics.Counts.Medium + metrics.Executor.Counts.Low += execMetrics.Counts.Low + metrics.Executor.Timings.Adaptation += execMetrics.Timings.Adaptation + metrics.Executor.Timings.RunningChecks += execMetrics.Timings.RunningChecks + + allResults = append(allResults, results...) + } + + metrics.Parser.Counts.ModuleDownloads = resolvers.Remote.GetDownloadCount() + + metrics.Timings.Total += metrics.Parser.Timings.DiskIODuration + metrics.Timings.Total += metrics.Parser.Timings.ParseDuration + metrics.Timings.Total += metrics.Executor.Timings.Adaptation + metrics.Timings.Total += metrics.Executor.Timings.RunningChecks + + return allResults, metrics, nil +} + +func (s *Scanner) removeNestedDirs(dirs []string) []string { + if s.forceAllDirs { + return dirs + } + var clean []string + for _, dirA := range dirs { + dirOK := true + for _, dirB := range dirs { + if dirA == dirB { + continue + } + if str, err := filepath.Rel(dirB, dirA); err == nil && !strings.HasPrefix(str, "..") { + dirOK = false + break + } + } + if dirOK { + clean = append(clean, dirA) + } + } + return clean +} + +func (s *Scanner) findRootModules(target fs.FS, scanDir string, dirs ...string) []string { + + var roots []string + var others []string + + for _, dir := range dirs { + if s.isRootModule(target, dir) { + roots = append(roots, dir) + if !s.forceAllDirs { + continue + } + } + + // if this isn't a root module, look at directories inside it + files, err := fs.ReadDir(target, filepath.ToSlash(dir)) + if err != nil { + continue + } + for _, file := range files { + realPath := filepath.Join(dir, file.Name()) + if symFS, ok := target.(extrafs.ReadLinkFS); ok { + realPath, err = symFS.ResolveSymlink(realPath, scanDir) + if err != nil { + s.debug.Log("failed to resolve symlink '%s': %s", file.Name(), err) + continue + } + } + if file.IsDir() { + others = append(others, realPath) + } else if statFS, ok := target.(fs.StatFS); ok { + info, err := statFS.Stat(filepath.ToSlash(realPath)) + if err != nil { + continue + } + if info.IsDir() { + others = append(others, realPath) + } + } + } + } + + if (len(roots) == 0 || s.forceAllDirs) && len(others) > 0 { + roots = append(roots, s.findRootModules(target, scanDir, others...)...) + } + + return s.removeNestedDirs(roots) +} + +func (s *Scanner) isRootModule(target fs.FS, dir string) bool { + files, err := fs.ReadDir(target, filepath.ToSlash(dir)) + if err != nil { + s.debug.Log("failed to read dir '%s' from filesystem [%s]: %s", dir, target, err) + return false + } + for _, file := range files { + if strings.HasSuffix(file.Name(), ".tf") || strings.HasSuffix(file.Name(), ".tf.json") { + return true + } + } + return false +} diff --git a/pkg/iac/scanners/terraform/scanner_integration_test.go b/pkg/iac/scanners/terraform/scanner_integration_test.go new file mode 100644 index 000000000000..70b912bf3065 --- /dev/null +++ b/pkg/iac/scanners/terraform/scanner_integration_test.go @@ -0,0 +1,132 @@ +package terraform + +import ( + "bytes" + "context" + "fmt" + "testing" + + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/trivy/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_ScanRemoteModule(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test in short mode") + } + fs := testutil.CreateFS(t, map[string]string{ + "main.tf": ` +module "s3_bucket" { + source = "terraform-aws-modules/s3-bucket/aws" + + bucket = "my-s3-bucket" +} +`, + "/rules/bucket_name.rego": ` +# METADATA +# schemas: +# - input: schema.input +# custom: +# avd_id: AVD-AWS-0001 +# input: +# selector: +# - type: cloud +# subtypes: +# - service: s3 +# provider: aws +package defsec.test.aws1 +deny[res] { + bucket := input.aws.s3.buckets[_] + bucket.name.value == "" + res := result.new("The name of the bucket must not be empty", bucket) +}`, + }) + + debugLog := bytes.NewBuffer([]byte{}) + + scanner := New( + options.ScannerWithDebug(debugLog), + options.ScannerWithPolicyFilesystem(fs), + options.ScannerWithPolicyDirs("rules"), + options.ScannerWithEmbeddedPolicies(false), + options.ScannerWithEmbeddedLibraries(false), + options.ScannerWithRegoOnly(true), + ScannerWithAllDirectories(true), + ScannerWithSkipCachedModules(true), + ) + + results, err := scanner.ScanFS(context.TODO(), fs, ".") + require.NoError(t, err) + + assert.Len(t, results.GetPassed(), 1) + + if t.Failed() { + fmt.Printf("Debug logs:\n%s\n", debugLog.String()) + } +} + +func Test_ScanChildUseRemoteModule(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test in short mode") + } + fs := testutil.CreateFS(t, map[string]string{ + "main.tf": ` +module "this" { + source = "./modules/s3" + bucket = "my-s3-bucket" +} +`, + "modules/s3/main.tf": ` +variable "bucket" { + type = string +} + +module "s3_bucket" { + source = "github.com/terraform-aws-modules/terraform-aws-s3-bucket?ref=v3.15.1" + bucket = var.bucket +} +`, + "rules/bucket_name.rego": ` +# METADATA +# schemas: +# - input: schema.input +# custom: +# avd_id: AVD-AWS-0001 +# input: +# selector: +# - type: cloud +# subtypes: +# - service: s3 +# provider: aws +package defsec.test.aws1 +deny[res] { + bucket := input.aws.s3.buckets[_] + bucket.name.value == "" + res := result.new("The name of the bucket must not be empty", bucket) +}`, + }) + + debugLog := bytes.NewBuffer([]byte{}) + + scanner := New( + options.ScannerWithDebug(debugLog), + options.ScannerWithPolicyFilesystem(fs), + options.ScannerWithPolicyDirs("rules"), + options.ScannerWithEmbeddedPolicies(false), + options.ScannerWithEmbeddedLibraries(false), + options.ScannerWithRegoOnly(true), + ScannerWithAllDirectories(true), + ScannerWithSkipCachedModules(true), + ) + + results, err := scanner.ScanFS(context.TODO(), fs, ".") + require.NoError(t, err) + + assert.Len(t, results.GetPassed(), 1) + + if t.Failed() { + fmt.Printf("Debug logs:\n%s\n", debugLog.String()) + } +} diff --git a/pkg/iac/scanners/terraform/scanner_test.go b/pkg/iac/scanners/terraform/scanner_test.go new file mode 100644 index 000000000000..43ac135968a2 --- /dev/null +++ b/pkg/iac/scanners/terraform/scanner_test.go @@ -0,0 +1,1360 @@ +package terraform + +import ( + "bytes" + "context" + "fmt" + "strconv" + "testing" + + "github.com/aquasecurity/defsec/pkg/providers" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/defsec/pkg/severity" + "github.com/aquasecurity/defsec/pkg/state" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/trivy/pkg/iac/rules" + "github.com/aquasecurity/trivy/test/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +var alwaysFailRule = scan.Rule{ + Provider: providers.AWSProvider, + Service: "service", + ShortCode: "abc", + Severity: severity.High, + CustomChecks: scan.CustomChecks{ + Terraform: &scan.TerraformCustomCheck{ + RequiredTypes: []string{}, + RequiredLabels: []string{}, + Check: func(resourceBlock *terraform.Block, _ *terraform.Module) (results scan.Results) { + results.Add("oh no", resourceBlock) + return + }, + }, + }, +} + +const emptyBucketRule = ` +# METADATA +# schemas: +# - input: schema.input +# custom: +# avd_id: AVD-AWS-0001 +# input: +# selector: +# - type: cloud +# subtypes: +# - service: s3 +# provider: aws +package defsec.test.aws1 +deny[res] { + bucket := input.aws.s3.buckets[_] + bucket.name.value == "" + res := result.new("The name of the bucket must not be empty", bucket) +} +` + +func scanWithOptions(t *testing.T, code string, opt ...options.ScannerOption) scan.Results { + + fs := testutil.CreateFS(t, map[string]string{ + "project/main.tf": code, + }) + + scanner := New(opt...) + results, _, err := scanner.ScanFSWithMetrics(context.TODO(), fs, "project") + require.NoError(t, err) + return results +} + +func Test_OptionWithAlternativeIDProvider(t *testing.T) { + reg := rules.Register(alwaysFailRule) + defer rules.Deregister(reg) + + options := []options.ScannerOption{ + ScannerWithAlternativeIDProvider(func(s string) []string { + return []string{"something", "altid", "blah"} + }), + } + results := scanWithOptions(t, ` +//tfsec:ignore:altid +resource "something" "else" {} +`, options...) + require.Len(t, results.GetFailed(), 0) + require.Len(t, results.GetIgnored(), 1) + +} + +func Test_TrivyOptionWithAlternativeIDProvider(t *testing.T) { + reg := rules.Register(alwaysFailRule) + defer rules.Deregister(reg) + + options := []options.ScannerOption{ + ScannerWithAlternativeIDProvider(func(s string) []string { + return []string{"something", "altid", "blah"} + }), + } + results := scanWithOptions(t, ` +//trivy:ignore:altid +resource "something" "else" {} +`, options...) + require.Len(t, results.GetFailed(), 0) + require.Len(t, results.GetIgnored(), 1) + +} + +func Test_OptionWithSeverityOverrides(t *testing.T) { + reg := rules.Register(alwaysFailRule) + defer rules.Deregister(reg) + + options := []options.ScannerOption{ + ScannerWithSeverityOverrides(map[string]string{"aws-service-abc": "LOW"}), + } + results := scanWithOptions(t, ` +resource "something" "else" {} +`, options...) + require.Len(t, results.GetFailed(), 1) + assert.Equal(t, severity.Low, results.GetFailed()[0].Severity()) +} + +func Test_OptionWithDebugWriter(t *testing.T) { + reg := rules.Register(alwaysFailRule) + defer rules.Deregister(reg) + + buffer := bytes.NewBuffer([]byte{}) + + scannerOpts := []options.ScannerOption{ + options.ScannerWithDebug(buffer), + } + _ = scanWithOptions(t, ` +resource "something" "else" {} +`, scannerOpts...) + require.Greater(t, buffer.Len(), 0) +} + +func Test_OptionNoIgnores(t *testing.T) { + reg := rules.Register(alwaysFailRule) + defer rules.Deregister(reg) + + scannerOpts := []options.ScannerOption{ + ScannerWithNoIgnores(), + } + results := scanWithOptions(t, ` +//tfsec:ignore:aws-service-abc +resource "something" "else" {} +`, scannerOpts...) + require.Len(t, results.GetFailed(), 1) + require.Len(t, results.GetIgnored(), 0) + +} + +func Test_OptionExcludeRules(t *testing.T) { + reg := rules.Register(alwaysFailRule) + defer rules.Deregister(reg) + + options := []options.ScannerOption{ + ScannerWithExcludedRules([]string{"aws-service-abc"}), + } + results := scanWithOptions(t, ` +resource "something" "else" {} +`, options...) + require.Len(t, results.GetFailed(), 0) + require.Len(t, results.GetIgnored(), 1) + +} + +func Test_OptionIncludeRules(t *testing.T) { + reg := rules.Register(alwaysFailRule) + defer rules.Deregister(reg) + + scannerOpts := []options.ScannerOption{ + ScannerWithIncludedRules([]string{"this-only"}), + } + results := scanWithOptions(t, ` +resource "something" "else" {} +`, scannerOpts...) + require.Len(t, results.GetFailed(), 0) + require.Len(t, results.GetIgnored(), 1) + +} + +func Test_OptionWithMinimumSeverity(t *testing.T) { + reg := rules.Register(alwaysFailRule) + defer rules.Deregister(reg) + + scannerOpts := []options.ScannerOption{ + ScannerWithMinimumSeverity(severity.Critical), + } + results := scanWithOptions(t, ` +resource "something" "else" {} +`, scannerOpts...) + require.Len(t, results.GetFailed(), 0) + require.Len(t, results.GetIgnored(), 1) + +} + +func Test_OptionWithPolicyDirs(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "/code/main.tf": ` +resource "aws_s3_bucket" "my-bucket" { + bucket = "evil" +} +`, + "/rules/test.rego": ` +package defsec.abcdefg + +__rego_metadata__ := { + "id": "TEST123", + "avd_id": "AVD-TEST-0123", + "title": "Buckets should not be evil", + "short_code": "no-evil-buckets", + "severity": "CRITICAL", + "type": "DefSec Security Check", + "description": "You should not allow buckets to be evil", + "recommended_actions": "Use a good bucket instead", + "url": "https://google.com/search?q=is+my+bucket+evil", +} + +__rego_input__ := { + "combine": false, + "selector": [{"type": "defsec", "subtypes": [{"service": "s3", "provider": "aws"}]}], +} + +deny[cause] { + bucket := input.aws.s3.buckets[_] + bucket.name.value == "evil" + cause := bucket.name +} +`, + }) + + debugLog := bytes.NewBuffer([]byte{}) + scanner := New( + options.ScannerWithDebug(debugLog), + options.ScannerWithPolicyFilesystem(fs), + options.ScannerWithPolicyDirs("rules"), + options.ScannerWithRegoOnly(true), + ) + + results, err := scanner.ScanFS(context.TODO(), fs, "code") + require.NoError(t, err) + + require.Len(t, results.GetFailed(), 1) + + failure := results.GetFailed()[0] + + assert.Equal(t, "AVD-TEST-0123", failure.Rule().AVDID) + + actualCode, err := failure.GetCode() + require.NoError(t, err) + for i := range actualCode.Lines { + actualCode.Lines[i].Highlighted = "" + } + assert.Equal(t, []scan.Line{ + { + Number: 2, + Content: "resource \"aws_s3_bucket\" \"my-bucket\" {", + IsCause: false, + FirstCause: false, + LastCause: false, + Annotation: "", + }, + { + Number: 3, + Content: "\tbucket = \"evil\"", + IsCause: true, + FirstCause: true, + LastCause: true, + Annotation: "", + }, + { + Number: 4, + Content: "}", + IsCause: false, + FirstCause: false, + LastCause: false, + Annotation: "", + }, + }, actualCode.Lines) + + if t.Failed() { + fmt.Printf("Debug logs:\n%s\n", debugLog.String()) + } + +} + +func Test_OptionWithPolicyNamespaces(t *testing.T) { + + tests := []struct { + includedNamespaces []string + policyNamespace string + wantFailure bool + }{ + { + includedNamespaces: nil, + policyNamespace: "blah", + wantFailure: false, + }, + { + includedNamespaces: nil, + policyNamespace: "appshield.something", + wantFailure: true, + }, + { + includedNamespaces: nil, + policyNamespace: "defsec.blah", + wantFailure: true, + }, + { + includedNamespaces: []string{"user"}, + policyNamespace: "users", + wantFailure: false, + }, + { + includedNamespaces: []string{"users"}, + policyNamespace: "something.users", + wantFailure: false, + }, + { + includedNamespaces: []string{"users"}, + policyNamespace: "users", + wantFailure: true, + }, + { + includedNamespaces: []string{"users"}, + policyNamespace: "users.my_rule", + wantFailure: true, + }, + { + includedNamespaces: []string{"a", "users", "b"}, + policyNamespace: "users", + wantFailure: true, + }, + { + includedNamespaces: []string{"user"}, + policyNamespace: "defsec", + wantFailure: true, + }, + } + + for i, test := range tests { + + t.Run(strconv.Itoa(i), func(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "/code/main.tf": ` +resource "aws_s3_bucket" "my-bucket" { + bucket = "evil" +} +`, + "/rules/test.rego": fmt.Sprintf(` +# METADATA +# custom: +# input: +# selector: +# - type: cloud +# subtypes: +# - service: s3 +# provider: aws +package %s + +deny[cause] { +bucket := input.aws.s3.buckets[_] +bucket.name.value == "evil" +cause := bucket.name +} + + `, test.policyNamespace), + }) + + scanner := New( + options.ScannerWithPolicyDirs("rules"), + options.ScannerWithPolicyNamespaces(test.includedNamespaces...), + ) + + results, _, err := scanner.ScanFSWithMetrics(context.TODO(), fs, "code") + require.NoError(t, err) + + var found bool + for _, result := range results.GetFailed() { + if result.RegoNamespace() == test.policyNamespace && result.RegoRule() == "deny" { + found = true + break + } + } + assert.Equal(t, test.wantFailure, found) + + }) + } + +} + +func Test_OptionWithStateFunc(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "code/main.tf": ` +resource "aws_s3_bucket" "my-bucket" { + bucket = "evil" +} +`, + }) + + var actual state.State + + debugLog := bytes.NewBuffer([]byte{}) + scanner := New( + options.ScannerWithDebug(debugLog), + ScannerWithStateFunc(func(s *state.State) { + require.NotNil(t, s) + actual = *s + }), + ) + + _, _, err := scanner.ScanFSWithMetrics(context.TODO(), fs, "code") + require.NoError(t, err) + + assert.Equal(t, 1, len(actual.AWS.S3.Buckets)) + + if t.Failed() { + fmt.Printf("Debug logs:\n%s\n", debugLog.String()) + } + +} + +func Test_OptionWithRegoOnly(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "/code/main.tf": ` +resource "aws_s3_bucket" "my-bucket" { + bucket = "evil" +} +`, + "/rules/test.rego": ` +package defsec.abcdefg + +__rego_metadata__ := { + "id": "TEST123", + "avd_id": "AVD-TEST-0123", + "title": "Buckets should not be evil", + "short_code": "no-evil-buckets", + "severity": "CRITICAL", + "type": "DefSec Security Check", + "description": "You should not allow buckets to be evil", + "recommended_actions": "Use a good bucket instead", + "url": "https://google.com/search?q=is+my+bucket+evil", +} + +__rego_input__ := { + "combine": false, + "selector": [{"type": "defsec", "subtypes": [{"service": "s3", "provider": "aws"}]}], +} + +deny[cause] { + bucket := input.aws.s3.buckets[_] + bucket.name.value == "evil" + cause := bucket.name +} +`, + }) + + debugLog := bytes.NewBuffer([]byte{}) + scanner := New( + options.ScannerWithDebug(debugLog), + options.ScannerWithPolicyDirs("rules"), + options.ScannerWithRegoOnly(true), + ) + + results, err := scanner.ScanFS(context.TODO(), fs, "code") + require.NoError(t, err) + + require.Len(t, results.GetFailed(), 1) + assert.Equal(t, "AVD-TEST-0123", results[0].Rule().AVDID) + + if t.Failed() { + fmt.Printf("Debug logs:\n%s\n", debugLog.String()) + } +} + +func Test_OptionWithRegoOnly_CodeHighlighting(t *testing.T) { + + fs := testutil.CreateFS(t, map[string]string{ + "/code/main.tf": ` +resource "aws_s3_bucket" "my-bucket" { + bucket = "evil" +} +`, + "/rules/test.rego": ` +package defsec.abcdefg + +__rego_metadata__ := { + "id": "TEST123", + "avd_id": "AVD-TEST-0123", + "title": "Buckets should not be evil", + "short_code": "no-evil-buckets", + "severity": "CRITICAL", + "type": "DefSec Security Check", + "description": "You should not allow buckets to be evil", + "recommended_actions": "Use a good bucket instead", + "url": "https://google.com/search?q=is+my+bucket+evil", +} + +__rego_input__ := { + "combine": false, + "selector": [{"type": "defsec", "subtypes": [{"service": "s3", "provider": "aws"}]}], +} + +deny[res] { + bucket := input.aws.s3.buckets[_] + bucket.name.value == "evil" + res := result.new("oh no", bucket.name) +} +`, + }) + + debugLog := bytes.NewBuffer([]byte{}) + scanner := New( + options.ScannerWithDebug(debugLog), + options.ScannerWithPolicyDirs("rules"), + options.ScannerWithRegoOnly(true), + options.ScannerWithEmbeddedLibraries(true), + ) + + results, err := scanner.ScanFS(context.TODO(), fs, "code") + require.NoError(t, err) + + require.Len(t, results.GetFailed(), 1) + assert.Equal(t, "AVD-TEST-0123", results[0].Rule().AVDID) + assert.NotNil(t, results[0].Metadata().Range().GetFS()) + + if t.Failed() { + fmt.Printf("Debug logs:\n%s\n", debugLog.String()) + } +} + +func Test_OptionWithSkipDownloaded(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "test/main.tf": ` +module "s3-bucket" { + source = "terraform-aws-modules/s3-bucket/aws" + version = "3.14.0" + bucket = mybucket +} +`, + // creating our own rule for the reliability of the test + "/rules/test.rego": ` +package defsec.abcdefg + +__rego_input__ := { + "combine": false, + "selector": [{"type": "defsec", "subtypes": [{"service": "s3", "provider": "aws"}]}], +} + +deny[cause] { + bucket := input.aws.s3.buckets[_] + bucket.name.value == "mybucket" + cause := bucket.name +}`, + }) + + scanner := New(options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true)) + results, err := scanner.ScanFS(context.TODO(), fs, "test") + assert.NoError(t, err) + assert.Greater(t, len(results.GetFailed()), 0) + + scanner = New(ScannerWithSkipDownloaded(true)) + results, err = scanner.ScanFS(context.TODO(), fs, "test") + assert.NoError(t, err) + assert.Len(t, results.GetFailed(), 0) + +} + +func Test_IAMPolicyRego(t *testing.T) { + fs := testutil.CreateFS(t, map[string]string{ + "/code/main.tf": ` +resource "aws_sqs_queue_policy" "bad_example" { + queue_url = aws_sqs_queue.q.id + + policy = < 0 { + if _, ok := v[0].(map[string]interface{}); ok { + maps := make([]map[string]interface{}, len(v)) + for i := range v { + maps[i] = v[i].(map[string]interface{}) + } + input[k] = purgeMetadataSlice(maps) + } + } + } + } + return input +} + +func purgeMetadataSlice(input []map[string]interface{}) []map[string]interface{} { + for i := range input { + input[i] = purgeMetadata(input[i]) + } + return input +} From 254e96ba3a9bd6e2476cfea1ff89dc7fdc7eab10 Mon Sep 17 00:00:00 2001 From: Simar Date: Fri, 1 Dec 2023 19:59:54 -0700 Subject: [PATCH 2/5] fix lints --- internal/adapters/arm/adapt.go | 5 ++--- internal/adapters/arm/storage/adapt.go | 3 +-- .../aws/accessanalyzer/accessanalyzer.go | 2 +- .../cloudformation/aws/apigateway/apigateway.go | 2 +- .../adapters/cloudformation/aws/athena/athena.go | 2 +- .../cloudformation/aws/cloudfront/cloudfront.go | 2 +- .../cloudformation/aws/cloudtrail/cloudtrail.go | 2 +- .../cloudformation/aws/cloudwatch/cloudwatch.go | 2 +- .../cloudformation/aws/codebuild/codebuild.go | 2 +- .../adapters/cloudformation/aws/config/config.go | 2 +- .../cloudformation/aws/documentdb/documentdb.go | 2 +- .../cloudformation/aws/dynamodb/dynamodb.go | 2 +- internal/adapters/cloudformation/aws/ec2/nacl.go | 4 +--- internal/adapters/cloudformation/aws/ecr/ecr.go | 2 +- .../cloudformation/aws/ecr/repository.go | 8 +++----- internal/adapters/cloudformation/aws/ecs/ecs.go | 2 +- internal/adapters/cloudformation/aws/efs/efs.go | 2 +- internal/adapters/cloudformation/aws/eks/eks.go | 2 +- .../aws/elasticache/elasticache.go | 2 +- .../aws/elasticsearch/elasticsearch.go | 2 +- internal/adapters/cloudformation/aws/elb/elb.go | 2 +- internal/adapters/cloudformation/aws/iam/iam.go | 2 +- .../adapters/cloudformation/aws/iam/policy.go | 3 ++- .../cloudformation/aws/kinesis/kinesis.go | 2 +- .../adapters/cloudformation/aws/lambda/lambda.go | 2 +- internal/adapters/cloudformation/aws/mq/mq.go | 2 +- internal/adapters/cloudformation/aws/msk/msk.go | 2 +- .../cloudformation/aws/neptune/neptune.go | 2 +- internal/adapters/cloudformation/aws/rds/rds.go | 2 +- .../cloudformation/aws/redshift/redshift.go | 2 +- .../adapters/cloudformation/aws/s3/bucket.go | 3 +-- internal/adapters/cloudformation/aws/s3/s3.go | 2 +- .../adapters/cloudformation/aws/sam/function.go | 3 ++- internal/adapters/cloudformation/aws/sam/sam.go | 2 +- .../cloudformation/aws/sam/state_machines.go | 3 ++- internal/adapters/cloudformation/aws/sns/sns.go | 2 +- .../adapters/cloudformation/aws/sqs/queue.go | 8 +++----- internal/adapters/cloudformation/aws/sqs/sqs.go | 2 +- internal/adapters/cloudformation/aws/ssm/ssm.go | 2 +- .../cloudformation/aws/workspaces/workspaces.go | 2 +- .../adapters/terraform/aws/ec2/autoscaling.go | 6 ++---- internal/adapters/terraform/aws/ecr/adapt.go | 3 ++- internal/adapters/terraform/aws/iam/convert.go | 8 +++----- internal/adapters/terraform/aws/iam/passwords.go | 6 ++---- internal/adapters/terraform/aws/iam/policies.go | 3 ++- internal/adapters/terraform/aws/sqs/adapt.go | 10 +++++----- .../adapters/terraform/azure/compute/adapt.go | 16 ++++++++-------- .../adapters/terraform/azure/keyvault/adapt.go | 6 ++---- .../adapters/terraform/azure/network/adapt.go | 8 +++----- .../adapters/terraform/azure/storage/adapt.go | 3 +-- .../terraform/cloudstack/compute/adapt.go | 6 ++---- .../terraform/digitalocean/compute/adapt.go | 4 ++-- .../terraform/digitalocean/spaces/adapt.go | 3 ++- .../terraform/google/compute/instances.go | 3 ++- .../terraform/google/compute/metadata.go | 3 ++- .../terraform/google/compute/networks.go | 3 +-- internal/adapters/terraform/google/gke/adapt.go | 5 +++-- internal/adapters/terraform/google/iam/adapt.go | 3 ++- .../adapters/terraform/google/iam/folder_iam.go | 8 +++++--- .../adapters/terraform/google/iam/folders.go | 8 +++++--- .../adapters/terraform/google/iam/org_iam.go | 9 +++++---- .../adapters/terraform/google/iam/project_iam.go | 14 +++++++------- .../adapters/terraform/google/iam/projects.go | 4 ++-- internal/adapters/terraform/google/kms/adapt.go | 6 ++---- internal/adapters/terraform/google/sql/adapt.go | 6 ++---- .../adapters/terraform/google/storage/adapt.go | 2 +- .../adapters/terraform/google/storage/iam.go | 8 +++++--- .../adapters/terraform/openstack/networking.go | 3 ++- .../adapters/terraform/tftestutil/testutil.go | 1 - internal/rules/register.go | 1 - pkg/iac/detection/detect.go | 2 +- pkg/iac/rego/build.go | 7 ++++--- pkg/iac/rego/embed.go | 3 ++- pkg/iac/rego/load.go | 2 +- pkg/iac/rego/metadata.go | 9 +++++---- pkg/iac/rego/result.go | 3 ++- pkg/iac/rego/scanner.go | 10 +++++----- pkg/iac/rego/schemas/builder.go | 14 +++++++------- pkg/iac/rego/store.go | 2 +- pkg/iac/rules/register.go | 1 - pkg/iac/rules/rules.go | 1 + .../azure/arm/parser/armjson/decode_object.go | 2 +- .../azure/arm/parser/armjson/parse_object.go | 2 +- pkg/iac/scanners/azure/arm/parser/parser.go | 3 +-- pkg/iac/scanners/azure/arm/parser/template.go | 3 +-- pkg/iac/scanners/azure/arm/scanner.go | 4 +--- pkg/iac/scanners/azure/functions/copy_index.go | 2 +- .../scanners/azure/functions/create_object.go | 2 +- pkg/iac/scanners/azure/functions/intersection.go | 2 +- pkg/iac/scanners/azure/functions/max.go | 3 +-- pkg/iac/scanners/azure/functions/min.go | 3 +-- pkg/iac/scanners/azure/functions/replace.go | 6 +++--- pkg/iac/scanners/azure/functions/split.go | 4 +--- pkg/iac/scanners/azure/functions/union.go | 5 ++--- pkg/iac/scanners/azure/value.go | 3 ++- .../scanners/cloudformation/parser/fn_builtin.go | 4 ++-- pkg/iac/scanners/cloudformation/parser/parser.go | 4 ++-- .../scanners/cloudformation/parser/property.go | 9 ++++----- .../cloudformation/parser/property_helpers.go | 1 - .../scanners/cloudformation/parser/resource.go | 4 ++-- pkg/iac/scanners/cloudformation/parser/util.go | 10 +++++----- pkg/iac/scanners/cloudformation/scanner.go | 1 - pkg/iac/scanners/dockerfile/parser/parser.go | 4 ++-- pkg/iac/scanners/dockerfile/scanner.go | 3 +-- pkg/iac/scanners/helm/parser/parser.go | 5 ++--- pkg/iac/scanners/helm/parser/parser_tar.go | 3 ++- pkg/iac/scanners/helm/parser/vals.go | 4 ++-- pkg/iac/scanners/helm/scanner.go | 6 +++--- pkg/iac/scanners/json/parser/parser.go | 4 ++-- pkg/iac/scanners/kubernetes/parser/manifest.go | 2 +- pkg/iac/scanners/kubernetes/parser/parser.go | 12 ++++++------ pkg/iac/scanners/kubernetes/scanner.go | 3 ++- pkg/iac/scanners/terraform/executor/executor.go | 3 +-- .../scanners/terraform/executor/statistics.go | 3 ++- pkg/iac/scanners/terraform/options.go | 1 - pkg/iac/scanners/terraform/parser/evaluator.go | 8 ++++---- .../terraform/parser/funcs/collection.go | 8 ++++---- .../scanners/terraform/parser/funcs/crypto.go | 7 +++---- .../scanners/terraform/parser/funcs/datetime.go | 2 +- .../scanners/terraform/parser/funcs/defaults.go | 5 +++-- .../scanners/terraform/parser/funcs/encoding.go | 2 +- .../scanners/terraform/parser/funcs/number.go | 4 ++-- .../scanners/terraform/parser/funcs/string.go | 2 +- pkg/iac/scanners/terraform/parser/functions.go | 3 ++- pkg/iac/scanners/terraform/parser/load_blocks.go | 3 ++- pkg/iac/scanners/terraform/parser/load_module.go | 6 +++--- pkg/iac/scanners/terraform/parser/parser.go | 8 ++++---- .../scanners/terraform/parser/resolvers/cache.go | 2 +- .../terraform/parser/resolvers/remote.go | 2 +- pkg/iac/scanners/terraform/scanner.go | 4 ++-- pkg/iac/scanners/terraformplan/parser/parser.go | 6 +++--- pkg/iac/scanners/terraformplan/scanner.go | 3 ++- pkg/iac/scanners/toml/parser/parser.go | 1 + pkg/iac/scanners/yaml/parser/parser.go | 7 ++++--- test/testutil/util.go | 3 ++- 135 files changed, 264 insertions(+), 277 deletions(-) mode change 100755 => 100644 internal/rules/register.go diff --git a/internal/adapters/arm/adapt.go b/internal/adapters/arm/adapt.go index 44f012146cd3..df317c429e85 100644 --- a/internal/adapters/arm/adapt.go +++ b/internal/adapters/arm/adapt.go @@ -3,6 +3,8 @@ package arm import ( "context" + "github.com/aquasecurity/defsec/pkg/providers/azure" + "github.com/aquasecurity/defsec/pkg/state" "github.com/aquasecurity/trivy/internal/adapters/arm/appservice" "github.com/aquasecurity/trivy/internal/adapters/arm/authorization" "github.com/aquasecurity/trivy/internal/adapters/arm/compute" @@ -16,9 +18,6 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/arm/securitycenter" "github.com/aquasecurity/trivy/internal/adapters/arm/storage" "github.com/aquasecurity/trivy/internal/adapters/arm/synapse" - - "github.com/aquasecurity/defsec/pkg/providers/azure" - "github.com/aquasecurity/defsec/pkg/state" scanner "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" ) diff --git a/internal/adapters/arm/storage/adapt.go b/internal/adapters/arm/storage/adapt.go index 31dd0bec0dd8..10d60b1f3cd0 100644 --- a/internal/adapters/arm/storage/adapt.go +++ b/internal/adapters/arm/storage/adapt.go @@ -4,9 +4,8 @@ import ( "strings" "github.com/aquasecurity/defsec/pkg/providers/azure/storage" - "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" - "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" ) func Adapt(deployment azure.Deployment) storage.Storage { diff --git a/internal/adapters/cloudformation/aws/accessanalyzer/accessanalyzer.go b/internal/adapters/cloudformation/aws/accessanalyzer/accessanalyzer.go index 687e657ed49d..c2fda50bd4a4 100644 --- a/internal/adapters/cloudformation/aws/accessanalyzer/accessanalyzer.go +++ b/internal/adapters/cloudformation/aws/accessanalyzer/accessanalyzer.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts accessanalyzer resources func Adapt(cfFile parser.FileContext) accessanalyzer.AccessAnalyzer { return accessanalyzer.AccessAnalyzer{ Analyzers: getAccessAnalyzer(cfFile), diff --git a/internal/adapters/cloudformation/aws/apigateway/apigateway.go b/internal/adapters/cloudformation/aws/apigateway/apigateway.go index e71444059838..7744f7e04751 100644 --- a/internal/adapters/cloudformation/aws/apigateway/apigateway.go +++ b/internal/adapters/cloudformation/aws/apigateway/apigateway.go @@ -7,7 +7,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts apigateway resources func Adapt(cfFile parser.FileContext) apigateway.APIGateway { return apigateway.APIGateway{ V1: v1.APIGateway{ diff --git a/internal/adapters/cloudformation/aws/athena/athena.go b/internal/adapters/cloudformation/aws/athena/athena.go index 5408c4660301..53c94fb03b9d 100644 --- a/internal/adapters/cloudformation/aws/athena/athena.go +++ b/internal/adapters/cloudformation/aws/athena/athena.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts athena resources func Adapt(cfFile parser.FileContext) athena.Athena { return athena.Athena{ Databases: nil, diff --git a/internal/adapters/cloudformation/aws/cloudfront/cloudfront.go b/internal/adapters/cloudformation/aws/cloudfront/cloudfront.go index e12dd2529036..85cc9ac3a009 100644 --- a/internal/adapters/cloudformation/aws/cloudfront/cloudfront.go +++ b/internal/adapters/cloudformation/aws/cloudfront/cloudfront.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts cloudfront resources func Adapt(cfFile parser.FileContext) cloudfront.Cloudfront { return cloudfront.Cloudfront{ Distributions: getDistributions(cfFile), diff --git a/internal/adapters/cloudformation/aws/cloudtrail/cloudtrail.go b/internal/adapters/cloudformation/aws/cloudtrail/cloudtrail.go index 848b124de9bf..c62f875ecad4 100644 --- a/internal/adapters/cloudformation/aws/cloudtrail/cloudtrail.go +++ b/internal/adapters/cloudformation/aws/cloudtrail/cloudtrail.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts cloudtrail resources func Adapt(cfFile parser.FileContext) cloudtrail.CloudTrail { return cloudtrail.CloudTrail{ Trails: getCloudTrails(cfFile), diff --git a/internal/adapters/cloudformation/aws/cloudwatch/cloudwatch.go b/internal/adapters/cloudformation/aws/cloudwatch/cloudwatch.go index 0acfede18143..c0b5ad1d53bf 100644 --- a/internal/adapters/cloudformation/aws/cloudwatch/cloudwatch.go +++ b/internal/adapters/cloudformation/aws/cloudwatch/cloudwatch.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts cloudwatch resources func Adapt(cfFile parser.FileContext) cloudwatch.CloudWatch { return cloudwatch.CloudWatch{ LogGroups: getLogGroups(cfFile), diff --git a/internal/adapters/cloudformation/aws/codebuild/codebuild.go b/internal/adapters/cloudformation/aws/codebuild/codebuild.go index e9aa90180bd1..5f54ea4c10a3 100644 --- a/internal/adapters/cloudformation/aws/codebuild/codebuild.go +++ b/internal/adapters/cloudformation/aws/codebuild/codebuild.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts codebuild resources func Adapt(cfFile parser.FileContext) codebuild.CodeBuild { return codebuild.CodeBuild{ Projects: getProjects(cfFile), diff --git a/internal/adapters/cloudformation/aws/config/config.go b/internal/adapters/cloudformation/aws/config/config.go index 819b20250266..b968300a1c86 100644 --- a/internal/adapters/cloudformation/aws/config/config.go +++ b/internal/adapters/cloudformation/aws/config/config.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts config resources func Adapt(cfFile parser.FileContext) config.Config { return config.Config{ ConfigurationAggregrator: getConfigurationAggregator(cfFile), diff --git a/internal/adapters/cloudformation/aws/documentdb/documentdb.go b/internal/adapters/cloudformation/aws/documentdb/documentdb.go index 91439139d3f8..272530ecd366 100644 --- a/internal/adapters/cloudformation/aws/documentdb/documentdb.go +++ b/internal/adapters/cloudformation/aws/documentdb/documentdb.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts documentdb resources func Adapt(cfFile parser.FileContext) documentdb.DocumentDB { return documentdb.DocumentDB{ Clusters: getClusters(cfFile), diff --git a/internal/adapters/cloudformation/aws/dynamodb/dynamodb.go b/internal/adapters/cloudformation/aws/dynamodb/dynamodb.go index 270aadc02176..d9dda85d5431 100644 --- a/internal/adapters/cloudformation/aws/dynamodb/dynamodb.go +++ b/internal/adapters/cloudformation/aws/dynamodb/dynamodb.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts dynamodb resources func Adapt(cfFile parser.FileContext) dynamodb.DynamoDB { return dynamodb.DynamoDB{ DAXClusters: getClusters(cfFile), diff --git a/internal/adapters/cloudformation/aws/ec2/nacl.go b/internal/adapters/cloudformation/aws/ec2/nacl.go index db90e02d12a6..6c6de06e9559 100644 --- a/internal/adapters/cloudformation/aws/ec2/nacl.go +++ b/internal/adapters/cloudformation/aws/ec2/nacl.go @@ -3,10 +3,8 @@ package ec2 import ( "strconv" - defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" - + defsecTypes "github.com/aquasecurity/defsec/pkg/types" "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) diff --git a/internal/adapters/cloudformation/aws/ecr/ecr.go b/internal/adapters/cloudformation/aws/ecr/ecr.go index 1b15e98eef09..b8f976578ad4 100644 --- a/internal/adapters/cloudformation/aws/ecr/ecr.go +++ b/internal/adapters/cloudformation/aws/ecr/ecr.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts ecr resources func Adapt(cfFile parser.FileContext) ecr.ECR { return ecr.ECR{ Repositories: getRepositories(cfFile), diff --git a/internal/adapters/cloudformation/aws/ecr/repository.go b/internal/adapters/cloudformation/aws/ecr/repository.go index c6ccf3c2becb..cc21d82e0458 100644 --- a/internal/adapters/cloudformation/aws/ecr/repository.go +++ b/internal/adapters/cloudformation/aws/ecr/repository.go @@ -3,14 +3,12 @@ package ecr import ( "fmt" - defsecTypes "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/liamg/iamgo" "github.com/aquasecurity/defsec/pkg/providers/aws/ecr" "github.com/aquasecurity/defsec/pkg/providers/aws/iam" - - "github.com/liamg/iamgo" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) func getRepositories(ctx parser.FileContext) (repositories []ecr.Repository) { diff --git a/internal/adapters/cloudformation/aws/ecs/ecs.go b/internal/adapters/cloudformation/aws/ecs/ecs.go index b9aa6ea9c755..ba0dad58d02c 100644 --- a/internal/adapters/cloudformation/aws/ecs/ecs.go +++ b/internal/adapters/cloudformation/aws/ecs/ecs.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts ecs resources func Adapt(cfFile parser.FileContext) ecs.ECS { return ecs.ECS{ Clusters: getClusters(cfFile), diff --git a/internal/adapters/cloudformation/aws/efs/efs.go b/internal/adapters/cloudformation/aws/efs/efs.go index 0be8584b70d2..54285879359f 100644 --- a/internal/adapters/cloudformation/aws/efs/efs.go +++ b/internal/adapters/cloudformation/aws/efs/efs.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts efs resources func Adapt(cfFile parser.FileContext) efs.EFS { return efs.EFS{ FileSystems: getFileSystems(cfFile), diff --git a/internal/adapters/cloudformation/aws/eks/eks.go b/internal/adapters/cloudformation/aws/eks/eks.go index c43c613c2f73..df694ed23634 100644 --- a/internal/adapters/cloudformation/aws/eks/eks.go +++ b/internal/adapters/cloudformation/aws/eks/eks.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts eks resources func Adapt(cfFile parser.FileContext) eks.EKS { return eks.EKS{ Clusters: getClusters(cfFile), diff --git a/internal/adapters/cloudformation/aws/elasticache/elasticache.go b/internal/adapters/cloudformation/aws/elasticache/elasticache.go index bc382616d3b3..265f3402767f 100644 --- a/internal/adapters/cloudformation/aws/elasticache/elasticache.go +++ b/internal/adapters/cloudformation/aws/elasticache/elasticache.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts elasticache resources func Adapt(cfFile parser.FileContext) elasticache.ElastiCache { return elasticache.ElastiCache{ Clusters: getClusterGroups(cfFile), diff --git a/internal/adapters/cloudformation/aws/elasticsearch/elasticsearch.go b/internal/adapters/cloudformation/aws/elasticsearch/elasticsearch.go index e7f5bc700916..96f40aa9a2e1 100644 --- a/internal/adapters/cloudformation/aws/elasticsearch/elasticsearch.go +++ b/internal/adapters/cloudformation/aws/elasticsearch/elasticsearch.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts elasticsearch resources func Adapt(cfFile parser.FileContext) elasticsearch.Elasticsearch { return elasticsearch.Elasticsearch{ Domains: getDomains(cfFile), diff --git a/internal/adapters/cloudformation/aws/elb/elb.go b/internal/adapters/cloudformation/aws/elb/elb.go index 0bf80ed98424..8449a7e54614 100644 --- a/internal/adapters/cloudformation/aws/elb/elb.go +++ b/internal/adapters/cloudformation/aws/elb/elb.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts elb resources func Adapt(cfFile parser.FileContext) elb.ELB { return elb.ELB{ LoadBalancers: getLoadBalancers(cfFile), diff --git a/internal/adapters/cloudformation/aws/iam/iam.go b/internal/adapters/cloudformation/aws/iam/iam.go index c87cf5c04425..76c04c844849 100644 --- a/internal/adapters/cloudformation/aws/iam/iam.go +++ b/internal/adapters/cloudformation/aws/iam/iam.go @@ -6,7 +6,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts iam resources func Adapt(cfFile parser.FileContext) iam.IAM { return iam.IAM{ PasswordPolicy: iam.PasswordPolicy{ diff --git a/internal/adapters/cloudformation/aws/iam/policy.go b/internal/adapters/cloudformation/aws/iam/policy.go index 586a008fe564..d5e189816711 100644 --- a/internal/adapters/cloudformation/aws/iam/policy.go +++ b/internal/adapters/cloudformation/aws/iam/policy.go @@ -1,10 +1,11 @@ package iam import ( + "github.com/liamg/iamgo" + "github.com/aquasecurity/defsec/pkg/providers/aws/iam" defsecTypes "github.com/aquasecurity/defsec/pkg/types" "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" - "github.com/liamg/iamgo" ) func getPolicies(ctx parser.FileContext) (policies []iam.Policy) { diff --git a/internal/adapters/cloudformation/aws/kinesis/kinesis.go b/internal/adapters/cloudformation/aws/kinesis/kinesis.go index 8b30ee219ccf..3ba12ff2beca 100644 --- a/internal/adapters/cloudformation/aws/kinesis/kinesis.go +++ b/internal/adapters/cloudformation/aws/kinesis/kinesis.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts kinesis resources func Adapt(cfFile parser.FileContext) kinesis.Kinesis { return kinesis.Kinesis{ Streams: getStreams(cfFile), diff --git a/internal/adapters/cloudformation/aws/lambda/lambda.go b/internal/adapters/cloudformation/aws/lambda/lambda.go index f1ca0aef6206..0df6a30b93e1 100644 --- a/internal/adapters/cloudformation/aws/lambda/lambda.go +++ b/internal/adapters/cloudformation/aws/lambda/lambda.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts lambda resources func Adapt(cfFile parser.FileContext) lambda.Lambda { return lambda.Lambda{ Functions: getFunctions(cfFile), diff --git a/internal/adapters/cloudformation/aws/mq/mq.go b/internal/adapters/cloudformation/aws/mq/mq.go index d6fe7470e875..5a651c124ac9 100644 --- a/internal/adapters/cloudformation/aws/mq/mq.go +++ b/internal/adapters/cloudformation/aws/mq/mq.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts mq resources func Adapt(cfFile parser.FileContext) mq.MQ { return mq.MQ{ Brokers: getBrokers(cfFile), diff --git a/internal/adapters/cloudformation/aws/msk/msk.go b/internal/adapters/cloudformation/aws/msk/msk.go index 4462f7c6f8ca..bc1b2500decd 100644 --- a/internal/adapters/cloudformation/aws/msk/msk.go +++ b/internal/adapters/cloudformation/aws/msk/msk.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts msk resources func Adapt(cfFile parser.FileContext) msk.MSK { return msk.MSK{ Clusters: getClusters(cfFile), diff --git a/internal/adapters/cloudformation/aws/neptune/neptune.go b/internal/adapters/cloudformation/aws/neptune/neptune.go index d9fc9e49faf6..e068517d49cb 100644 --- a/internal/adapters/cloudformation/aws/neptune/neptune.go +++ b/internal/adapters/cloudformation/aws/neptune/neptune.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts neptune resources func Adapt(cfFile parser.FileContext) neptune.Neptune { return neptune.Neptune{ Clusters: getClusters(cfFile), diff --git a/internal/adapters/cloudformation/aws/rds/rds.go b/internal/adapters/cloudformation/aws/rds/rds.go index 7bcf26716d5d..9692df9a50fe 100644 --- a/internal/adapters/cloudformation/aws/rds/rds.go +++ b/internal/adapters/cloudformation/aws/rds/rds.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts rds resources func Adapt(cfFile parser.FileContext) rds.RDS { clusters, orphans := getClustersAndInstances(cfFile) return rds.RDS{ diff --git a/internal/adapters/cloudformation/aws/redshift/redshift.go b/internal/adapters/cloudformation/aws/redshift/redshift.go index fccd7b20f60b..35462caa0efd 100644 --- a/internal/adapters/cloudformation/aws/redshift/redshift.go +++ b/internal/adapters/cloudformation/aws/redshift/redshift.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts redshift resources func Adapt(cfFile parser.FileContext) redshift.Redshift { return redshift.Redshift{ Clusters: getClusters(cfFile), diff --git a/internal/adapters/cloudformation/aws/s3/bucket.go b/internal/adapters/cloudformation/aws/s3/bucket.go index d22f25c338fe..1089456c126e 100644 --- a/internal/adapters/cloudformation/aws/s3/bucket.go +++ b/internal/adapters/cloudformation/aws/s3/bucket.go @@ -4,9 +4,8 @@ import ( "regexp" "strings" - defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/defsec/pkg/providers/aws/s3" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) diff --git a/internal/adapters/cloudformation/aws/s3/s3.go b/internal/adapters/cloudformation/aws/s3/s3.go index 6074cd4bc912..4cdd1e099263 100644 --- a/internal/adapters/cloudformation/aws/s3/s3.go +++ b/internal/adapters/cloudformation/aws/s3/s3.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts s3 resources func Adapt(cfFile parser.FileContext) s3.S3 { return s3.S3{ Buckets: getBuckets(cfFile), diff --git a/internal/adapters/cloudformation/aws/sam/function.go b/internal/adapters/cloudformation/aws/sam/function.go index e4f0e8901d3c..40e7ff2f6d34 100644 --- a/internal/adapters/cloudformation/aws/sam/function.go +++ b/internal/adapters/cloudformation/aws/sam/function.go @@ -1,11 +1,12 @@ package sam import ( + "github.com/liamg/iamgo" + "github.com/aquasecurity/defsec/pkg/providers/aws/iam" "github.com/aquasecurity/defsec/pkg/providers/aws/sam" defsecTypes "github.com/aquasecurity/defsec/pkg/types" "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" - "github.com/liamg/iamgo" ) func getFunctions(cfFile parser.FileContext) (functions []sam.Function) { diff --git a/internal/adapters/cloudformation/aws/sam/sam.go b/internal/adapters/cloudformation/aws/sam/sam.go index a5fbe0eea81a..7f971a596615 100644 --- a/internal/adapters/cloudformation/aws/sam/sam.go +++ b/internal/adapters/cloudformation/aws/sam/sam.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts sam resources func Adapt(cfFile parser.FileContext) sam.SAM { return sam.SAM{ APIs: getApis(cfFile), diff --git a/internal/adapters/cloudformation/aws/sam/state_machines.go b/internal/adapters/cloudformation/aws/sam/state_machines.go index a591d4418e95..ec01f3470e03 100644 --- a/internal/adapters/cloudformation/aws/sam/state_machines.go +++ b/internal/adapters/cloudformation/aws/sam/state_machines.go @@ -1,11 +1,12 @@ package sam import ( + "github.com/liamg/iamgo" + "github.com/aquasecurity/defsec/pkg/providers/aws/iam" "github.com/aquasecurity/defsec/pkg/providers/aws/sam" defsecTypes "github.com/aquasecurity/defsec/pkg/types" "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" - "github.com/liamg/iamgo" ) func getStateMachines(cfFile parser.FileContext) (stateMachines []sam.StateMachine) { diff --git a/internal/adapters/cloudformation/aws/sns/sns.go b/internal/adapters/cloudformation/aws/sns/sns.go index fc89a1f1ccb6..a0461e98b20b 100644 --- a/internal/adapters/cloudformation/aws/sns/sns.go +++ b/internal/adapters/cloudformation/aws/sns/sns.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts sns resources func Adapt(cfFile parser.FileContext) sns.SNS { return sns.SNS{ Topics: getTopics(cfFile), diff --git a/internal/adapters/cloudformation/aws/sqs/queue.go b/internal/adapters/cloudformation/aws/sqs/queue.go index 45e241153d1b..bbfeb02a08b2 100644 --- a/internal/adapters/cloudformation/aws/sqs/queue.go +++ b/internal/adapters/cloudformation/aws/sqs/queue.go @@ -3,14 +3,12 @@ package sqs import ( "fmt" - defsecTypes "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/liamg/iamgo" "github.com/aquasecurity/defsec/pkg/providers/aws/iam" "github.com/aquasecurity/defsec/pkg/providers/aws/sqs" - - "github.com/liamg/iamgo" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) func getQueues(ctx parser.FileContext) (queues []sqs.Queue) { diff --git a/internal/adapters/cloudformation/aws/sqs/sqs.go b/internal/adapters/cloudformation/aws/sqs/sqs.go index 1a1b3f94931d..8c0648126fec 100644 --- a/internal/adapters/cloudformation/aws/sqs/sqs.go +++ b/internal/adapters/cloudformation/aws/sqs/sqs.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts sqs resources func Adapt(cfFile parser.FileContext) sqs.SQS { return sqs.SQS{ Queues: getQueues(cfFile), diff --git a/internal/adapters/cloudformation/aws/ssm/ssm.go b/internal/adapters/cloudformation/aws/ssm/ssm.go index e85bcfeabbaf..c9d9eddd2d26 100644 --- a/internal/adapters/cloudformation/aws/ssm/ssm.go +++ b/internal/adapters/cloudformation/aws/ssm/ssm.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts ssm resources func Adapt(cfFile parser.FileContext) ssm.SSM { return ssm.SSM{ Secrets: getSecrets(cfFile), diff --git a/internal/adapters/cloudformation/aws/workspaces/workspaces.go b/internal/adapters/cloudformation/aws/workspaces/workspaces.go index 6b563257d056..cca60d15e289 100644 --- a/internal/adapters/cloudformation/aws/workspaces/workspaces.go +++ b/internal/adapters/cloudformation/aws/workspaces/workspaces.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt adapts workspaces resources func Adapt(cfFile parser.FileContext) workspaces.WorkSpaces { return workspaces.WorkSpaces{ WorkSpaces: getWorkSpaces(cfFile), diff --git a/internal/adapters/terraform/aws/ec2/autoscaling.go b/internal/adapters/terraform/aws/ec2/autoscaling.go index 9298897f4e34..c291ce87abaa 100644 --- a/internal/adapters/terraform/aws/ec2/autoscaling.go +++ b/internal/adapters/terraform/aws/ec2/autoscaling.go @@ -3,11 +3,9 @@ package ec2 import ( "encoding/base64" - defsecTypes "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/defsec/pkg/terraform" - "github.com/aquasecurity/defsec/pkg/providers/aws/ec2" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" ) func adaptLaunchTemplates(modules terraform.Modules) (templates []ec2.LaunchTemplate) { diff --git a/internal/adapters/terraform/aws/ecr/adapt.go b/internal/adapters/terraform/aws/ecr/adapt.go index 83741a7e1e33..0aca6c6da7cb 100644 --- a/internal/adapters/terraform/aws/ecr/adapt.go +++ b/internal/adapters/terraform/aws/ecr/adapt.go @@ -1,12 +1,13 @@ package ecr import ( + "github.com/liamg/iamgo" + "github.com/aquasecurity/defsec/pkg/providers/aws/ecr" iamp "github.com/aquasecurity/defsec/pkg/providers/aws/iam" "github.com/aquasecurity/defsec/pkg/terraform" defsecTypes "github.com/aquasecurity/defsec/pkg/types" "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/iam" - "github.com/liamg/iamgo" ) func Adapt(modules terraform.Modules) ecr.ECR { diff --git a/internal/adapters/terraform/aws/iam/convert.go b/internal/adapters/terraform/aws/iam/convert.go index 66464b7b1b7c..6acff0a521e4 100644 --- a/internal/adapters/terraform/aws/iam/convert.go +++ b/internal/adapters/terraform/aws/iam/convert.go @@ -3,13 +3,11 @@ package iam import ( "strings" - "github.com/aquasecurity/defsec/pkg/scan" - - "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/liamg/iamgo" "github.com/aquasecurity/defsec/pkg/providers/aws/iam" - - "github.com/liamg/iamgo" + "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/terraform" ) type wrappedDocument struct { diff --git a/internal/adapters/terraform/aws/iam/passwords.go b/internal/adapters/terraform/aws/iam/passwords.go index 73a09700adb0..10db50f1e94b 100644 --- a/internal/adapters/terraform/aws/iam/passwords.go +++ b/internal/adapters/terraform/aws/iam/passwords.go @@ -3,11 +3,9 @@ package iam import ( "math" - defsecTypes "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/defsec/pkg/terraform" - "github.com/aquasecurity/defsec/pkg/providers/aws/iam" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" ) func adaptPasswordPolicy(modules terraform.Modules) iam.PasswordPolicy { diff --git a/internal/adapters/terraform/aws/iam/policies.go b/internal/adapters/terraform/aws/iam/policies.go index e44addeeaaea..de852ef6e81c 100644 --- a/internal/adapters/terraform/aws/iam/policies.go +++ b/internal/adapters/terraform/aws/iam/policies.go @@ -1,10 +1,11 @@ package iam import ( + "github.com/liamg/iamgo" + "github.com/aquasecurity/defsec/pkg/providers/aws/iam" "github.com/aquasecurity/defsec/pkg/terraform" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/liamg/iamgo" ) func parsePolicy(policyBlock *terraform.Block, modules terraform.Modules) (iam.Policy, error) { diff --git a/internal/adapters/terraform/aws/sqs/adapt.go b/internal/adapters/terraform/aws/sqs/adapt.go index 04bca31e7101..5fa3520a7114 100644 --- a/internal/adapters/terraform/aws/sqs/adapt.go +++ b/internal/adapters/terraform/aws/sqs/adapt.go @@ -1,14 +1,14 @@ package sqs import ( + "github.com/google/uuid" + "github.com/liamg/iamgo" + iamp "github.com/aquasecurity/defsec/pkg/providers/aws/iam" "github.com/aquasecurity/defsec/pkg/providers/aws/sqs" "github.com/aquasecurity/defsec/pkg/terraform" defsecTypes "github.com/aquasecurity/defsec/pkg/types" "github.com/aquasecurity/trivy/internal/adapters/terraform/aws/iam" - "github.com/liamg/iamgo" - - "github.com/google/uuid" ) func Adapt(modules terraform.Modules) sqs.SQS { @@ -49,7 +49,7 @@ func (a *adapter) adaptQueues() []sqs.Queue { } policy.Document.Parsed = *parsed policy.Document.Metadata = attr.GetMetadata() - } else if dataBlock.Type() == "data" && dataBlock.TypeLabel() == "aws_iam_policy_document" { + } else if dataBlock.Type() == "data" && dataBlock.TypeLabel() == "aws_iam_policy_document" { //nolint:goconst if doc, err := iam.ConvertTerraformDocument(a.modules, dataBlock); err == nil { policy.Document.Parsed = doc.Document policy.Document.Metadata = doc.Source.GetMetadata() @@ -57,7 +57,7 @@ func (a *adapter) adaptQueues() []sqs.Queue { } } } else if refBlock, err := a.modules.GetReferencedBlock(attr, policyBlock); err == nil { - if refBlock.Type() == "data" && refBlock.TypeLabel() == "aws_iam_policy_document" { + if refBlock.Type() == "data" && refBlock.TypeLabel() == "aws_iam_policy_document" { //nolint:goconst if doc, err := iam.ConvertTerraformDocument(a.modules, refBlock); err == nil { policy.Document.Parsed = doc.Document policy.Document.Metadata = doc.Source.GetMetadata() diff --git a/internal/adapters/terraform/azure/compute/adapt.go b/internal/adapters/terraform/azure/compute/adapt.go index 87b151d99848..b5eb379147ac 100644 --- a/internal/adapters/terraform/azure/compute/adapt.go +++ b/internal/adapters/terraform/azure/compute/adapt.go @@ -3,13 +3,13 @@ package compute import ( "encoding/base64" - defsecTypes "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/defsec/pkg/terraform" - "github.com/aquasecurity/defsec/pkg/providers/azure/compute" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" ) +const AzureVirtualMachine = "azurerm_virtual_machine" + func Adapt(modules terraform.Modules) compute.Compute { return adaptCompute(modules) } @@ -28,7 +28,7 @@ func adaptCompute(modules terraform.Modules) compute.Compute { for _, resource := range module.GetResourcesByType("azurerm_windows_virtual_machine") { windowsVirtualMachines = append(windowsVirtualMachines, adaptWindowsVM(resource)) } - for _, resource := range module.GetResourcesByType("azurerm_virtual_machine") { + for _, resource := range module.GetResourcesByType(AzureVirtualMachine) { if resource.HasChild("os_profile_linux_config") { linuxVirtualMachines = append(linuxVirtualMachines, adaptLinuxVM(resource)) } else if resource.HasChild("os_profile_windows_config") { @@ -71,7 +71,7 @@ func adaptManagedDisk(resource *terraform.Block) compute.ManagedDisk { func adaptLinuxVM(resource *terraform.Block) compute.LinuxVirtualMachine { workingBlock := resource - if resource.TypeLabel() == "azurerm_virtual_machine" { + if resource.TypeLabel() == AzureVirtualMachine { if b := resource.GetBlock("os_profile"); b.IsNotNil() { workingBlock = b } @@ -86,7 +86,7 @@ func adaptLinuxVM(resource *terraform.Block) compute.LinuxVirtualMachine { customDataVal = defsecTypes.String(string(encoded), customDataAttr.GetMetadata()) } - if resource.TypeLabel() == "azurerm_virtual_machine" { + if resource.TypeLabel() == AzureVirtualMachine { workingBlock = resource.GetBlock("os_profile_linux_config") } disablePasswordAuthAttr := workingBlock.GetAttribute("disable_password_authentication") @@ -108,7 +108,7 @@ func adaptLinuxVM(resource *terraform.Block) compute.LinuxVirtualMachine { func adaptWindowsVM(resource *terraform.Block) compute.WindowsVirtualMachine { workingBlock := resource - if resource.TypeLabel() == "azurerm_virtual_machine" { + if resource.TypeLabel() == AzureVirtualMachine { if b := resource.GetBlock("os_profile"); b.IsNotNil() { workingBlock = b } diff --git a/internal/adapters/terraform/azure/keyvault/adapt.go b/internal/adapters/terraform/azure/keyvault/adapt.go index c78d39115bff..2e7a0f18a63a 100644 --- a/internal/adapters/terraform/azure/keyvault/adapt.go +++ b/internal/adapters/terraform/azure/keyvault/adapt.go @@ -3,11 +3,9 @@ package keyvault import ( "time" - defsecTypes "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/defsec/pkg/terraform" - "github.com/aquasecurity/defsec/pkg/providers/azure/keyvault" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" ) func Adapt(modules terraform.Modules) keyvault.KeyVault { diff --git a/internal/adapters/terraform/azure/network/adapt.go b/internal/adapters/terraform/azure/network/adapt.go index 899c0fe767d5..60d117fde8aa 100644 --- a/internal/adapters/terraform/azure/network/adapt.go +++ b/internal/adapters/terraform/azure/network/adapt.go @@ -4,13 +4,11 @@ import ( "strconv" "strings" - defsecTypes "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/google/uuid" "github.com/aquasecurity/defsec/pkg/providers/azure/network" - - "github.com/google/uuid" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" ) func Adapt(modules terraform.Modules) network.Network { diff --git a/internal/adapters/terraform/azure/storage/adapt.go b/internal/adapters/terraform/azure/storage/adapt.go index 4519460b5eb2..ce150103bdb8 100644 --- a/internal/adapters/terraform/azure/storage/adapt.go +++ b/internal/adapters/terraform/azure/storage/adapt.go @@ -1,10 +1,9 @@ package storage import ( - defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/defsec/pkg/providers/azure/storage" "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" ) func Adapt(modules terraform.Modules) storage.Storage { diff --git a/internal/adapters/terraform/cloudstack/compute/adapt.go b/internal/adapters/terraform/cloudstack/compute/adapt.go index 7104f74e2846..06ce13be5195 100644 --- a/internal/adapters/terraform/cloudstack/compute/adapt.go +++ b/internal/adapters/terraform/cloudstack/compute/adapt.go @@ -3,11 +3,9 @@ package compute import ( "encoding/base64" - "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/defsec/pkg/terraform" - "github.com/aquasecurity/defsec/pkg/providers/cloudstack/compute" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/defsec/pkg/types" ) func Adapt(modules terraform.Modules) compute.Compute { diff --git a/internal/adapters/terraform/digitalocean/compute/adapt.go b/internal/adapters/terraform/digitalocean/compute/adapt.go index a66e28083e0f..a2996acf29c7 100644 --- a/internal/adapters/terraform/digitalocean/compute/adapt.go +++ b/internal/adapters/terraform/digitalocean/compute/adapt.go @@ -41,7 +41,7 @@ func adaptFirewalls(module terraform.Modules) []compute.Firewall { inboundRules := block.GetBlocks("inbound_rule") outboundRules := block.GetBlocks("outbound_rule") - inboundFirewallRules := []compute.InboundFirewallRule{} + var inboundFirewallRules []compute.InboundFirewallRule for _, inBoundRule := range inboundRules { inboundFirewallRule := compute.InboundFirewallRule{ Metadata: inBoundRule.GetMetadata(), @@ -52,7 +52,7 @@ func adaptFirewalls(module terraform.Modules) []compute.Firewall { inboundFirewallRules = append(inboundFirewallRules, inboundFirewallRule) } - outboundFirewallRules := []compute.OutboundFirewallRule{} + var outboundFirewallRules []compute.OutboundFirewallRule for _, outBoundRule := range outboundRules { outboundFirewallRule := compute.OutboundFirewallRule{ Metadata: outBoundRule.GetMetadata(), diff --git a/internal/adapters/terraform/digitalocean/spaces/adapt.go b/internal/adapters/terraform/digitalocean/spaces/adapt.go index 567768d3f764..93214777a41a 100644 --- a/internal/adapters/terraform/digitalocean/spaces/adapt.go +++ b/internal/adapters/terraform/digitalocean/spaces/adapt.go @@ -1,10 +1,11 @@ package spaces import ( + "github.com/google/uuid" + "github.com/aquasecurity/defsec/pkg/providers/digitalocean/spaces" "github.com/aquasecurity/defsec/pkg/terraform" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/google/uuid" ) func Adapt(modules terraform.Modules) spaces.Spaces { diff --git a/internal/adapters/terraform/google/compute/instances.go b/internal/adapters/terraform/google/compute/instances.go index 0ecf120d4f06..2fb8177b6a1c 100644 --- a/internal/adapters/terraform/google/compute/instances.go +++ b/internal/adapters/terraform/google/compute/instances.go @@ -1,10 +1,11 @@ package compute import ( + "github.com/zclconf/go-cty/cty" + "github.com/aquasecurity/defsec/pkg/providers/google/compute" "github.com/aquasecurity/defsec/pkg/terraform" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/zclconf/go-cty/cty" ) func adaptInstances(modules terraform.Modules) (instances []compute.Instance) { diff --git a/internal/adapters/terraform/google/compute/metadata.go b/internal/adapters/terraform/google/compute/metadata.go index c69947d3cda3..2cc58839a367 100644 --- a/internal/adapters/terraform/google/compute/metadata.go +++ b/internal/adapters/terraform/google/compute/metadata.go @@ -1,10 +1,11 @@ package compute import ( + "github.com/zclconf/go-cty/cty" + "github.com/aquasecurity/defsec/pkg/providers/google/compute" "github.com/aquasecurity/defsec/pkg/terraform" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/zclconf/go-cty/cty" ) func adaptProjectMetadata(modules terraform.Modules) compute.ProjectMetadata { diff --git a/internal/adapters/terraform/google/compute/networks.go b/internal/adapters/terraform/google/compute/networks.go index ef59c360eef8..978b2f49a0bf 100644 --- a/internal/adapters/terraform/google/compute/networks.go +++ b/internal/adapters/terraform/google/compute/networks.go @@ -4,10 +4,9 @@ import ( "strconv" "strings" - defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/defsec/pkg/providers/google/compute" "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" ) const ( diff --git a/internal/adapters/terraform/google/gke/adapt.go b/internal/adapters/terraform/google/gke/adapt.go index 6703671f4257..4d281c90bad9 100644 --- a/internal/adapters/terraform/google/gke/adapt.go +++ b/internal/adapters/terraform/google/gke/adapt.go @@ -1,11 +1,12 @@ package gke import ( + "github.com/google/uuid" + "github.com/zclconf/go-cty/cty" + "github.com/aquasecurity/defsec/pkg/providers/google/gke" "github.com/aquasecurity/defsec/pkg/terraform" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/google/uuid" - "github.com/zclconf/go-cty/cty" ) func Adapt(modules terraform.Modules) gke.GKE { diff --git a/internal/adapters/terraform/google/iam/adapt.go b/internal/adapters/terraform/google/iam/adapt.go index 45d082af945b..b28650342319 100644 --- a/internal/adapters/terraform/google/iam/adapt.go +++ b/internal/adapters/terraform/google/iam/adapt.go @@ -1,10 +1,11 @@ package iam import ( + "github.com/google/uuid" + "github.com/aquasecurity/defsec/pkg/providers/google/iam" "github.com/aquasecurity/defsec/pkg/terraform" "github.com/aquasecurity/defsec/pkg/types" - "github.com/google/uuid" ) func Adapt(modules terraform.Modules) iam.IAM { diff --git a/internal/adapters/terraform/google/iam/folder_iam.go b/internal/adapters/terraform/google/iam/folder_iam.go index 51b09f185ba8..706729852282 100644 --- a/internal/adapters/terraform/google/iam/folder_iam.go +++ b/internal/adapters/terraform/google/iam/folder_iam.go @@ -12,12 +12,14 @@ func (a *adapter) adaptFolderIAM() { a.adaptFolderBindings() } +const GoogleFolder = "google_folder" + func (a *adapter) adaptFolderMembers() { for _, iamBlock := range a.modules.GetResourcesByType("google_folder_iam_member") { member := a.adaptMember(iamBlock) folderAttr := iamBlock.GetAttribute("folder") if refBlock, err := a.modules.GetReferencedBlock(folderAttr, iamBlock); err == nil { - if refBlock.TypeLabel() == "google_folder" { + if refBlock.TypeLabel() == GoogleFolder { var foundFolder bool for i, folder := range a.folders { if folder.blockID == refBlock.ID() { @@ -59,7 +61,7 @@ func (a *adapter) adaptFolderBindings() { folderAttr := iamBlock.GetAttribute("folder") if refBlock, err := a.modules.GetReferencedBlock(folderAttr, iamBlock); err == nil { - if refBlock.TypeLabel() == "google_folder" { + if refBlock.TypeLabel() == GoogleFolder { var foundFolder bool for i, folder := range a.folders { if folder.blockID == refBlock.ID() { @@ -89,7 +91,7 @@ func (a *adapter) adaptFolderBindings() { binding := a.adaptBinding(iamBlock) folderAttr := iamBlock.GetAttribute("folder") if refBlock, err := a.modules.GetReferencedBlock(folderAttr, iamBlock); err == nil { - if refBlock.TypeLabel() == "google_folder" { + if refBlock.TypeLabel() == GoogleFolder { var foundFolder bool for i, folder := range a.folders { if folder.blockID == refBlock.ID() { diff --git a/internal/adapters/terraform/google/iam/folders.go b/internal/adapters/terraform/google/iam/folders.go index 6e8de9641c6f..0b649458677d 100644 --- a/internal/adapters/terraform/google/iam/folders.go +++ b/internal/adapters/terraform/google/iam/folders.go @@ -4,6 +4,8 @@ import ( "github.com/aquasecurity/defsec/pkg/providers/google/iam" ) +const GoogleOrganization = "google_organization" + type parentedFolder struct { blockID string parentBlockID string @@ -12,7 +14,7 @@ type parentedFolder struct { } func (a *adapter) adaptFolders() { - for _, folderBlock := range a.modules.GetResourcesByType("google_folder") { + for _, folderBlock := range a.modules.GetResourcesByType(GoogleFolder) { var folder parentedFolder parentAttr := folderBlock.GetAttribute("parent") if parentAttr.IsNil() { @@ -26,10 +28,10 @@ func (a *adapter) adaptFolders() { } if referencedBlock, err := a.modules.GetReferencedBlock(parentAttr, folderBlock); err == nil { - if referencedBlock.TypeLabel() == "google_folder" { + if referencedBlock.TypeLabel() == GoogleFolder { folder.parentBlockID = referencedBlock.ID() } - if referencedBlock.TypeLabel() == "google_organization" { + if referencedBlock.TypeLabel() == GoogleOrganization { folder.parentBlockID = referencedBlock.ID() a.addOrg(folder.parentBlockID) } diff --git a/internal/adapters/terraform/google/iam/org_iam.go b/internal/adapters/terraform/google/iam/org_iam.go index bf56dabd3866..c2f97a01905e 100644 --- a/internal/adapters/terraform/google/iam/org_iam.go +++ b/internal/adapters/terraform/google/iam/org_iam.go @@ -1,9 +1,10 @@ package iam import ( + "github.com/google/uuid" + "github.com/aquasecurity/defsec/pkg/providers/google/iam" "github.com/aquasecurity/defsec/pkg/types" - "github.com/google/uuid" ) // see https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/google_organization_iam @@ -22,7 +23,7 @@ func (a *adapter) adaptOrganizationMembers() { } if refBlock, err := a.modules.GetReferencedBlock(organizationAttr, iamBlock); err == nil { - if refBlock.TypeLabel() == "google_organization" { + if refBlock.TypeLabel() == GoogleOrganization { a.addOrg(refBlock.ID()) org, ok := a.orgs[refBlock.ID()] if !ok { @@ -67,7 +68,7 @@ func (a *adapter) adaptOrganizationBindings() { orgAttr := iamBlock.GetAttribute("organization") if refBlock, err := a.modules.GetReferencedBlock(orgAttr, iamBlock); err == nil { - if refBlock.TypeLabel() == "google_organization" { + if refBlock.TypeLabel() == GoogleOrganization { if org, ok := a.orgs[refBlock.ID()]; ok { org.Bindings = append(org.Bindings, bindings...) a.orgs[refBlock.ID()] = org @@ -93,7 +94,7 @@ func (a *adapter) adaptOrganizationBindings() { } if refBlock, err := a.modules.GetReferencedBlock(organizationAttr, iamBlock); err == nil { - if refBlock.TypeLabel() == "google_organization" { + if refBlock.TypeLabel() == GoogleOrganization { a.addOrg(refBlock.ID()) org := a.orgs[refBlock.ID()] org.Bindings = append(org.Bindings, binding) diff --git a/internal/adapters/terraform/google/iam/project_iam.go b/internal/adapters/terraform/google/iam/project_iam.go index bac596af7569..bc2941904aa2 100644 --- a/internal/adapters/terraform/google/iam/project_iam.go +++ b/internal/adapters/terraform/google/iam/project_iam.go @@ -3,15 +3,15 @@ package iam import ( "strings" - defsecTypes "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/defsec/pkg/terraform" - "github.com/aquasecurity/defsec/pkg/providers/google/iam" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" ) // see https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/google_project_iam +const GoogleProject = "google_project" + func (a *adapter) adaptProjectIAM() { a.adaptProjectMembers() a.adaptProjectBindings() @@ -77,7 +77,7 @@ func (a *adapter) adaptProjectMembers() { } if refBlock, err := a.modules.GetReferencedBlock(projectAttr, iamBlock); err == nil { - if refBlock.TypeLabel() == "google_project" { + if refBlock.TypeLabel() == GoogleProject { var foundProject bool for i, project := range a.projects { if project.blockID == refBlock.ID() { @@ -189,7 +189,7 @@ func (a *adapter) adaptProjectDataBindings() { } if refBlock, err := a.modules.GetReferencedBlock(projectAttr, iamBlock); err == nil { - if refBlock.TypeLabel() == "google_project" { + if refBlock.TypeLabel() == GoogleProject { var foundProject bool for i, project := range a.projects { if project.blockID == refBlock.ID() { @@ -244,7 +244,7 @@ func (a *adapter) adaptProjectBindings() { } if refBlock, err := a.modules.GetReferencedBlock(projectAttr, iamBlock); err == nil { - if refBlock.TypeLabel() == "google_project" { + if refBlock.TypeLabel() == GoogleProject { var foundProject bool for i, project := range a.projects { if project.blockID == refBlock.ID() { diff --git a/internal/adapters/terraform/google/iam/projects.go b/internal/adapters/terraform/google/iam/projects.go index e064dc8d0bd8..55d435b66baf 100644 --- a/internal/adapters/terraform/google/iam/projects.go +++ b/internal/adapters/terraform/google/iam/projects.go @@ -15,7 +15,7 @@ type parentedProject struct { } func (a *adapter) adaptProjects() { - for _, projectBlock := range a.modules.GetResourcesByType("google_project") { + for _, projectBlock := range a.modules.GetResourcesByType(GoogleProject) { var project parentedProject project.project.Metadata = projectBlock.GetMetadata() idAttr := projectBlock.GetAttribute("project_id") @@ -40,7 +40,7 @@ func (a *adapter) adaptProjects() { if orgAttr.IsNotNil() { if referencedBlock, err := a.modules.GetReferencedBlock(orgAttr, projectBlock); err == nil { - if referencedBlock.TypeLabel() == "google_organization" { + if referencedBlock.TypeLabel() == GoogleOrganization { project.orgBlockID = referencedBlock.ID() a.addOrg(project.orgBlockID) } diff --git a/internal/adapters/terraform/google/kms/adapt.go b/internal/adapters/terraform/google/kms/adapt.go index 1b76b7d8b501..10d46ecf1a48 100644 --- a/internal/adapters/terraform/google/kms/adapt.go +++ b/internal/adapters/terraform/google/kms/adapt.go @@ -3,11 +3,9 @@ package kms import ( "strconv" - "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/defsec/pkg/terraform" - "github.com/aquasecurity/defsec/pkg/providers/google/kms" + "github.com/aquasecurity/defsec/pkg/terraform" + "github.com/aquasecurity/defsec/pkg/types" ) func Adapt(modules terraform.Modules) kms.KMS { diff --git a/internal/adapters/terraform/google/sql/adapt.go b/internal/adapters/terraform/google/sql/adapt.go index b6cb39f39c3c..7f00b717c266 100644 --- a/internal/adapters/terraform/google/sql/adapt.go +++ b/internal/adapters/terraform/google/sql/adapt.go @@ -3,11 +3,9 @@ package sql import ( "strconv" - defsecTypes "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/defsec/pkg/terraform" - "github.com/aquasecurity/defsec/pkg/providers/google/sql" + "github.com/aquasecurity/defsec/pkg/terraform" + defsecTypes "github.com/aquasecurity/defsec/pkg/types" ) func Adapt(modules terraform.Modules) sql.SQL { diff --git a/internal/adapters/terraform/google/storage/adapt.go b/internal/adapters/terraform/google/storage/adapt.go index 36aff1b0e2a9..a927a0f6cd78 100644 --- a/internal/adapters/terraform/google/storage/adapt.go +++ b/internal/adapters/terraform/google/storage/adapt.go @@ -30,7 +30,7 @@ func (a *adapter) adaptBuckets() []storage.Bucket { var buckets []storage.Bucket for _, module := range a.modules { - for _, resource := range module.GetResourcesByType("google_storage_bucket") { + for _, resource := range module.GetResourcesByType(GoogleStorageBucket) { buckets = append(buckets, a.adaptBucketResource(resource)) } } diff --git a/internal/adapters/terraform/google/storage/iam.go b/internal/adapters/terraform/google/storage/iam.go index 24b88a657f94..fde9704f62b4 100644 --- a/internal/adapters/terraform/google/storage/iam.go +++ b/internal/adapters/terraform/google/storage/iam.go @@ -5,6 +5,8 @@ import ( "github.com/aquasecurity/trivy/internal/adapters/terraform/google/iam" ) +const GoogleStorageBucket = "google_storage_bucket" + type parentedBinding struct { blockID string bucketID string @@ -31,7 +33,7 @@ func (a *adapter) adaptBindings() { } if refBlock, err := a.modules.GetReferencedBlock(bucketAttr, iamBlock); err == nil { - if refBlock.TypeLabel() == "google_storage_bucket" { + if refBlock.TypeLabel() == GoogleStorageBucket { parented.bucketBlockID = refBlock.ID() } } @@ -62,7 +64,7 @@ func (a *adapter) adaptBindings() { } if refBlock, err := a.modules.GetReferencedBlock(bucketAttr, iamBlock); err == nil { - if refBlock.TypeLabel() == "google_storage_bucket" { + if refBlock.TypeLabel() == GoogleStorageBucket { parented.bucketBlockID = refBlock.ID() } } @@ -85,7 +87,7 @@ func (a *adapter) adaptMembers() { } if refBlock, err := a.modules.GetReferencedBlock(bucketAttr, iamBlock); err == nil { - if refBlock.TypeLabel() == "google_storage_bucket" { + if refBlock.TypeLabel() == GoogleStorageBucket { parented.bucketBlockID = refBlock.ID() } } diff --git a/internal/adapters/terraform/openstack/networking.go b/internal/adapters/terraform/openstack/networking.go index c77029b4fb9f..dd56a82b2d1d 100644 --- a/internal/adapters/terraform/openstack/networking.go +++ b/internal/adapters/terraform/openstack/networking.go @@ -1,10 +1,11 @@ package openstack import ( + "github.com/google/uuid" + "github.com/aquasecurity/defsec/pkg/providers/openstack" "github.com/aquasecurity/defsec/pkg/terraform" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/google/uuid" ) func adaptNetworking(modules terraform.Modules) openstack.Networking { diff --git a/internal/adapters/terraform/tftestutil/testutil.go b/internal/adapters/terraform/tftestutil/testutil.go index 52331b1f01a9..387ba8e2fe0c 100644 --- a/internal/adapters/terraform/tftestutil/testutil.go +++ b/internal/adapters/terraform/tftestutil/testutil.go @@ -6,7 +6,6 @@ import ( "github.com/aquasecurity/defsec/pkg/terraform" "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" - "github.com/aquasecurity/trivy/test/testutil" ) diff --git a/internal/rules/register.go b/internal/rules/register.go old mode 100755 new mode 100644 index 5b76118c623c..adb74386a145 --- a/internal/rules/register.go +++ b/internal/rules/register.go @@ -9,7 +9,6 @@ import ( "github.com/aquasecurity/defsec/pkg/scan" dftypes "github.com/aquasecurity/defsec/pkg/types" "github.com/aquasecurity/trivy-policies/rules/specs" - "github.com/aquasecurity/trivy/pkg/iac/types" ) diff --git a/pkg/iac/detection/detect.go b/pkg/iac/detection/detect.go index fcf615a6c54d..9050a735172a 100644 --- a/pkg/iac/detection/detect.go +++ b/pkg/iac/detection/detect.go @@ -29,7 +29,7 @@ const ( FileTypeAzureARM FileType = "azure-arm" ) -var matchers = map[FileType]func(name string, r io.ReadSeeker) bool{} +var matchers = make(map[FileType]func(name string, r io.ReadSeeker) bool) // nolint func init() { diff --git a/pkg/iac/rego/build.go b/pkg/iac/rego/build.go index 4d51b9a2d164..3123ad673190 100644 --- a/pkg/iac/rego/build.go +++ b/pkg/iac/rego/build.go @@ -5,15 +5,16 @@ import ( "path/filepath" "strings" - "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/iac/rego/schemas" "github.com/open-policy-agent/opa/ast" "github.com/open-policy-agent/opa/util" + + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/rego/schemas" ) func BuildSchemaSetFromPolicies(policies map[string]*ast.Module, paths []string, fsys fs.FS) (*ast.SchemaSet, bool, error) { schemaSet := ast.NewSchemaSet() - schemaSet.Put(ast.MustParseRef("schema.input"), map[string]interface{}{}) // for backwards compat only + schemaSet.Put(ast.MustParseRef("schema.input"), make(map[string]interface{})) // for backwards compat only var customFound bool for _, policy := range policies { for _, annotation := range policy.Annotations { diff --git a/pkg/iac/rego/embed.go b/pkg/iac/rego/embed.go index 6ab9c4f2ac09..4b05c615a19b 100644 --- a/pkg/iac/rego/embed.go +++ b/pkg/iac/rego/embed.go @@ -6,9 +6,10 @@ import ( "path/filepath" "strings" + "github.com/open-policy-agent/opa/ast" + rules2 "github.com/aquasecurity/trivy-policies/rules" "github.com/aquasecurity/trivy/pkg/iac/rules" - "github.com/open-policy-agent/opa/ast" ) func init() { diff --git a/pkg/iac/rego/load.go b/pkg/iac/rego/load.go index 909510e8f505..aeef80144472 100644 --- a/pkg/iac/rego/load.go +++ b/pkg/iac/rego/load.go @@ -147,7 +147,7 @@ func (s *Scanner) compilePolicies(srcFS fs.FS, paths []string) error { return err } if custom { - s.inputSchema = nil // discard auto detected input schema in favour of policy defined schema + s.inputSchema = nil // discard auto detected input schema in favor of policy defined schema } compiler := ast.NewCompiler(). diff --git a/pkg/iac/rego/metadata.go b/pkg/iac/rego/metadata.go index ee6b5d1d2dce..ee5f5eb35d44 100644 --- a/pkg/iac/rego/metadata.go +++ b/pkg/iac/rego/metadata.go @@ -5,14 +5,15 @@ import ( "fmt" "strings" + "github.com/mitchellh/mapstructure" + "github.com/open-policy-agent/opa/ast" + "github.com/open-policy-agent/opa/rego" + "github.com/aquasecurity/defsec/pkg/framework" "github.com/aquasecurity/defsec/pkg/providers" "github.com/aquasecurity/defsec/pkg/scan" "github.com/aquasecurity/defsec/pkg/severity" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/mitchellh/mapstructure" - "github.com/open-policy-agent/opa/ast" - "github.com/open-policy-agent/opa/rego" ) type StaticMetadata struct { @@ -291,7 +292,7 @@ func (m *MetadataRetriever) RetrieveMetadata(ctx context.Context, module *ast.Mo return metadata, nil } -// nolint: cyclop +// nolint:cyclop func (m *MetadataRetriever) queryInputOptions(ctx context.Context, module *ast.Module) InputOptions { options := InputOptions{ diff --git a/pkg/iac/rego/result.go b/pkg/iac/rego/result.go index 94319eee4887..a2f56e3c2801 100644 --- a/pkg/iac/rego/result.go +++ b/pkg/iac/rego/result.go @@ -5,9 +5,10 @@ import ( "io/fs" "strconv" + "github.com/open-policy-agent/opa/rego" + "github.com/aquasecurity/defsec/pkg/scan" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/open-policy-agent/opa/rego" ) type regoResult struct { diff --git a/pkg/iac/rego/scanner.go b/pkg/iac/rego/scanner.go index c88c48df3d95..05c2d8c23f5f 100644 --- a/pkg/iac/rego/scanner.go +++ b/pkg/iac/rego/scanner.go @@ -9,16 +9,16 @@ import ( "io/fs" "strings" - "github.com/aquasecurity/defsec/pkg/debug" - "github.com/aquasecurity/defsec/pkg/framework" - "github.com/aquasecurity/defsec/pkg/scan" - "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/iac/rego/schemas" "github.com/open-policy-agent/opa/ast" "github.com/open-policy-agent/opa/rego" "github.com/open-policy-agent/opa/storage" + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/framework" + "github.com/aquasecurity/defsec/pkg/scan" "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/rego/schemas" ) var _ options.ConfigurableScanner = (*Scanner)(nil) diff --git a/pkg/iac/rego/schemas/builder.go b/pkg/iac/rego/schemas/builder.go index 11b37d11304f..beeb72f8b2e9 100644 --- a/pkg/iac/rego/schemas/builder.go +++ b/pkg/iac/rego/schemas/builder.go @@ -65,12 +65,12 @@ func (b *builder) fromInput(inputValue reflect.Value) error { func refName(name string, parent, t reflect.Type) string { if t.Name() == "" { // inline struct - return sanitise(parent.PkgPath() + "." + parent.Name() + "." + name) + return sanitize(parent.PkgPath() + "." + parent.Name() + "." + name) } - return sanitise(t.PkgPath() + "." + t.Name()) + return sanitize(t.PkgPath() + "." + t.Name()) } -func sanitise(s string) string { +func sanitize(s string) string { return strings.ReplaceAll(s, "/", ".") } @@ -169,12 +169,12 @@ var converterInterface = reflect.TypeOf((*convert.Converter)(nil)).Elem() func (b *builder) readStruct(name string, parent, inputType reflect.Type, indent int) (*Property, error) { if b.schema.Defs == nil { - b.schema.Defs = map[string]*Property{} + b.schema.Defs = make(map[string]*Property) } def := &Property{ Type: "object", - Properties: map[string]Property{}, + Properties: make(map[string]Property), } if parent != nil { @@ -239,7 +239,7 @@ func (b *builder) readRego(def *Property, name string, parent reflect.Type, typ def.Type = "object" for k, v := range cast { child := &Property{ - Properties: map[string]Property{}, + Properties: make(map[string]Property), } if err := b.readRego(child, k, reflect.TypeOf(raw), reflect.TypeOf(v), v, indent+1); err != nil { return err @@ -250,7 +250,7 @@ func (b *builder) readRego(def *Property, name string, parent reflect.Type, typ def.Type = "object" for k, v := range cast { child := &Property{ - Properties: map[string]Property{}, + Properties: make(map[string]Property), } if err := b.readRego(child, k, reflect.TypeOf(raw), reflect.TypeOf(v), v, indent+1); err != nil { return err diff --git a/pkg/iac/rego/store.go b/pkg/iac/rego/store.go index 127b1d8dd647..c75818d402e7 100644 --- a/pkg/iac/rego/store.go +++ b/pkg/iac/rego/store.go @@ -11,7 +11,7 @@ import ( "github.com/open-policy-agent/opa/storage" ) -// initialise a store populated with OPA data files found in dataPaths +// initialize a store populated with OPA data files found in dataPaths func initStore(dataFS fs.FS, dataPaths, namespaces []string) (storage.Store, error) { // FilteredPaths will recursively find all file paths that contain a valid document // extension from the given list of data paths. diff --git a/pkg/iac/rules/register.go b/pkg/iac/rules/register.go index c4e4f27aeba0..125f8a8b995a 100644 --- a/pkg/iac/rules/register.go +++ b/pkg/iac/rules/register.go @@ -3,7 +3,6 @@ package rules import ( "github.com/aquasecurity/defsec/pkg/framework" "github.com/aquasecurity/defsec/pkg/scan" - "github.com/aquasecurity/trivy/internal/rules" "github.com/aquasecurity/trivy/pkg/iac/types" ) diff --git a/pkg/iac/rules/rules.go b/pkg/iac/rules/rules.go index 3ea9a161aca1..7d6d8222bb77 100644 --- a/pkg/iac/rules/rules.go +++ b/pkg/iac/rules/rules.go @@ -2,6 +2,7 @@ package rules import ( trules "github.com/aquasecurity/trivy-policies/pkg/rules" + _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/accessanalyzer" _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/apigateway" _ "github.com/aquasecurity/trivy-policies/rules/cloud/policies/aws/athena" diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/decode_object.go b/pkg/iac/scanners/azure/arm/parser/armjson/decode_object.go index 516029b55deb..57b611065242 100644 --- a/pkg/iac/scanners/azure/arm/parser/armjson/decode_object.go +++ b/pkg/iac/scanners/azure/arm/parser/armjson/decode_object.go @@ -105,7 +105,7 @@ func (n *node) decodeObjectToStruct(v reflect.Value) error { subject := v.Field(i) - // if fields are nil pointers, initialise them with values of the correct type + // if fields are nil pointers, initialize them with values of the correct type if subject.Kind() == reflect.Ptr { if subject.IsNil() { subject.Set(reflect.New(subject.Type().Elem())) diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/parse_object.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_object.go index f87ffc439ac9..f3895df90ffb 100644 --- a/pkg/iac/scanners/azure/arm/parser/armjson/parse_object.go +++ b/pkg/iac/scanners/azure/arm/parser/armjson/parse_object.go @@ -32,7 +32,7 @@ func (p *parser) parseObject(parentMetadata *types.Metadata) (Node, error) { } -// nolint: cyclop +// nolint:cyclop func (p *parser) iterateObject(nextComments []Node, metadata *types.Metadata, n *node) (Node, error) { for { diff --git a/pkg/iac/scanners/azure/arm/parser/parser.go b/pkg/iac/scanners/azure/arm/parser/parser.go index a4c14cacb8d3..df0535eba389 100644 --- a/pkg/iac/scanners/azure/arm/parser/parser.go +++ b/pkg/iac/scanners/azure/arm/parser/parser.go @@ -9,9 +9,8 @@ import ( "strings" "github.com/aquasecurity/defsec/pkg/debug" - "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/defsec/pkg/types" "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" "github.com/aquasecurity/trivy/pkg/iac/scanners/azure/arm/parser/armjson" "github.com/aquasecurity/trivy/pkg/iac/scanners/azure/resolver" diff --git a/pkg/iac/scanners/azure/arm/parser/template.go b/pkg/iac/scanners/azure/arm/parser/template.go index 955431bd9dd2..57e82d29da7d 100644 --- a/pkg/iac/scanners/azure/arm/parser/template.go +++ b/pkg/iac/scanners/azure/arm/parser/template.go @@ -70,8 +70,7 @@ func (v *Resource) UnmarshalJSONWithMetadata(node armjson.Node) error { if err := comment.Decode(&str); err != nil { return err } - // TODO - // v.Metadata.Comments = append(v.Metadata.Comments, str) + // v.Metadata.Comments = append(v.Metadata.Comments, str) // TODO look into implementing/removing } return nil diff --git a/pkg/iac/scanners/azure/arm/scanner.go b/pkg/iac/scanners/azure/arm/scanner.go index 8f4a26ef7cbe..7a66b1bb7ed5 100644 --- a/pkg/iac/scanners/azure/arm/scanner.go +++ b/pkg/iac/scanners/azure/arm/scanner.go @@ -3,7 +3,6 @@ package arm import ( "context" "fmt" - "io" "io/fs" "sync" @@ -11,10 +10,9 @@ import ( "github.com/aquasecurity/defsec/pkg/debug" "github.com/aquasecurity/defsec/pkg/framework" "github.com/aquasecurity/defsec/pkg/scan" + "github.com/aquasecurity/defsec/pkg/scanners/options" "github.com/aquasecurity/defsec/pkg/state" "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/defsec/pkg/scanners/options" "github.com/aquasecurity/trivy/internal/adapters/arm" "github.com/aquasecurity/trivy/pkg/iac/rego" "github.com/aquasecurity/trivy/pkg/iac/rules" diff --git a/pkg/iac/scanners/azure/functions/copy_index.go b/pkg/iac/scanners/azure/functions/copy_index.go index aee090e79466..d1289cc0a20d 100644 --- a/pkg/iac/scanners/azure/functions/copy_index.go +++ b/pkg/iac/scanners/azure/functions/copy_index.go @@ -1,6 +1,6 @@ package functions -var loopCounter = map[string]int{} +var loopCounter = make(map[string]int) func CopyIndex(args ...interface{}) interface{} { loopName := "default" diff --git a/pkg/iac/scanners/azure/functions/create_object.go b/pkg/iac/scanners/azure/functions/create_object.go index b9fe2d29f6d2..30dc239847f8 100644 --- a/pkg/iac/scanners/azure/functions/create_object.go +++ b/pkg/iac/scanners/azure/functions/create_object.go @@ -1,7 +1,7 @@ package functions func CreateObject(args ...interface{}) interface{} { - obj := map[string]interface{}{} + obj := make(map[string]interface{}) if len(args) == 0 { return obj } diff --git a/pkg/iac/scanners/azure/functions/intersection.go b/pkg/iac/scanners/azure/functions/intersection.go index 5eace2fe0bc7..d137a7c2aec8 100644 --- a/pkg/iac/scanners/azure/functions/intersection.go +++ b/pkg/iac/scanners/azure/functions/intersection.go @@ -19,7 +19,7 @@ func Intersection(args ...interface{}) interface{} { } func intersectionArray(args ...interface{}) interface{} { - result := []interface{}{} + var result []interface{} hash := make(map[interface{}]bool) for _, arg := range args[0].([]interface{}) { diff --git a/pkg/iac/scanners/azure/functions/max.go b/pkg/iac/scanners/azure/functions/max.go index 6cbfd5ba25bc..eb0338a4f894 100644 --- a/pkg/iac/scanners/azure/functions/max.go +++ b/pkg/iac/scanners/azure/functions/max.go @@ -9,8 +9,7 @@ func Max(args ...interface{}) interface{} { } return maxInt(ints) case interface{}: - switch iType := args[0].(type) { - case []int: + if iType, ok := args[0].([]int); ok { return maxInt(iType) } } diff --git a/pkg/iac/scanners/azure/functions/min.go b/pkg/iac/scanners/azure/functions/min.go index 35900e26d33d..5147c3bb2769 100644 --- a/pkg/iac/scanners/azure/functions/min.go +++ b/pkg/iac/scanners/azure/functions/min.go @@ -9,8 +9,7 @@ func Min(args ...interface{}) interface{} { } return minInt(ints) case interface{}: - switch iType := args[0].(type) { - case []int: + if iType, ok := args[0].([]int); ok { return minInt(iType) } } diff --git a/pkg/iac/scanners/azure/functions/replace.go b/pkg/iac/scanners/azure/functions/replace.go index 00a7a8a4560f..1af527ea49a8 100644 --- a/pkg/iac/scanners/azure/functions/replace.go +++ b/pkg/iac/scanners/azure/functions/replace.go @@ -12,15 +12,15 @@ func Replace(args ...interface{}) interface{} { return "" } - old, ok := args[1].(string) + o, ok := args[1].(string) if !ok { return "" } - new, ok := args[2].(string) + n, ok := args[2].(string) if !ok { return "" } - return strings.ReplaceAll(input, old, new) + return strings.ReplaceAll(input, o, n) } diff --git a/pkg/iac/scanners/azure/functions/split.go b/pkg/iac/scanners/azure/functions/split.go index 04b7f5779d33..75ce46287427 100644 --- a/pkg/iac/scanners/azure/functions/split.go +++ b/pkg/iac/scanners/azure/functions/split.go @@ -16,8 +16,7 @@ func Split(args ...interface{}) interface{} { case string: return strings.Split(input, separator) case interface{}: - switch separator := separator.(type) { - case []string: + if separator, ok := separator.([]string); ok { m := make(map[rune]int) for _, r := range separator { r := rune(r[0]) @@ -30,7 +29,6 @@ func Split(args ...interface{}) interface{} { return strings.FieldsFunc(input, splitter) } - } return []string{} } diff --git a/pkg/iac/scanners/azure/functions/union.go b/pkg/iac/scanners/azure/functions/union.go index 07bb98f28eeb..37bccbbcd262 100644 --- a/pkg/iac/scanners/azure/functions/union.go +++ b/pkg/iac/scanners/azure/functions/union.go @@ -25,8 +25,7 @@ func unionMap(args ...interface{}) interface{} { result := make(map[string]interface{}) for _, arg := range args { - switch iType := arg.(type) { - case map[string]interface{}: + if iType, ok := arg.(map[string]interface{}); ok { for k, v := range iType { result[k] = v } @@ -37,7 +36,7 @@ func unionMap(args ...interface{}) interface{} { } func unionArray(args ...interface{}) interface{} { - result := []interface{}{} + var result []interface{} union := make(map[interface{}]bool) for _, arg := range args { diff --git a/pkg/iac/scanners/azure/value.go b/pkg/iac/scanners/azure/value.go index bbcdfded6860..0d4d19b06c0d 100644 --- a/pkg/iac/scanners/azure/value.go +++ b/pkg/iac/scanners/azure/value.go @@ -4,9 +4,10 @@ import ( "strings" "time" + "k8s.io/utils/strings/slices" + "github.com/aquasecurity/defsec/pkg/types" "github.com/aquasecurity/trivy/pkg/iac/scanners/azure/arm/parser/armjson" - "k8s.io/utils/strings/slices" ) type EvalContext struct{} diff --git a/pkg/iac/scanners/cloudformation/parser/fn_builtin.go b/pkg/iac/scanners/cloudformation/parser/fn_builtin.go index a9786910f58b..378ffdfa81ce 100644 --- a/pkg/iac/scanners/cloudformation/parser/fn_builtin.go +++ b/pkg/iac/scanners/cloudformation/parser/fn_builtin.go @@ -4,9 +4,9 @@ import ( "fmt" "net" - "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" - "github.com/apparentlymart/go-cidr/cidr" + + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" ) func GetAzs(property *Property) (*Property, bool) { diff --git a/pkg/iac/scanners/cloudformation/parser/parser.go b/pkg/iac/scanners/cloudformation/parser/parser.go index f4d31ff565d9..9edd3639f111 100644 --- a/pkg/iac/scanners/cloudformation/parser/parser.go +++ b/pkg/iac/scanners/cloudformation/parser/parser.go @@ -11,11 +11,11 @@ import ( "path/filepath" "strings" - "github.com/aquasecurity/defsec/pkg/debug" - "github.com/aquasecurity/defsec/pkg/scanners/options" "github.com/liamg/jfather" "gopkg.in/yaml.v3" + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/scanners/options" "github.com/aquasecurity/trivy/pkg/iac/detection" ) diff --git a/pkg/iac/scanners/cloudformation/parser/property.go b/pkg/iac/scanners/cloudformation/parser/property.go index 2de7c937808d..466de3497c85 100644 --- a/pkg/iac/scanners/cloudformation/parser/property.go +++ b/pkg/iac/scanners/cloudformation/parser/property.go @@ -6,12 +6,11 @@ import ( "strconv" "strings" - defsecTypes "github.com/aquasecurity/defsec/pkg/types" - - "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" - "github.com/liamg/jfather" "gopkg.in/yaml.v3" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" ) type EqualityOptions = int @@ -414,7 +413,7 @@ func removeLeftMargin(lines []string) []string { func convert(input interface{}) interface{} { switch x := input.(type) { case map[interface{}]interface{}: - outpMap := map[string]interface{}{} + outpMap := make(map[string]interface{}) for k, v := range x { outpMap[k.(string)] = convert(v) } diff --git a/pkg/iac/scanners/cloudformation/parser/property_helpers.go b/pkg/iac/scanners/cloudformation/parser/property_helpers.go index 86c50dcc9639..c7b9d9efac2d 100644 --- a/pkg/iac/scanners/cloudformation/parser/property_helpers.go +++ b/pkg/iac/scanners/cloudformation/parser/property_helpers.go @@ -5,7 +5,6 @@ import ( "strings" defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" ) diff --git a/pkg/iac/scanners/cloudformation/parser/resource.go b/pkg/iac/scanners/cloudformation/parser/resource.go index 1258ac3fd5c0..d18bc2212d16 100644 --- a/pkg/iac/scanners/cloudformation/parser/resource.go +++ b/pkg/iac/scanners/cloudformation/parser/resource.go @@ -4,10 +4,10 @@ import ( "io/fs" "strings" - defsecTypes "github.com/aquasecurity/defsec/pkg/types" - "github.com/liamg/jfather" "gopkg.in/yaml.v3" + + defsecTypes "github.com/aquasecurity/defsec/pkg/types" ) type Resource struct { diff --git a/pkg/iac/scanners/cloudformation/parser/util.go b/pkg/iac/scanners/cloudformation/parser/util.go index a5252e354a56..6c286564f685 100644 --- a/pkg/iac/scanners/cloudformation/parser/util.go +++ b/pkg/iac/scanners/cloudformation/parser/util.go @@ -3,10 +3,10 @@ package parser import ( "strconv" - "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" - "github.com/liamg/jfather" "gopkg.in/yaml.v3" + + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" ) func setPropertyValueFromJson(node jfather.Node, propertyData *PropertyInner) error { @@ -57,7 +57,7 @@ func setPropertyValueFromYaml(node *yaml.Node, propertyData *PropertyInner) erro newContent = createNode(node, newContent) - node.Tag = "!!map" + node.Tag = "!!map" //nolint:goconst node.Kind = yaml.MappingNode node.Content = newContent } @@ -80,7 +80,7 @@ func setPropertyValueFromYaml(node *yaml.Node, propertyData *PropertyInner) erro } switch node.Tag { - case "!!map": + case "!!map": //nolint:goconst var childData map[string]*Property if err := node.Decode(&childData); err != nil { return err @@ -119,7 +119,7 @@ func createNode(node *yaml.Node, newContent []*yaml.Node) []*yaml.Node { case yaml.SequenceNode: newNode.Tag = "!!seq" case yaml.MappingNode: - newNode.Tag = "!!map" + newNode.Tag = "!!map" //nolint:goconst case yaml.ScalarNode: default: newNode.Tag = node.Tag diff --git a/pkg/iac/scanners/cloudformation/scanner.go b/pkg/iac/scanners/cloudformation/scanner.go index 43f1f839426f..c39ebf02e52f 100644 --- a/pkg/iac/scanners/cloudformation/scanner.go +++ b/pkg/iac/scanners/cloudformation/scanner.go @@ -13,7 +13,6 @@ import ( "github.com/aquasecurity/defsec/pkg/scan" "github.com/aquasecurity/defsec/pkg/scanners/options" "github.com/aquasecurity/defsec/pkg/types" - adapter "github.com/aquasecurity/trivy/internal/adapters/cloudformation" "github.com/aquasecurity/trivy/pkg/iac/rego" "github.com/aquasecurity/trivy/pkg/iac/rules" diff --git a/pkg/iac/scanners/dockerfile/parser/parser.go b/pkg/iac/scanners/dockerfile/parser/parser.go index 6f86e0419031..cd21642ac0a2 100644 --- a/pkg/iac/scanners/dockerfile/parser/parser.go +++ b/pkg/iac/scanners/dockerfile/parser/parser.go @@ -8,11 +8,11 @@ import ( "path/filepath" "strings" - "github.com/aquasecurity/defsec/pkg/debug" - "github.com/aquasecurity/defsec/pkg/providers/dockerfile" "github.com/moby/buildkit/frontend/dockerfile/instructions" "github.com/moby/buildkit/frontend/dockerfile/parser" + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/providers/dockerfile" "github.com/aquasecurity/defsec/pkg/scanners/options" "github.com/aquasecurity/trivy/pkg/iac/detection" ) diff --git a/pkg/iac/scanners/dockerfile/scanner.go b/pkg/iac/scanners/dockerfile/scanner.go index a9d7c6f6f951..46da048e1b2f 100644 --- a/pkg/iac/scanners/dockerfile/scanner.go +++ b/pkg/iac/scanners/dockerfile/scanner.go @@ -9,9 +9,8 @@ import ( "github.com/aquasecurity/defsec/pkg/debug" "github.com/aquasecurity/defsec/pkg/framework" "github.com/aquasecurity/defsec/pkg/scan" - "github.com/aquasecurity/defsec/pkg/types" - "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/defsec/pkg/types" "github.com/aquasecurity/trivy/pkg/iac/rego" "github.com/aquasecurity/trivy/pkg/iac/scanners" "github.com/aquasecurity/trivy/pkg/iac/scanners/dockerfile/parser" diff --git a/pkg/iac/scanners/helm/parser/parser.go b/pkg/iac/scanners/helm/parser/parser.go index cc417e13e16d..5c2b40ed41df 100644 --- a/pkg/iac/scanners/helm/parser/parser.go +++ b/pkg/iac/scanners/helm/parser/parser.go @@ -13,16 +13,15 @@ import ( "sort" "strings" - "gopkg.in/yaml.v3" - - "github.com/aquasecurity/defsec/pkg/debug" "github.com/google/uuid" + "gopkg.in/yaml.v3" "helm.sh/helm/v3/pkg/action" "helm.sh/helm/v3/pkg/chart" "helm.sh/helm/v3/pkg/chart/loader" "helm.sh/helm/v3/pkg/release" "helm.sh/helm/v3/pkg/releaseutil" + "github.com/aquasecurity/defsec/pkg/debug" "github.com/aquasecurity/defsec/pkg/scanners/options" "github.com/aquasecurity/trivy/pkg/iac/detection" ) diff --git a/pkg/iac/scanners/helm/parser/parser_tar.go b/pkg/iac/scanners/helm/parser/parser_tar.go index ad3abdad82ca..5455ab780683 100644 --- a/pkg/iac/scanners/helm/parser/parser_tar.go +++ b/pkg/iac/scanners/helm/parser/parser_tar.go @@ -11,8 +11,9 @@ import ( "os" "path/filepath" - "github.com/aquasecurity/trivy/pkg/iac/detection" "github.com/liamg/memoryfs" + + "github.com/aquasecurity/trivy/pkg/iac/detection" ) var errSkipFS = errors.New("skip parse FS") diff --git a/pkg/iac/scanners/helm/parser/vals.go b/pkg/iac/scanners/helm/parser/vals.go index 300dad819730..b54cd7c3a521 100644 --- a/pkg/iac/scanners/helm/parser/vals.go +++ b/pkg/iac/scanners/helm/parser/vals.go @@ -22,11 +22,11 @@ type ValueOptions struct { // MergeValues merges values from files specified via -f/--values and directly // via --set, --set-string, or --set-file, marshaling them to YAML func (opts *ValueOptions) MergeValues() (map[string]interface{}, error) { - base := map[string]interface{}{} + base := make(map[string]interface{}) // User specified a values files via -f/--values for _, filePath := range opts.ValueFiles { - currentMap := map[string]interface{}{} + currentMap := make(map[string]interface{}) bytes, err := readFile(filePath) if err != nil { diff --git a/pkg/iac/scanners/helm/scanner.go b/pkg/iac/scanners/helm/scanner.go index a4235b6ea279..e81f22208089 100644 --- a/pkg/iac/scanners/helm/scanner.go +++ b/pkg/iac/scanners/helm/scanner.go @@ -8,13 +8,13 @@ import ( "path/filepath" "strings" + "github.com/liamg/memoryfs" + "github.com/aquasecurity/defsec/pkg/debug" "github.com/aquasecurity/defsec/pkg/framework" "github.com/aquasecurity/defsec/pkg/scan" - "github.com/aquasecurity/defsec/pkg/types" - "github.com/liamg/memoryfs" - "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/defsec/pkg/types" "github.com/aquasecurity/trivy/pkg/iac/detection" "github.com/aquasecurity/trivy/pkg/iac/rego" "github.com/aquasecurity/trivy/pkg/iac/scanners" diff --git a/pkg/iac/scanners/json/parser/parser.go b/pkg/iac/scanners/json/parser/parser.go index 7205df351de4..ff3417b8f0b9 100644 --- a/pkg/iac/scanners/json/parser/parser.go +++ b/pkg/iac/scanners/json/parser/parser.go @@ -68,8 +68,8 @@ func (p *Parser) ParseFS(ctx context.Context, target fs.FS, path string) (map[st } // ParseFile parses Dockerfile content from the provided filesystem path. -func (p *Parser) ParseFile(_ context.Context, fs fs.FS, path string) (interface{}, error) { - f, err := fs.Open(filepath.ToSlash(path)) +func (p *Parser) ParseFile(_ context.Context, fsys fs.FS, path string) (interface{}, error) { + f, err := fsys.Open(filepath.ToSlash(path)) if err != nil { return nil, err } diff --git a/pkg/iac/scanners/kubernetes/parser/manifest.go b/pkg/iac/scanners/kubernetes/parser/manifest.go index 3f809a6f9145..0cc8aa092178 100644 --- a/pkg/iac/scanners/kubernetes/parser/manifest.go +++ b/pkg/iac/scanners/kubernetes/parser/manifest.go @@ -14,7 +14,7 @@ type Manifest struct { func (m *Manifest) UnmarshalYAML(value *yaml.Node) error { switch value.Tag { - case "!!map": + case "!!map": //nolint:goconst node := new(ManifestNode) node.Path = m.Path if err := value.Decode(node); err != nil { diff --git a/pkg/iac/scanners/kubernetes/parser/parser.go b/pkg/iac/scanners/kubernetes/parser/parser.go index aa915adc21ee..46d853a9ed08 100644 --- a/pkg/iac/scanners/kubernetes/parser/parser.go +++ b/pkg/iac/scanners/kubernetes/parser/parser.go @@ -34,9 +34,9 @@ func (p *Parser) SetSkipRequiredCheck(b bool) { } // New creates a new K8s parser -func New(options ...options.ParserOption) *Parser { +func New(po ...options.ParserOption) *Parser { p := &Parser{} - for _, option := range options { + for _, option := range po { option(p) } return p @@ -73,8 +73,8 @@ func (p *Parser) ParseFS(ctx context.Context, target fs.FS, path string) (map[st } // ParseFile parses Kubernetes manifest from the provided filesystem path. -func (p *Parser) ParseFile(_ context.Context, fs fs.FS, path string) ([]interface{}, error) { - f, err := fs.Open(filepath.ToSlash(path)) +func (p *Parser) ParseFile(_ context.Context, fsys fs.FS, path string) ([]interface{}, error) { + f, err := fsys.Open(filepath.ToSlash(path)) if err != nil { return nil, err } @@ -82,11 +82,11 @@ func (p *Parser) ParseFile(_ context.Context, fs fs.FS, path string) ([]interfac return p.Parse(f, path) } -func (p *Parser) required(fs fs.FS, path string) bool { +func (p *Parser) required(fsys fs.FS, path string) bool { if p.skipRequired { return true } - f, err := fs.Open(filepath.ToSlash(path)) + f, err := fsys.Open(filepath.ToSlash(path)) if err != nil { return false } diff --git a/pkg/iac/scanners/kubernetes/scanner.go b/pkg/iac/scanners/kubernetes/scanner.go index 9bbc03aec74c..d8633c97ba7e 100644 --- a/pkg/iac/scanners/kubernetes/scanner.go +++ b/pkg/iac/scanners/kubernetes/scanner.go @@ -8,6 +8,8 @@ import ( "sort" "sync" + "github.com/liamg/memoryfs" + "github.com/aquasecurity/defsec/pkg/debug" "github.com/aquasecurity/defsec/pkg/framework" "github.com/aquasecurity/defsec/pkg/scan" @@ -16,7 +18,6 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/rego" "github.com/aquasecurity/trivy/pkg/iac/scanners" "github.com/aquasecurity/trivy/pkg/iac/scanners/kubernetes/parser" - "github.com/liamg/memoryfs" ) var _ scanners.FSScanner = (*Scanner)(nil) diff --git a/pkg/iac/scanners/terraform/executor/executor.go b/pkg/iac/scanners/terraform/executor/executor.go index f11844c9c301..1e1f283e5614 100644 --- a/pkg/iac/scanners/terraform/executor/executor.go +++ b/pkg/iac/scanners/terraform/executor/executor.go @@ -12,7 +12,6 @@ import ( "github.com/aquasecurity/defsec/pkg/severity" "github.com/aquasecurity/defsec/pkg/state" "github.com/aquasecurity/defsec/pkg/terraform" - adapter "github.com/aquasecurity/trivy/internal/adapters/terraform" "github.com/aquasecurity/trivy/pkg/iac/rego" "github.com/aquasecurity/trivy/pkg/iac/rules" @@ -107,7 +106,7 @@ func (e *Executor) Execute(modules terraform.Modules) (scan.Results, Metrics, er checksTime := time.Now() registeredRules := rules.GetRegistered(e.frameworks...) - e.debug.Log("Initialised %d rule(s).", len(registeredRules)) + e.debug.Log("Initialized %d rule(s).", len(registeredRules)) pool := NewPool(threads, registeredRules, modules, infra, e.ignoreCheckErrors, e.regoScanner, e.regoOnly) e.debug.Log("Created pool with %d worker(s) to apply rules.", threads) diff --git a/pkg/iac/scanners/terraform/executor/statistics.go b/pkg/iac/scanners/terraform/executor/statistics.go index 5c2dd1784ea2..6bdaab0fd2a4 100644 --- a/pkg/iac/scanners/terraform/executor/statistics.go +++ b/pkg/iac/scanners/terraform/executor/statistics.go @@ -8,8 +8,9 @@ import ( "strconv" "strings" - "github.com/aquasecurity/defsec/pkg/scan" "github.com/olekukonko/tablewriter" + + "github.com/aquasecurity/defsec/pkg/scan" ) type StatisticsItem struct { diff --git a/pkg/iac/scanners/terraform/options.go b/pkg/iac/scanners/terraform/options.go index 73e31e9f950c..3fd3ba63579f 100644 --- a/pkg/iac/scanners/terraform/options.go +++ b/pkg/iac/scanners/terraform/options.go @@ -8,7 +8,6 @@ import ( "github.com/aquasecurity/defsec/pkg/scanners/options" "github.com/aquasecurity/defsec/pkg/severity" "github.com/aquasecurity/defsec/pkg/state" - "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/executor" "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" ) diff --git a/pkg/iac/scanners/terraform/parser/evaluator.go b/pkg/iac/scanners/terraform/parser/evaluator.go index 41a656909e0c..32d08304d4f5 100644 --- a/pkg/iac/scanners/terraform/parser/evaluator.go +++ b/pkg/iac/scanners/terraform/parser/evaluator.go @@ -8,16 +8,16 @@ import ( "reflect" "time" + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/ext/typeexpr" + "github.com/zclconf/go-cty/cty" + "github.com/zclconf/go-cty/cty/convert" "golang.org/x/exp/slices" "github.com/aquasecurity/defsec/pkg/debug" "github.com/aquasecurity/defsec/pkg/terraform" tfcontext "github.com/aquasecurity/defsec/pkg/terraform/context" "github.com/aquasecurity/defsec/pkg/types" - "github.com/hashicorp/hcl/v2" - "github.com/hashicorp/hcl/v2/ext/typeexpr" - "github.com/zclconf/go-cty/cty" - "github.com/zclconf/go-cty/cty/convert" ) const ( diff --git a/pkg/iac/scanners/terraform/parser/funcs/collection.go b/pkg/iac/scanners/terraform/parser/funcs/collection.go index 693b8912f618..506c711cd2fd 100644 --- a/pkg/iac/scanners/terraform/parser/funcs/collection.go +++ b/pkg/iac/scanners/terraform/parser/funcs/collection.go @@ -353,7 +353,7 @@ var MatchkeysFunc = function.New(&function.Spec{ return cty.ListValEmpty(retType.ElementType()), errors.New("length of keys and values should be equal") } - output := make([]cty.Value, 0) + var output []cty.Value values := args[0] // Keys and searchset must be the same type. @@ -582,7 +582,7 @@ var TransposeFunc = function.New(&function.Spec{ } for outKey, outVal := range tmpMap { - values := make([]cty.Value, 0) + var values []cty.Value for _, v := range outVal { values = append(values, cty.StringVal(v)) } @@ -600,7 +600,7 @@ var TransposeFunc = function.New(&function.Spec{ // ListFunc constructs a function that takes an arbitrary number of arguments // and returns a list containing those values in the same order. // -// This function is deprecated in Terraform v0.12 +// This function is deprecated in Terraform v0.12 // nolint:gocritic var ListFunc = function.New(&function.Spec{ Params: []function.Parameter{}, VarParam: &function.Parameter{ @@ -621,7 +621,7 @@ var ListFunc = function.New(&function.Spec{ // MapFunc constructs a function that takes an even number of arguments and // returns a map whose elements are constructed from consecutive pairs of arguments. // -// This function is deprecated in Terraform v0.12 +// This function is deprecated in Terraform v0.12 // nolint:gocritic var MapFunc = function.New(&function.Spec{ Params: []function.Parameter{}, VarParam: &function.Parameter{ diff --git a/pkg/iac/scanners/terraform/parser/funcs/crypto.go b/pkg/iac/scanners/terraform/parser/funcs/crypto.go index 424c4c184763..786a288e4e2c 100644 --- a/pkg/iac/scanners/terraform/parser/funcs/crypto.go +++ b/pkg/iac/scanners/terraform/parser/funcs/crypto.go @@ -1,10 +1,9 @@ -// Copied from github.com/hashicorp/terraform/internal/lang/funcs package funcs import ( - "crypto/md5" + "crypto/md5" //nolint:gosec "crypto/rsa" - "crypto/sha1" + "crypto/sha1" //nolint:gosec "crypto/sha256" "crypto/sha512" "encoding/asn1" @@ -274,7 +273,7 @@ func UUID() (cty.Value, error) { // UUIDV5 generates and returns a Type-5 UUID in the standard hexadecimal string // format. -func UUIDV5(namespace cty.Value, name cty.Value) (cty.Value, error) { +func UUIDV5(namespace, name cty.Value) (cty.Value, error) { return UUIDV5Func.Call([]cty.Value{namespace, name}) } diff --git a/pkg/iac/scanners/terraform/parser/funcs/datetime.go b/pkg/iac/scanners/terraform/parser/funcs/datetime.go index 253e59eef018..b09da879da99 100644 --- a/pkg/iac/scanners/terraform/parser/funcs/datetime.go +++ b/pkg/iac/scanners/terraform/parser/funcs/datetime.go @@ -66,6 +66,6 @@ func Timestamp() (cty.Value, error) { // // The result is a string, also in RFC 3339 format, representing the result // of adding the given direction to the given timestamp. -func TimeAdd(timestamp cty.Value, duration cty.Value) (cty.Value, error) { +func TimeAdd(timestamp, duration cty.Value) (cty.Value, error) { return TimeAddFunc.Call([]cty.Value{timestamp, duration}) } diff --git a/pkg/iac/scanners/terraform/parser/funcs/defaults.go b/pkg/iac/scanners/terraform/parser/funcs/defaults.go index 4467b81e35ce..e32adfb904f2 100644 --- a/pkg/iac/scanners/terraform/parser/funcs/defaults.go +++ b/pkg/iac/scanners/terraform/parser/funcs/defaults.go @@ -69,6 +69,7 @@ var DefaultsFunc = function.New(&function.Spec{ }, }) +// nolint:cyclop func defaultsApply(input, fallback cty.Value) cty.Value { wantTy := input.Type() @@ -113,7 +114,7 @@ func defaultsApply(input, fallback cty.Value) cty.Value { return input } atys := wantTy.AttributeTypes() - ret := map[string]cty.Value{} + ret := make(map[string]cty.Value) for attr, aty := range atys { inputSub := umInput.GetAttr(attr) fallbackSub := cty.NullVal(aty) @@ -154,7 +155,7 @@ func defaultsApply(input, fallback cty.Value) cty.Value { ety := wantTy.ElementType() switch { case wantTy.IsMapType(): - newVals := map[string]cty.Value{} + newVals := make(map[string]cty.Value) if !umInput.IsNull() { for it := umInput.ElementIterator(); it.Next(); { diff --git a/pkg/iac/scanners/terraform/parser/funcs/encoding.go b/pkg/iac/scanners/terraform/parser/funcs/encoding.go index f74a508fb7ed..778367fb8fce 100644 --- a/pkg/iac/scanners/terraform/parser/funcs/encoding.go +++ b/pkg/iac/scanners/terraform/parser/funcs/encoding.go @@ -30,7 +30,7 @@ var Base64DecodeFunc = function.New(&function.Spec{ if err != nil { return cty.UnknownVal(cty.String), fmt.Errorf("failed to decode base64 data '%s'", s) } - if !utf8.Valid([]byte(sDec)) { + if !utf8.Valid(sDec) { log.Printf("[DEBUG] the result of decoding the provided string is not valid UTF-8: %s", sDec) return cty.UnknownVal(cty.String), fmt.Errorf("the result of decoding the provided string is not valid UTF-8") } diff --git a/pkg/iac/scanners/terraform/parser/funcs/number.go b/pkg/iac/scanners/terraform/parser/funcs/number.go index 6c8f5dc3b6d9..f68ec4851776 100644 --- a/pkg/iac/scanners/terraform/parser/funcs/number.go +++ b/pkg/iac/scanners/terraform/parser/funcs/number.go @@ -148,7 +148,7 @@ var ParseIntFunc = function.New(&function.Spec{ }, }) -// Log returns returns the logarithm of a given number in a given base. +// Log returns the logarithm of a given number in a given base. func Log(num, base cty.Value) (cty.Value, error) { return LogFunc.Call([]cty.Value{num, base}) } @@ -165,6 +165,6 @@ func Signum(num cty.Value) (cty.Value, error) { } // ParseInt parses a string argument and returns an integer of the specified base. -func ParseInt(num cty.Value, base cty.Value) (cty.Value, error) { +func ParseInt(num, base cty.Value) (cty.Value, error) { return ParseIntFunc.Call([]cty.Value{num, base}) } diff --git a/pkg/iac/scanners/terraform/parser/funcs/string.go b/pkg/iac/scanners/terraform/parser/funcs/string.go index 49696784e872..6fe077c1f586 100644 --- a/pkg/iac/scanners/terraform/parser/funcs/string.go +++ b/pkg/iac/scanners/terraform/parser/funcs/string.go @@ -43,7 +43,7 @@ var ReplaceFunc = function.New(&function.Spec{ return cty.StringVal(re.ReplaceAllString(str, replace)), nil } - return cty.StringVal(strings.Replace(str, substr, replace, -1)), nil + return cty.StringVal(strings.ReplaceAll(str, substr, replace)), nil }, }) diff --git a/pkg/iac/scanners/terraform/parser/functions.go b/pkg/iac/scanners/terraform/parser/functions.go index f8553d150f16..5b2cec6986bb 100644 --- a/pkg/iac/scanners/terraform/parser/functions.go +++ b/pkg/iac/scanners/terraform/parser/functions.go @@ -3,12 +3,13 @@ package parser import ( "io/fs" - "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser/funcs" "github.com/hashicorp/hcl/v2/ext/tryfunc" ctyyaml "github.com/zclconf/go-cty-yaml" "github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty/function" "github.com/zclconf/go-cty/cty/function/stdlib" + + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser/funcs" ) // Functions returns the set of functions that should be used to when evaluating diff --git a/pkg/iac/scanners/terraform/parser/load_blocks.go b/pkg/iac/scanners/terraform/parser/load_blocks.go index 88bd9de47f3b..782bfdc85fb6 100644 --- a/pkg/iac/scanners/terraform/parser/load_blocks.go +++ b/pkg/iac/scanners/terraform/parser/load_blocks.go @@ -6,9 +6,10 @@ import ( "strings" "time" + "github.com/hashicorp/hcl/v2" + "github.com/aquasecurity/defsec/pkg/terraform" "github.com/aquasecurity/defsec/pkg/types" - "github.com/hashicorp/hcl/v2" ) func loadBlocksFromFile(file sourceFile, moduleSource string) (hcl.Blocks, []terraform.Ignore, error) { diff --git a/pkg/iac/scanners/terraform/parser/load_module.go b/pkg/iac/scanners/terraform/parser/load_module.go index 9d6c7adcdcd1..7f1ef3060995 100644 --- a/pkg/iac/scanners/terraform/parser/load_module.go +++ b/pkg/iac/scanners/terraform/parser/load_module.go @@ -8,10 +8,10 @@ import ( "path/filepath" "strings" + "github.com/zclconf/go-cty/cty" + "github.com/aquasecurity/defsec/pkg/terraform" "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser/resolvers" - - "github.com/zclconf/go-cty/cty" ) type moduleLoadError struct { @@ -146,7 +146,7 @@ func (e *evaluator) loadModuleFromTerraformCache(ctx context.Context, b *terrafo func (e *evaluator) loadExternalModule(ctx context.Context, b *terraform.Block, source string) (*ModuleDefinition, error) { - e.debug.Log("locating non-initialised module '%s'...", source) + e.debug.Log("locating non-initialized module '%s'...", source) version := b.GetAttribute("version").AsStringValueOrDefault("", b).Value() opt := resolvers.Options{ diff --git a/pkg/iac/scanners/terraform/parser/parser.go b/pkg/iac/scanners/terraform/parser/parser.go index fde407e5a696..00bba293ad42 100644 --- a/pkg/iac/scanners/terraform/parser/parser.go +++ b/pkg/iac/scanners/terraform/parser/parser.go @@ -10,14 +10,14 @@ import ( "strings" "time" - "github.com/aquasecurity/defsec/pkg/debug" - "github.com/aquasecurity/defsec/pkg/scanners/options" - "github.com/aquasecurity/defsec/pkg/terraform" - tfcontext "github.com/aquasecurity/defsec/pkg/terraform/context" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/hclparse" "github.com/zclconf/go-cty/cty" + "github.com/aquasecurity/defsec/pkg/debug" + "github.com/aquasecurity/defsec/pkg/scanners/options" + "github.com/aquasecurity/defsec/pkg/terraform" + tfcontext "github.com/aquasecurity/defsec/pkg/terraform/context" "github.com/aquasecurity/trivy/pkg/iac/extrafs" ) diff --git a/pkg/iac/scanners/terraform/parser/resolvers/cache.go b/pkg/iac/scanners/terraform/parser/resolvers/cache.go index 1314d538a60a..7aca10de768d 100644 --- a/pkg/iac/scanners/terraform/parser/resolvers/cache.go +++ b/pkg/iac/scanners/terraform/parser/resolvers/cache.go @@ -25,7 +25,7 @@ func locateCacheFS() (fs.FS, error) { func locateCacheDir() (string, error) { cacheDir := filepath.Join(os.TempDir(), tempDirName, "cache") - if err := os.MkdirAll(cacheDir, 0o755); err != nil { + if err := os.MkdirAll(cacheDir, 0o750); err != nil { return "", err } if !isWritable(cacheDir) { diff --git a/pkg/iac/scanners/terraform/parser/resolvers/remote.go b/pkg/iac/scanners/terraform/parser/resolvers/remote.go index 4c1a96437e65..51a76e6f6b41 100644 --- a/pkg/iac/scanners/terraform/parser/resolvers/remote.go +++ b/pkg/iac/scanners/terraform/parser/resolvers/remote.go @@ -58,7 +58,7 @@ func (r *remoteResolver) Resolve(ctx context.Context, _ fs.FS, opt Options) (fil func (r *remoteResolver) download(ctx context.Context, opt Options, dst string) error { _ = os.RemoveAll(dst) - if err := os.MkdirAll(filepath.Dir(dst), 0o755); err != nil { + if err := os.MkdirAll(filepath.Dir(dst), 0o750); err != nil { return err } diff --git a/pkg/iac/scanners/terraform/scanner.go b/pkg/iac/scanners/terraform/scanner.go index ecee5f883fb7..e1ae195dfa30 100644 --- a/pkg/iac/scanners/terraform/scanner.go +++ b/pkg/iac/scanners/terraform/scanner.go @@ -10,14 +10,14 @@ import ( "sync" "time" + "golang.org/x/exp/slices" + "github.com/aquasecurity/defsec/pkg/debug" "github.com/aquasecurity/defsec/pkg/framework" "github.com/aquasecurity/defsec/pkg/scan" "github.com/aquasecurity/defsec/pkg/scanners/options" "github.com/aquasecurity/defsec/pkg/terraform" "github.com/aquasecurity/defsec/pkg/types" - "golang.org/x/exp/slices" - "github.com/aquasecurity/trivy/pkg/iac/extrafs" "github.com/aquasecurity/trivy/pkg/iac/rego" "github.com/aquasecurity/trivy/pkg/iac/scanners" diff --git a/pkg/iac/scanners/terraformplan/parser/parser.go b/pkg/iac/scanners/terraformplan/parser/parser.go index a14eea339ac2..a1fb5e1366da 100644 --- a/pkg/iac/scanners/terraformplan/parser/parser.go +++ b/pkg/iac/scanners/terraformplan/parser/parser.go @@ -8,8 +8,9 @@ import ( "os" "strings" - "github.com/aquasecurity/defsec/pkg/terraform" "github.com/liamg/memoryfs" + + "github.com/aquasecurity/defsec/pkg/terraform" ) type Parser struct { @@ -142,8 +143,7 @@ func getResources(module Module, resourceChanges []ResourceChange, configuration } func unpackConfigurationValue(val interface{}, r Resource) (interface{}, bool) { - switch t := val.(type) { - case map[string]interface{}: + if t, ok := val.(map[string]interface{}); ok { for k, v := range t { switch k { case "references": diff --git a/pkg/iac/scanners/terraformplan/scanner.go b/pkg/iac/scanners/terraformplan/scanner.go index 7417d34e989f..85ec0d6123e5 100644 --- a/pkg/iac/scanners/terraformplan/scanner.go +++ b/pkg/iac/scanners/terraformplan/scanner.go @@ -6,6 +6,8 @@ import ( "io" "io/fs" + "github.com/bmatcuk/doublestar/v4" + "github.com/aquasecurity/defsec/pkg/debug" "github.com/aquasecurity/defsec/pkg/framework" "github.com/aquasecurity/defsec/pkg/scan" @@ -13,7 +15,6 @@ import ( terraformScanner "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform" "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/executor" "github.com/aquasecurity/trivy/pkg/iac/scanners/terraformplan/parser" - "github.com/bmatcuk/doublestar/v4" ) var tfPlanExts = []string{ diff --git a/pkg/iac/scanners/toml/parser/parser.go b/pkg/iac/scanners/toml/parser/parser.go index fff396f6d0bf..be215dfa6b07 100644 --- a/pkg/iac/scanners/toml/parser/parser.go +++ b/pkg/iac/scanners/toml/parser/parser.go @@ -7,6 +7,7 @@ import ( "path/filepath" "github.com/BurntSushi/toml" + "github.com/aquasecurity/defsec/pkg/debug" "github.com/aquasecurity/defsec/pkg/scanners/options" "github.com/aquasecurity/trivy/pkg/iac/detection" diff --git a/pkg/iac/scanners/yaml/parser/parser.go b/pkg/iac/scanners/yaml/parser/parser.go index 8eec82404d94..177d2289dd30 100644 --- a/pkg/iac/scanners/yaml/parser/parser.go +++ b/pkg/iac/scanners/yaml/parser/parser.go @@ -8,10 +8,11 @@ import ( "path/filepath" "strings" + "gopkg.in/yaml.v3" + "github.com/aquasecurity/defsec/pkg/debug" "github.com/aquasecurity/defsec/pkg/scanners/options" "github.com/aquasecurity/trivy/pkg/iac/detection" - "gopkg.in/yaml.v3" ) var _ options.ConfigurableParser = (*Parser)(nil) @@ -70,8 +71,8 @@ func (p *Parser) ParseFS(ctx context.Context, target fs.FS, path string) (map[st } // ParseFile parses yaml content from the provided filesystem path. -func (p *Parser) ParseFile(_ context.Context, fs fs.FS, path string) ([]interface{}, error) { - f, err := fs.Open(filepath.ToSlash(path)) +func (p *Parser) ParseFile(_ context.Context, fsys fs.FS, path string) ([]interface{}, error) { + f, err := fsys.Open(filepath.ToSlash(path)) if err != nil { return nil, err } diff --git a/test/testutil/util.go b/test/testutil/util.go index ea29df78a9ae..8d7b957d438d 100644 --- a/test/testutil/util.go +++ b/test/testutil/util.go @@ -7,10 +7,11 @@ import ( "strings" "testing" - "github.com/aquasecurity/defsec/pkg/scan" "github.com/liamg/memoryfs" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + + "github.com/aquasecurity/defsec/pkg/scan" ) func AssertRuleFound(t *testing.T, ruleID string, results scan.Results, message string, args ...interface{}) { From 772caa6ff2ce2bc9e59228ace689d262973c2ed4 Mon Sep 17 00:00:00 2001 From: nikpivkin Date: Mon, 4 Dec 2023 13:35:53 +0700 Subject: [PATCH 3/5] chore: fix linter issues --- .../aws/000000000000/eu-central-1/data.json | 1 - internal/adapters/arm/adapt.go | 2 +- internal/adapters/cloudformation/adapt.go | 2 +- internal/adapters/cloudformation/aws/adapt.go | 2 +- .../adapters/cloudformation/aws/ec2/ec2.go | 2 +- internal/adapters/terraform/aws/s3/bucket.go | 2 +- .../terraform/aws/workspaces/adapt.go | 6 +- .../digitalocean/compute/adapt_test.go | 4 +- .../adapters/terraform/google/iam/adapt.go | 4 +- .../adapters/terraform/tftestutil/testutil.go | 2 +- pkg/iac/detection/detect.go | 18 +++--- pkg/iac/rego/build.go | 2 +- pkg/iac/rego/exceptions.go | 6 +- pkg/iac/rego/metadata.go | 29 +++++----- pkg/iac/rego/metadata_test.go | 6 +- pkg/iac/rego/result.go | 26 ++++----- pkg/iac/rego/result_test.go | 14 ++--- pkg/iac/rego/scanner.go | 35 ++++++------ pkg/iac/rego/scanner_test.go | 42 +++++++------- pkg/iac/rego/schemas/builder.go | 4 +- pkg/iac/rules/providers.go | 2 +- .../azure/arm/parser/armjson/bench_test.go | 4 +- .../azure/arm/parser/armjson/decode.go | 2 +- .../azure/arm/parser/armjson/decode_array.go | 2 +- .../azure/arm/parser/armjson/decode_object.go | 2 +- .../scanners/azure/arm/parser/armjson/node.go | 4 +- .../azure/arm/parser/armjson/parse.go | 2 +- .../arm/parser/armjson/parse_array_test.go | 2 +- .../arm/parser/armjson/parse_boolean_test.go | 5 +- .../arm/parser/armjson/parse_complex_test.go | 8 +-- .../azure/arm/parser/armjson/parse_object.go | 2 +- .../arm/parser/armjson/parse_object_test.go | 8 +-- .../arm/parser/armjson/parse_string_test.go | 2 +- .../azure/arm/parser/armjson/unmarshal.go | 4 +- pkg/iac/scanners/azure/arm/scanner.go | 18 +++--- pkg/iac/scanners/azure/deployment.go | 42 +++++++------- pkg/iac/scanners/azure/expressions/lex.go | 2 +- pkg/iac/scanners/azure/expressions/node.go | 10 ++-- .../azure/functions/intersection_test.go | 4 +- pkg/iac/scanners/azure/functions/union.go | 27 +++++---- pkg/iac/scanners/azure/value.go | 12 ++-- .../cloudformation/parser/file_context.go | 10 ++-- .../cloudformation/parser/fn_builtin.go | 2 +- .../cloudformation/parser/fn_find_in_map.go | 4 +- .../scanners/cloudformation/parser/fn_ref.go | 32 ++++++----- .../cloudformation/parser/fn_split.go | 2 +- .../scanners/cloudformation/parser/fn_sub.go | 4 +- .../cloudformation/parser/parameter.go | 56 +++++++++---------- .../scanners/cloudformation/parser/parser.go | 34 +++++------ .../cloudformation/parser/property.go | 16 +++--- .../cloudformation/parser/property_helpers.go | 4 +- .../parser/property_helpers_test.go | 2 +- .../parser/pseudo_parameters.go | 6 +- .../parser/pseudo_parameters_test.go | 2 +- pkg/iac/scanners/cloudformation/scanner.go | 28 +++++----- pkg/iac/scanners/dockerfile/parser/parser.go | 8 +-- pkg/iac/scanners/dockerfile/scanner.go | 38 ++++++------- pkg/iac/scanners/helm/parser/parser.go | 16 +++--- pkg/iac/scanners/helm/parser/vals.go | 16 +++--- pkg/iac/scanners/helm/scanner.go | 10 ++-- pkg/iac/scanners/json/parser/parser.go | 8 +-- pkg/iac/scanners/json/parser/parser_test.go | 6 +- pkg/iac/scanners/json/scanner.go | 34 +++++------ .../scanners/kubernetes/parser/manifest.go | 2 +- .../kubernetes/parser/manifest_node.go | 10 ++-- pkg/iac/scanners/kubernetes/parser/parser.go | 14 ++--- pkg/iac/scanners/kubernetes/scanner.go | 20 +++---- .../scanners/terraform/executor/executor.go | 2 +- pkg/iac/scanners/terraform/executor/pool.go | 8 +-- .../terraform/parser/funcs/collection.go | 6 +- .../terraform/parser/funcs/defaults.go | 2 +- .../scanners/terraform/parser/load_blocks.go | 2 +- .../terraform/parser/load_module_metadata.go | 2 +- .../terraform/parser/module_retrieval.go | 2 +- .../terraform/parser/resolvers/cache.go | 2 +- .../terraform/parser/resolvers/local.go | 2 +- .../terraform/parser/resolvers/options.go | 2 +- .../terraform/parser/resolvers/registry.go | 6 +- .../terraform/parser/resolvers/remote.go | 2 +- pkg/iac/scanners/terraform/scanner.go | 20 +++---- pkg/iac/scanners/terraform/scanner_test.go | 22 ++++---- .../scanners/terraformplan/parser/parser.go | 10 ++-- .../terraformplan/parser/plan_file.go | 6 +- pkg/iac/scanners/terraformplan/scanner.go | 12 ++-- .../terraformplan/test/scanner_test.go | 2 +- pkg/iac/scanners/toml/parser/parser.go | 10 ++-- pkg/iac/scanners/toml/parser/parser_test.go | 6 +- pkg/iac/scanners/toml/scanner.go | 34 +++++------ pkg/iac/scanners/universal/scanner.go | 4 +- pkg/iac/scanners/yaml/parser/parser.go | 10 ++-- pkg/iac/scanners/yaml/parser/parser_test.go | 18 +++--- pkg/iac/scanners/yaml/scanner.go | 34 +++++------ pkg/k8s/scanner/scanner_test.go | 20 +++---- test/testutil/util.go | 32 +++++------ 94 files changed, 516 insertions(+), 519 deletions(-) delete mode 100644 integration/cloud/aws/000000000000/eu-central-1/data.json diff --git a/integration/cloud/aws/000000000000/eu-central-1/data.json b/integration/cloud/aws/000000000000/eu-central-1/data.json deleted file mode 100644 index eb6c8a6500de..000000000000 --- a/integration/cloud/aws/000000000000/eu-central-1/data.json +++ /dev/null @@ -1 +0,0 @@ -{"schema_version":2,"state":{"AWS":{"Meta":{"TFProviders":null},"AccessAnalyzer":{"Analyzers":null},"APIGateway":{"V1":{"APIs":null,"DomainNames":null},"V2":{"APIs":null,"DomainNames":null}},"Athena":{"Databases":null,"Workgroups":null},"Cloudfront":{"Distributions":null},"CloudTrail":{"Trails":null},"CloudWatch":{"LogGroups":null,"Alarms":null},"CodeBuild":{"Projects":null},"Config":{"ConfigurationAggregrator":{"Metadata":{"default":false,"explicit":false,"managed":false,"parent":null,"range":{"endLine":0,"filename":"","fsKey":"","isLogicalSource":false,"sourcePrefix":"","startLine":0},"ref":"","unresolvable":false},"SourceAllRegions":{"metadata":{"default":false,"explicit":false,"managed":false,"parent":null,"range":{"endLine":0,"filename":"","fsKey":"","isLogicalSource":false,"sourcePrefix":"","startLine":0},"ref":"","unresolvable":false},"value":false}}},"DocumentDB":{"Clusters":null},"DynamoDB":{"DAXClusters":null,"Tables":null},"EC2":{"Instances":null,"LaunchConfigurations":null,"LaunchTemplates":null,"VPCs":[{"Metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:vpc/vpc-ff423fb8","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:vpc/vpc-ff423fb8","unresolvable":false},"ID":{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:vpc/vpc-ff423fb8","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:vpc/vpc-ff423fb8","unresolvable":false},"value":"vpc-ff423fb8"},"IsDefault":{"metadata":{"default":true,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:vpc/vpc-ff423fb8","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:vpc/vpc-ff423fb8","unresolvable":false},"value":true},"SecurityGroups":[{"Metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:security-group/sg-3c8e4bc2412b5f2b7","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:security-group/sg-3c8e4bc2412b5f2b7","unresolvable":false},"IsDefault":{"metadata":{"default":true,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:security-group/sg-3c8e4bc2412b5f2b7","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:security-group/sg-3c8e4bc2412b5f2b7","unresolvable":false},"value":true},"Description":{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:security-group/sg-3c8e4bc2412b5f2b7","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:security-group/sg-3c8e4bc2412b5f2b7","unresolvable":false},"value":"default VPC security group"},"IngressRules":null,"EgressRules":null,"VPCID":{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:security-group/sg-3c8e4bc2412b5f2b7","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:security-group/sg-3c8e4bc2412b5f2b7","unresolvable":false},"value":"vpc-ff423fb8"}}],"FlowLogsEnabled":{"metadata":{"default":true,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:vpc/vpc-ff423fb8","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:vpc/vpc-ff423fb8","unresolvable":false},"value":false}}],"SecurityGroups":[{"Metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:security-group/sg-3c8e4bc2412b5f2b7","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:security-group/sg-3c8e4bc2412b5f2b7","unresolvable":false},"IsDefault":{"metadata":{"default":true,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:security-group/sg-3c8e4bc2412b5f2b7","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:security-group/sg-3c8e4bc2412b5f2b7","unresolvable":false},"value":true},"Description":{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:security-group/sg-3c8e4bc2412b5f2b7","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:security-group/sg-3c8e4bc2412b5f2b7","unresolvable":false},"value":"default VPC security group"},"IngressRules":null,"EgressRules":null,"VPCID":{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:security-group/sg-3c8e4bc2412b5f2b7","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:security-group/sg-3c8e4bc2412b5f2b7","unresolvable":false},"value":"vpc-ff423fb8"}}],"NetworkACLs":[{"Metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"Rules":[{"Metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"Type":{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"value":"egress"},"Action":{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"value":"allow"},"Protocol":{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"value":"-1"},"CIDRs":[{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"value":"0.0.0.0/0"}]},{"Metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"Type":{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"value":"egress"},"Action":{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"value":"deny"},"Protocol":{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"value":"-1"},"CIDRs":[{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"value":"0.0.0.0/0"}]},{"Metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"Type":{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"value":"ingress"},"Action":{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"value":"allow"},"Protocol":{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"value":"-1"},"CIDRs":[{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"value":"0.0.0.0/0"}]},{"Metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"Type":{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"value":"ingress"},"Action":{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"value":"deny"},"Protocol":{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"value":"-1"},"CIDRs":[{"metadata":{"default":false,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"value":"0.0.0.0/0"}]}],"IsDefaultRule":{"metadata":{"default":true,"explicit":false,"managed":true,"parent":null,"range":{"endLine":0,"filename":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","fsKey":"","isLogicalSource":false,"sourcePrefix":"remote","startLine":0},"ref":"arn:aws:ec2:eu-central-1:000000000000:network-acl/acl-4f19a36d","unresolvable":false},"value":false}}],"Subnets":null,"Volumes":null},"ECR":{"Repositories":null},"ECS":{"Clusters":null,"TaskDefinitions":null},"EFS":{"FileSystems":null},"EKS":{"Clusters":null},"ElastiCache":{"Clusters":null,"ReplicationGroups":null,"SecurityGroups":null},"Elasticsearch":{"Domains":null},"ELB":{"LoadBalancers":null},"EMR":{"Clusters":null,"SecurityConfiguration":null},"IAM":{"PasswordPolicy":{"Metadata":{"default":false,"explicit":false,"managed":false,"parent":null,"range":{"endLine":0,"filename":"","fsKey":"","isLogicalSource":false,"sourcePrefix":"","startLine":0},"ref":"","unresolvable":false},"ReusePreventionCount":{"metadata":{"default":false,"explicit":false,"managed":false,"parent":null,"range":{"endLine":0,"filename":"","fsKey":"","isLogicalSource":false,"sourcePrefix":"","startLine":0},"ref":"","unresolvable":false},"value":0},"RequireLowercase":{"metadata":{"default":false,"explicit":false,"managed":false,"parent":null,"range":{"endLine":0,"filename":"","fsKey":"","isLogicalSource":false,"sourcePrefix":"","startLine":0},"ref":"","unresolvable":false},"value":false},"RequireUppercase":{"metadata":{"default":false,"explicit":false,"managed":false,"parent":null,"range":{"endLine":0,"filename":"","fsKey":"","isLogicalSource":false,"sourcePrefix":"","startLine":0},"ref":"","unresolvable":false},"value":false},"RequireNumbers":{"metadata":{"default":false,"explicit":false,"managed":false,"parent":null,"range":{"endLine":0,"filename":"","fsKey":"","isLogicalSource":false,"sourcePrefix":"","startLine":0},"ref":"","unresolvable":false},"value":false},"RequireSymbols":{"metadata":{"default":false,"explicit":false,"managed":false,"parent":null,"range":{"endLine":0,"filename":"","fsKey":"","isLogicalSource":false,"sourcePrefix":"","startLine":0},"ref":"","unresolvable":false},"value":false},"MaxAgeDays":{"metadata":{"default":false,"explicit":false,"managed":false,"parent":null,"range":{"endLine":0,"filename":"","fsKey":"","isLogicalSource":false,"sourcePrefix":"","startLine":0},"ref":"","unresolvable":false},"value":0},"MinimumLength":{"metadata":{"default":false,"explicit":false,"managed":false,"parent":null,"range":{"endLine":0,"filename":"","fsKey":"","isLogicalSource":false,"sourcePrefix":"","startLine":0},"ref":"","unresolvable":false},"value":0}},"Policies":null,"Groups":null,"Users":null,"Roles":null,"ServerCertificates":null},"Kinesis":{"Streams":null},"KMS":{"Keys":null},"Lambda":{"Functions":null},"MQ":{"Brokers":null},"MSK":{"Clusters":null},"Neptune":{"Clusters":null},"RDS":{"Instances":null,"Clusters":null,"Classic":{"DBSecurityGroups":null},"Snapshots":null,"ParameterGroups":null},"Redshift":{"Clusters":null,"ReservedNodes":null,"ClusterParameters":null,"SecurityGroups":null},"SAM":{"APIs":null,"Applications":null,"Functions":null,"HttpAPIs":null,"SimpleTables":null,"StateMachines":null},"S3":{"Buckets":null},"SNS":{"Topics":null},"SQS":{"Queues":null},"SSM":{"Secrets":null},"WorkSpaces":{"WorkSpaces":null}},"Azure":{"AppService":{"Services":null,"FunctionApps":null},"Authorization":{"RoleDefinitions":null},"Compute":{"LinuxVirtualMachines":null,"WindowsVirtualMachines":null,"ManagedDisks":null},"Container":{"KubernetesClusters":null},"Database":{"MSSQLServers":null,"MariaDBServers":null,"MySQLServers":null,"PostgreSQLServers":null},"DataFactory":{"DataFactories":null},"DataLake":{"Stores":null},"KeyVault":{"Vaults":null},"Monitor":{"LogProfiles":null},"Network":{"SecurityGroups":null,"NetworkWatcherFlowLogs":null},"SecurityCenter":{"Contacts":null,"Subscriptions":null},"Storage":{"Accounts":null},"Synapse":{"Workspaces":null}},"CloudStack":{"Compute":{"Instances":null}},"DigitalOcean":{"Compute":{"Firewalls":null,"LoadBalancers":null,"Droplets":null,"KubernetesClusters":null},"Spaces":{"Buckets":null}},"GitHub":{"Repositories":null,"EnvironmentSecrets":null,"BranchProtections":null},"Google":{"BigQuery":{"Datasets":null},"Compute":{"Disks":null,"Networks":null,"SSLPolicies":null,"ProjectMetadata":{"Metadata":{"default":false,"explicit":false,"managed":false,"parent":null,"range":{"endLine":0,"filename":"","fsKey":"","isLogicalSource":false,"sourcePrefix":"","startLine":0},"ref":"","unresolvable":false},"EnableOSLogin":{"metadata":{"default":false,"explicit":false,"managed":false,"parent":null,"range":{"endLine":0,"filename":"","fsKey":"","isLogicalSource":false,"sourcePrefix":"","startLine":0},"ref":"","unresolvable":false},"value":false}},"Instances":null},"DNS":{"ManagedZones":null},"GKE":{"Clusters":null},"KMS":{"KeyRings":null},"IAM":{"Organizations":null,"WorkloadIdentityPoolProviders":null},"SQL":{"Instances":null},"Storage":{"Buckets":null}},"Kubernetes":{"NetworkPolicies":null},"OpenStack":{"Compute":{"Instances":null,"Firewall":{"AllowRules":null,"DenyRules":null}},"Networking":{"SecurityGroups":null}},"Oracle":{"Compute":{"AddressReservations":null}},"Nifcloud":{"Computing":{"SecurityGroups":null,"Instances":null},"DNS":{"Records":null},"NAS":{"NASSecurityGroups":null,"NASInstances":null},"Network":{"ElasticLoadBalancers":null,"LoadBalancers":null,"Routers":null,"VpnGateways":null},"RDB":{"DBSecurityGroups":null,"DBInstances":null},"SSLCertificate":{"ServerCertificates":null}}},"service_metadata":{"accessanalyzer":{"name":"accessanalyzer","updated":"2023-12-01T16:34:23.965109+07:00"},"api-gateway":{"name":"api-gateway","updated":"2023-12-01T16:34:23.965129+07:00"},"athena":{"name":"athena","updated":"2023-12-01T16:34:23.965129+07:00"},"cloudfront":{"name":"cloudfront","updated":"2023-12-01T16:34:23.965129+07:00"},"cloudtrail":{"name":"cloudtrail","updated":"2023-12-01T16:34:23.965129+07:00"},"cloudwatch":{"name":"cloudwatch","updated":"2023-12-01T16:34:23.96513+07:00"},"codebuild":{"name":"codebuild","updated":"2023-12-01T16:34:23.96513+07:00"},"documentdb":{"name":"documentdb","updated":"2023-12-01T16:34:23.96513+07:00"},"dynamodb":{"name":"dynamodb","updated":"2023-12-01T16:34:23.96513+07:00"},"ec2":{"name":"ec2","updated":"2023-12-01T16:34:23.965154+07:00"},"ecr":{"name":"ecr","updated":"2023-12-01T16:34:23.965154+07:00"},"ecs":{"name":"ecs","updated":"2023-12-01T16:34:23.965154+07:00"},"efs":{"name":"efs","updated":"2023-12-01T16:34:23.965154+07:00"},"eks":{"name":"eks","updated":"2023-12-01T16:34:23.965171+07:00"},"elasticache":{"name":"elasticache","updated":"2023-12-01T16:34:23.965171+07:00"},"elasticsearch":{"name":"elasticsearch","updated":"2023-12-01T16:34:23.965171+07:00"},"elb":{"name":"elb","updated":"2023-12-01T16:34:23.965172+07:00"},"emr":{"name":"emr","updated":"2023-12-01T16:34:23.965172+07:00"},"iam":{"name":"iam","updated":"2023-12-01T16:34:23.965172+07:00"},"kinesis":{"name":"kinesis","updated":"2023-12-01T16:34:23.965181+07:00"},"kms":{"name":"kms","updated":"2023-12-01T16:34:23.965172+07:00"},"lambda":{"name":"lambda","updated":"2023-12-01T16:34:23.965172+07:00"},"mq":{"name":"mq","updated":"2023-12-01T16:34:23.965173+07:00"},"msk":{"name":"msk","updated":"2023-12-01T16:34:23.965173+07:00"},"neptune":{"name":"neptune","updated":"2023-12-01T16:34:23.965173+07:00"},"rds":{"name":"rds","updated":"2023-12-01T16:34:23.965173+07:00"},"redshift":{"name":"redshift","updated":"2023-12-01T16:34:23.96518+07:00"},"s3":{"name":"s3","updated":"2023-12-01T16:34:23.965181+07:00"},"sns":{"name":"sns","updated":"2023-12-01T16:34:23.96518+07:00"},"sqs":{"name":"sqs","updated":"2023-12-01T16:34:23.965181+07:00"},"ssm":{"name":"ssm","updated":"2023-12-01T16:34:23.965181+07:00"},"workspaces":{"name":"workspaces","updated":"2023-12-01T16:34:23.965181+07:00"}},"updated":"2023-12-01T16:34:23.965105+07:00"} diff --git a/internal/adapters/arm/adapt.go b/internal/adapters/arm/adapt.go index df317c429e85..9dbc9a1ff39c 100644 --- a/internal/adapters/arm/adapt.go +++ b/internal/adapters/arm/adapt.go @@ -21,7 +21,7 @@ import ( scanner "github.com/aquasecurity/trivy/pkg/iac/scanners/azure" ) -// Adapt ... +// Adapt ARM templates func Adapt(ctx context.Context, deployment scanner.Deployment) *state.State { return &state.State{ Azure: adaptAzure(deployment), diff --git a/internal/adapters/cloudformation/adapt.go b/internal/adapters/cloudformation/adapt.go index 5efe2612524e..b24a127c62a6 100644 --- a/internal/adapters/cloudformation/adapt.go +++ b/internal/adapters/cloudformation/adapt.go @@ -6,7 +6,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt CloudFormation func Adapt(cfFile parser.FileContext) *state.State { return &state.State{ AWS: aws.Adapt(cfFile), diff --git a/internal/adapters/cloudformation/aws/adapt.go b/internal/adapters/cloudformation/aws/adapt.go index 03746988ec16..017284db6ef8 100644 --- a/internal/adapters/cloudformation/aws/adapt.go +++ b/internal/adapters/cloudformation/aws/adapt.go @@ -36,7 +36,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt AWS func Adapt(cfFile parser.FileContext) aws.AWS { return aws.AWS{ APIGateway: apigateway.Adapt(cfFile), diff --git a/internal/adapters/cloudformation/aws/ec2/ec2.go b/internal/adapters/cloudformation/aws/ec2/ec2.go index 19eb2080c449..e96257550c25 100644 --- a/internal/adapters/cloudformation/aws/ec2/ec2.go +++ b/internal/adapters/cloudformation/aws/ec2/ec2.go @@ -5,7 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -// Adapt ... +// Adapt ec2 resources func Adapt(cfFile parser.FileContext) ec2.EC2 { return ec2.EC2{ LaunchConfigurations: getLaunchConfigurations(cfFile), diff --git a/internal/adapters/terraform/aws/s3/bucket.go b/internal/adapters/terraform/aws/s3/bucket.go index b254e5d56a5b..e26d7c67fe19 100644 --- a/internal/adapters/terraform/aws/s3/bucket.go +++ b/internal/adapters/terraform/aws/s3/bucket.go @@ -59,7 +59,7 @@ func getEncryption(block *terraform.Block, a *adapter) s3.Encryption { } } -func newS3Encryption(root *terraform.Block, sseConfgihuration *terraform.Block) s3.Encryption { +func newS3Encryption(root, sseConfgihuration *terraform.Block) s3.Encryption { return s3.Encryption{ Metadata: root.GetMetadata(), Enabled: isEncrypted(sseConfgihuration), diff --git a/internal/adapters/terraform/aws/workspaces/adapt.go b/internal/adapters/terraform/aws/workspaces/adapt.go index 084bb82faae7..325871aa787b 100644 --- a/internal/adapters/terraform/aws/workspaces/adapt.go +++ b/internal/adapters/terraform/aws/workspaces/adapt.go @@ -13,13 +13,13 @@ func Adapt(modules terraform.Modules) workspaces.WorkSpaces { } func adaptWorkspaces(modules terraform.Modules) []workspaces.WorkSpace { - var workspaces []workspaces.WorkSpace + var res []workspaces.WorkSpace for _, module := range modules { for _, resource := range module.GetResourcesByType("aws_workspaces_workspace") { - workspaces = append(workspaces, adaptWorkspace(resource)) + res = append(res, adaptWorkspace(resource)) } } - return workspaces + return res } func adaptWorkspace(resource *terraform.Block) workspaces.WorkSpace { diff --git a/internal/adapters/terraform/digitalocean/compute/adapt_test.go b/internal/adapters/terraform/digitalocean/compute/adapt_test.go index 34bb11e07f97..dc1520702d08 100644 --- a/internal/adapters/terraform/digitalocean/compute/adapt_test.go +++ b/internal/adapters/terraform/digitalocean/compute/adapt_test.go @@ -138,9 +138,7 @@ func Test_adaptFirewalls(t *testing.T) { `, expected: []compute.Firewall{ { - Metadata: defsecTypes.NewTestMetadata(), - OutboundRules: []compute.OutboundFirewallRule{}, - InboundRules: []compute.InboundFirewallRule{}, + Metadata: defsecTypes.NewTestMetadata(), }, }, }, diff --git a/internal/adapters/terraform/google/iam/adapt.go b/internal/adapters/terraform/google/iam/adapt.go index b28650342319..ab227d8cd00d 100644 --- a/internal/adapters/terraform/google/iam/adapt.go +++ b/internal/adapters/terraform/google/iam/adapt.go @@ -75,12 +75,12 @@ FOLDER_NESTED: if folder.parentBlockID != "" && folder.parentBlockID == existing.blockID { existing.folder.Folders = append(existing.folder.Folders, folder.folder) a.folders[i] = existing - continue FOLDER_NESTED + continue FOLDER_NESTED // nolint:gocritic } } } -FOLDER_ORG: +FOLDER_ORG: // nolint:gocritic for _, folder := range a.folders { if folder.parentBlockID != "" { if org, ok := a.orgs[folder.parentBlockID]; ok { diff --git a/internal/adapters/terraform/tftestutil/testutil.go b/internal/adapters/terraform/tftestutil/testutil.go index 387ba8e2fe0c..362fc427f3c3 100644 --- a/internal/adapters/terraform/tftestutil/testutil.go +++ b/internal/adapters/terraform/tftestutil/testutil.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/test/testutil" ) -func CreateModulesFromSource(t *testing.T, source string, ext string) terraform.Modules { +func CreateModulesFromSource(t *testing.T, source, ext string) terraform.Modules { fs := testutil.CreateFS(t, map[string]string{ "source" + ext: source, }) diff --git a/pkg/iac/detection/detect.go b/pkg/iac/detection/detect.go index 9050a735172a..b1c48d2625dd 100644 --- a/pkg/iac/detection/detect.go +++ b/pkg/iac/detection/detect.go @@ -43,7 +43,7 @@ func init() { return true } - var content interface{} + var content any return json.NewDecoder(r).Decode(&content) == nil } @@ -56,7 +56,7 @@ func init() { return true } - var content interface{} + var content any return yaml.NewDecoder(r).Decode(&content) == nil } @@ -83,7 +83,7 @@ func init() { return false } - contents := make(map[string]interface{}) + contents := make(map[string]any) err := json.NewDecoder(r).Decode(&contents) if err == nil { if _, ok := contents["terraform_version"]; ok { @@ -97,7 +97,7 @@ func init() { matchers[FileTypeCloudFormation] = func(name string, r io.ReadSeeker) bool { sniff := struct { - Resources map[string]map[string]interface{} `json:"Resources" yaml:"Resources"` + Resources map[string]map[string]any `json:"Resources" yaml:"Resources"` }{} switch { @@ -129,9 +129,9 @@ func init() { } sniff := struct { - ContentType string `json:"contentType"` - Parameters map[string]interface{} `json:"parameters"` - Resources []interface{} `json:"resources"` + ContentType string `json:"contentType"` + Parameters map[string]any `json:"parameters"` + Resources []any `json:"resources"` }{} metadata := types.NewUnmanagedMetadata() if err := armjson.UnmarshalFromReader(r, &sniff, &metadata); err != nil { @@ -190,7 +190,7 @@ func init() { return false } - var result map[string]interface{} + var result map[string]any if err := json.NewDecoder(r).Decode(&result); err != nil { return false } @@ -217,7 +217,7 @@ func init() { } for _, partial := range strings.Split(string(data), marker) { - var result map[string]interface{} + var result map[string]any if err := yaml.Unmarshal([]byte(partial), &result); err != nil { continue } diff --git a/pkg/iac/rego/build.go b/pkg/iac/rego/build.go index 3123ad673190..e8be100e4ef5 100644 --- a/pkg/iac/rego/build.go +++ b/pkg/iac/rego/build.go @@ -14,7 +14,7 @@ import ( func BuildSchemaSetFromPolicies(policies map[string]*ast.Module, paths []string, fsys fs.FS) (*ast.SchemaSet, bool, error) { schemaSet := ast.NewSchemaSet() - schemaSet.Put(ast.MustParseRef("schema.input"), make(map[string]interface{})) // for backwards compat only + schemaSet.Put(ast.MustParseRef("schema.input"), make(map[string]any)) // for backwards compat only var customFound bool for _, policy := range policies { for _, annotation := range policy.Annotations { diff --git a/pkg/iac/rego/exceptions.go b/pkg/iac/rego/exceptions.go index ab202ec00d19..a43e47147a54 100644 --- a/pkg/iac/rego/exceptions.go +++ b/pkg/iac/rego/exceptions.go @@ -5,7 +5,7 @@ import ( "fmt" ) -func (s *Scanner) isIgnored(ctx context.Context, namespace string, ruleName string, input interface{}) (bool, error) { +func (s *Scanner) isIgnored(ctx context.Context, namespace, ruleName string, input any) (bool, error) { if ignored, err := s.isNamespaceIgnored(ctx, namespace, input); err != nil { return false, err } else if ignored { @@ -14,7 +14,7 @@ func (s *Scanner) isIgnored(ctx context.Context, namespace string, ruleName stri return s.isRuleIgnored(ctx, namespace, ruleName, input) } -func (s *Scanner) isNamespaceIgnored(ctx context.Context, namespace string, input interface{}) (bool, error) { +func (s *Scanner) isNamespaceIgnored(ctx context.Context, namespace string, input any) (bool, error) { exceptionQuery := fmt.Sprintf("data.namespace.exceptions.exception[_] == %q", namespace) result, _, err := s.runQuery(ctx, exceptionQuery, input, true) if err != nil { @@ -23,7 +23,7 @@ func (s *Scanner) isNamespaceIgnored(ctx context.Context, namespace string, inpu return result.Allowed(), nil } -func (s *Scanner) isRuleIgnored(ctx context.Context, namespace string, ruleName string, input interface{}) (bool, error) { +func (s *Scanner) isRuleIgnored(ctx context.Context, namespace, ruleName string, input any) (bool, error) { exceptionQuery := fmt.Sprintf("endswith(%q, data.%s.exception[_][_])", ruleName, namespace) result, _, err := s.runQuery(ctx, exceptionQuery, input, true) if err != nil { diff --git a/pkg/iac/rego/metadata.go b/pkg/iac/rego/metadata.go index ee5f5eb35d44..4d2aa232d12b 100644 --- a/pkg/iac/rego/metadata.go +++ b/pkg/iac/rego/metadata.go @@ -89,13 +89,14 @@ func (sm *StaticMetadata) Update(meta map[string]any) error { } } if raw, ok := meta["related_resources"]; ok { - if relatedResources, ok := raw.([]map[string]any); ok { + switch relatedResources := raw.(type) { + case []map[string]any: for _, relatedResource := range relatedResources { if raw, ok := relatedResource["ref"]; ok { sm.References = append(sm.References, fmt.Sprintf("%s", raw)) } } - } else if relatedResources, ok := raw.([]string); ok { + case []string: sm.References = append(sm.References, relatedResources...) } } @@ -132,10 +133,10 @@ func (sm *StaticMetadata) FromAnnotations(annotations *ast.Annotations) error { return nil } -func NewEngineMetadata(schema string, meta map[string]interface{}) (*scan.EngineMetadata, error) { - var sMap map[string]interface{} +func NewEngineMetadata(schema string, meta map[string]any) (*scan.EngineMetadata, error) { + var sMap map[string]any if raw, ok := meta[schema]; ok { - sMap, ok = raw.(map[string]interface{}) + sMap, ok = raw.(map[string]any) if !ok { return nil, fmt.Errorf("failed to parse %s metadata: not an object", schema) } @@ -280,7 +281,7 @@ func (m *MetadataRetriever) RetrieveMetadata(ctx context.Context, module *ast.Mo return nil, fmt.Errorf("failed to parse metadata: unexpected expression length") } expression := set[0].Expressions[0] - meta, ok := expression.Value.(map[string]interface{}) + meta, ok := expression.Value.(map[string]any) if !ok { return nil, fmt.Errorf("failed to parse metadata: not an object") } @@ -292,7 +293,7 @@ func (m *MetadataRetriever) RetrieveMetadata(ctx context.Context, module *ast.Mo return metadata, nil } -// nolint:cyclop +// nolint:gocyclo func (m *MetadataRetriever) queryInputOptions(ctx context.Context, module *ast.Module) InputOptions { options := InputOptions{ @@ -300,12 +301,12 @@ func (m *MetadataRetriever) queryInputOptions(ctx context.Context, module *ast.M Selectors: nil, } - var metadata map[string]interface{} + var metadata map[string]any // read metadata from official rego annotations if possible if annotation := m.findPackageAnnotations(module); annotation != nil && annotation.Custom != nil { if input, ok := annotation.Custom["input"]; ok { - if mapped, ok := input.(map[string]interface{}); ok { + if mapped, ok := input.(map[string]any); ok { metadata = mapped } } @@ -332,7 +333,7 @@ func (m *MetadataRetriever) queryInputOptions(ctx context.Context, module *ast.M return options } expression := set[0].Expressions[0] - meta, ok := expression.Value.(map[string]interface{}) + meta, ok := expression.Value.(map[string]any) if !ok { return options } @@ -346,10 +347,10 @@ func (m *MetadataRetriever) queryInputOptions(ctx context.Context, module *ast.M } if raw, ok := metadata["selector"]; ok { - if each, ok := raw.([]interface{}); ok { + if each, ok := raw.([]any); ok { for _, rawSelector := range each { var selector Selector - if selectorMap, ok := rawSelector.(map[string]interface{}); ok { + if selectorMap, ok := rawSelector.(map[string]any); ok { if rawType, ok := selectorMap["type"]; ok { selector.Type = fmt.Sprintf("%s", rawType) // handle backward compatibility for "defsec" source type which is now "cloud" @@ -357,9 +358,9 @@ func (m *MetadataRetriever) queryInputOptions(ctx context.Context, module *ast.M selector.Type = string(defsecTypes.SourceCloud) } } - if subType, ok := selectorMap["subtypes"].([]interface{}); ok { + if subType, ok := selectorMap["subtypes"].([]any); ok { for _, subT := range subType { - if st, ok := subT.(map[string]interface{}); ok { + if st, ok := subT.(map[string]any); ok { s := SubType{} _ = mapstructure.Decode(st, &s) selector.Subtypes = append(selector.Subtypes, s) diff --git a/pkg/iac/rego/metadata_test.go b/pkg/iac/rego/metadata_test.go index 935c027d0c58..64fee56703ec 100644 --- a/pkg/iac/rego/metadata_test.go +++ b/pkg/iac/rego/metadata_test.go @@ -117,8 +117,8 @@ func Test_UpdateStaticMetadata(t *testing.T) { } func Test_getEngineMetadata(t *testing.T) { - inputSchema := map[string]interface{}{ - "terraform": map[string]interface{}{ + inputSchema := map[string]any{ + "terraform": map[string]any{ "good_examples": `resource "aws_cloudtrail" "good_example" { is_multi_region_trail = true @@ -133,7 +133,7 @@ func Test_getEngineMetadata(t *testing.T) { } }`, }, - "cloud_formation": map[string]interface{}{"good_examples": `--- + "cloud_formation": map[string]any{"good_examples": `--- Resources: GoodExample: Type: AWS::CloudTrail::Trail diff --git a/pkg/iac/rego/result.go b/pkg/iac/rego/result.go index a2f56e3c2801..ae00f6968d33 100644 --- a/pkg/iac/rego/result.go +++ b/pkg/iac/rego/result.go @@ -43,19 +43,19 @@ func (r regoResult) GetMetadata() defsecTypes.Metadata { return m } -func (r regoResult) GetRawValue() interface{} { +func (r regoResult) GetRawValue() any { return nil } -func parseResult(raw interface{}) *regoResult { +func parseResult(raw any) *regoResult { var result regoResult result.Managed = true switch val := raw.(type) { - case []interface{}: + case []any: var msg string for _, item := range val { switch raw := item.(type) { - case map[string]interface{}: + case map[string]any: result = parseCause(raw) case string: msg = raw @@ -64,7 +64,7 @@ func parseResult(raw interface{}) *regoResult { result.Message = msg case string: result.Message = val - case map[string]interface{}: + case map[string]any: result = parseCause(val) default: result.Message = "Rego policy resulted in DENY" @@ -72,7 +72,7 @@ func parseResult(raw interface{}) *regoResult { return &result } -func parseCause(cause map[string]interface{}) regoResult { +func parseCause(cause map[string]any) regoResult { var result regoResult result.Managed = true if msg, ok := cause["msg"]; ok { @@ -107,7 +107,7 @@ func parseCause(cause map[string]interface{}) regoResult { } } if parent, ok := cause["parent"]; ok { - if m, ok := parent.(map[string]interface{}); ok { + if m, ok := parent.(map[string]any); ok { parentResult := parseCause(m) result.Parent = &parentResult } @@ -115,20 +115,20 @@ func parseCause(cause map[string]interface{}) regoResult { return result } -func parseLineNumber(raw interface{}) int { +func parseLineNumber(raw any) int { str := fmt.Sprintf("%s", raw) n, _ := strconv.Atoi(str) return n } -func (s *Scanner) convertResults(set rego.ResultSet, input Input, namespace string, rule string, traces []string) scan.Results { +func (s *Scanner) convertResults(set rego.ResultSet, input Input, namespace, rule string, traces []string) scan.Results { var results scan.Results offset := 0 if input.Contents != nil { - if xx, ok := input.Contents.(map[string]interface{}); ok { + if xx, ok := input.Contents.(map[string]any); ok { if md, ok := xx["__defsec_metadata"]; ok { - if md2, ok := md.(map[string]interface{}); ok { + if md2, ok := md.(map[string]any); ok { if sl, ok := md2["offset"]; ok { offset, _ = sl.(int) } @@ -138,9 +138,9 @@ func (s *Scanner) convertResults(set rego.ResultSet, input Input, namespace stri } for _, result := range set { for _, expression := range result.Expressions { - values, ok := expression.Value.([]interface{}) + values, ok := expression.Value.([]any) if !ok { - values = []interface{}{expression.Value} + values = []any{expression.Value} } for _, value := range values { diff --git a/pkg/iac/rego/result_test.go b/pkg/iac/rego/result_test.go index d958f7962b10..dc6c69b1d720 100644 --- a/pkg/iac/rego/result_test.go +++ b/pkg/iac/rego/result_test.go @@ -9,7 +9,7 @@ import ( func Test_parseResult(t *testing.T) { var testCases = []struct { name string - input interface{} + input any want regoResult }{ { @@ -30,7 +30,7 @@ func Test_parseResult(t *testing.T) { }, { name: "strings", - input: []interface{}{"message"}, + input: []any{"message"}, want: regoResult{ Managed: true, Message: "message", @@ -38,9 +38,9 @@ func Test_parseResult(t *testing.T) { }, { name: "maps", - input: []interface{}{ + input: []any{ "message", - map[string]interface{}{ + map[string]any{ "filepath": "a.out", }, }, @@ -52,7 +52,7 @@ func Test_parseResult(t *testing.T) { }, { name: "map", - input: map[string]interface{}{ + input: map[string]any{ "msg": "message", "filepath": "a.out", "fskey": "abcd", @@ -77,9 +77,9 @@ func Test_parseResult(t *testing.T) { }, { name: "parent", - input: map[string]interface{}{ + input: map[string]any{ "msg": "child", - "parent": map[string]interface{}{ + "parent": map[string]any{ "msg": "parent", }, }, diff --git a/pkg/iac/rego/scanner.go b/pkg/iac/rego/scanner.go index 05c2d8c23f5f..25a4440c7cef 100644 --- a/pkg/iac/rego/scanner.go +++ b/pkg/iac/rego/scanner.go @@ -39,7 +39,7 @@ type Scanner struct { dataFS fs.FS frameworks []framework.Framework spec string - inputSchema interface{} // unmarshalled into this from a json schema document + inputSchema any // unmarshalled into this from a json schema document sourceType types.Source } @@ -61,7 +61,7 @@ func (s *Scanner) SetUseEmbeddedPolicies(b bool) { // handled externally } -func (s *Scanner) trace(heading string, input interface{}) { +func (s *Scanner) trace(heading string, input any) { if s.traceWriter == nil { return } @@ -72,12 +72,12 @@ func (s *Scanner) trace(heading string, input interface{}) { _, _ = fmt.Fprintf(s.traceWriter, "REGO %[1]s:\n%s\nEND REGO %[1]s\n\n", heading, string(data)) } -func (s *Scanner) SetPolicyFilesystem(fs fs.FS) { - s.policyFS = fs +func (s *Scanner) SetPolicyFilesystem(fsys fs.FS) { + s.policyFS = fsys } -func (s *Scanner) SetDataFilesystem(fs fs.FS) { - s.dataFS = fs +func (s *Scanner) SetDataFilesystem(fsys fs.FS) { + s.dataFS = fsys } func (s *Scanner) SetPolicyReaders(_ []io.Reader) { @@ -126,7 +126,7 @@ type DynamicMetadata struct { EndLine int } -func NewScanner(source types.Source, options ...options.ScannerOption) *Scanner { +func NewScanner(source types.Source, opts ...options.ScannerOption) *Scanner { schema, ok := schemas.SchemaMap[source] if !ok { schema = schemas.Anything @@ -142,7 +142,7 @@ func NewScanner(source types.Source, options ...options.ScannerOption) *Scanner }, runtimeValues: addRuntimeValues(), } - for _, opt := range options { + for _, opt := range opts { opt(s) } if schema != schemas.None { @@ -158,7 +158,7 @@ func (s *Scanner) SetParentDebugLogger(l debug.Logger) { s.debug = l.Extend("rego") } -func (s *Scanner) runQuery(ctx context.Context, query string, input interface{}, disableTracing bool) (rego.ResultSet, []string, error) { +func (s *Scanner) runQuery(ctx context.Context, query string, input any, disableTracing bool) (rego.ResultSet, []string, error) { trace := (s.traceWriter != nil || s.tracePerResult) && !disableTracing @@ -203,9 +203,9 @@ func (s *Scanner) runQuery(ctx context.Context, query string, input interface{}, } type Input struct { - Path string `json:"path"` - FS fs.FS `json:"-"` - Contents interface{} `json:"contents"` + Path string `json:"path"` + FS fs.FS `json:"-"` + Contents any `json:"contents"` } func GetInputsContents(inputs []Input) []any { @@ -284,14 +284,14 @@ func isPolicyWithSubtype(sourceType types.Source) bool { return false } -func checkSubtype(ii map[string]interface{}, provider string, subTypes []SubType) bool { +func checkSubtype(ii map[string]any, provider string, subTypes []SubType) bool { if len(subTypes) == 0 { return true } for _, st := range subTypes { switch services := ii[provider].(type) { - case map[string]interface{}: // cloud + case map[string]any: // cloud for service := range services { if (service == st.Service) && (st.Provider == provider) { return true @@ -311,7 +311,7 @@ func checkSubtype(ii map[string]interface{}, provider string, subTypes []SubType func isPolicyApplicable(staticMetadata *StaticMetadata, inputs ...Input) bool { for _, input := range inputs { - if ii, ok := input.Contents.(map[string]interface{}); ok { + if ii, ok := input.Contents.(map[string]any); ok { for provider := range ii { // TODO(simar): Add other providers if !strings.Contains(strings.Join([]string{"kind", "aws", "azure"}, ","), provider) { @@ -334,8 +334,7 @@ func isPolicyApplicable(staticMetadata *StaticMetadata, inputs ...Input) bool { return false } -func (s *Scanner) applyRule(ctx context.Context, namespace string, rule string, inputs []Input, combined bool) (scan.Results, error) { - +func (s *Scanner) applyRule(ctx context.Context, namespace, rule string, inputs []Input, combined bool) (scan.Results, error) { // handle combined evaluations if possible if combined { s.trace("INPUT", inputs) @@ -376,7 +375,7 @@ func (s *Scanner) applyRule(ctx context.Context, namespace string, rule string, return results, nil } -func (s *Scanner) applyRuleCombined(ctx context.Context, namespace string, rule string, inputs []Input) (scan.Results, error) { +func (s *Scanner) applyRuleCombined(ctx context.Context, namespace, rule string, inputs []Input) (scan.Results, error) { if len(inputs) == 0 { return nil, nil } diff --git a/pkg/iac/rego/scanner_test.go b/pkg/iac/rego/scanner_test.go index d1aca5098f65..e9e80c19b2bf 100644 --- a/pkg/iac/rego/scanner_test.go +++ b/pkg/iac/rego/scanner_test.go @@ -50,7 +50,7 @@ deny { results, err := scanner.ScanInput(context.TODO(), Input{ Path: "/evil.lol", - Contents: map[string]interface{}{ + Contents: map[string]any{ "evil": true, }, FS: srcFS, @@ -85,7 +85,7 @@ deny { results, err := scanner.ScanInput(context.TODO(), Input{ Path: "/evil.lol", - Contents: map[string]interface{}{ + Contents: map[string]any{ "evil": true, }, FS: srcFS, @@ -120,7 +120,7 @@ warn { results, err := scanner.ScanInput(context.TODO(), Input{ Path: "/evil.lol", - Contents: map[string]interface{}{ + Contents: map[string]any{ "evil": true, }, }) @@ -152,7 +152,7 @@ deny { results, err := scanner.ScanInput(context.TODO(), Input{ Path: "/evil.lol", - Contents: map[string]interface{}{ + Contents: map[string]any{ "evil": false, }, }) @@ -195,7 +195,7 @@ exception[ns] { results, err := scanner.ScanInput(context.TODO(), Input{ Path: "/evil.lol", - Contents: map[string]interface{}{ + Contents: map[string]any{ "evil": true, }, }) @@ -243,7 +243,7 @@ exception[ns] { results, err := scanner.ScanInput(context.TODO(), Input{ Path: "/evil.lol", - Contents: map[string]interface{}{ + Contents: map[string]any{ "evil": true, }, }) @@ -280,7 +280,7 @@ exception[rules] { results, err := scanner.ScanInput(context.TODO(), Input{ Path: "/evil.lol", - Contents: map[string]interface{}{ + Contents: map[string]any{ "evil": true, }, }) @@ -316,7 +316,7 @@ exception[rules] { results, err := scanner.ScanInput(context.TODO(), Input{ Path: "/evil.lol", - Contents: map[string]interface{}{ + Contents: map[string]any{ "evil": true, }, }) @@ -350,7 +350,7 @@ deny_evil { results, err := scanner.ScanInput(context.TODO(), Input{ Path: "/evil.lol", - Contents: map[string]interface{}{ + Contents: map[string]any{ "evil": true, }, }) @@ -381,7 +381,7 @@ deny[msg] { results, err := scanner.ScanInput(context.TODO(), Input{ Path: "/evil.lol", - Contents: map[string]interface{}{ + Contents: map[string]any{ "evil": true, }, }) @@ -419,7 +419,7 @@ deny[res] { results, err := scanner.ScanInput(context.TODO(), Input{ Path: "/evil.lol", - Contents: map[string]interface{}{ + Contents: map[string]any{ "evil": true, }, }) @@ -461,7 +461,7 @@ deny[res] { results, err := scanner.ScanInput(context.TODO(), Input{ Path: "/evil.lol", - Contents: map[string]interface{}{ + Contents: map[string]any{ "evil": true, }, }) @@ -515,7 +515,7 @@ deny[res] { results, err := scanner.ScanInput(context.TODO(), Input{ Path: "/evil.lol", - Contents: map[string]interface{}{ + Contents: map[string]any{ "evil": true, }, }) @@ -564,7 +564,7 @@ deny { results, err := scanner.ScanInput(context.TODO(), Input{ Path: "/evil.lol", - Contents: map[string]interface{}{ + Contents: map[string]any{ "evil": true, }, }) @@ -598,7 +598,7 @@ deny { results, err := scanner.ScanInput(context.TODO(), Input{ Path: "/evil.lol", - Contents: map[string]interface{}{ + Contents: map[string]any{ "evil": true, }, }) @@ -629,7 +629,7 @@ deny { results, err := scanner.ScanInput(context.TODO(), Input{ Path: "/evil.lol", - Contents: map[string]interface{}{ + Contents: map[string]any{ "evil": true, }, }) @@ -664,7 +664,7 @@ deny { results, err := scanner.ScanInput(context.TODO(), Input{ Path: "/evil.lol", - Contents: map[string]interface{}{ + Contents: map[string]any{ "evil": true, }, }) @@ -698,7 +698,7 @@ deny { results, err := scanner.ScanInput(context.TODO(), Input{ Path: "/evil.lol", - Contents: map[string]interface{}{ + Contents: map[string]any{ "evil": true, }, }) @@ -736,7 +736,7 @@ deny { results, err := scanner.ScanInput(context.TODO(), Input{ Path: "/evil.lol", - Contents: map[string]interface{}{ + Contents: map[string]any{ "text": "dynamic", }, }) @@ -769,7 +769,7 @@ deny { results, err := scanner.ScanInput(context.TODO(), Input{ Path: "/evil.lol", - Contents: map[string]interface{}{ + Contents: map[string]any{ "text": "test", }, }) @@ -816,7 +816,7 @@ deny { results, err := scanner.ScanInput(context.TODO(), Input{ Path: "/evil.lol", - Contents: map[string]interface{}{ + Contents: map[string]any{ "text": "test", }, }) diff --git a/pkg/iac/rego/schemas/builder.go b/pkg/iac/rego/schemas/builder.go index beeb72f8b2e9..9c3c0374bc14 100644 --- a/pkg/iac/rego/schemas/builder.go +++ b/pkg/iac/rego/schemas/builder.go @@ -232,10 +232,10 @@ func (b *builder) readSlice(name string, parent, inputType reflect.Type, indent return prop, nil } -func (b *builder) readRego(def *Property, name string, parent reflect.Type, typ reflect.Type, raw interface{}, indent int) error { +func (b *builder) readRego(def *Property, name string, parent, typ reflect.Type, raw any, indent int) error { switch cast := raw.(type) { - case map[string]interface{}: + case map[string]any: def.Type = "object" for k, v := range cast { child := &Property{ diff --git a/pkg/iac/rules/providers.go b/pkg/iac/rules/providers.go index 60c976fd045b..7c14aa1c627a 100644 --- a/pkg/iac/rules/providers.go +++ b/pkg/iac/rules/providers.go @@ -150,7 +150,7 @@ func GetProviderServiceNames(providerName string) []string { return uniqueServices } -func GetProviderServiceCheckNames(providerName string, serviceName string) []string { +func GetProviderServiceCheckNames(providerName, serviceName string) []string { registeredRules := GetRegistered() diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/bench_test.go b/pkg/iac/scanners/azure/arm/parser/armjson/bench_test.go index f2e56f853f7b..6487c9bc6a2b 100644 --- a/pkg/iac/scanners/azure/arm/parser/armjson/bench_test.go +++ b/pkg/iac/scanners/azure/arm/parser/armjson/bench_test.go @@ -10,7 +10,7 @@ import ( ) func BenchmarkUnmarshal_JFather(b *testing.B) { - target := make(map[string]interface{}) + target := make(map[string]any) input := []byte(`{ "glossary": { "title": "example glossary", @@ -41,7 +41,7 @@ func BenchmarkUnmarshal_JFather(b *testing.B) { } func BenchmarkUnmarshal_Traditional(b *testing.B) { - target := make(map[string]interface{}) + target := make(map[string]any) input := []byte(`{ "glossary": { "title": "example glossary", diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/decode.go b/pkg/iac/scanners/azure/arm/parser/armjson/decode.go index 5dd2f6fd3e1c..cec50b442777 100644 --- a/pkg/iac/scanners/azure/arm/parser/armjson/decode.go +++ b/pkg/iac/scanners/azure/arm/parser/armjson/decode.go @@ -7,7 +7,7 @@ import ( "github.com/aquasecurity/defsec/pkg/types" ) -func (n *node) Decode(target interface{}) error { +func (n *node) Decode(target any) error { v := reflect.ValueOf(target) return n.decodeToValue(v) } diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/decode_array.go b/pkg/iac/scanners/azure/arm/parser/armjson/decode_array.go index 75faada57252..483880814383 100644 --- a/pkg/iac/scanners/azure/arm/parser/armjson/decode_array.go +++ b/pkg/iac/scanners/azure/arm/parser/armjson/decode_array.go @@ -20,7 +20,7 @@ func (n *node) decodeArray(v reflect.Value) error { v.Set(reflect.MakeSlice(v.Type(), length, length)) case reflect.Interface: original = v - slice := reflect.ValueOf(make([]interface{}, length)) + slice := reflect.ValueOf(make([]any, length)) v = reflect.New(slice.Type()).Elem() v.Set(slice) default: diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/decode_object.go b/pkg/iac/scanners/azure/arm/parser/armjson/decode_object.go index 57b611065242..fdc58f6c8e34 100644 --- a/pkg/iac/scanners/azure/arm/parser/armjson/decode_object.go +++ b/pkg/iac/scanners/azure/arm/parser/armjson/decode_object.go @@ -13,7 +13,7 @@ func (n *node) decodeObject(v reflect.Value) error { case reflect.Map: return n.decodeObjectToMap(v) case reflect.Interface: - target := reflect.New(reflect.TypeOf(make(map[string]interface{}, len(n.Content())))).Elem() + target := reflect.New(reflect.TypeOf(make(map[string]any, len(n.Content())))).Elem() if err := n.decodeObjectToMap(target); err != nil { return err } diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/node.go b/pkg/iac/scanners/azure/arm/parser/armjson/node.go index 3c398d6ed29c..fbfe74ae26e3 100644 --- a/pkg/iac/scanners/azure/arm/parser/armjson/node.go +++ b/pkg/iac/scanners/azure/arm/parser/armjson/node.go @@ -5,7 +5,7 @@ import "github.com/aquasecurity/defsec/pkg/types" type Node interface { Comments() []Node Range() Range - Decode(target interface{}) error + Decode(target any) error Kind() Kind Content() []Node Metadata() types.Metadata @@ -22,7 +22,7 @@ type Position struct { } type node struct { - raw interface{} + raw any start Position end Position kind Kind diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/parse.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse.go index ac86f459fced..f30ad13ed635 100644 --- a/pkg/iac/scanners/azure/arm/parser/armjson/parse.go +++ b/pkg/iac/scanners/azure/arm/parser/armjson/parse.go @@ -89,7 +89,7 @@ func (p *parser) undo() error { return nil } -func (p *parser) makeError(format string, args ...interface{}) error { +func (p *parser) makeError(format string, args ...any) error { return fmt.Errorf( "error at line %d, column %d: %s", p.position.Line, diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/parse_array_test.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_array_test.go index f1146ab08d87..3ddccffceb16 100644 --- a/pkg/iac/scanners/azure/arm/parser/armjson/parse_array_test.go +++ b/pkg/iac/scanners/azure/arm/parser/armjson/parse_array_test.go @@ -38,7 +38,7 @@ func Test_Array_ToArray(t *testing.T) { func Test_Array_ToInterface(t *testing.T) { example := []byte(`{ "List": [1, 2, 3] }`) target := struct { - List interface{} + List any }{} metadata := types.NewTestMetadata() require.NoError(t, Unmarshal(example, &target, &metadata)) diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/parse_boolean_test.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_boolean_test.go index e1d44db6119c..f4c7f19db162 100644 --- a/pkg/iac/scanners/azure/arm/parser/armjson/parse_boolean_test.go +++ b/pkg/iac/scanners/azure/arm/parser/armjson/parse_boolean_test.go @@ -46,9 +46,8 @@ func Test_Bool_ToUninitialisedPointer(t *testing.T) { func Test_Bool_ToInterface(t *testing.T) { example := []byte(`true`) - var output interface{} + var output any metadata := types.NewTestMetadata() - err := Unmarshal(example, &output, &metadata) - require.NoError(t, err) + require.NoError(t, Unmarshal(example, &output, &metadata)) assert.True(t, output.(bool)) } diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/parse_complex_test.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_complex_test.go index 17c4014b83a3..dbb8de49edfd 100644 --- a/pkg/iac/scanners/azure/arm/parser/armjson/parse_complex_test.go +++ b/pkg/iac/scanners/azure/arm/parser/armjson/parse_complex_test.go @@ -9,7 +9,7 @@ import ( ) func Test_Complex(t *testing.T) { - target := make(map[string]interface{}) + target := make(map[string]any) input := `{ "glossary": { "title": "example glossary", @@ -56,8 +56,8 @@ type Parameter struct { } type parameterInner struct { - Type string `json:"Type" yaml:"Type"` - Default interface{} `yaml:"Default"` + Type string `json:"Type" yaml:"Type"` + Default any `yaml:"Default"` } func (p *Parameter) UnmarshalJSONWithMetadata(node Node) error { @@ -73,7 +73,7 @@ type CFType string type propertyInner struct { Type CFType - Value interface{} `json:"Value" yaml:"Value"` + Value any `json:"Value" yaml:"Value"` } func (p *Property) UnmarshalJSONWithMetadata(node Node) error { diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/parse_object.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_object.go index f3895df90ffb..fc311f373635 100644 --- a/pkg/iac/scanners/azure/arm/parser/armjson/parse_object.go +++ b/pkg/iac/scanners/azure/arm/parser/armjson/parse_object.go @@ -32,7 +32,7 @@ func (p *parser) parseObject(parentMetadata *types.Metadata) (Node, error) { } -// nolint:cyclop +// nolint:gocyclo func (p *parser) iterateObject(nextComments []Node, metadata *types.Metadata, n *node) (Node, error) { for { diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/parse_object_test.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_object_test.go index 56985ecbc805..7e5f99e16580 100644 --- a/pkg/iac/scanners/azure/arm/parser/armjson/parse_object_test.go +++ b/pkg/iac/scanners/azure/arm/parser/armjson/parse_object_test.go @@ -68,7 +68,7 @@ func Test_Object_ToMapStringInterface(t *testing.T) { "Name": "testing" }`) - parent := make(map[string]interface{}) + parent := make(map[string]any) metadata := types.NewTestMetadata() require.NoError(t, Unmarshal(example, &parent, &metadata)) assert.Equal(t, "testing", parent["Name"]) @@ -93,7 +93,7 @@ func Test_Object_ToNestedMapStringInterfaceFromIAM(t *testing.T) { ] }`) - parent := make(map[string]interface{}) + parent := make(map[string]any) metadata := types.NewTestMetadata() require.NoError(t, Unmarshal(example, &parent, &metadata)) } @@ -106,10 +106,10 @@ func Test_Object_ToNestedMapStringInterface(t *testing.T) { "Name": "testing" }`) - parent := make(map[string]interface{}) + parent := make(map[string]any) metadata := types.NewTestMetadata() require.NoError(t, Unmarshal(example, &parent, &metadata)) assert.Equal(t, "testing", parent["Name"]) - child := parent["Child"].(map[string]interface{}) + child := parent["Child"].(map[string]any) assert.Equal(t, "password", child["secret"]) } diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/parse_string_test.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_string_test.go index 83c98cd859fc..43ce5ff1577d 100644 --- a/pkg/iac/scanners/azure/arm/parser/armjson/parse_string_test.go +++ b/pkg/iac/scanners/azure/arm/parser/armjson/parse_string_test.go @@ -29,7 +29,7 @@ func Test_StringToUninitialisedPointer(t *testing.T) { func Test_String_ToInterface(t *testing.T) { example := []byte(`"hello"`) - var output interface{} + var output any metadata := types.NewTestMetadata() err := Unmarshal(example, &output, &metadata) require.NoError(t, err) diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/unmarshal.go b/pkg/iac/scanners/azure/arm/parser/armjson/unmarshal.go index 6e096a694d8a..fc43170d72a1 100644 --- a/pkg/iac/scanners/azure/arm/parser/armjson/unmarshal.go +++ b/pkg/iac/scanners/azure/arm/parser/armjson/unmarshal.go @@ -15,7 +15,7 @@ type MetadataReceiver interface { SetMetadata(m *types.Metadata) } -func Unmarshal(data []byte, target interface{}, metadata *types.Metadata) error { +func Unmarshal(data []byte, target any, metadata *types.Metadata) error { node, err := newParser(NewPeekReader(bytes.NewReader(data)), Position{1, 1}).parse(metadata) if err != nil { return err @@ -27,7 +27,7 @@ func Unmarshal(data []byte, target interface{}, metadata *types.Metadata) error return nil } -func UnmarshalFromReader(r io.ReadSeeker, target interface{}, metadata *types.Metadata) error { +func UnmarshalFromReader(r io.ReadSeeker, target any, metadata *types.Metadata) error { node, err := newParser(NewPeekReader(r), Position{1, 1}).parse(metadata) if err != nil { return err diff --git a/pkg/iac/scanners/azure/arm/scanner.go b/pkg/iac/scanners/azure/arm/scanner.go index 7a66b1bb7ed5..77750a5a09d1 100644 --- a/pkg/iac/scanners/azure/arm/scanner.go +++ b/pkg/iac/scanners/azure/arm/scanner.go @@ -25,6 +25,7 @@ var _ scanners.FSScanner = (*Scanner)(nil) var _ options.ConfigurableScanner = (*Scanner)(nil) type Scanner struct { + mu sync.Mutex scannerOptions []options.ScannerOption parserOptions []options.ParserOption debug debug.Logger @@ -37,7 +38,6 @@ type Scanner struct { policyReaders []io.Reader regoScanner *rego.Scanner spec string - sync.Mutex } func (s *Scanner) SetSpec(spec string) { @@ -104,8 +104,8 @@ func (s *Scanner) SetPolicyNamespaces(...string) {} func (s *Scanner) SetRegoErrorLimit(_ int) {} func (s *Scanner) initRegoScanner(srcFS fs.FS) error { - s.Lock() - defer s.Unlock() + s.mu.Lock() + defer s.mu.Unlock() if s.regoScanner != nil { return nil } @@ -118,17 +118,17 @@ func (s *Scanner) initRegoScanner(srcFS fs.FS) error { return nil } -func (s *Scanner) ScanFS(ctx context.Context, fs fs.FS, dir string) (scan.Results, error) { - p := parser.New(fs, s.parserOptions...) +func (s *Scanner) ScanFS(ctx context.Context, fsys fs.FS, dir string) (scan.Results, error) { + p := parser.New(fsys, s.parserOptions...) deployments, err := p.ParseFS(ctx, dir) if err != nil { return nil, err } - if err := s.initRegoScanner(fs); err != nil { + if err := s.initRegoScanner(fsys); err != nil { return nil, err } - return s.scanDeployments(ctx, deployments, fs) + return s.scanDeployments(ctx, deployments, fsys) } func (s *Scanner) scanDeployments(ctx context.Context, deployments []azure.Deployment, f fs.FS) (scan.Results, error) { @@ -147,7 +147,7 @@ func (s *Scanner) scanDeployments(ctx context.Context, deployments []azure.Deplo return results, nil } -func (s *Scanner) scanDeployment(ctx context.Context, deployment azure.Deployment, fs fs.FS) (scan.Results, error) { +func (s *Scanner) scanDeployment(ctx context.Context, deployment azure.Deployment, fsys fs.FS) (scan.Results, error) { var results scan.Results deploymentState := s.adaptDeployment(ctx, deployment) if !s.regoOnly { @@ -170,7 +170,7 @@ func (s *Scanner) scanDeployment(ctx context.Context, deployment azure.Deploymen regoResults, err := s.regoScanner.ScanInput(ctx, rego.Input{ Path: deployment.Metadata.Range().GetFilename(), - FS: fs, + FS: fsys, Contents: deploymentState.ToRego(), }) if err != nil { diff --git a/pkg/iac/scanners/azure/deployment.go b/pkg/iac/scanners/azure/deployment.go index 6df8b48d6b6a..4ba48cdb766d 100644 --- a/pkg/iac/scanners/azure/deployment.go +++ b/pkg/iac/scanners/azure/deployment.go @@ -77,7 +77,7 @@ func (r *Resource) GetResourcesByType(t string) []Resource { return resources } -func (d *Deployment) GetParameter(parameterName string) interface{} { +func (d *Deployment) GetParameter(parameterName string) any { for _, parameter := range d.Parameters { if parameter.Name == parameterName { @@ -87,7 +87,7 @@ func (d *Deployment) GetParameter(parameterName string) interface{} { return nil } -func (d *Deployment) GetVariable(variableName string) interface{} { +func (d *Deployment) GetVariable(variableName string) any { for _, variable := range d.Variables { if variable.Name == variableName { @@ -97,7 +97,7 @@ func (d *Deployment) GetVariable(variableName string) interface{} { return nil } -func (d *Deployment) GetEnvVariable(envVariableName string) interface{} { +func (d *Deployment) GetEnvVariable(envVariableName string) any { if envVariable, exists := os.LookupEnv(envVariableName); exists { return envVariable @@ -105,7 +105,7 @@ func (d *Deployment) GetEnvVariable(envVariableName string) interface{} { return nil } -func (d *Deployment) GetOutput(outputName string) interface{} { +func (d *Deployment) GetOutput(outputName string) any { for _, output := range d.Outputs { if output.Name == outputName { @@ -115,15 +115,15 @@ func (d *Deployment) GetOutput(outputName string) interface{} { return nil } -func (d *Deployment) GetDeployment() interface{} { +func (d *Deployment) GetDeployment() any { type template struct { - Schema string `json:"$schema"` - ContentVersion string `json:"contentVersion"` - Parameters map[string]interface{} `json:"parameters"` - Variables map[string]interface{} `json:"variables"` - Resources []interface{} `json:"resources"` - Outputs map[string]interface{} `json:"outputs"` + Schema string `json:"$schema"` + ContentVersion string `json:"contentVersion"` + Parameters map[string]any `json:"parameters"` + Variables map[string]any `json:"variables"` + Resources []any `json:"resources"` + Outputs map[string]any `json:"outputs"` } type templateLink struct { @@ -131,12 +131,12 @@ func (d *Deployment) GetDeployment() interface{} { } type properties struct { - TemplateLink templateLink `json:"templateLink"` - Template template `json:"template"` - TemplateHash string `json:"templateHash"` - Parameters map[string]interface{} `json:"parameters"` - Mode string `json:"mode"` - ProvisioningState string `json:"provisioningState"` + TemplateLink templateLink `json:"templateLink"` + Template template `json:"template"` + TemplateHash string `json:"templateHash"` + Parameters map[string]any `json:"parameters"` + Mode string `json:"mode"` + ProvisioningState string `json:"provisioningState"` } deploymentShell := struct { @@ -151,10 +151,10 @@ func (d *Deployment) GetDeployment() interface{} { Template: template{ Schema: "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", ContentVersion: "", - Parameters: make(map[string]interface{}), - Variables: make(map[string]interface{}), - Resources: make([]interface{}, 0), - Outputs: make(map[string]interface{}), + Parameters: make(map[string]any), + Variables: make(map[string]any), + Resources: make([]any, 0), + Outputs: make(map[string]any), }, }, } diff --git a/pkg/iac/scanners/azure/expressions/lex.go b/pkg/iac/scanners/azure/expressions/lex.go index 09eb7b819eff..f5cfb2a34705 100644 --- a/pkg/iac/scanners/azure/expressions/lex.go +++ b/pkg/iac/scanners/azure/expressions/lex.go @@ -23,7 +23,7 @@ const ( type Token struct { Type TokenType - Data interface{} + Data any } type lexer struct { diff --git a/pkg/iac/scanners/azure/expressions/node.go b/pkg/iac/scanners/azure/expressions/node.go index 843357dd9a62..a66d4e458505 100644 --- a/pkg/iac/scanners/azure/expressions/node.go +++ b/pkg/iac/scanners/azure/expressions/node.go @@ -5,14 +5,14 @@ import ( ) type Node interface { - Evaluate(deploymentProvider functions.DeploymentData) interface{} + Evaluate(deploymentProvider functions.DeploymentData) any } type expressionValue struct { - val interface{} + val any } -func (e expressionValue) Evaluate(deploymentProvider functions.DeploymentData) interface{} { +func (e expressionValue) Evaluate(deploymentProvider functions.DeploymentData) any { if f, ok := e.val.(expression); ok { return f.Evaluate(deploymentProvider) } @@ -24,8 +24,8 @@ type expression struct { args []Node } -func (f expression) Evaluate(deploymentProvider functions.DeploymentData) interface{} { - args := make([]interface{}, len(f.args)) +func (f expression) Evaluate(deploymentProvider functions.DeploymentData) any { + args := make([]any, len(f.args)) for i, arg := range f.args { args[i] = arg.Evaluate(deploymentProvider) } diff --git a/pkg/iac/scanners/azure/functions/intersection_test.go b/pkg/iac/scanners/azure/functions/intersection_test.go index 98630fa9687c..fecd2f3f2de1 100644 --- a/pkg/iac/scanners/azure/functions/intersection_test.go +++ b/pkg/iac/scanners/azure/functions/intersection_test.go @@ -36,7 +36,7 @@ func Test_Intersect(t *testing.T) { []interface{}{"a", "b", "c"}, []interface{}{}, }, - expected: []interface{}{}, + expected: *new([]interface{}), }, { name: "intersect two arrays with both empty", @@ -44,7 +44,7 @@ func Test_Intersect(t *testing.T) { []interface{}{}, []interface{}{}, }, - expected: []interface{}{}, + expected: *new([]interface{}), }, { name: "intersect two arrays with both nil", diff --git a/pkg/iac/scanners/azure/functions/union.go b/pkg/iac/scanners/azure/functions/union.go index 37bccbbcd262..bd450a74af47 100644 --- a/pkg/iac/scanners/azure/functions/union.go +++ b/pkg/iac/scanners/azure/functions/union.go @@ -2,30 +2,30 @@ package functions import "sort" -func Union(args ...interface{}) interface{} { +func Union(args ...any) any { if len(args) == 0 { - return []interface{}{} + return []any{} } if len(args) == 1 { return args[0] } switch args[0].(type) { - case map[string]interface{}: + case map[string]any: return unionMap(args...) - case interface{}: + case any: return unionArray(args...) } - return []interface{}{} + return []any{} } -func unionMap(args ...interface{}) interface{} { - result := make(map[string]interface{}) +func unionMap(args ...any) any { + result := make(map[string]any) for _, arg := range args { - if iType, ok := arg.(map[string]interface{}); ok { + if iType, ok := arg.(map[string]any); ok { for k, v := range iType { result[k] = v } @@ -35,15 +35,14 @@ func unionMap(args ...interface{}) interface{} { return result } -func unionArray(args ...interface{}) interface{} { - var result []interface{} - union := make(map[interface{}]bool) +func unionArray(args ...any) any { + var result []any + union := make(map[any]struct{}) for _, arg := range args { - switch iType := arg.(type) { - case []interface{}: + if iType, ok := arg.([]any); ok { for _, item := range iType { - union[item] = true + union[item] = struct{}{} } } } diff --git a/pkg/iac/scanners/azure/value.go b/pkg/iac/scanners/azure/value.go index 0d4d19b06c0d..5a8d6a942259 100644 --- a/pkg/iac/scanners/azure/value.go +++ b/pkg/iac/scanners/azure/value.go @@ -27,7 +27,7 @@ const ( type Value struct { types.Metadata - rLit interface{} + rLit any rMap map[string]Value rArr []Value Kind Kind @@ -38,14 +38,14 @@ var NullValue = Value{ Kind: KindNull, } -func NewValue(value interface{}, metadata types.Metadata) Value { +func NewValue(value any, metadata types.Metadata) Value { v := Value{ Metadata: metadata, } switch ty := value.(type) { - case []interface{}: + case []any: v.Kind = KindArray for _, child := range ty { if internal, ok := child.(Value); ok { @@ -58,7 +58,7 @@ func NewValue(value interface{}, metadata types.Metadata) Value { v.Kind = KindArray v.rArr = append(v.rArr, ty...) - case map[string]interface{}: + case map[string]any: v.Kind = KindObject v.rMap = make(map[string]Value) for key, val := range ty { @@ -261,7 +261,7 @@ func (v Value) AsBoolValue(defaultValue bool, metadata types.Metadata) types.Boo return types.Bool(v.rLit.(bool), v.GetMetadata()) } -func (v Value) EqualTo(value interface{}) bool { +func (v Value) EqualTo(value any) bool { switch ty := value.(type) { case string: return v.AsString() == ty @@ -302,7 +302,7 @@ func (v Value) AsList() []Value { return v.rArr } -func (v Value) Raw() interface{} { +func (v Value) Raw() any { switch v.Kind { case KindArray: // TODO: recursively build raw array diff --git a/pkg/iac/scanners/cloudformation/parser/file_context.go b/pkg/iac/scanners/cloudformation/parser/file_context.go index 35f4483018f8..18a98a661598 100644 --- a/pkg/iac/scanners/cloudformation/parser/file_context.go +++ b/pkg/iac/scanners/cloudformation/parser/file_context.go @@ -17,11 +17,11 @@ type FileContext struct { filepath string lines []string SourceFormat SourceFormat - Parameters map[string]*Parameter `json:"Parameters" yaml:"Parameters"` - Resources map[string]*Resource `json:"Resources" yaml:"Resources"` - Globals map[string]*Resource `json:"Globals" yaml:"Globals"` - Mappings map[string]interface{} `json:"Mappings,omitempty" yaml:"Mappings"` - Conditions map[string]Property `json:"Conditions,omitempty" yaml:"Conditions"` + Parameters map[string]*Parameter `json:"Parameters" yaml:"Parameters"` + Resources map[string]*Resource `json:"Resources" yaml:"Resources"` + Globals map[string]*Resource `json:"Globals" yaml:"Globals"` + Mappings map[string]any `json:"Mappings,omitempty" yaml:"Mappings"` + Conditions map[string]Property `json:"Conditions,omitempty" yaml:"Conditions"` } func (t *FileContext) GetResourceByLogicalID(name string) *Resource { diff --git a/pkg/iac/scanners/cloudformation/parser/fn_builtin.go b/pkg/iac/scanners/cloudformation/parser/fn_builtin.go index 378ffdfa81ce..3fb21dca82de 100644 --- a/pkg/iac/scanners/cloudformation/parser/fn_builtin.go +++ b/pkg/iac/scanners/cloudformation/parser/fn_builtin.go @@ -43,7 +43,7 @@ func GetCidr(property *Property) (*Property, bool) { return property.deriveResolved(cftypes.List, ranges), true } -func calculateCidrs(ipaddress string, count int, bit int, original *Property) ([]*Property, error) { +func calculateCidrs(ipaddress string, count, bit int, original *Property) ([]*Property, error) { var cidrProperties []*Property diff --git a/pkg/iac/scanners/cloudformation/parser/fn_find_in_map.go b/pkg/iac/scanners/cloudformation/parser/fn_find_in_map.go index 7767f0126456..b379cba3527e 100644 --- a/pkg/iac/scanners/cloudformation/parser/fn_find_in_map.go +++ b/pkg/iac/scanners/cloudformation/parser/fn_find_in_map.go @@ -28,14 +28,14 @@ func ResolveFindInMap(property *Property) (resolved *Property, success bool) { return abortIntrinsic(property, "could not find map %s, returning original Property") } - mapContents := m.(map[string]interface{}) + mapContents := m.(map[string]any) k, ok := mapContents[topLevelKey] if !ok { return abortIntrinsic(property, "could not find %s in the %s map, returning original Property", topLevelKey, mapName) } - mapValues := k.(map[string]interface{}) + mapValues := k.(map[string]any) if prop, ok := mapValues[secondaryLevelKey]; !ok { return abortIntrinsic(property, "could not find a value for %s in %s, returning original Property", secondaryLevelKey, topLevelKey) diff --git a/pkg/iac/scanners/cloudformation/parser/fn_ref.go b/pkg/iac/scanners/cloudformation/parser/fn_ref.go index a14740dff91a..0a37f2d2b0b5 100644 --- a/pkg/iac/scanners/cloudformation/parser/fn_ref.go +++ b/pkg/iac/scanners/cloudformation/parser/fn_ref.go @@ -25,22 +25,24 @@ func ResolveReference(property *Property) (resolved *Property, success bool) { var param *Parameter for k := range property.ctx.Parameters { - if k == refValue { - param = property.ctx.Parameters[k] - resolvedType := param.Type() - - switch param.Default().(type) { - case bool: - resolvedType = cftypes.Bool - case string: - resolvedType = cftypes.String - case int: - resolvedType = cftypes.Int - } - - resolved = property.deriveResolved(resolvedType, param.Default()) - return resolved, true + if k != refValue { + continue } + + param = property.ctx.Parameters[k] + resolvedType := param.Type() + + switch param.Default().(type) { + case bool: + resolvedType = cftypes.Bool + case string: + resolvedType = cftypes.String + case int: + resolvedType = cftypes.Int + } + + resolved = property.deriveResolved(resolvedType, param.Default()) + return resolved, true } for k := range property.ctx.Resources { diff --git a/pkg/iac/scanners/cloudformation/parser/fn_split.go b/pkg/iac/scanners/cloudformation/parser/fn_split.go index fc14d63e93a1..cddda20ef190 100644 --- a/pkg/iac/scanners/cloudformation/parser/fn_split.go +++ b/pkg/iac/scanners/cloudformation/parser/fn_split.go @@ -30,7 +30,7 @@ func ResolveSplit(property *Property) (resolved *Property, success bool) { return property.deriveResolved(cftypes.List, propertyList), true } -func createPropertyList(splitProp *Property, delimiterProp *Property, parent *Property) []*Property { +func createPropertyList(splitProp, delimiterProp, parent *Property) []*Property { splitString := splitProp.AsString() delimiter := delimiterProp.AsString() diff --git a/pkg/iac/scanners/cloudformation/parser/fn_sub.go b/pkg/iac/scanners/cloudformation/parser/fn_sub.go index 514680f95ad5..52db66cf9757 100644 --- a/pkg/iac/scanners/cloudformation/parser/fn_sub.go +++ b/pkg/iac/scanners/cloudformation/parser/fn_sub.go @@ -26,7 +26,7 @@ func ResolveSub(property *Property) (resolved *Property, success bool) { return property, false } -func resolveMapSub(refValue *Property, original *Property) (*Property, bool) { +func resolveMapSub(refValue, original *Property) (*Property, bool) { refValues := refValue.AsList() if len(refValues) != 2 { return abortIntrinsic(original, "Fn::Sub with list expects 2 values, returning original property") @@ -60,7 +60,7 @@ func resolveMapSub(refValue *Property, original *Property) (*Property, bool) { return original.deriveResolved(cftypes.String, workingString), true } -func resolveStringSub(refValue *Property, original *Property) *Property { +func resolveStringSub(refValue, original *Property) *Property { workingString := refValue.AsString() for k, param := range pseudoParameters { diff --git a/pkg/iac/scanners/cloudformation/parser/parameter.go b/pkg/iac/scanners/cloudformation/parser/parameter.go index 2007ca65b2b0..671058a9f83c 100644 --- a/pkg/iac/scanners/cloudformation/parser/parameter.go +++ b/pkg/iac/scanners/cloudformation/parser/parameter.go @@ -18,8 +18,8 @@ type Parameter struct { } type parameterInner struct { - Type string `yaml:"Type"` - Default interface{} `yaml:"Default"` + Type string `yaml:"Type"` + Default any `yaml:"Default"` } func (p *Parameter) UnmarshalYAML(node *yaml.Node) error { @@ -43,11 +43,11 @@ func (p *Parameter) Type() cftypes.CfType { } } -func (p *Parameter) Default() interface{} { +func (p *Parameter) Default() any { return p.inner.Default } -func (p *Parameter) UpdateDefault(inVal interface{}) { +func (p *Parameter) UpdateDefault(inVal any) { passedVal := inVal.(string) switch p.inner.Type { @@ -90,36 +90,34 @@ func (p *Parameters) UnmarshalJSON(data []byte) error { (*p) = params.Params case data[0] == '[' && data[len(data)-1] == ']': // array - { - // Original format - var params []string - - if err := json.Unmarshal(data, ¶ms); err == nil { - for _, param := range params { - parts := strings.Split(param, "=") - if len(parts) != 2 { - return fmt.Errorf("invalid key-value parameter: %q", param) - } - (*p)[parts[0]] = parts[1] + // Original format + var params []string + + if err := json.Unmarshal(data, ¶ms); err == nil { + for _, param := range params { + parts := strings.Split(param, "=") + if len(parts) != 2 { + return fmt.Errorf("invalid key-value parameter: %q", param) } - return nil + (*p)[parts[0]] = parts[1] } + return nil + } - // CloudFormation like format - var cfparams []struct { - ParameterKey string `json:"ParameterKey"` - ParameterValue string `json:"ParameterValue"` - } + // CloudFormation like format + var cfparams []struct { + ParameterKey string `json:"ParameterKey"` + ParameterValue string `json:"ParameterValue"` + } - d := json.NewDecoder(bytes.NewReader(data)) - d.DisallowUnknownFields() - if err := d.Decode(&cfparams); err != nil { - return err - } + d := json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&cfparams); err != nil { + return err + } - for _, param := range cfparams { - (*p)[param.ParameterKey] = param.ParameterValue - } + for _, param := range cfparams { + (*p)[param.ParameterKey] = param.ParameterValue } default: return fmt.Errorf("unsupported parameters format") diff --git a/pkg/iac/scanners/cloudformation/parser/parser.go b/pkg/iac/scanners/cloudformation/parser/parser.go index 9edd3639f111..5ba989220544 100644 --- a/pkg/iac/scanners/cloudformation/parser/parser.go +++ b/pkg/iac/scanners/cloudformation/parser/parser.go @@ -62,9 +62,9 @@ func (p *Parser) SetSkipRequiredCheck(b bool) { p.skipRequired = b } -func New(options ...options.ParserOption) *Parser { +func New(opts ...options.ParserOption) *Parser { p := &Parser{} - for _, option := range options { + for _, option := range opts { option(p) } return p @@ -103,12 +103,12 @@ func (p *Parser) ParseFS(ctx context.Context, fsys fs.FS, dir string) (FileConte return contexts, nil } -func (p *Parser) Required(fs fs.FS, path string) bool { +func (p *Parser) Required(fsys fs.FS, path string) bool { if p.skipRequired { return true } - f, err := fs.Open(filepath.ToSlash(path)) + f, err := fsys.Open(filepath.ToSlash(path)) if err != nil { return false } @@ -120,7 +120,7 @@ func (p *Parser) Required(fs fs.FS, path string) bool { } -func (p *Parser) ParseFile(ctx context.Context, fsys fs.FS, path string) (context *FileContext, err error) { +func (p *Parser) ParseFile(ctx context.Context, fsys fs.FS, path string) (fctx *FileContext, err error) { defer func() { if e := recover(); e != nil { err = fmt.Errorf("panic during parse: %s", e) @@ -159,40 +159,40 @@ func (p *Parser) ParseFile(ctx context.Context, fsys fs.FS, path string) (contex lines := strings.Split(string(content), "\n") - context = &FileContext{ + fctx = &FileContext{ filepath: path, lines: lines, SourceFormat: sourceFmt, } if strings.HasSuffix(strings.ToLower(path), ".json") { - if err := jfather.Unmarshal(content, context); err != nil { + if err := jfather.Unmarshal(content, fctx); err != nil { return nil, NewErrInvalidContent(path, err) } } else { - if err := yaml.Unmarshal(content, context); err != nil { + if err := yaml.Unmarshal(content, fctx); err != nil { return nil, NewErrInvalidContent(path, err) } } - context.OverrideParameters(p.overridedParameters) + fctx.OverrideParameters(p.overridedParameters) - context.lines = lines - context.SourceFormat = sourceFmt - context.filepath = path + fctx.lines = lines + fctx.SourceFormat = sourceFmt + fctx.filepath = path p.debug.Log("Context loaded from source %s", path) // the context must be set to conditions before resources - for _, c := range context.Conditions { - c.setContext(context) + for _, c := range fctx.Conditions { + c.setContext(fctx) } - for name, r := range context.Resources { - r.ConfigureResource(name, fsys, path, context) + for name, r := range fctx.Resources { + r.ConfigureResource(name, fsys, path, fctx) } - return context, nil + return fctx, nil } func (p *Parser) parseParams() error { diff --git a/pkg/iac/scanners/cloudformation/parser/property.go b/pkg/iac/scanners/cloudformation/parser/property.go index 466de3497c85..683fddf4cc3e 100644 --- a/pkg/iac/scanners/cloudformation/parser/property.go +++ b/pkg/iac/scanners/cloudformation/parser/property.go @@ -32,7 +32,7 @@ type Property struct { type PropertyInner struct { Type cftypes.CfType - Value interface{} `json:"Value" yaml:"Value"` + Value any `json:"Value" yaml:"Value"` } func (p *Property) Comment() string { @@ -140,7 +140,7 @@ func (p *Property) isFunction() bool { return false } -func (p *Property) RawValue() interface{} { +func (p *Property) RawValue() any { return p.Inner.Value } @@ -273,7 +273,7 @@ func (p *Property) GetProperty(path string) *Property { return &Property{} } -func (p *Property) deriveResolved(propType cftypes.CfType, propValue interface{}) *Property { +func (p *Property) deriveResolved(propType cftypes.CfType, propValue any) *Property { return &Property{ ctx: p.ctx, name: p.name, @@ -380,7 +380,7 @@ func (p *Property) GetJsonBytes(squashList ...bool) []byte { lines = removeLeftMargin(lines) yamlContent := strings.Join(lines, "\n") - var body interface{} + var body any if err := yaml.Unmarshal([]byte(yamlContent), &body); err != nil { return nil } @@ -410,15 +410,15 @@ func removeLeftMargin(lines []string) []string { return lines } -func convert(input interface{}) interface{} { +func convert(input any) any { switch x := input.(type) { - case map[interface{}]interface{}: - outpMap := make(map[string]interface{}) + case map[any]any: + outpMap := make(map[string]any) for k, v := range x { outpMap[k.(string)] = convert(v) } return outpMap - case []interface{}: + case []any: for i, v := range x { x[i] = convert(v) } diff --git a/pkg/iac/scanners/cloudformation/parser/property_helpers.go b/pkg/iac/scanners/cloudformation/parser/property_helpers.go index c7b9d9efac2d..8f44b2d6c74e 100644 --- a/pkg/iac/scanners/cloudformation/parser/property_helpers.go +++ b/pkg/iac/scanners/cloudformation/parser/property_helpers.go @@ -169,7 +169,7 @@ func (p *Property) Len() int { return len(p.AsList()) } -func (p *Property) EqualTo(checkValue interface{}, equalityOptions ...EqualityOptions) bool { +func (p *Property) EqualTo(checkValue any, equalityOptions ...EqualityOptions) bool { var ignoreCase bool for _, option := range equalityOptions { if option == IgnoreCase { @@ -235,7 +235,7 @@ func (p *Property) IsEmpty() bool { } } -func (p *Property) Contains(checkVal interface{}) bool { +func (p *Property) Contains(checkVal any) bool { if p == nil || p.IsNil() { return false } diff --git a/pkg/iac/scanners/cloudformation/parser/property_helpers_test.go b/pkg/iac/scanners/cloudformation/parser/property_helpers_test.go index 1fa1885a408b..5e26801ba3aa 100644 --- a/pkg/iac/scanners/cloudformation/parser/property_helpers_test.go +++ b/pkg/iac/scanners/cloudformation/parser/property_helpers_test.go @@ -21,7 +21,7 @@ func Test_EqualTo(t *testing.T) { tests := []struct { name string property *Property - checkValue interface{} + checkValue any opts []EqualityOptions isEqual bool }{ diff --git a/pkg/iac/scanners/cloudformation/parser/pseudo_parameters.go b/pkg/iac/scanners/cloudformation/parser/pseudo_parameters.go index 3775026678a3..91933cf9e2fa 100644 --- a/pkg/iac/scanners/cloudformation/parser/pseudo_parameters.go +++ b/pkg/iac/scanners/cloudformation/parser/pseudo_parameters.go @@ -4,8 +4,8 @@ import "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/cftypes" type pseudoParameter struct { t cftypes.CfType - val interface{} - raw interface{} + val any + raw any } var pseudoParameters = map[string]pseudoParameter{ @@ -36,7 +36,7 @@ var pseudoParameters = map[string]pseudoParameter{ "AWS::URLSuffix": {t: cftypes.String, val: "amazonaws.com"}, } -func (p pseudoParameter) getRawValue() interface{} { +func (p pseudoParameter) getRawValue() any { switch p.t { case cftypes.List: return p.raw diff --git a/pkg/iac/scanners/cloudformation/parser/pseudo_parameters_test.go b/pkg/iac/scanners/cloudformation/parser/pseudo_parameters_test.go index 281bf9083a14..e653b74e65ef 100644 --- a/pkg/iac/scanners/cloudformation/parser/pseudo_parameters_test.go +++ b/pkg/iac/scanners/cloudformation/parser/pseudo_parameters_test.go @@ -10,7 +10,7 @@ func Test_Raw(t *testing.T) { tests := []struct { name string key string - expected interface{} + expected any }{ { name: "parameter with a string type value", diff --git a/pkg/iac/scanners/cloudformation/scanner.go b/pkg/iac/scanners/cloudformation/scanner.go index c39ebf02e52f..2c9d3157abbb 100644 --- a/pkg/iac/scanners/cloudformation/scanner.go +++ b/pkg/iac/scanners/cloudformation/scanner.go @@ -48,6 +48,7 @@ var _ scanners.FSScanner = (*Scanner)(nil) var _ options.ConfigurableScanner = (*Scanner)(nil) type Scanner struct { + mu sync.Mutex debug debug.Logger policyDirs []string policyReaders []io.Reader @@ -61,7 +62,6 @@ type Scanner struct { parserOptions []options.ParserOption frameworks []framework.Framework spec string - sync.Mutex } func (s *Scanner) addParserOptions(opt options.ParserOption) { @@ -136,8 +136,8 @@ func New(opts ...options.ScannerOption) *Scanner { } func (s *Scanner) initRegoScanner(srcFS fs.FS) (*rego.Scanner, error) { - s.Lock() - defer s.Unlock() + s.mu.Lock() + defer s.mu.Unlock() if s.regoScanner != nil { return s.regoScanner, nil } @@ -150,9 +150,9 @@ func (s *Scanner) initRegoScanner(srcFS fs.FS) (*rego.Scanner, error) { return regoScanner, nil } -func (s *Scanner) ScanFS(ctx context.Context, fs fs.FS, dir string) (results scan.Results, err error) { +func (s *Scanner) ScanFS(ctx context.Context, fsys fs.FS, dir string) (results scan.Results, err error) { - contexts, err := s.parser.ParseFS(ctx, fs, dir) + contexts, err := s.parser.ParseFS(ctx, fsys, dir) if err != nil { return nil, err } @@ -161,7 +161,7 @@ func (s *Scanner) ScanFS(ctx context.Context, fs fs.FS, dir string) (results sca return nil, nil } - regoScanner, err := s.initRegoScanner(fs) + regoScanner, err := s.initRegoScanner(fsys) if err != nil { return nil, err } @@ -170,7 +170,7 @@ func (s *Scanner) ScanFS(ctx context.Context, fs fs.FS, dir string) (results sca if cfCtx == nil { continue } - fileResults, err := s.scanFileContext(ctx, regoScanner, cfCtx, fs) + fileResults, err := s.scanFileContext(ctx, regoScanner, cfCtx, fsys) if err != nil { return nil, err } @@ -182,23 +182,23 @@ func (s *Scanner) ScanFS(ctx context.Context, fs fs.FS, dir string) (results sca return results, nil } -func (s *Scanner) ScanFile(ctx context.Context, fs fs.FS, path string) (scan.Results, error) { +func (s *Scanner) ScanFile(ctx context.Context, fsys fs.FS, path string) (scan.Results, error) { - cfCtx, err := s.parser.ParseFile(ctx, fs, path) + cfCtx, err := s.parser.ParseFile(ctx, fsys, path) if err != nil { return nil, err } - regoScanner, err := s.initRegoScanner(fs) + regoScanner, err := s.initRegoScanner(fsys) if err != nil { return nil, err } - results, err := s.scanFileContext(ctx, regoScanner, cfCtx, fs) + results, err := s.scanFileContext(ctx, regoScanner, cfCtx, fsys) if err != nil { return nil, err } - results.SetSourceAndFilesystem("", fs, false) + results.SetSourceAndFilesystem("", fsys, false) sort.Slice(results, func(i, j int) bool { return results[i].Rule().AVDID < results[j].Rule().AVDID @@ -206,7 +206,7 @@ func (s *Scanner) ScanFile(ctx context.Context, fs fs.FS, path string) (scan.Res return results, nil } -func (s *Scanner) scanFileContext(ctx context.Context, regoScanner *rego.Scanner, cfCtx *parser.FileContext, fs fs.FS) (results scan.Results, err error) { +func (s *Scanner) scanFileContext(ctx context.Context, regoScanner *rego.Scanner, cfCtx *parser.FileContext, fsys fs.FS) (results scan.Results, err error) { state := adapter.Adapt(*cfCtx) if state == nil { return nil, nil @@ -241,7 +241,7 @@ func (s *Scanner) scanFileContext(ctx context.Context, regoScanner *rego.Scanner } regoResults, err := regoScanner.ScanInput(ctx, rego.Input{ Path: cfCtx.Metadata().Range().GetFilename(), - FS: fs, + FS: fsys, Contents: state.ToRego(), }) if err != nil { diff --git a/pkg/iac/scanners/dockerfile/parser/parser.go b/pkg/iac/scanners/dockerfile/parser/parser.go index cd21642ac0a2..8b13d1e0e61e 100644 --- a/pkg/iac/scanners/dockerfile/parser/parser.go +++ b/pkg/iac/scanners/dockerfile/parser/parser.go @@ -33,9 +33,9 @@ func (p *Parser) SetSkipRequiredCheck(b bool) { } // New creates a new Dockerfile parser -func New(options ...options.ParserOption) *Parser { +func New(opts ...options.ParserOption) *Parser { p := &Parser{} - for _, option := range options { + for _, option := range opts { option(p) } return p @@ -73,8 +73,8 @@ func (p *Parser) ParseFS(ctx context.Context, target fs.FS, path string) (map[st } // ParseFile parses Dockerfile content from the provided filesystem path. -func (p *Parser) ParseFile(_ context.Context, fs fs.FS, path string) (*dockerfile.Dockerfile, error) { - f, err := fs.Open(filepath.ToSlash(path)) +func (p *Parser) ParseFile(_ context.Context, fsys fs.FS, path string) (*dockerfile.Dockerfile, error) { + f, err := fsys.Open(filepath.ToSlash(path)) if err != nil { return nil, err } diff --git a/pkg/iac/scanners/dockerfile/scanner.go b/pkg/iac/scanners/dockerfile/scanner.go index 46da048e1b2f..f4c7a0670555 100644 --- a/pkg/iac/scanners/dockerfile/scanner.go +++ b/pkg/iac/scanners/dockerfile/scanner.go @@ -20,16 +20,16 @@ var _ scanners.FSScanner = (*Scanner)(nil) var _ options.ConfigurableScanner = (*Scanner)(nil) type Scanner struct { - debug debug.Logger - policyDirs []string - policyReaders []io.Reader - parser *parser.Parser - regoScanner *rego.Scanner - skipRequired bool - options []options.ScannerOption - frameworks []framework.Framework - spec string - sync.Mutex + mu sync.Mutex + debug debug.Logger + policyDirs []string + policyReaders []io.Reader + parser *parser.Parser + regoScanner *rego.Scanner + skipRequired bool + options []options.ScannerOption + frameworks []framework.Framework + spec string loadEmbeddedLibraries bool loadEmbeddedPolicies bool } @@ -112,9 +112,9 @@ func NewScanner(opts ...options.ScannerOption) *Scanner { return s } -func (s *Scanner) ScanFS(ctx context.Context, fs fs.FS, path string) (scan.Results, error) { +func (s *Scanner) ScanFS(ctx context.Context, fsys fs.FS, path string) (scan.Results, error) { - files, err := s.parser.ParseFS(ctx, fs, path) + files, err := s.parser.ParseFS(ctx, fsys, path) if err != nil { return nil, err } @@ -127,33 +127,33 @@ func (s *Scanner) ScanFS(ctx context.Context, fs fs.FS, path string) (scan.Resul for path, dfile := range files { inputs = append(inputs, rego.Input{ Path: path, - FS: fs, + FS: fsys, Contents: dfile.ToRego(), }) } - results, err := s.scanRego(ctx, fs, inputs...) + results, err := s.scanRego(ctx, fsys, inputs...) if err != nil { return nil, err } return results, nil } -func (s *Scanner) ScanFile(ctx context.Context, fs fs.FS, path string) (scan.Results, error) { - dockerfile, err := s.parser.ParseFile(ctx, fs, path) +func (s *Scanner) ScanFile(ctx context.Context, fsys fs.FS, path string) (scan.Results, error) { + dockerfile, err := s.parser.ParseFile(ctx, fsys, path) if err != nil { return nil, err } s.debug.Log("Scanning %s...", path) - return s.scanRego(ctx, fs, rego.Input{ + return s.scanRego(ctx, fsys, rego.Input{ Path: path, Contents: dockerfile.ToRego(), }) } func (s *Scanner) initRegoScanner(srcFS fs.FS) (*rego.Scanner, error) { - s.Lock() - defer s.Unlock() + s.mu.Lock() + defer s.mu.Unlock() if s.regoScanner != nil { return s.regoScanner, nil } diff --git a/pkg/iac/scanners/helm/parser/parser.go b/pkg/iac/scanners/helm/parser/parser.go index 5c2b40ed41df..d60b82069888 100644 --- a/pkg/iac/scanners/helm/parser/parser.go +++ b/pkg/iac/scanners/helm/parser/parser.go @@ -76,7 +76,7 @@ func (p *Parser) SetAPIVersions(values ...string) { p.apiVersions = values } -func New(path string, options ...options.ParserOption) *Parser { +func New(path string, opts ...options.ParserOption) *Parser { client := action.NewInstall(&action.Configuration{}) client.DryRun = true // don't do anything @@ -88,7 +88,7 @@ func New(path string, options ...options.ParserOption) *Parser { ChartSource: path, } - for _, option := range options { + for _, option := range opts { option(p) } @@ -166,14 +166,14 @@ func (p *Parser) addPaths(paths ...string) error { func (p *Parser) extractChartName(chartPath string) error { - chart, err := p.workingFS.Open(chartPath) + f, err := p.workingFS.Open(chartPath) if err != nil { return err } - defer func() { _ = chart.Close() }() + defer func() { _ = f.Close() }() - var chartContent map[string]interface{} - if err := yaml.NewDecoder(chart).Decode(&chartContent); err != nil { + var chartContent map[string]any + if err := yaml.NewDecoder(f).Decode(&chartContent); err != nil { // the chart likely has the name templated and so cannot be parsed as yaml - use a temporary name if dir := filepath.Dir(chartPath); dir != "" && dir != "." { p.helmClient.ReleaseName = dir @@ -223,7 +223,7 @@ func (p *Parser) RenderedChartFiles() ([]ChartFile, error) { return p.getRenderedManifests(manifestsKeys, splitManifests), nil } -func (p *Parser) getRelease(chart *chart.Chart) (*release.Release, error) { +func (p *Parser) getRelease(helmChart *chart.Chart) (*release.Release, error) { opts := &ValueOptions{ ValueFiles: p.valuesFiles, Values: p.values, @@ -235,7 +235,7 @@ func (p *Parser) getRelease(chart *chart.Chart) (*release.Release, error) { if err != nil { return nil, err } - r, err := p.helmClient.RunWithContext(context.Background(), chart, vals) + r, err := p.helmClient.RunWithContext(context.Background(), helmChart, vals) if err != nil { return nil, err } diff --git a/pkg/iac/scanners/helm/parser/vals.go b/pkg/iac/scanners/helm/parser/vals.go index b54cd7c3a521..f2589b3caec7 100644 --- a/pkg/iac/scanners/helm/parser/vals.go +++ b/pkg/iac/scanners/helm/parser/vals.go @@ -21,12 +21,12 @@ type ValueOptions struct { // MergeValues merges values from files specified via -f/--values and directly // via --set, --set-string, or --set-file, marshaling them to YAML -func (opts *ValueOptions) MergeValues() (map[string]interface{}, error) { - base := make(map[string]interface{}) +func (opts *ValueOptions) MergeValues() (map[string]any, error) { + base := make(map[string]any) // User specified a values files via -f/--values for _, filePath := range opts.ValueFiles { - currentMap := make(map[string]interface{}) + currentMap := make(map[string]any) bytes, err := readFile(filePath) if err != nil { @@ -56,7 +56,7 @@ func (opts *ValueOptions) MergeValues() (map[string]interface{}, error) { // User specified a value via --set-file for _, value := range opts.FileValues { - reader := func(rs []rune) (interface{}, error) { + reader := func(rs []rune) (any, error) { bytes, err := readFile(string(rs)) if err != nil { return nil, err @@ -71,15 +71,15 @@ func (opts *ValueOptions) MergeValues() (map[string]interface{}, error) { return base, nil } -func mergeMaps(a, b map[string]interface{}) map[string]interface{} { - out := make(map[string]interface{}, len(a)) +func mergeMaps(a, b map[string]any) map[string]any { + out := make(map[string]any, len(a)) for k, v := range a { out[k] = v } for k, v := range b { - if v, ok := v.(map[string]interface{}); ok { + if v, ok := v.(map[string]any); ok { if bv, ok := out[k]; ok { - if bv, ok := bv.(map[string]interface{}); ok { + if bv, ok := bv.(map[string]any); ok { out[k] = mergeMaps(bv, v) continue } diff --git a/pkg/iac/scanners/helm/scanner.go b/pkg/iac/scanners/helm/scanner.go index e81f22208089..bacff8eb4c64 100644 --- a/pkg/iac/scanners/helm/scanner.go +++ b/pkg/iac/scanners/helm/scanner.go @@ -52,19 +52,19 @@ func (s *Scanner) SetFrameworks(frameworks []framework.Framework) { } // New creates a new Scanner -func New(options ...options.ScannerOption) *Scanner { +func New(opts ...options.ScannerOption) *Scanner { s := &Scanner{ - options: options, + options: opts, } - for _, option := range options { + for _, option := range opts { option(s) } return s } -func (s *Scanner) AddParserOptions(options ...options.ParserOption) { - s.parserOptions = append(s.parserOptions, options...) +func (s *Scanner) AddParserOptions(opts ...options.ParserOption) { + s.parserOptions = append(s.parserOptions, opts...) } func (s *Scanner) SetUseEmbeddedPolicies(b bool) { diff --git a/pkg/iac/scanners/json/parser/parser.go b/pkg/iac/scanners/json/parser/parser.go index ff3417b8f0b9..0adbe27cca91 100644 --- a/pkg/iac/scanners/json/parser/parser.go +++ b/pkg/iac/scanners/json/parser/parser.go @@ -36,9 +36,9 @@ func New(opts ...options.ParserOption) *Parser { return p } -func (p *Parser) ParseFS(ctx context.Context, target fs.FS, path string) (map[string]interface{}, error) { +func (p *Parser) ParseFS(ctx context.Context, target fs.FS, path string) (map[string]any, error) { - files := make(map[string]interface{}) + files := make(map[string]any) if err := fs.WalkDir(target, filepath.ToSlash(path), func(path string, entry fs.DirEntry, err error) error { select { case <-ctx.Done(): @@ -68,13 +68,13 @@ func (p *Parser) ParseFS(ctx context.Context, target fs.FS, path string) (map[st } // ParseFile parses Dockerfile content from the provided filesystem path. -func (p *Parser) ParseFile(_ context.Context, fsys fs.FS, path string) (interface{}, error) { +func (p *Parser) ParseFile(_ context.Context, fsys fs.FS, path string) (any, error) { f, err := fsys.Open(filepath.ToSlash(path)) if err != nil { return nil, err } defer func() { _ = f.Close() }() - var target interface{} + var target any if err := json.NewDecoder(f).Decode(&target); err != nil { return nil, err } diff --git a/pkg/iac/scanners/json/parser/parser_test.go b/pkg/iac/scanners/json/parser/parser_test.go index 2af3936d6124..ed7b87492d96 100644 --- a/pkg/iac/scanners/json/parser/parser_test.go +++ b/pkg/iac/scanners/json/parser/parser_test.go @@ -19,13 +19,13 @@ func Test_Parser(t *testing.T) { data, err := New().ParseFile(context.TODO(), memfs, "something.json") require.NoError(t, err) - msi, ok := data.(map[string]interface{}) + msi, ok := data.(map[string]any) require.True(t, ok) xObj, ok := msi["x"] require.True(t, ok) - xMsi, ok := xObj.(map[string]interface{}) + xMsi, ok := xObj.(map[string]any) require.True(t, ok) yRaw, ok := xMsi["y"] @@ -39,7 +39,7 @@ func Test_Parser(t *testing.T) { zRaw, ok := xMsi["z"] require.True(t, ok) - z, ok := zRaw.([]interface{}) + z, ok := zRaw.([]any) require.True(t, ok) require.Len(t, z, 3) diff --git a/pkg/iac/scanners/json/scanner.go b/pkg/iac/scanners/json/scanner.go index a1ad82e86690..79d90dc0d65b 100644 --- a/pkg/iac/scanners/json/scanner.go +++ b/pkg/iac/scanners/json/scanner.go @@ -20,14 +20,14 @@ var _ scanners.FSScanner = (*Scanner)(nil) var _ options.ConfigurableScanner = (*Scanner)(nil) type Scanner struct { - debug debug.Logger - policyDirs []string - policyReaders []io.Reader - parser *parser.Parser - regoScanner *rego.Scanner - skipRequired bool - options []options.ScannerOption - sync.Mutex + mu sync.Mutex + debug debug.Logger + policyDirs []string + policyReaders []io.Reader + parser *parser.Parser + regoScanner *rego.Scanner + skipRequired bool + options []options.ScannerOption frameworks []framework.Framework spec string loadEmbeddedPolicies bool @@ -102,9 +102,9 @@ func (s *Scanner) Name() string { return "JSON" } -func (s *Scanner) ScanFS(ctx context.Context, fs fs.FS, path string) (scan.Results, error) { +func (s *Scanner) ScanFS(ctx context.Context, fsys fs.FS, path string) (scan.Results, error) { - files, err := s.parser.ParseFS(ctx, fs, path) + files, err := s.parser.ParseFS(ctx, fsys, path) if err != nil { return nil, err } @@ -117,33 +117,33 @@ func (s *Scanner) ScanFS(ctx context.Context, fs fs.FS, path string) (scan.Resul for path, file := range files { inputs = append(inputs, rego.Input{ Path: path, - FS: fs, + FS: fsys, Contents: file, }) } - results, err := s.scanRego(ctx, fs, inputs...) + results, err := s.scanRego(ctx, fsys, inputs...) if err != nil { return nil, err } return results, nil } -func (s *Scanner) ScanFile(ctx context.Context, fs fs.FS, path string) (scan.Results, error) { - parsed, err := s.parser.ParseFile(ctx, fs, path) +func (s *Scanner) ScanFile(ctx context.Context, fsys fs.FS, path string) (scan.Results, error) { + parsed, err := s.parser.ParseFile(ctx, fsys, path) if err != nil { return nil, err } s.debug.Log("Scanning %s...", path) - return s.scanRego(ctx, fs, rego.Input{ + return s.scanRego(ctx, fsys, rego.Input{ Path: path, Contents: parsed, }) } func (s *Scanner) initRegoScanner(srcFS fs.FS) (*rego.Scanner, error) { - s.Lock() - defer s.Unlock() + s.mu.Lock() + defer s.mu.Unlock() if s.regoScanner != nil { return s.regoScanner, nil } diff --git a/pkg/iac/scanners/kubernetes/parser/manifest.go b/pkg/iac/scanners/kubernetes/parser/manifest.go index 0cc8aa092178..c22bb7ea274b 100644 --- a/pkg/iac/scanners/kubernetes/parser/manifest.go +++ b/pkg/iac/scanners/kubernetes/parser/manifest.go @@ -28,6 +28,6 @@ func (m *Manifest) UnmarshalYAML(value *yaml.Node) error { return nil } -func (m *Manifest) ToRego() interface{} { +func (m *Manifest) ToRego() any { return m.Content.ToRego() } diff --git a/pkg/iac/scanners/kubernetes/parser/manifest_node.go b/pkg/iac/scanners/kubernetes/parser/manifest_node.go index 1f82ca1e3680..cc5694e597c5 100644 --- a/pkg/iac/scanners/kubernetes/parser/manifest_node.go +++ b/pkg/iac/scanners/kubernetes/parser/manifest_node.go @@ -23,12 +23,12 @@ type ManifestNode struct { StartLine int EndLine int Offset int - Value interface{} + Value any Type TagType Path string } -func (r *ManifestNode) ToRego() interface{} { +func (r *ManifestNode) ToRego() any { if r == nil { return nil } @@ -36,14 +36,14 @@ func (r *ManifestNode) ToRego() interface{} { case TagBool, TagInt, TagString, TagStr: return r.Value case TagSlice: - var output []interface{} + var output []any for _, node := range r.Value.([]ManifestNode) { output = append(output, node.ToRego()) } return output case TagMap: - output := make(map[string]interface{}) - output["__defsec_metadata"] = map[string]interface{}{ + output := make(map[string]any) + output["__defsec_metadata"] = map[string]any{ "startline": r.StartLine, "endline": r.EndLine, "filepath": r.Path, diff --git a/pkg/iac/scanners/kubernetes/parser/parser.go b/pkg/iac/scanners/kubernetes/parser/parser.go index 46d853a9ed08..721a9f62e31e 100644 --- a/pkg/iac/scanners/kubernetes/parser/parser.go +++ b/pkg/iac/scanners/kubernetes/parser/parser.go @@ -42,8 +42,8 @@ func New(po ...options.ParserOption) *Parser { return p } -func (p *Parser) ParseFS(ctx context.Context, target fs.FS, path string) (map[string][]interface{}, error) { - files := make(map[string][]interface{}) +func (p *Parser) ParseFS(ctx context.Context, target fs.FS, path string) (map[string][]any, error) { + files := make(map[string][]any) if err := fs.WalkDir(target, filepath.ToSlash(path), func(path string, entry fs.DirEntry, err error) error { select { case <-ctx.Done(): @@ -73,7 +73,7 @@ func (p *Parser) ParseFS(ctx context.Context, target fs.FS, path string) (map[st } // ParseFile parses Kubernetes manifest from the provided filesystem path. -func (p *Parser) ParseFile(_ context.Context, fsys fs.FS, path string) ([]interface{}, error) { +func (p *Parser) ParseFile(_ context.Context, fsys fs.FS, path string) ([]any, error) { f, err := fsys.Open(filepath.ToSlash(path)) if err != nil { return nil, err @@ -97,7 +97,7 @@ func (p *Parser) required(fsys fs.FS, path string) bool { return false } -func (p *Parser) Parse(r io.Reader, path string) ([]interface{}, error) { +func (p *Parser) Parse(r io.Reader, path string) ([]any, error) { contents, err := io.ReadAll(r) if err != nil { @@ -109,14 +109,14 @@ func (p *Parser) Parse(r io.Reader, path string) ([]interface{}, error) { } if strings.TrimSpace(string(contents))[0] == '{' { - var target interface{} + var target any if err := json.Unmarshal(contents, &target); err != nil { return nil, err } - return []interface{}{target}, nil + return []any{target}, nil } - var results []interface{} + var results []any re := regexp.MustCompile(`(?m:^---\r?\n)`) pos := 0 diff --git a/pkg/iac/scanners/kubernetes/scanner.go b/pkg/iac/scanners/kubernetes/scanner.go index d8633c97ba7e..9773936f974d 100644 --- a/pkg/iac/scanners/kubernetes/scanner.go +++ b/pkg/iac/scanners/kubernetes/scanner.go @@ -24,14 +24,14 @@ var _ scanners.FSScanner = (*Scanner)(nil) var _ options.ConfigurableScanner = (*Scanner)(nil) type Scanner struct { - debug debug.Logger - options []options.ScannerOption - policyDirs []string - policyReaders []io.Reader - regoScanner *rego.Scanner - parser *parser.Parser - skipRequired bool - sync.Mutex + mu sync.Mutex + debug debug.Logger + options []options.ScannerOption + policyDirs []string + policyReaders []io.Reader + regoScanner *rego.Scanner + parser *parser.Parser + skipRequired bool loadEmbeddedPolicies bool frameworks []framework.Framework spec string @@ -107,8 +107,8 @@ func (s *Scanner) Name() string { } func (s *Scanner) initRegoScanner(srcFS fs.FS) (*rego.Scanner, error) { - s.Lock() - defer s.Unlock() + s.mu.Lock() + defer s.mu.Unlock() if s.regoScanner != nil { return s.regoScanner, nil } diff --git a/pkg/iac/scanners/terraform/executor/executor.go b/pkg/iac/scanners/terraform/executor/executor.go index 1e1f283e5614..54e140a95db1 100644 --- a/pkg/iac/scanners/terraform/executor/executor.go +++ b/pkg/iac/scanners/terraform/executor/executor.go @@ -66,7 +66,7 @@ func New(options ...Option) *Executor { } // Find element in list -func checkInList(id string, altIDs []string, list []string) bool { +func checkInList(id string, altIDs, list []string) bool { for _, codeIgnored := range list { if codeIgnored == id { return true diff --git a/pkg/iac/scanners/terraform/executor/pool.go b/pkg/iac/scanners/terraform/executor/pool.go index aa4c5b85a058..51eaf9fbeb3e 100644 --- a/pkg/iac/scanners/terraform/executor/pool.go +++ b/pkg/iac/scanners/terraform/executor/pool.go @@ -26,11 +26,11 @@ type Pool struct { regoOnly bool } -func NewPool(size int, rules []types.RegisteredRule, modules terraform.Modules, state *state.State, ignoreErrors bool, regoScanner *rego.Scanner, regoOnly bool) *Pool { +func NewPool(size int, rules []types.RegisteredRule, modules terraform.Modules, s *state.State, ignoreErrors bool, regoScanner *rego.Scanner, regoOnly bool) *Pool { return &Pool{ size: size, rules: rules, - state: state, + state: s, modules: modules, ignoreErrors: ignoreErrors, rs: regoScanner, @@ -227,7 +227,7 @@ func cleanPathRelativeToWorkingDir(dir, path string) string { return relPath } -func wildcardMatch(pattern string, subject string) bool { +func wildcardMatch(pattern, subject string) bool { if pattern == "" { return false } @@ -260,7 +260,7 @@ type Worker struct { incoming <-chan Job mu sync.Mutex results scan.Results - panic interface{} + panic any } func NewWorker(incoming <-chan Job) *Worker { diff --git a/pkg/iac/scanners/terraform/parser/funcs/collection.go b/pkg/iac/scanners/terraform/parser/funcs/collection.go index 506c711cd2fd..1ccec3f68886 100644 --- a/pkg/iac/scanners/terraform/parser/funcs/collection.go +++ b/pkg/iac/scanners/terraform/parser/funcs/collection.go @@ -597,10 +597,11 @@ var TransposeFunc = function.New(&function.Spec{ }, }) +// nolint:gocritic // ListFunc constructs a function that takes an arbitrary number of arguments // and returns a list containing those values in the same order. // -// This function is deprecated in Terraform v0.12 // nolint:gocritic +// This function is deprecated in Terraform v0.12 var ListFunc = function.New(&function.Spec{ Params: []function.Parameter{}, VarParam: &function.Parameter{ @@ -618,10 +619,11 @@ var ListFunc = function.New(&function.Spec{ }, }) +// nolint:gocritic // MapFunc constructs a function that takes an even number of arguments and // returns a map whose elements are constructed from consecutive pairs of arguments. // -// This function is deprecated in Terraform v0.12 // nolint:gocritic +// This function is deprecated in Terraform v0.12 var MapFunc = function.New(&function.Spec{ Params: []function.Parameter{}, VarParam: &function.Parameter{ diff --git a/pkg/iac/scanners/terraform/parser/funcs/defaults.go b/pkg/iac/scanners/terraform/parser/funcs/defaults.go index e32adfb904f2..3bc8bd28b148 100644 --- a/pkg/iac/scanners/terraform/parser/funcs/defaults.go +++ b/pkg/iac/scanners/terraform/parser/funcs/defaults.go @@ -69,7 +69,7 @@ var DefaultsFunc = function.New(&function.Spec{ }, }) -// nolint:cyclop +// nolint:gocyclo func defaultsApply(input, fallback cty.Value) cty.Value { wantTy := input.Type() diff --git a/pkg/iac/scanners/terraform/parser/load_blocks.go b/pkg/iac/scanners/terraform/parser/load_blocks.go index 782bfdc85fb6..34b303e3a91f 100644 --- a/pkg/iac/scanners/terraform/parser/load_blocks.go +++ b/pkg/iac/scanners/terraform/parser/load_blocks.go @@ -24,7 +24,7 @@ func loadBlocksFromFile(file sourceFile, moduleSource string) (hcl.Blocks, []ter return contents.Blocks, ignores, nil } -func parseIgnores(data []byte, path string, moduleSource string) []terraform.Ignore { +func parseIgnores(data []byte, path, moduleSource string) []terraform.Ignore { var ignores []terraform.Ignore for i, line := range strings.Split(string(data), "\n") { line = strings.TrimSpace(line) diff --git a/pkg/iac/scanners/terraform/parser/load_module_metadata.go b/pkg/iac/scanners/terraform/parser/load_module_metadata.go index 9d06402a76fc..82a48225db5f 100644 --- a/pkg/iac/scanners/terraform/parser/load_module_metadata.go +++ b/pkg/iac/scanners/terraform/parser/load_module_metadata.go @@ -16,7 +16,7 @@ type modulesMetadata struct { } func loadModuleMetadata(target fs.FS, fullPath string) (*modulesMetadata, string, error) { - metadataPath := filepath.Join(fullPath, ".terraform/modules/modules.json") + metadataPath := filepath.Join(fullPath, ".terraform", "modules", "modules.json") f, err := target.Open(metadataPath) if err != nil { diff --git a/pkg/iac/scanners/terraform/parser/module_retrieval.go b/pkg/iac/scanners/terraform/parser/module_retrieval.go index e96108a212cf..2ae6221afc73 100644 --- a/pkg/iac/scanners/terraform/parser/module_retrieval.go +++ b/pkg/iac/scanners/terraform/parser/module_retrieval.go @@ -19,7 +19,7 @@ var defaultResolvers = []ModuleResolver{ resolvers.Registry, } -func resolveModule(ctx context.Context, current fs.FS, opt resolvers.Options) (filesystem fs.FS, sourcePrefix string, downloadPath string, err error) { +func resolveModule(ctx context.Context, current fs.FS, opt resolvers.Options) (filesystem fs.FS, sourcePrefix, downloadPath string, err error) { opt.Debug("Resolving module '%s' with source: '%s'...", opt.Name, opt.Source) for _, resolver := range defaultResolvers { if filesystem, prefix, path, applies, err := resolver.Resolve(ctx, current, opt); err != nil { diff --git a/pkg/iac/scanners/terraform/parser/resolvers/cache.go b/pkg/iac/scanners/terraform/parser/resolvers/cache.go index 7aca10de768d..6efc15f72dbb 100644 --- a/pkg/iac/scanners/terraform/parser/resolvers/cache.go +++ b/pkg/iac/scanners/terraform/parser/resolvers/cache.go @@ -34,7 +34,7 @@ func locateCacheDir() (string, error) { return cacheDir, nil } -func (r *cacheResolver) Resolve(_ context.Context, _ fs.FS, opt Options) (filesystem fs.FS, prefix string, downloadPath string, applies bool, err error) { +func (r *cacheResolver) Resolve(_ context.Context, _ fs.FS, opt Options) (filesystem fs.FS, prefix, downloadPath string, applies bool, err error) { if opt.SkipCache { opt.Debug("Cache is disabled.") return nil, "", "", false, nil diff --git a/pkg/iac/scanners/terraform/parser/resolvers/local.go b/pkg/iac/scanners/terraform/parser/resolvers/local.go index 94d92099b6c3..58fe5b9cd084 100644 --- a/pkg/iac/scanners/terraform/parser/resolvers/local.go +++ b/pkg/iac/scanners/terraform/parser/resolvers/local.go @@ -10,7 +10,7 @@ type localResolver struct{} var Local = &localResolver{} -func (r *localResolver) Resolve(_ context.Context, target fs.FS, opt Options) (filesystem fs.FS, prefix string, downloadPath string, applies bool, err error) { +func (r *localResolver) Resolve(_ context.Context, target fs.FS, opt Options) (filesystem fs.FS, prefix, downloadPath string, applies bool, err error) { if !opt.hasPrefix(".", "..") { return nil, "", "", false, nil } diff --git a/pkg/iac/scanners/terraform/parser/resolvers/options.go b/pkg/iac/scanners/terraform/parser/resolvers/options.go index 61f720e8cc9e..f567ff03dd47 100644 --- a/pkg/iac/scanners/terraform/parser/resolvers/options.go +++ b/pkg/iac/scanners/terraform/parser/resolvers/options.go @@ -23,6 +23,6 @@ func (o *Options) hasPrefix(prefixes ...string) bool { return false } -func (o *Options) Debug(format string, args ...interface{}) { +func (o *Options) Debug(format string, args ...any) { o.DebugLogger.Log(format, args...) } diff --git a/pkg/iac/scanners/terraform/parser/resolvers/registry.go b/pkg/iac/scanners/terraform/parser/resolvers/registry.go index 5623e9064e06..584c93a34a53 100644 --- a/pkg/iac/scanners/terraform/parser/resolvers/registry.go +++ b/pkg/iac/scanners/terraform/parser/resolvers/registry.go @@ -11,7 +11,7 @@ import ( "strings" "time" - "github.com/Masterminds/semver" + "github.com/aquasecurity/go-version/pkg/semver" // nolint:gomodguard ) type registryResolver struct { @@ -143,13 +143,13 @@ func resolveVersion(input string, versions moduleVersions) (string, error) { if len(versions.Modules[0].Versions) == 0 { return "", fmt.Errorf("no available versions for module") } - constraints, err := semver.NewConstraint(input) + constraints, err := semver.NewConstraints(input) if err != nil { return "", err } var realVersions semver.Collection for _, rawVersion := range versions.Modules[0].Versions { - realVersion, err := semver.NewVersion(rawVersion.Version) + realVersion, err := semver.Parse(rawVersion.Version) if err != nil { continue } diff --git a/pkg/iac/scanners/terraform/parser/resolvers/remote.go b/pkg/iac/scanners/terraform/parser/resolvers/remote.go index 51a76e6f6b41..4a6a26798a8a 100644 --- a/pkg/iac/scanners/terraform/parser/resolvers/remote.go +++ b/pkg/iac/scanners/terraform/parser/resolvers/remote.go @@ -29,7 +29,7 @@ func (r *remoteResolver) GetDownloadCount() int { return int(atomic.LoadInt32(&r.count)) } -func (r *remoteResolver) Resolve(ctx context.Context, _ fs.FS, opt Options) (filesystem fs.FS, prefix string, downloadPath string, applies bool, err error) { +func (r *remoteResolver) Resolve(ctx context.Context, _ fs.FS, opt Options) (filesystem fs.FS, prefix, downloadPath string, applies bool, err error) { if !opt.hasPrefix("github.com/", "bitbucket.org/", "s3:", "git@", "git:", "hg:", "https:", "gcs:") { return nil, "", "", false, nil } diff --git a/pkg/iac/scanners/terraform/scanner.go b/pkg/iac/scanners/terraform/scanner.go index e1ae195dfa30..697000827b1c 100644 --- a/pkg/iac/scanners/terraform/scanner.go +++ b/pkg/iac/scanners/terraform/scanner.go @@ -31,7 +31,7 @@ var _ options.ConfigurableScanner = (*Scanner)(nil) var _ ConfigurableTerraformScanner = (*Scanner)(nil) type Scanner struct { - sync.Mutex + mu sync.Mutex options []options.ScannerOption parserOpt []options.ParserOption executorOpt []executor.Option @@ -76,12 +76,12 @@ func (s *Scanner) SetForceAllDirs(b bool) { s.forceAllDirs = b } -func (s *Scanner) AddParserOptions(options ...options.ParserOption) { - s.parserOpt = append(s.parserOpt, options...) +func (s *Scanner) AddParserOptions(opts ...options.ParserOption) { + s.parserOpt = append(s.parserOpt, opts...) } -func (s *Scanner) AddExecutorOptions(options ...executor.Option) { - s.executorOpt = append(s.executorOpt, options...) +func (s *Scanner) AddExecutorOptions(opts ...executor.Option) { + s.executorOpt = append(s.executorOpt, opts...) } func (s *Scanner) SetPolicyReaders(readers []io.Reader) { @@ -128,12 +128,12 @@ type Metrics struct { } } -func New(options ...options.ScannerOption) *Scanner { +func New(opts ...options.ScannerOption) *Scanner { s := &Scanner{ dirs: make(map[string]struct{}), - options: options, + options: opts, } - for _, opt := range options { + for _, opt := range opts { opt(s) } return s @@ -145,8 +145,8 @@ func (s *Scanner) ScanFS(ctx context.Context, target fs.FS, dir string) (scan.Re } func (s *Scanner) initRegoScanner(srcFS fs.FS) (*rego.Scanner, error) { - s.Lock() - defer s.Unlock() + s.mu.Lock() + defer s.mu.Unlock() if s.regoScanner != nil { return s.regoScanner, nil } diff --git a/pkg/iac/scanners/terraform/scanner_test.go b/pkg/iac/scanners/terraform/scanner_test.go index 43ac135968a2..4e6f9cb8ec35 100644 --- a/pkg/iac/scanners/terraform/scanner_test.go +++ b/pkg/iac/scanners/terraform/scanner_test.go @@ -869,7 +869,7 @@ resource "aws_s3_bucket_public_access_block" "testB" { func Test_RegoInput(t *testing.T) { - var regoInput interface{} + var regoInput any opts := []options.ScannerOption{ ScannerWithStateFunc(func(s *state.State) { @@ -893,30 +893,30 @@ resource "aws_security_group" "example_security_group" { } `, opts...) - outer, ok := regoInput.(map[string]interface{}) + outer, ok := regoInput.(map[string]any) require.True(t, ok) - aws, ok := outer["aws"].(map[string]interface{}) + aws, ok := outer["aws"].(map[string]any) require.True(t, ok) - ec2, ok := aws["ec2"].(map[string]interface{}) + ec2, ok := aws["ec2"].(map[string]any) require.True(t, ok) - sgs, ok := ec2["securitygroups"].([]interface{}) + sgs, ok := ec2["securitygroups"].([]any) require.True(t, ok) require.Len(t, sgs, 1) - sg0, ok := sgs[0].(map[string]interface{}) + sg0, ok := sgs[0].(map[string]any) require.True(t, ok) - ingress, ok := sg0["ingressrules"].([]interface{}) + ingress, ok := sg0["ingressrules"].([]any) require.True(t, ok) require.Len(t, ingress, 1) - ingress0, ok := ingress[0].(map[string]interface{}) + ingress0, ok := ingress[0].(map[string]any) require.True(t, ok) - cidrs, ok := ingress0["cidrs"].([]interface{}) + cidrs, ok := ingress0["cidrs"].([]any) require.True(t, ok) require.Len(t, cidrs, 2) - cidr0, ok := cidrs[0].(map[string]interface{}) + cidr0, ok := cidrs[0].(map[string]any) require.True(t, ok) - cidr1, ok := cidrs[1].(map[string]interface{}) + cidr1, ok := cidrs[1].(map[string]any) require.True(t, ok) assert.Equal(t, "1.2.3.4", cidr0["value"]) diff --git a/pkg/iac/scanners/terraformplan/parser/parser.go b/pkg/iac/scanners/terraformplan/parser/parser.go index a1fb5e1366da..c8b7d111ac4c 100644 --- a/pkg/iac/scanners/terraformplan/parser/parser.go +++ b/pkg/iac/scanners/terraformplan/parser/parser.go @@ -101,14 +101,14 @@ func getResources(module Module, resourceChanges []ResourceChange, configuration // process the changes to get the after state for k, v := range changes.After { switch t := v.(type) { - case []interface{}: + case []any: if len(t) == 0 { continue } val := t[0] switch v := val.(type) { // is it a HCL block? - case map[string]interface{}: + case map[string]any: res.Blocks[k] = v // just a normal attribute then default: @@ -142,12 +142,12 @@ func getResources(module Module, resourceChanges []ResourceChange, configuration return resources, nil } -func unpackConfigurationValue(val interface{}, r Resource) (interface{}, bool) { - if t, ok := val.(map[string]interface{}); ok { +func unpackConfigurationValue(val any, r Resource) (any, bool) { + if t, ok := val.(map[string]any); ok { for k, v := range t { switch k { case "references": - reference := v.([]interface{})[0].(string) + reference := v.([]any)[0].(string) if strings.HasPrefix(r.Address, "module.") { hashable := strings.TrimSuffix(strings.Split(r.Address, fmt.Sprintf(".%s.", r.Type))[0], ".data") /* #nosec */ diff --git a/pkg/iac/scanners/terraformplan/parser/plan_file.go b/pkg/iac/scanners/terraformplan/parser/plan_file.go index 42dfccb2d579..6275c228182e 100644 --- a/pkg/iac/scanners/terraformplan/parser/plan_file.go +++ b/pkg/iac/scanners/terraformplan/parser/plan_file.go @@ -17,12 +17,12 @@ type ResourceChange struct { type ConfigurationResource struct { Resource - Expressions map[string]interface{} `json:"expressions"` + Expressions map[string]any `json:"expressions"` } type Change struct { - Before map[string]interface{} `json:"before"` - After map[string]interface{} `json:"after"` + Before map[string]any `json:"before"` + After map[string]any `json:"after"` } type Module struct { diff --git a/pkg/iac/scanners/terraformplan/scanner.go b/pkg/iac/scanners/terraformplan/scanner.go index 85ec0d6123e5..0f354b5229b1 100644 --- a/pkg/iac/scanners/terraformplan/scanner.go +++ b/pkg/iac/scanners/terraformplan/scanner.go @@ -115,7 +115,7 @@ func (s *Scanner) ScanFS(ctx context.Context, inputFS fs.FS, dir string) (scan.R var results scan.Results for _, f := range filesFound { - res, err := s.ScanFile(f, inputFS) + res, err := s.ScanFile(inputFS, f) if err != nil { return nil, err } @@ -124,21 +124,21 @@ func (s *Scanner) ScanFS(ctx context.Context, inputFS fs.FS, dir string) (scan.R return results, nil } -func New(options ...options.ScannerOption) *Scanner { +func New(opts ...options.ScannerOption) *Scanner { scanner := &Scanner{ parser: *parser.New(), - options: options, + options: opts, } - for _, o := range options { + for _, o := range opts { o(scanner) } return scanner } -func (s *Scanner) ScanFile(filepath string, fs fs.FS) (scan.Results, error) { +func (s *Scanner) ScanFile(fsys fs.FS, filepath string) (scan.Results, error) { s.debug.Log("Scanning file %s", filepath) - file, err := fs.Open(filepath) + file, err := fsys.Open(filepath) if err != nil { return nil, err } diff --git a/pkg/iac/scanners/terraformplan/test/scanner_test.go b/pkg/iac/scanners/terraformplan/test/scanner_test.go index 69024828fd15..6cf150dee307 100644 --- a/pkg/iac/scanners/terraformplan/test/scanner_test.go +++ b/pkg/iac/scanners/terraformplan/test/scanner_test.go @@ -23,7 +23,7 @@ func Test_Scanning_Plan(t *testing.T) { "testdata/plan.json": {Data: b}, } - results, err := scanner.ScanFile("testdata/plan.json", testFS) + results, err := scanner.ScanFile(testFS, "testdata/plan.json") require.NoError(t, err) require.NotNil(t, results) diff --git a/pkg/iac/scanners/toml/parser/parser.go b/pkg/iac/scanners/toml/parser/parser.go index be215dfa6b07..edc6dee3c8c0 100644 --- a/pkg/iac/scanners/toml/parser/parser.go +++ b/pkg/iac/scanners/toml/parser/parser.go @@ -37,9 +37,9 @@ func New(opts ...options.ParserOption) *Parser { return p } -func (p *Parser) ParseFS(ctx context.Context, target fs.FS, path string) (map[string]interface{}, error) { +func (p *Parser) ParseFS(ctx context.Context, target fs.FS, path string) (map[string]any, error) { - files := make(map[string]interface{}) + files := make(map[string]any) if err := fs.WalkDir(target, filepath.ToSlash(path), func(path string, entry fs.DirEntry, err error) error { select { case <-ctx.Done(): @@ -69,13 +69,13 @@ func (p *Parser) ParseFS(ctx context.Context, target fs.FS, path string) (map[st } // ParseFile parses toml content from the provided filesystem path. -func (p *Parser) ParseFile(_ context.Context, fs fs.FS, path string) (interface{}, error) { - f, err := fs.Open(filepath.ToSlash(path)) +func (p *Parser) ParseFile(_ context.Context, fsys fs.FS, path string) (any, error) { + f, err := fsys.Open(filepath.ToSlash(path)) if err != nil { return nil, err } defer func() { _ = f.Close() }() - var target interface{} + var target any if _, err := toml.NewDecoder(f).Decode(&target); err != nil { return nil, err } diff --git a/pkg/iac/scanners/toml/parser/parser_test.go b/pkg/iac/scanners/toml/parser/parser_test.go index d6ae51d1bf7f..1c9e9bdc341e 100644 --- a/pkg/iac/scanners/toml/parser/parser_test.go +++ b/pkg/iac/scanners/toml/parser/parser_test.go @@ -23,13 +23,13 @@ z = ["a", "b", "c"] data, err := New().ParseFile(context.TODO(), memfs, "something.yaml") require.NoError(t, err) - msi, ok := data.(map[string]interface{}) + msi, ok := data.(map[string]any) require.True(t, ok) xObj, ok := msi["x"] require.True(t, ok) - xMsi, ok := xObj.(map[string]interface{}) + xMsi, ok := xObj.(map[string]any) require.True(t, ok) yRaw, ok := xMsi["y"] @@ -43,7 +43,7 @@ z = ["a", "b", "c"] zRaw, ok := xMsi["z"] require.True(t, ok) - z, ok := zRaw.([]interface{}) + z, ok := zRaw.([]any) require.True(t, ok) require.Len(t, z, 3) diff --git a/pkg/iac/scanners/toml/scanner.go b/pkg/iac/scanners/toml/scanner.go index 374c76b02142..f2c22f16ca87 100644 --- a/pkg/iac/scanners/toml/scanner.go +++ b/pkg/iac/scanners/toml/scanner.go @@ -18,14 +18,14 @@ import ( var _ options.ConfigurableScanner = (*Scanner)(nil) type Scanner struct { - debug debug.Logger - options []options.ScannerOption - policyDirs []string - policyReaders []io.Reader - parser *parser.Parser - regoScanner *rego.Scanner - skipRequired bool - sync.Mutex + mu sync.Mutex + debug debug.Logger + options []options.ScannerOption + policyDirs []string + policyReaders []io.Reader + parser *parser.Parser + regoScanner *rego.Scanner + skipRequired bool frameworks []framework.Framework spec string loadEmbeddedPolicies bool @@ -96,9 +96,9 @@ func NewScanner(opts ...options.ScannerOption) *Scanner { return s } -func (s *Scanner) ScanFS(ctx context.Context, fs fs.FS, path string) (scan.Results, error) { +func (s *Scanner) ScanFS(ctx context.Context, fsys fs.FS, path string) (scan.Results, error) { - files, err := s.parser.ParseFS(ctx, fs, path) + files, err := s.parser.ParseFS(ctx, fsys, path) if err != nil { return nil, err } @@ -112,32 +112,32 @@ func (s *Scanner) ScanFS(ctx context.Context, fs fs.FS, path string) (scan.Resul inputs = append(inputs, rego.Input{ Path: path, Contents: file, - FS: fs, + FS: fsys, }) } - results, err := s.scanRego(ctx, fs, inputs...) + results, err := s.scanRego(ctx, fsys, inputs...) if err != nil { return nil, err } return results, nil } -func (s *Scanner) ScanFile(ctx context.Context, fs fs.FS, path string) (scan.Results, error) { - parsed, err := s.parser.ParseFile(ctx, fs, path) +func (s *Scanner) ScanFile(ctx context.Context, fsys fs.FS, path string) (scan.Results, error) { + parsed, err := s.parser.ParseFile(ctx, fsys, path) if err != nil { return nil, err } s.debug.Log("Scanning %s...", path) - return s.scanRego(ctx, fs, rego.Input{ + return s.scanRego(ctx, fsys, rego.Input{ Path: path, Contents: parsed, }) } func (s *Scanner) initRegoScanner(srcFS fs.FS) (*rego.Scanner, error) { - s.Lock() - defer s.Unlock() + s.mu.Lock() + defer s.mu.Unlock() if s.regoScanner != nil { return s.regoScanner, nil } diff --git a/pkg/iac/scanners/universal/scanner.go b/pkg/iac/scanners/universal/scanner.go index 170e76898fda..d289f1961f6c 100644 --- a/pkg/iac/scanners/universal/scanner.go +++ b/pkg/iac/scanners/universal/scanner.go @@ -50,10 +50,10 @@ func (s *Scanner) Name() string { return "Universal" } -func (s *Scanner) ScanFS(ctx context.Context, fs fs.FS, dir string) (scan.Results, error) { +func (s *Scanner) ScanFS(ctx context.Context, fsys fs.FS, dir string) (scan.Results, error) { var results scan.Results for _, inner := range s.fsScanners { - innerResults, err := inner.ScanFS(ctx, fs, dir) + innerResults, err := inner.ScanFS(ctx, fsys, dir) if err != nil { return nil, err } diff --git a/pkg/iac/scanners/yaml/parser/parser.go b/pkg/iac/scanners/yaml/parser/parser.go index 177d2289dd30..d03788565638 100644 --- a/pkg/iac/scanners/yaml/parser/parser.go +++ b/pkg/iac/scanners/yaml/parser/parser.go @@ -39,9 +39,9 @@ func New(opts ...options.ParserOption) *Parser { return p } -func (p *Parser) ParseFS(ctx context.Context, target fs.FS, path string) (map[string][]interface{}, error) { +func (p *Parser) ParseFS(ctx context.Context, target fs.FS, path string) (map[string][]any, error) { - files := make(map[string][]interface{}) + files := make(map[string][]any) if err := fs.WalkDir(target, filepath.ToSlash(path), func(path string, entry fs.DirEntry, err error) error { select { case <-ctx.Done(): @@ -71,7 +71,7 @@ func (p *Parser) ParseFS(ctx context.Context, target fs.FS, path string) (map[st } // ParseFile parses yaml content from the provided filesystem path. -func (p *Parser) ParseFile(_ context.Context, fsys fs.FS, path string) ([]interface{}, error) { +func (p *Parser) ParseFile(_ context.Context, fsys fs.FS, path string) ([]any, error) { f, err := fsys.Open(filepath.ToSlash(path)) if err != nil { return nil, err @@ -83,7 +83,7 @@ func (p *Parser) ParseFile(_ context.Context, fsys fs.FS, path string) ([]interf return nil, err } - var results []interface{} + var results []any marker := "\n---\n" altMarker := "\r\n---\r\n" @@ -92,7 +92,7 @@ func (p *Parser) ParseFile(_ context.Context, fsys fs.FS, path string) ([]interf } for _, partial := range strings.Split(string(contents), marker) { - var target interface{} + var target any if err := yaml.Unmarshal([]byte(partial), &target); err != nil { return nil, err } diff --git a/pkg/iac/scanners/yaml/parser/parser_test.go b/pkg/iac/scanners/yaml/parser/parser_test.go index 8d89ce78223b..f7817d5990fa 100644 --- a/pkg/iac/scanners/yaml/parser/parser_test.go +++ b/pkg/iac/scanners/yaml/parser/parser_test.go @@ -28,13 +28,13 @@ x: assert.Len(t, data, 1) - msi, ok := data[0].(map[string]interface{}) + msi, ok := data[0].(map[string]any) require.True(t, ok) xObj, ok := msi["x"] require.True(t, ok) - xMsi, ok := xObj.(map[string]interface{}) + xMsi, ok := xObj.(map[string]any) require.True(t, ok) yRaw, ok := xMsi["y"] @@ -48,7 +48,7 @@ x: zRaw, ok := xMsi["z"] require.True(t, ok) - z, ok := zRaw.([]interface{}) + z, ok := zRaw.([]any) require.True(t, ok) require.Len(t, z, 3) @@ -86,13 +86,13 @@ x: assert.Len(t, data, 2) { - msi, ok := data[0].(map[string]interface{}) + msi, ok := data[0].(map[string]any) require.True(t, ok) xObj, ok := msi["x"] require.True(t, ok) - xMsi, ok := xObj.(map[string]interface{}) + xMsi, ok := xObj.(map[string]any) require.True(t, ok) yRaw, ok := xMsi["y"] @@ -106,7 +106,7 @@ x: zRaw, ok := xMsi["z"] require.True(t, ok) - z, ok := zRaw.([]interface{}) + z, ok := zRaw.([]any) require.True(t, ok) require.Len(t, z, 3) @@ -117,13 +117,13 @@ x: } { - msi, ok := data[1].(map[string]interface{}) + msi, ok := data[1].(map[string]any) require.True(t, ok) xObj, ok := msi["x"] require.True(t, ok) - xMsi, ok := xObj.(map[string]interface{}) + xMsi, ok := xObj.(map[string]any) require.True(t, ok) yRaw, ok := xMsi["y"] @@ -137,7 +137,7 @@ x: zRaw, ok := xMsi["z"] require.True(t, ok) - z, ok := zRaw.([]interface{}) + z, ok := zRaw.([]any) require.True(t, ok) require.Len(t, z, 3) diff --git a/pkg/iac/scanners/yaml/scanner.go b/pkg/iac/scanners/yaml/scanner.go index 710e1f2eb71b..6c1246436ba5 100644 --- a/pkg/iac/scanners/yaml/scanner.go +++ b/pkg/iac/scanners/yaml/scanner.go @@ -18,14 +18,14 @@ import ( var _ options.ConfigurableScanner = (*Scanner)(nil) type Scanner struct { - options []options.ScannerOption - debug debug.Logger - policyDirs []string - policyReaders []io.Reader - parser *parser.Parser - regoScanner *rego.Scanner - skipRequired bool - sync.Mutex + mu sync.Mutex + options []options.ScannerOption + debug debug.Logger + policyDirs []string + policyReaders []io.Reader + parser *parser.Parser + regoScanner *rego.Scanner + skipRequired bool frameworks []framework.Framework spec string loadEmbeddedLibraries bool @@ -95,9 +95,9 @@ func NewScanner(opts ...options.ScannerOption) *Scanner { return s } -func (s *Scanner) ScanFS(ctx context.Context, fs fs.FS, path string) (scan.Results, error) { +func (s *Scanner) ScanFS(ctx context.Context, fsys fs.FS, path string) (scan.Results, error) { - fileset, err := s.parser.ParseFS(ctx, fs, path) + fileset, err := s.parser.ParseFS(ctx, fsys, path) if err != nil { return nil, err } @@ -112,33 +112,33 @@ func (s *Scanner) ScanFS(ctx context.Context, fs fs.FS, path string) (scan.Resul inputs = append(inputs, rego.Input{ Path: path, Contents: file, - FS: fs, + FS: fsys, }) } } - results, err := s.scanRego(ctx, fs, inputs...) + results, err := s.scanRego(ctx, fsys, inputs...) if err != nil { return nil, err } return results, nil } -func (s *Scanner) ScanFile(ctx context.Context, fs fs.FS, path string) (scan.Results, error) { - parsed, err := s.parser.ParseFile(ctx, fs, path) +func (s *Scanner) ScanFile(ctx context.Context, fsys fs.FS, path string) (scan.Results, error) { + parsed, err := s.parser.ParseFile(ctx, fsys, path) if err != nil { return nil, err } s.debug.Log("Scanning %s...", path) - return s.scanRego(ctx, fs, rego.Input{ + return s.scanRego(ctx, fsys, rego.Input{ Path: path, Contents: parsed, }) } func (s *Scanner) initRegoScanner(srcFS fs.FS) (*rego.Scanner, error) { - s.Lock() - defer s.Unlock() + s.mu.Lock() + defer s.mu.Unlock() if s.regoScanner != nil { return s.regoScanner, nil } diff --git a/pkg/k8s/scanner/scanner_test.go b/pkg/k8s/scanner/scanner_test.go index ec6d78f73c05..566bf4c67544 100644 --- a/pkg/k8s/scanner/scanner_test.go +++ b/pkg/k8s/scanner/scanner_test.go @@ -33,7 +33,7 @@ func TestScanner_Scan(t *testing.T) { Namespace: "kube-system", Kind: "Cluster", Name: "k8s.io/kubernetes", - RawResource: map[string]interface{}{ + RawResource: map[string]any{ "name": "k8s.io/kubernetes", "version": "1.21.1", "type": "ClusterInfo", @@ -47,9 +47,9 @@ func TestScanner_Scan(t *testing.T) { Namespace: "kube-system", Kind: "ControlPlaneComponents", Name: "k8s.io/apiserver", - RawResource: map[string]interface{}{ - "Containers": []interface{}{ - map[string]interface{}{ + RawResource: map[string]any{ + "Containers": []any{ + map[string]any{ "Digest": "18e61c783b41758dd391ab901366ec3546b26fae00eef7e223d1f94da808e02f", "ID": "kube-apiserver:v1.21.1", "Registry": "k8s.gcr.io", @@ -64,7 +64,7 @@ func TestScanner_Scan(t *testing.T) { { Kind: "NodeComponents", Name: "kind-control-plane", - RawResource: map[string]interface{}{ + RawResource: map[string]any{ "ContainerRuntimeVersion": "containerd://1.5.2", "Hostname": "kind-control-plane", "KubeProxyVersion": "6.2.13-300.fc38.aarch64", @@ -509,18 +509,18 @@ func TestFindNodeName(t *testing.T) { Namespace: "kube-system", Kind: "Cluster", Name: "k8s.io/kubernetes", - RawResource: map[string]interface{}{}, + RawResource: map[string]any{}, }, { Namespace: "kube-system", Kind: "ControlPlaneComponents", Name: "k8s.io/apiserver", - RawResource: map[string]interface{}{}, + RawResource: map[string]any{}, }, { Kind: "NodeComponents", Name: "kind-control-plane", - RawResource: map[string]interface{}{}, + RawResource: map[string]any{}, }, }, want: "kind-control-plane", @@ -532,13 +532,13 @@ func TestFindNodeName(t *testing.T) { Namespace: "kube-system", Kind: "Cluster", Name: "k8s.io/kubernetes", - RawResource: map[string]interface{}{}, + RawResource: map[string]any{}, }, { Namespace: "kube-system", Kind: "ControlPlaneComponents", Name: "k8s.io/apiserver", - RawResource: map[string]interface{}{}, + RawResource: map[string]any{}, }, }, want: "", diff --git a/test/testutil/util.go b/test/testutil/util.go index 8d7b957d438d..124065663667 100644 --- a/test/testutil/util.go +++ b/test/testutil/util.go @@ -14,9 +14,9 @@ import ( "github.com/aquasecurity/defsec/pkg/scan" ) -func AssertRuleFound(t *testing.T, ruleID string, results scan.Results, message string, args ...interface{}) { +func AssertRuleFound(t *testing.T, ruleID string, results scan.Results, message string, args ...any) { found := ruleIDInResults(ruleID, results.GetFailed()) - assert.True(t, found, append([]interface{}{message}, args...)...) + assert.True(t, found, append([]any{message}, args...)...) for _, result := range results.GetFailed() { if result.Rule().LongID() == ruleID { m := result.Metadata() @@ -31,9 +31,9 @@ func AssertRuleFound(t *testing.T, ruleID string, results scan.Results, message } } -func AssertRuleNotFound(t *testing.T, ruleID string, results scan.Results, message string, args ...interface{}) { +func AssertRuleNotFound(t *testing.T, ruleID string, results scan.Results, message string, args ...any) { found := ruleIDInResults(ruleID, results.GetFailed()) - assert.False(t, found, append([]interface{}{message}, args...)...) + assert.False(t, found, append([]any{message}, args...)...) } func ruleIDInResults(ruleID string, results scan.Results) bool { @@ -57,24 +57,24 @@ func CreateFS(t *testing.T, files map[string]string) fs.FS { return memfs } -func AssertDefsecEqual(t *testing.T, expected interface{}, actual interface{}) { +func AssertDefsecEqual(t *testing.T, expected, actual any) { expectedJson, err := json.MarshalIndent(expected, "", "\t") require.NoError(t, err) actualJson, err := json.MarshalIndent(actual, "", "\t") require.NoError(t, err) if expectedJson[0] == '[' { - var expectedSlice []map[string]interface{} + var expectedSlice []map[string]any require.NoError(t, json.Unmarshal(expectedJson, &expectedSlice)) - var actualSlice []map[string]interface{} + var actualSlice []map[string]any require.NoError(t, json.Unmarshal(actualJson, &actualSlice)) expectedSlice = purgeMetadataSlice(expectedSlice) actualSlice = purgeMetadataSlice(actualSlice) assert.Equal(t, expectedSlice, actualSlice, "defsec adapted and expected values do not match") } else { - var expectedMap map[string]interface{} + var expectedMap map[string]any require.NoError(t, json.Unmarshal(expectedJson, &expectedMap)) - var actualMap map[string]interface{} + var actualMap map[string]any require.NoError(t, json.Unmarshal(actualJson, &actualMap)) expectedMap = purgeMetadata(expectedMap) actualMap = purgeMetadata(actualMap) @@ -82,21 +82,21 @@ func AssertDefsecEqual(t *testing.T, expected interface{}, actual interface{}) { } } -func purgeMetadata(input map[string]interface{}) map[string]interface{} { +func purgeMetadata(input map[string]any) map[string]any { for k, v := range input { if k == "metadata" || k == "Metadata" { delete(input, k) continue } - if v, ok := v.(map[string]interface{}); ok { + if v, ok := v.(map[string]any); ok { input[k] = purgeMetadata(v) } - if v, ok := v.([]interface{}); ok { + if v, ok := v.([]any); ok { if len(v) > 0 { - if _, ok := v[0].(map[string]interface{}); ok { - maps := make([]map[string]interface{}, len(v)) + if _, ok := v[0].(map[string]any); ok { + maps := make([]map[string]any, len(v)) for i := range v { - maps[i] = v[i].(map[string]interface{}) + maps[i] = v[i].(map[string]any) } input[k] = purgeMetadataSlice(maps) } @@ -106,7 +106,7 @@ func purgeMetadata(input map[string]interface{}) map[string]interface{} { return input } -func purgeMetadataSlice(input []map[string]interface{}) []map[string]interface{} { +func purgeMetadataSlice(input []map[string]any) []map[string]any { for i := range input { input[i] = purgeMetadata(input[i]) } From 6314102e095ae88fd61eef8eb875bf1294f59a3d Mon Sep 17 00:00:00 2001 From: nikpivkin Date: Mon, 4 Dec 2023 18:18:06 +0700 Subject: [PATCH 4/5] fix skipping of optional policies in windows --- pkg/iac/rego/embed.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/iac/rego/embed.go b/pkg/iac/rego/embed.go index 4b05c615a19b..2003a7667e4e 100644 --- a/pkg/iac/rego/embed.go +++ b/pkg/iac/rego/embed.go @@ -80,7 +80,7 @@ func LoadPoliciesFromDirs(target fs.FS, paths ...string) (map[string]*ast.Module return nil } - if strings.HasSuffix(filepath.Dir(filepath.ToSlash(path)), "policies/advanced/optional") { + if strings.HasSuffix(filepath.Dir(path), filepath.Join("policies", "advanced", "optional")) { return fs.SkipDir } From 3e518fbc330e6662801842470aee1dc55cefe46c Mon Sep 17 00:00:00 2001 From: nikpivkin Date: Mon, 4 Dec 2023 18:34:41 +0700 Subject: [PATCH 5/5] test: cleanup helm tests --- pkg/iac/scanners/helm/test/scanner_test.go | 184 ++++++++++----------- 1 file changed, 85 insertions(+), 99 deletions(-) diff --git a/pkg/iac/scanners/helm/test/scanner_test.go b/pkg/iac/scanners/helm/test/scanner_test.go index 5e4178c7120d..6e261502294e 100644 --- a/pkg/iac/scanners/helm/test/scanner_test.go +++ b/pkg/iac/scanners/helm/test/scanner_test.go @@ -8,8 +8,10 @@ import ( "sort" "testing" + "github.com/aquasecurity/defsec/pkg/scan" "github.com/aquasecurity/defsec/pkg/scanners/options" "github.com/aquasecurity/trivy/pkg/iac/scanners/helm" + "github.com/samber/lo" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -17,13 +19,13 @@ import ( func Test_helm_scanner_with_archive(t *testing.T) { tests := []struct { - testName string + name string chartName string path string archiveName string }{ { - testName: "Parsing tarball 'mysql-8.8.26.tar'", + name: "Parsing tarball 'mysql-8.8.26.tar'", chartName: "mysql", path: filepath.Join("testdata", "mysql-8.8.26.tar"), archiveName: "mysql-8.8.26.tar", @@ -31,55 +33,46 @@ func Test_helm_scanner_with_archive(t *testing.T) { } for _, test := range tests { - t.Logf("Running test: %s", test.testName) - - helmScanner := helm.New(options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true)) - - testTemp := t.TempDir() - testFileName := filepath.Join(testTemp, test.archiveName) - require.NoError(t, copyArchive(test.path, testFileName)) - - testFs := os.DirFS(testTemp) - results, err := helmScanner.ScanFS(context.TODO(), testFs, ".") - require.NoError(t, err) - require.NotNil(t, results) - - failed := results.GetFailed() - assert.Equal(t, 19, len(failed)) - - visited := make(map[string]bool) - var errorCodes []string - for _, result := range failed { - id := result.Flatten().RuleID - if _, exists := visited[id]; !exists { - visited[id] = true - errorCodes = append(errorCodes, id) - } - } - assert.Len(t, errorCodes, 14) - - sort.Strings(errorCodes) - - assert.Equal(t, []string{ - "AVD-KSV-0001", "AVD-KSV-0003", - "AVD-KSV-0011", "AVD-KSV-0012", "AVD-KSV-0014", - "AVD-KSV-0015", "AVD-KSV-0016", "AVD-KSV-0018", - "AVD-KSV-0020", "AVD-KSV-0021", "AVD-KSV-0030", - "AVD-KSV-0104", "AVD-KSV-0106", "AVD-KSV-0116", - }, errorCodes) + t.Run(test.name, func(t *testing.T) { + helmScanner := helm.New( + options.ScannerWithEmbeddedPolicies(true), + options.ScannerWithEmbeddedLibraries(true), + ) + + testTemp := t.TempDir() + testFileName := filepath.Join(testTemp, test.archiveName) + require.NoError(t, copyArchive(test.path, testFileName)) + + testFs := os.DirFS(testTemp) + results, err := helmScanner.ScanFS(context.TODO(), testFs, ".") + require.NoError(t, err) + require.NotNil(t, results) + + failed := results.GetFailed() + errorCodes := getRuleIDs(failed) + assert.Len(t, errorCodes, 14) + + assert.Equal(t, []string{ + "AVD-KSV-0001", "AVD-KSV-0003", + "AVD-KSV-0011", "AVD-KSV-0012", "AVD-KSV-0014", + "AVD-KSV-0015", "AVD-KSV-0016", "AVD-KSV-0018", + "AVD-KSV-0020", "AVD-KSV-0021", "AVD-KSV-0030", + "AVD-KSV-0104", "AVD-KSV-0106", "AVD-KSV-0116", + }, errorCodes) + }) } } func Test_helm_scanner_with_missing_name_can_recover(t *testing.T) { tests := []struct { - testName string + name string chartName string path string archiveName string }{ { - testName: "Parsing tarball 'aws-cluster-autoscaler-bad.tar.gz'", + name: "Parsing tarball 'aws-cluster-autoscaler-bad.tar.gz'", chartName: "aws-cluster-autoscaler", path: filepath.Join("testdata", "aws-cluster-autoscaler-bad.tar.gz"), archiveName: "aws-cluster-autoscaler-bad.tar.gz", @@ -87,66 +80,57 @@ func Test_helm_scanner_with_missing_name_can_recover(t *testing.T) { } for _, test := range tests { - t.Logf("Running test: %s", test.testName) - helmScanner := helm.New(options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true)) + t.Run(test.name, func(t *testing.T) { + helmScanner := helm.New(options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true)) - testTemp := t.TempDir() - testFileName := filepath.Join(testTemp, test.archiveName) - require.NoError(t, copyArchive(test.path, testFileName)) + testTemp := t.TempDir() + testFileName := filepath.Join(testTemp, test.archiveName) + require.NoError(t, copyArchive(test.path, testFileName)) - testFs := os.DirFS(testTemp) - _, err := helmScanner.ScanFS(context.TODO(), testFs, ".") - require.NoError(t, err) + testFs := os.DirFS(testTemp) + _, err := helmScanner.ScanFS(context.TODO(), testFs, ".") + require.NoError(t, err) + }) } } func Test_helm_scanner_with_dir(t *testing.T) { tests := []struct { - testName string + name string chartName string }{ { - testName: "Parsing directory testchart'", + name: "Parsing directory testchart'", chartName: "testchart", }, } for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + helmScanner := helm.New(options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true)) + + testFs := os.DirFS(filepath.Join("testdata", test.chartName)) + results, err := helmScanner.ScanFS(context.TODO(), testFs, ".") + require.NoError(t, err) + require.NotNil(t, results) + + failed := results.GetFailed() + assert.Equal(t, 17, len(failed)) + + errorCodes := getRuleIDs(failed) + + assert.Equal(t, []string{ + "AVD-KSV-0001", "AVD-KSV-0003", + "AVD-KSV-0011", "AVD-KSV-0012", "AVD-KSV-0014", + "AVD-KSV-0015", "AVD-KSV-0016", "AVD-KSV-0018", + "AVD-KSV-0020", "AVD-KSV-0021", "AVD-KSV-0030", + "AVD-KSV-0104", "AVD-KSV-0106", "AVD-KSV-0116", + "AVD-KSV-0117", + }, errorCodes) + }) - t.Logf("Running test: %s", test.testName) - - helmScanner := helm.New(options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true)) - - testFs := os.DirFS(filepath.Join("testdata", test.chartName)) - results, err := helmScanner.ScanFS(context.TODO(), testFs, ".") - require.NoError(t, err) - require.NotNil(t, results) - - failed := results.GetFailed() - assert.Equal(t, 17, len(failed)) - - visited := make(map[string]bool) - var errorCodes []string - for _, result := range failed { - id := result.Flatten().RuleID - if _, exists := visited[id]; !exists { - visited[id] = true - errorCodes = append(errorCodes, id) - } - } - - sort.Strings(errorCodes) - - assert.Equal(t, []string{ - "AVD-KSV-0001", "AVD-KSV-0003", - "AVD-KSV-0011", "AVD-KSV-0012", "AVD-KSV-0014", - "AVD-KSV-0015", "AVD-KSV-0016", "AVD-KSV-0018", - "AVD-KSV-0020", "AVD-KSV-0021", "AVD-KSV-0030", - "AVD-KSV-0104", "AVD-KSV-0106", "AVD-KSV-0116", - "AVD-KSV-0117", - }, errorCodes) } } @@ -176,13 +160,13 @@ deny[res] { } ` tests := []struct { - testName string + name string chartName string path string archiveName string }{ { - testName: "Parsing tarball 'mysql-8.8.26.tar'", + name: "Parsing tarball 'mysql-8.8.26.tar'", chartName: "mysql", path: filepath.Join("testdata", "mysql-8.8.26.tar"), archiveName: "mysql-8.8.26.tar", @@ -190,12 +174,14 @@ deny[res] { } for _, test := range tests { - t.Run(test.testName, func(t *testing.T) { - t.Logf("Running test: %s", test.testName) + t.Run(test.name, func(t *testing.T) { - helmScanner := helm.New(options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true), + helmScanner := helm.New( + options.ScannerWithEmbeddedPolicies(true), + options.ScannerWithEmbeddedLibraries(true), options.ScannerWithPolicyDirs("rules"), - options.ScannerWithPolicyNamespaces("user")) + options.ScannerWithPolicyNamespaces("user"), + ) testTemp := t.TempDir() testFileName := filepath.Join(testTemp, test.archiveName) @@ -214,19 +200,9 @@ deny[res] { failed := results.GetFailed() assert.Equal(t, 21, len(failed)) - visited := make(map[string]bool) - var errorCodes []string - for _, result := range failed { - id := result.Flatten().RuleID - if _, exists := visited[id]; !exists { - visited[id] = true - errorCodes = append(errorCodes, id) - } - } + errorCodes := getRuleIDs(failed) assert.Len(t, errorCodes, 15) - sort.Strings(errorCodes) - assert.Equal(t, []string{ "AVD-KSV-0001", "AVD-KSV-0003", "AVD-KSV-0011", "AVD-KSV-0012", "AVD-KSV-0014", @@ -263,3 +239,13 @@ func Test_helm_chart_with_templated_name(t *testing.T) { _, err := helmScanner.ScanFS(context.TODO(), testFs, ".") require.NoError(t, err) } + +func getRuleIDs(results scan.Results) []string { + ids := lo.Uniq(lo.Map(results, func(r scan.Result, _ int) string { + return r.Flatten().RuleID + })) + + sort.Strings(ids) + + return ids +}