Skip to content

Commit

Permalink
feat: Add option to migrate to worker group module
Browse files Browse the repository at this point in the history
  • Loading branch information
Grzegorz Lisowski committed Feb 8, 2021
1 parent 2221909 commit a429b61
Show file tree
Hide file tree
Showing 14 changed files with 1,370 additions and 8 deletions.
13 changes: 13 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -160,6 +160,8 @@ MIT Licensed. See [LICENSE](https://github.com/terraform-aws-modules/terraform-a
| kubernetes | >= 1.11.1 |
| local | >= 1.4 |
| null | >= 2.1 |
| random | >= 2.1 |
| template | >= 2.1 |

## Inputs

Expand Down Expand Up @@ -220,6 +222,8 @@ MIT Licensed. See [LICENSE](https://github.com/terraform-aws-modules/terraform-a
| worker\_create\_initial\_lifecycle\_hooks | Whether to create initial lifecycle hooks provided in worker groups. | `bool` | `false` | no |
| worker\_create\_security\_group | Whether to create a security group for the workers or attach the workers to `worker_security_group_id`. | `bool` | `true` | no |
| worker\_groups | A map of maps defining worker group configurations to be defined using AWS Launch Templates. See workers\_group\_defaults for valid keys. | `any` | `{}` | no |
| worker\_groups\_launch\_template\_legacy | A list of maps defining worker group configurations to be defined using AWS Launch Templates. See workers\_group\_defaults for valid keys. | `any` | `[]` | no |
| worker\_groups\_legacy | A list of maps defining worker group configurations to be defined using AWS Launch Configurations. See workers\_group\_defaults for valid keys. | `any` | `[]` | no |
| worker\_security\_group\_id | If provided, all workers will be attached to this security group. If not given, a security group will be created with necessary ingress/egress to work with the EKS cluster. | `string` | `""` | no |
| worker\_sg\_ingress\_from\_port | Minimum port number from which pods will accept communication. Must be changed to a lower value if some pods in your cluster will expose a port lower than 1025 (e.g. 22, 80, or 443). | `number` | `1025` | no |
| workers\_additional\_policies | Additional policies to be added to workers | `list(string)` | `[]` | no |
Expand Down Expand Up @@ -256,6 +260,15 @@ MIT Licensed. See [LICENSE](https://github.com/terraform-aws-modules/terraform-a
| worker\_groups | Outputs from EKS worker groups. Map of maps, keyed by var.worker\_groups keys |
| worker\_iam\_instance\_profile\_arns | default IAM instance profile ARN for EKS worker groups |
| worker\_iam\_instance\_profile\_names | default IAM instance profile name for EKS worker groups |
| worker\_iam\_role\_arn | default IAM role ARN for EKS worker groups |
| worker\_iam\_role\_name | default IAM role name for EKS worker groups |
| worker\_security\_group\_id | Security group ID attached to the EKS workers. |
| workers\_asg\_arns | IDs of the autoscaling groups containing workers. |
| workers\_asg\_names | Names of the autoscaling groups containing workers. |
| workers\_default\_ami\_id | ID of the default worker group AMI |
| workers\_launch\_template\_arns | ARNs of the worker launch templates. |
| workers\_launch\_template\_ids | IDs of the worker launch templates. |
| workers\_launch\_template\_latest\_versions | Latest versions of the worker launch templates. |
| workers\_user\_data | User data of worker groups |

<!-- END OF PRE-COMMIT-TERRAFORM DOCS HOOK -->
44 changes: 43 additions & 1 deletion aws_auth.tf
Original file line number Diff line number Diff line change
@@ -1,9 +1,51 @@
data "aws_caller_identity" "current" {}

locals {
## DEPRECATED section which should be removed when users will be done migration to
## worker nodes managed via maps. When updating remember about proper update in modules/worker_groups

auth_launch_template_worker_roles = [
for index in range(0, var.create_eks ? local.worker_group_launch_template_legacy_count : 0) : {
worker_role_arn = "arn:${data.aws_partition.current.partition}:iam::${data.aws_caller_identity.current.account_id}:role/${element(
coalescelist(
aws_iam_instance_profile.workers_launch_template.*.role,
data.aws_iam_instance_profile.custom_worker_group_launch_template_iam_instance_profile.*.role_name,
[""]
),
index
)}"
platform = lookup(
var.worker_groups_launch_template_legacy[index],
"platform",
local.workers_group_defaults["platform"]
)
}
]

auth_worker_roles = [
for index in range(0, var.create_eks ? local.worker_group_count_legacy : 0) : {
worker_role_arn = "arn:${data.aws_partition.current.partition}:iam::${data.aws_caller_identity.current.account_id}:role/${element(
coalescelist(
aws_iam_instance_profile.workers.*.role,
data.aws_iam_instance_profile.custom_worker_group_iam_instance_profile.*.role_name,
[""]
),
index,
)}"
platform = lookup(
var.worker_groups_legacy[index],
"platform",
local.workers_group_defaults["platform"]
)
}
]
## ~DEPRECATED

# Convert to format needed by aws-auth ConfigMap
configmap_roles = [
for role in concat(
local.auth_launch_template_worker_roles,
local.auth_worker_roles,
module.worker_groups.aws_auth_roles,
module.node_groups.aws_auth_roles,
module.fargate.aws_auth_roles,
Expand Down Expand Up @@ -35,7 +77,7 @@ resource "kubernetes_config_map" "aws_auth" {
labels = merge(
{
"app.kubernetes.io/managed-by" = "Terraform"
# / are replaced by . because label validator fails in this lib
# / are replaced by . because label validator fails in this lib
# https://github.com/kubernetes/apimachinery/blob/1bdd76d09076d4dc0362456e59c8f551f5f24a72/pkg/util/validation/validation.go#L166
"terraform.io/module" = "terraform-aws-modules.eks.aws"
},
Expand Down
144 changes: 143 additions & 1 deletion data.tf
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,150 @@ data "aws_iam_policy_document" "cluster_assume_role_policy" {
}

data "aws_iam_role" "custom_cluster_iam_role" {
count = !var.manage_cluster_iam_resources && var.create_eks ? 1 : 0
count = ! var.manage_cluster_iam_resources && var.create_eks ? 1 : 0
name = var.cluster_iam_role_name
}

data "aws_partition" "current" {}

## DEPRECATED section which should be removed when users will be done migration to
## worker nodes managed via maps. When updating remember about proper update in modules/worker_groups

data "aws_ami" "eks_worker" {
filter {
name = "name"
values = [local.worker_ami_name_filter]
}

most_recent = true

owners = [var.worker_ami_owner_id]
}

data "aws_ami" "eks_worker_windows" {
filter {
name = "name"
values = [local.worker_ami_name_filter_windows]
}

filter {
name = "platform"
values = ["windows"]
}

most_recent = true

owners = [var.worker_ami_owner_id_windows]
}

data "template_file" "userdata" {
count = var.create_eks ? local.worker_group_count_legacy : 0
template = lookup(
var.worker_groups_legacy[count.index],
"userdata_template_file",
file(
lookup(var.worker_groups_legacy[count.index], "platform", local.workers_group_defaults["platform"]) == "windows"
? "${path.module}/templates/userdata_windows.tpl"
: "${path.module}/templates/userdata.sh.tpl"
)
)

vars = merge({
platform = lookup(var.worker_groups_legacy[count.index], "platform", local.workers_group_defaults["platform"])
cluster_name = coalescelist(aws_eks_cluster.this[*].name, [""])[0]
endpoint = coalescelist(aws_eks_cluster.this[*].endpoint, [""])[0]
cluster_auth_base64 = coalescelist(aws_eks_cluster.this[*].certificate_authority[0].data, [""])[0]
pre_userdata = lookup(
var.worker_groups_legacy[count.index],
"pre_userdata",
local.workers_group_defaults["pre_userdata"],
)
additional_userdata = lookup(
var.worker_groups_legacy[count.index],
"additional_userdata",
local.workers_group_defaults["additional_userdata"],
)
bootstrap_extra_args = lookup(
var.worker_groups_legacy[count.index],
"bootstrap_extra_args",
local.workers_group_defaults["bootstrap_extra_args"],
)
kubelet_extra_args = lookup(
var.worker_groups_legacy[count.index],
"kubelet_extra_args",
local.workers_group_defaults["kubelet_extra_args"],
)
},
lookup(
var.worker_groups_legacy[count.index],
"userdata_template_extra_args",
local.workers_group_defaults["userdata_template_extra_args"]
)
)
}

data "template_file" "launch_template_userdata" {
count = var.create_eks ? local.worker_group_launch_template_legacy_count : 0
template = lookup(
var.worker_groups_launch_template_legacy[count.index],
"userdata_template_file",
file(
lookup(var.worker_groups_launch_template_legacy[count.index], "platform", local.workers_group_defaults["platform"]) == "windows"
? "${path.module}/templates/userdata_windows.tpl"
: "${path.module}/templates/userdata.sh.tpl"
)
)

vars = merge({
platform = lookup(var.worker_groups_launch_template_legacy[count.index], "platform", local.workers_group_defaults["platform"])
cluster_name = coalescelist(aws_eks_cluster.this[*].name, [""])[0]
endpoint = coalescelist(aws_eks_cluster.this[*].endpoint, [""])[0]
cluster_auth_base64 = coalescelist(aws_eks_cluster.this[*].certificate_authority[0].data, [""])[0]
pre_userdata = lookup(
var.worker_groups_launch_template_legacy[count.index],
"pre_userdata",
local.workers_group_defaults["pre_userdata"],
)
additional_userdata = lookup(
var.worker_groups_launch_template_legacy[count.index],
"additional_userdata",
local.workers_group_defaults["additional_userdata"],
)
bootstrap_extra_args = lookup(
var.worker_groups_launch_template_legacy[count.index],
"bootstrap_extra_args",
local.workers_group_defaults["bootstrap_extra_args"],
)
kubelet_extra_args = lookup(
var.worker_groups_launch_template_legacy[count.index],
"kubelet_extra_args",
local.workers_group_defaults["kubelet_extra_args"],
)
},
lookup(
var.worker_groups_launch_template_legacy[count.index],
"userdata_template_extra_args",
local.workers_group_defaults["userdata_template_extra_args"]
)
)
}

data "aws_iam_instance_profile" "custom_worker_group_iam_instance_profile" {
count = var.manage_worker_iam_resources ? 0 : local.worker_group_count_legacy
name = lookup(
var.worker_groups_legacy[count.index],
"iam_instance_profile_name",
local.workers_group_defaults["iam_instance_profile_name"],
)
}

data "aws_iam_instance_profile" "custom_worker_group_launch_template_iam_instance_profile" {
count = var.manage_worker_iam_resources ? 0 : local.worker_group_launch_template_legacy_count
name = lookup(
var.worker_groups_launch_template_legacy[count.index],
"iam_instance_profile_name",
local.workers_group_defaults["iam_instance_profile_name"],
)
}

## ~DEPRECATED
19 changes: 19 additions & 0 deletions examples/basic/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,25 @@ module "eks" {

vpc_id = module.vpc.vpc_id

// worker_groups_legacy = [
// {
// name = "worker-group-1"
// instance_type = "t3.small"
// additional_userdata = "echo foo bar"
// asg_desired_capacity = 2
// additional_security_group_ids = [aws_security_group.worker_group_mgmt_one.id]
// root_volume_type = "gp2"
// },
// {
// name = "worker-group-2"
// instance_type = "t3.medium"
// additional_userdata = "echo foo bar"
// additional_security_group_ids = [aws_security_group.worker_group_mgmt_two.id]
// asg_desired_capacity = 1
// root_volume_type = "gp2"
// },
// ]

worker_groups = {
worker-group-1 = {
instance_type = "t3.small"
Expand Down
15 changes: 15 additions & 0 deletions examples/launch_templates/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,21 @@ module "eks" {
subnets = module.vpc.public_subnets
vpc_id = module.vpc.vpc_id

// worker_groups_launch_template_legacy = [
// {
// name = "worker-group-1"
// instance_type = "t3.small"
// asg_desired_capacity = 2
// public_ip = true
// },
// {
// name = "worker-group-2"
// instance_type = "t3.medium"
// asg_desired_capacity = 1
// public_ip = true
// },
// ]

worker_groups = {
worker-group-1 = {
instance_type = "t3.small"
Expand Down
Loading

0 comments on commit a429b61

Please sign in to comment.