From 9af159f7d8d53acb385fd94828c2c04fe7192191 Mon Sep 17 00:00:00 2001 From: Justin Bronn Date: Fri, 15 Dec 2023 16:04:32 -0700 Subject: [PATCH] Add ability to use Karpenter. --- karpenter.tf | 52 ++++++++++++++++++++++++++++++++++++++++++++++++++++ main.tf | 17 ++++++++++++++++- variables.tf | 24 ++++++++++++++++++------ 3 files changed, 86 insertions(+), 7 deletions(-) create mode 100644 karpenter.tf diff --git a/karpenter.tf b/karpenter.tf new file mode 100644 index 0000000..e753a5a --- /dev/null +++ b/karpenter.tf @@ -0,0 +1,52 @@ +module "karpenter" { + count = var.karpenter ? 1 : 0 + source = "terraform-aws-modules/eks/aws//modules/karpenter" + version = "19.21.0" + + cluster_name = var.cluster_name + irsa_oidc_provider_arn = module.eks.oidc_provider_arn + tags = var.tags +} + +resource "helm_release" "karpenter" { + count = var.karpenter ? 1 : 0 + namespace = "karpenter" + create_namespace = true + + name = "karpenter" + repository = "oci://public.ecr.aws/karpenter" + repository_username = data.aws_ecrpublic_authorization_token.current.user_name + repository_password = data.aws_ecrpublic_authorization_token.current.password + chart = "karpenter" + version = "v${var.karpenter_version}" + + set { + name = "settings.aws.clusterName" + value = local.cluster_name + } + + set { + name = "settings.aws.clusterEndpoint" + value = module.eks.cluster_endpoint + } + + set { + name = "serviceAccount.annotations.eks\\.amazonaws\\.com/role-arn" + value = module.karpenter.irsa_arn + } + + set { + name = "settings.aws.defaultInstanceProfile" + value = module.karpenter.instance_profile_name + } + + set { + name = "settings.aws.interruptionQueueName" + value = module.karpenter.queue_name + } + + depends_on = [ + module.eks, + module.karpenter[0], + ] +} diff --git a/main.tf b/main.tf index e64478b..d3d472b 100644 --- a/main.tf +++ b/main.tf @@ -6,6 +6,16 @@ locals { aws_account_id = data.aws_caller_identity.current.account_id aws_partition = data.aws_partition.current.partition aws_region = data.aws_region.current.name + aws_auth_karpenter_roles = var.karpenter ? [ + { + rolearn = module.karpenter[0].role_arn + username = "system:node:{{EC2PrivateDNSName}}" + groups = [ + "system:bootstrappers", + "system:nodes", + ] + }, + ] : [] aws_auth_roles = concat( [ for role in var.system_masters_roles : { @@ -14,6 +24,7 @@ locals { groups = ["system:masters"] } ], + local.aws_auth_karpenter_roles, var.aws_auth_roles ) } @@ -85,7 +96,11 @@ module "eks" { # tfsec:ignore:aws-ec2-no-public-egress-sgr tfsec:ignore:aws-eks- fargate_profile_defaults = var.fargate_profile_defaults node_security_group_tags = var.node_security_group_tags - tags = var.tags + + tags = merge( + var.tags, + var.karpenter ? { "karpenter.sh/discovery" = var.cluster_name } : {} + ) } # Add EKS to default kubeconfig and set context for it. diff --git a/variables.tf b/variables.tf index abbbd8d..12a7c60 100644 --- a/variables.tf +++ b/variables.tf @@ -160,18 +160,24 @@ variable "helm_verify" { type = bool } -variable "kubernetes_version" { - default = "1.28" - description = "Kubernetes version to use for the EKS cluster." - type = string -} - variable "iam_role_attach_cni_policy" { default = true description = "Whether to attach CNI policy to EKS Node groups." type = bool } +variable "karpenter" { + description = "Whether to use Karpenter with the EKS cluster." + type = bool + default = false +} + +variable "karpenter_version" { + description = "Version of Karpenter Helm chart to install on the EKS cluster." + type = string + default = "0.32.3" +} + variable "kms_manage" { default = false description = "Manage EKS KMS resource instead of the AWS module" @@ -190,6 +196,12 @@ variable "kms_key_enable_default_policy" { default = true } +variable "kubernetes_version" { + default = "1.28" + description = "Kubernetes version to use for the EKS cluster." + type = string +} + variable "lb_controller_version" { default = "1.6.1" description = "Version of the AWS Load Balancer Controller chart to install."