Skip to content

Commit

Permalink
feat: added Cilium CNI.
Browse files Browse the repository at this point in the history
This is a reimplementation of #722.

Signed-off-by: Ali Mukadam <ali.mukadam@oracle.com>
  • Loading branch information
hyder committed Nov 28, 2023
1 parent 3d96325 commit 9b683d8
Show file tree
Hide file tree
Showing 7 changed files with 318 additions and 0 deletions.
9 changes: 9 additions & 0 deletions module-extensions.tf
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,15 @@ module "extensions" {
cni_type = var.cni_type
pods_cidr = var.pods_cidr

# CNI: Cilium
cilium_install = var.cilium_install
cilium_reapply = var.cilium_reapply
cilium_namespace = var.cilium_namespace
cilium_helm_version = var.cilium_helm_version
cilium_helm_values = var.cilium_helm_values
cilium_helm_values_files = var.cilium_helm_values_files


# CNI: Multus
multus_install = var.multus_install
multus_namespace = var.multus_namespace
Expand Down
1 change: 1 addition & 0 deletions module-operator.tf
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ module "operator" {
availability_domain = coalesce(var.operator_availability_domain, lookup(local.ad_numbers_to_names, local.ad_numbers[0]))
cloud_init = var.operator_cloud_init
image_id = local.operator_image_id
install_cilium = var.cilium_install
install_helm = var.operator_install_helm
install_k9s = var.operator_install_k9s
install_kubectx = var.operator_install_kubectx
Expand Down
239 changes: 239 additions & 0 deletions modules/extensions/cilium.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,239 @@
# Copyright (c) 2023 Oracle Corporation and/or its affiliates.
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl

locals {
cilium_helm_crds_file = join("/", [local.yaml_manifest_path, "cilium.crds.yaml"])
cilium_helm_manifest_file = join("/", [local.yaml_manifest_path, "cilium.manifest.yaml"])
cilium_helm_values_file = join("/", [local.yaml_manifest_path, "cilium.values.yaml"])
cilium_net_attach_def_file = join("/", [local.yaml_manifest_path, "cilium.net_attach_def.yaml"])
cilium_veth_config_map_file = join("/", [local.yaml_manifest_path, "cilium.cni_config_map.yaml"])

cilium_helm_crds = one(data.helm_template.cilium[*].crds)
cilium_helm_manifest = one(data.helm_template.cilium[*].manifest)

cilium_vxlan_cni = {
install = true
chainingMode = "none"
exclusive = true # !var.multus_install
}

# TODO Support Flannel w/ generic-veth & tunnel disabled
cilium_tunnel = "vxlan" # var.cni_type == "flannel" ? "disabled" : "vxlan"
cilium_flannel_cni = {
install = true
chainingMode = "generic-veth"
configMap = "cni-configuration"
customConf = var.cni_type == "flannel"
exclusive = !var.multus_install
}

cilium_helm_values = {
annotateK8sNode = true
cluster = { name = "oke-${var.state_id}" }
clustermesh = { useAPIServer = true }
cni = local.cilium_vxlan_cni
containerRuntime = { integration = "crio" }
installIptablesRules = true
installNoConntrackIptablesRules = false
ipam = { mode = "kubernetes" }
ipv4NativeRoutingCIDR = element(var.vcn_cidrs, 0)
kubeProxyReplacement = "disabled"
pmtuDiscovery = { enabled = true }
tunnel = local.cilium_tunnel

hubble = {
metrics = {
dashboards = { enabled = var.prometheus_install }
# serviceMonitor = { enabled = var.prometheus_enabled }
}
relay = { enabled = true }
ui = { enabled = true }
}

k8s = {
requireIPv4PodCIDR = true # wait for Kubernetes to provide the PodCIDR (ipam kubernetes)
enableIPv4Masquerade = true # var.cni_type != "flannel" # masquerade IPv4 traffic leaving the node from endpoints
}

# Prometheus metrics
metrics = {
dashboards = { enabled = var.prometheus_install }
# # serviceMonitor = { enabled = var.prometheus_enabled }
}

prometheus = {
enabled = var.prometheus_install
# serviceMonitor = { enabled = var.prometheus_enabled }
}

operator = {
prometheus = {
enabled = var.prometheus_install
# serviceMonitor = { enabled = var.prometheus_enabled }
}
}
}

cilium_net_attach_def_conf = {
cniVersion = "0.3.1"
name = "cilium"
plugins = [
{
cniVersion = "0.3.1"
name = "cilium"
type = "cilium-cni"
},
{
name = "cilium-sbr"
type = "sbr"
}
],
}

cilium_net_attach_def = {
apiVersion = "k8s.cni.cncf.io/v1"
kind = "NetworkAttachmentDefinition"
metadata = { name = "cilium" }
spec = { config = jsonencode(local.cilium_net_attach_def_conf) }
}

cilium_veth_conf = {
cniVersion = "0.3.1"
name = "cbr0"
"plugins" = [
{
type = "flannel"
delegate = {
hairpinMode = true
isDefaultGateway = true
}
},
{
type = "portmap"
capabilities = { portMappings = true }
},
{ type = "cilium-cni" },
]
}

cilium_veth_config_map = {
apiVersion = "v1"
kind = "ConfigMap"
metadata = {
name = "cni-configuration"
namespace = var.cilium_namespace
}
data = { "cni-config" = jsonencode(local.cilium_veth_conf) }
}

cilium_net_attach_def_yaml = yamlencode(local.cilium_net_attach_def)
cilium_veth_config_map_yaml = yamlencode(local.cilium_veth_config_map)
cilium_helm_values_yaml = yamlencode(local.cilium_helm_values)
}

data "helm_template" "cilium" {
count = var.cilium_install ? 1 : 0
chart = "cilium"
repository = "https://helm.cilium.io"
version = var.cilium_helm_version
kube_version = var.kubernetes_version

name = "cilium"
namespace = var.cilium_namespace
create_namespace = true
include_crds = true
skip_tests = true
values = concat(
[local.cilium_helm_values_yaml],
[for path in var.cilium_helm_values_files : file(path)],
)

lifecycle {
precondition {
condition = alltrue([for path in var.cilium_helm_values_files : fileexists(path)])
error_message = format("Missing Helm values files in configuration: %s",
jsonencode([for path in var.cilium_helm_values_files : path if !fileexists(path)])
)
}
}
}

resource "null_resource" "cilium" {
count = var.cilium_install ? 1 : 0
depends_on = [null_resource.prometheus]

triggers = {
helm_version = var.cilium_helm_version
crds_md5 = try(md5(join("\n", local.cilium_helm_crds)), null)
manifest_md5 = try(md5(local.cilium_helm_manifest), null)
reapply = var.cilium_reapply ? uuid() : null
}

connection {
bastion_host = var.bastion_host
bastion_user = var.bastion_user
bastion_private_key = var.ssh_private_key
host = var.operator_host
user = var.operator_user
private_key = var.ssh_private_key
timeout = "40m"
type = "ssh"
}

provisioner "remote-exec" {
inline = ["mkdir -p ${local.yaml_manifest_path}"]
}

provisioner "file" {
content = join("\n", local.cilium_helm_crds)
destination = local.cilium_helm_crds_file
}

provisioner "file" {
content = local.cilium_helm_manifest
destination = local.cilium_helm_manifest_file
}

provisioner "file" {
content = local.cilium_helm_values_yaml
destination = local.cilium_helm_values_file
}

provisioner "file" {
content = local.cilium_net_attach_def_yaml
destination = local.cilium_net_attach_def_file
}

provisioner "file" {
content = local.cilium_veth_config_map_yaml
destination = local.cilium_veth_config_map_file
}

provisioner "remote-exec" {
inline = [for c in compact([
# Create namespace if non-standard and missing
(contains(["kube-system", "default"], var.cilium_namespace) ? null
: format(local.kubectl_create_missing_ns, var.cilium_namespace)),

# Install CRDs first
format(local.kubectl_apply_server_ns_file, var.cilium_namespace, local.cilium_helm_crds_file),

# Install full manifest
format(local.kubectl_apply_ns_file, var.cilium_namespace, local.cilium_helm_manifest_file),

# Install Network Attachment Definition when Multus is enabled
# var.multus_install ? format(local.kubectl_apply_file, local.cilium_net_attach_def_file) : null,

# Install CNI ConfigMap for Flannel
# var.cni_type == "flannel" ? format(local.kubectl_apply_file, local.cilium_veth_config_map_file) : null,
]) : format(local.output_log, c, "cilium")
]
}

lifecycle {
precondition {
condition = var.cni_type == "flannel"
error_message = "Incompatible cni_type for installation - must be 'flannel'."
}
}
}
8 changes: 8 additions & 0 deletions modules/extensions/variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,14 @@ variable "vcn_cidrs" { type = list(string) }
variable "cni_type" { type = string }
variable "pods_cidr" { type = string }

# CNI: Cilium
variable "cilium_install" { type = bool }
variable "cilium_reapply" { type = bool }
variable "cilium_namespace" { type = string }
variable "cilium_helm_version" { type = string }
variable "cilium_helm_values" { type = map(string) }
variable "cilium_helm_values_files" { type = list(string) }

# CNI: Multus
variable "multus_install" { type = bool }
variable "multus_namespace" { type = string }
Expand Down
21 changes: 21 additions & 0 deletions modules/operator/cloudinit.tf
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@ locals {
developer_EPEL = "${local.baserepo}_developer_EPEL"
olcne17 = "${local.baserepo}_olcne17"
developer_olcne = "${local.baserepo}_developer_olcne"
arch_amd = "amd64"
arch_arm = "aarch64"

}

Expand Down Expand Up @@ -154,6 +156,25 @@ data "cloudinit_config" "operator" {
}
}

# Optional cilium cli installation
dynamic "part" {
for_each = var.install_cilium ? [1] : []
content {
content_type = "text/cloud-config"
content = jsonencode({
runcmd = [
"CILIUM_CLI_VERSION=$(curl -s https://raw.githubusercontent.com/cilium/cilium-cli/master/stable.txt)",
"CLI_ARCH=${local.arch_amd}",
"if [ '$(uname -m)' = ${local.arch_arm} ]; then CLI_ARCH=${local.arch_arm}; fi",
"curl -L --fail --remote-name-all https://github.com/cilium/cilium-cli/releases/download/$CILIUM_CLI_VERSION/cilium-linux-$CLI_ARCH.tar.gz",
"tar xzvfC cilium-linux-$CLI_ARCH.tar.gz /usr/local/bin"
]
})
filename = "20-cilium.yml"
merge_type = local.default_cloud_init_merge_type
}
}

# Write user bashrc to filesystem
part {
content_type = "text/cloud-config"
Expand Down
1 change: 1 addition & 0 deletions modules/operator/variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ variable "assign_dns" { type = bool }
variable "availability_domain" { type = string }
variable "cloud_init" { type = list(map(string)) }
variable "image_id" { type = string }
variable "install_cilium" { type = bool }
variable "install_helm" { type = bool }
variable "install_k9s" { type = bool }
variable "install_kubectx" { type = bool }
Expand Down
39 changes: 39 additions & 0 deletions variables-extensions.tf
Original file line number Diff line number Diff line change
@@ -1,6 +1,44 @@
# Copyright (c) 2017, 2023 Oracle Corporation and/or its affiliates.
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl

# CNI: Cilium

variable "cilium_install" {
default = false
description = "Whether to deploy the Cilium Helm chart. May only be enabled when cni_type = 'flannel'. See https://docs.cilium.io. NOTE: Provided only as a convenience and not supported by or sourced from Oracle - use at your own risk."
type = bool
}

variable "cilium_reapply" {
default = false
description = "Whether to force reapply of the chart when no changes are detected, e.g. with state modified externally."
type = bool
}

variable "cilium_namespace" {
default = "kube-system"
description = "Kubernetes namespace for deployed resources."
type = string
}

variable "cilium_helm_version" {
default = "1.14.4"
description = "Version of the Helm chart to install. List available releases using `helm search repo [keyword] --versions`."
type = string
}

variable "cilium_helm_values" {
default = {}
description = "Map of individual Helm chart values. See https://registry.terraform.io/providers/hashicorp/helm/latest/docs/data-sources/template."
type = map(string)
}

variable "cilium_helm_values_files" {
default = []
description = "Paths to a local YAML files with Helm chart values (as with `helm install -f` which supports multiple). Generate with defaults using `helm show values [CHART] [flags]`."
type = list(string)
}

# CNI: Multus

variable "multus_install" {
Expand Down Expand Up @@ -328,3 +366,4 @@ variable "gatekeeper_helm_values_files" {
description = "Paths to a local YAML files with Helm chart values (as with `helm install -f` which supports multiple). Generate with defaults using `helm show values [CHART] [flags]`."
type = list(string)
}

0 comments on commit 9b683d8

Please sign in to comment.