Provision EKS worker nodes

less than 1 minute read

Adding Worker node to EKS cluster.

Lets extent the previous code and add worker node in that as below.

provider "aws" {
  region = "ap-south-1"
  profile = "terraform-operator"
}

data "aws_availability_zones" "available" {

}

module "vpc" {
  source  = "terraform-aws-modules/vpc/aws"
  version = "2.6.0"
  name = "vpc-eks-test"
  cidr = "10.0.0.0/16"
  azs = data.aws_availability_zones.available.names
  public_subnets = ["10.0.1.0/24", "10.0.2.0/24", "10.0.3.0/24"]
  enable_dns_support = true # it will be used for Ingress
  enable_dns_hostnames = true
  tags = {
    "Name" = "vpc-eks-test"
  }
}

module "eks" {
  source  = "terraform-aws-modules/eks/aws"
  version = "7.0.0"
  # insert the 4 required variables here
  cluster_name = "eks-test"
  permissions_boundary = ""
  subnets = module.vpc.public_subnets
  vpc_id = module.vpc.vpc_id

  ## Adding worker Nodes
  worker_groups_launch_template = [
    {
      name                 = "worker-group-1"
      instance_type        = "t2.small"
      asg_desired_capacity = 2
      public_ip            = true
    },
    {
      name                 = "worker-group-2"
      instance_type        = "t2.small"
      asg_desired_capacity = 1
      public_ip            = true
    },
  ]
}

In above terraform code (worker_groups_launch_template ) block will add two worker groups with 2 and 1 desired instances. However, just creating EC2 instances are not enough. We need to ensure that these EC2 instances join the cluster worker node as well.

Updated: