未找到配置映射"aws-auth"



我使用terraform模块启动了一个EKS集群

我的模板看起来像这样:

module "eks" {
source          = "terraform-aws-modules/eks/aws"
version         = "17.20.0"
cluster_name    = "${var.cluster_name}"
cluster_version = var.cluster_version
subnets         = ["${var.public_subnet_1}", 
"${var.public_subnet_2}","${var.public_subnet_3}"]
vpc_id          = var.vpc_id
cluster_security_group_id = "${var.master_sg_id}"
worker_security_group_id =  "${var.master_sg_id}"
workers_additional_policies =[aws_iam_policy.siera_alb_ingress_controller_policy.arn]
workers_role_name = "${var.cluster_name}-${var.environment}-${var.aws_region}-worker-role"
map_roles = [
{
rolearn   = "arn:aws:iam::${var.account_no}:role/${var.cluster_name}-${var.environment}-${var.aws_region}-worker-role"
username  = "system:node:{{EC2PrivateDNSName}}"
groups    = ["system:bootstrappers","system:nodes"]
},
{
rolearn   = "arn:aws:sts::${var.account_no}:assumed-role/${var.assumed_role_1}"
username  = "admin"
groups    = ["system:masters","system:nodes","system:bootstrappers"]
},
{
rolearn  = "arn:aws:sts::${var.account_no}:assumed-role/${var.assumed_role_2}"
username  = "admin"
groups    = ["system:masters","system:nodes","system:bootstrappers"]
}
]
tags = {
Purpose = "${var.project}"
Environment = "${var.environment}"
}
worker_groups_launch_template = [
{
name                  = "${var.cluster_name}-lt"
key_name              = "${var.node_key}"
additional_userdata   = <<EOT
"echo dummy" 
EOT
instance_type         = "${var.node_size}"
asg_min_size          = 3
asg_desired_capacity  = 3
asg_max_size          = 5
autoscaling_enabled   = true
asg_force_delete      = true
public_ip             = true
enable_monitoring     = false
root_volume_size      = 80
suspended_processes   = ["AZRebalance"]
tags = [
{
"key"                 = "k8s.io/cluster-autoscaler/enabled"
"propagate_at_launch" = "false"
"value"               = "true"
},
{
"key"                 = "k8s.io/cluster-autoscaler/${var.cluster_name}"
"propagate_at_launch" = "false"
"value"               = "true"
}
]
}
] 
manage_aws_auth = false 
}

正如你所看到的,我正在尝试使用map_roles添加aw -auth configmap。

运行kubectl describe configmap -n kube-system aws-auth

启动集群后给出如下错误:Error from server (NotFound): configmaps "aws-auth" not found

我错过了什么?请帮助

Terraform需要连接已创建的集群,并使用集群凭证创建aws-authconfigmap。将其添加到正在创建EKS集群的文件中。

data "aws_eks_cluster" "default" {
name = module.eks.cluster_id
}
data "aws_eks_cluster_auth" "default" {
name = module.eks.cluster_id
}
provider "kubernetes" {
host                   = data.aws_eks_cluster.default.endpoint
cluster_ca_certificate = base64decode(data.aws_eks_cluster.default.certificate_authority[0].data)
token                  = data.aws_eks_cluster_auth.default.token
}

From HashiCorp github repo

嘿,我看到了同样的问题,问题是kubernetes提供程序没有连接到最近创建的集群。我在main中添加了一段代码。

...
provider "kubernetes" {
host                   = module.app_cluster.cluster_endpoint
cluster_ca_certificate = base64decode(module.app_cluster.cluster_certificate_authority_data)
exec {
api_version = "client.authentication.k8s.io/v1alpha1"
command     = "aws"
# This requires the awscli to be installed locally where Terraform is executed
args = ["eks", "get-token", "--cluster-name", module.app_cluster.cluster_id]
}
}
module "app_cluster" {
source  = "terraform-aws-modules/eks/aws"
version = "~> 18.0"
cluster_name    = "${terraform.workspace}-cluster"
cluster_version = "1.21"
vpc_id     = "${var.vpc}"
subnet_ids = "${var.subnets}"
...

希望有帮助!

最新更新