diff --git a/aws-github/terraform/aws/eks/cluster_permission.tf b/aws-github/terraform/aws/eks/cluster_permission.tf new file mode 100644 index 000000000..69dd7b967 --- /dev/null +++ b/aws-github/terraform/aws/eks/cluster_permission.tf @@ -0,0 +1,57 @@ +data "aws_iam_session_context" "current" { + # This data source provides information on the IAM source role of an STS assumed role + # For non-role ARNs, this data source simply passes the ARN through issuer ARN + # Ref https://github.com/terraform-aws-modules/terraform-aws-eks/issues/2327#issuecomment-1355581682 + # Ref https://github.com/hashicorp/terraform-provider-aws/issues/28381 + arn = data.aws_caller_identity.current.arn +} + +data "aws_iam_policy_document" "assume_kubernetes_admin" { + statement { + actions = ["sts:AssumeRole"] + principals { + type = "AWS" + identifiers = [data.aws_iam_session_context.current.issuer_arn] + } + } +} + +data "aws_iam_policy_document" "describe_cluster" { + statement { + actions = [ + "eks:DescribeCluster" + ] + resources = ["arn:aws:eks:*:*:cluster/*"] + } +} + +resource "aws_iam_role" "kubernetes_admin" { + name = "-KubernetesAdmin" + assume_role_policy = data.aws_iam_policy_document.assume_kubernetes_admin.json +} + +resource "aws_iam_role_policy" "dynamodb_access" { + name = "DescribeCluster" + role = aws_iam_role.kubernetes_admin.id + policy = data.aws_iam_policy_document.describe_cluster.json +} + +resource "aws_eks_access_entry" "cluster_owner" { + cluster_name = module.eks.cluster_name + principal_arn = aws_iam_role.kubernetes_admin.arn + type = "STANDARD" +} + +resource "aws_eks_access_policy_association" "cluster_owner" { + cluster_name = module.eks.cluster_name + policy_arn = "arn:aws:eks::aws:cluster-access-policy/AmazonEKSClusterAdminPolicy" + principal_arn = aws_iam_role.kubernetes_admin.arn + + access_scope { + type = "cluster" + } + + depends_on = [ + aws_eks_access_entry.cluster_owner + ] +} diff --git a/aws-github/terraform/aws/eks/main.tf b/aws-github/terraform/aws/eks/main.tf index 68deb2395..fbc9f662f 100644 --- a/aws-github/terraform/aws/eks/main.tf +++ b/aws-github/terraform/aws/eks/main.tf @@ -106,40 +106,7 @@ module "eks" { } # Enable admin permissions for the cluster creator - enable_cluster_creator_admin_permissions = true - - access_entries = { - - "argocd_" = { - cluster_name = "" - principal_arn = "arn:aws:iam:::role/argocd-" - username = "arn:aws:iam:::role/argocd-" - policy_associations = { - view_deployments = { - policy_arn = "arn:aws:eks::aws:cluster-access-policy/AmazonEKSViewPolicy" - access_scope = { - namespaces = ["default"] - type = "namespace" - } - } - } - } - - "atlantis_" = { - cluster_name = "" - principal_arn = "arn:aws:iam:::role/atlantis-" - username = "arn:aws:iam:::role/atlantis-" - policy_associations = { - view_deployments = { - policy_arn = "arn:aws:eks::aws:cluster-access-policy/AmazonEKSViewPolicy" - access_scope = { - namespaces = ["default"] - type = "namespace" - } - } - } - } - } + enable_cluster_creator_admin_permissions = false tags = local.tags } @@ -616,7 +583,7 @@ EOT } resource "aws_iam_policy" "ssm_access_policy" { - name = "kubefirst-pro-api-ssm-access-${local.name}" + name = "kubefirst-pro-api-ssm-access-${local.name}" description = "Policy to allow SSM actions for kubefirst-pro-api" policy = jsonencode({ Version = "2012-10-17", diff --git a/aws-github/terraform/aws/modules/workload-cluster/cluster_permission.tf b/aws-github/terraform/aws/modules/workload-cluster/cluster_permission.tf new file mode 100644 index 000000000..eef5fe4cd --- /dev/null +++ b/aws-github/terraform/aws/modules/workload-cluster/cluster_permission.tf @@ -0,0 +1,25 @@ +data "aws_caller_identity" "this" {} + +locals { + role_name = data.aws_caller_identity.this.account_id == "" ? "-KubernetesAdmin" : "kubefirst-pro-api-" +} + +resource "aws_eks_access_entry" "cluster_owner" { + cluster_name = module.eks.cluster_name + principal_arn = "arn:aws:iam::${data.aws_caller_identity.this.account_id}:role/${local.role_name}" + type = "STANDARD" +} + +resource "aws_eks_access_policy_association" "cluster_owner" { + cluster_name = module.eks.cluster_name + policy_arn = "arn:aws:eks::aws:cluster-access-policy/AmazonEKSClusterAdminPolicy" + principal_arn = "arn:aws:iam::${data.aws_caller_identity.this.account_id}:role/${local.role_name}" + + access_scope { + type = "cluster" + } + + depends_on = [ + aws_eks_access_entry.cluster_owner + ] +}