Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 28 additions & 4 deletions templates/Makefile
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
SHELL = /usr/bin/env bash
ENVIRONMENT ?= stage
PROJECT = <% .Name %>
ROLE ?= admin
export AWS_DEFAULT_REGION = <% index .Params `region` %>
export AWS_PAGER =
KUBE_CONTEXT := $(PROJECT)-$(ENVIRONMENT)-$(AWS_DEFAULT_REGION)

apply: apply-remote-state apply-secrets apply-env update-k8s-conf pre-k8s apply-k8s-utils post-apply-setup
apply: apply-remote-state apply-shared-remote-state apply-secrets apply-shared-env apply-env update-k8s-conf pre-k8s apply-k8s-utils post-apply-setup

apply-remote-state:
aws s3 ls $(PROJECT)-$(ENVIRONMENT)-terraform-state > /dev/null 2>&1 || ( \
Expand All @@ -14,13 +15,25 @@ apply-remote-state:
terraform apply -var "environment=$(ENVIRONMENT)" $(AUTO_APPROVE) && \
rm ./terraform.tfstate )

apply-shared-remote-state:
aws s3 ls $(PROJECT)-shared-terraform-state > /dev/null 2>&1 || ( \
cd terraform/bootstrap/remote-state && \
terraform init && \
terraform apply -var "environment=shared" $(AUTO_APPROVE) && \
rm ./terraform.tfstate )

apply-secrets:
aws iam list-access-keys --user-name $(PROJECT)-ci-user > /dev/null 2>&1 || ( \
cd terraform/bootstrap/secrets && \
terraform init && \
terraform apply $(AUTO_APPROVE) && \
rm ./terraform.tfstate )

apply-shared-env:
cd terraform/environments/shared; \
terraform init && \
terraform apply $(AUTO_APPROVE)

apply-env:
cd terraform/environments/$(ENVIRONMENT); \
terraform init && \
Expand All @@ -40,12 +53,12 @@ apply-k8s-utils:
terraform apply $(AUTO_APPROVE)

update-k8s-conf:
aws eks --region $(AWS_DEFAULT_REGION) update-kubeconfig --role "arn:aws:iam::<% index .Params `accountId` %>:role/$(PROJECT)-kubernetes-admin-$(ENVIRONMENT)" --name $(KUBE_CONTEXT) --alias $(KUBE_CONTEXT)
aws eks --region $(AWS_DEFAULT_REGION) update-kubeconfig --role "arn:aws:iam::<% index .Params `accountId` %>:role/$(PROJECT)-kubernetes-$(ROLE)-$(ENVIRONMENT)" --name $(KUBE_CONTEXT) --alias $(KUBE_CONTEXT)

post-apply-setup:
cd scripts && ENVIRONMENT=$(ENVIRONMENT) PROJECT=$(PROJECT) sh post-apply.sh

teardown: teardown-k8s-utils teardown-env teardown-secrets teardown-remote-state
teardown: teardown-k8s-utils teardown-env teardown-shared-env teardown-secrets teardown-remote-state teardown-shared-remote-state

teardown-remote-state:
@echo "Deleting remote state is not reversible, are you sure you want to delete the resources? [y/N]:" ; read ans ; [ $${ans:-N} == "y" ] || exit 1
Expand All @@ -54,6 +67,13 @@ teardown-remote-state:
# TODO : This doesn't work because bucket versioning is enabled, we would need to loop through all versions of files and delete them manually
aws s3 rb s3://$(PROJECT)-$(ENVIRONMENT)-terraform-state --force

teardown-shared-remote-state:
@echo "Deleting shared remote state is not reversible, are you sure you want to delete the resources? [y/N]:" ; read ans ; [ $${ans:-N} == "y" ] || exit 1
aws dynamodb delete-table --region $(AWS_DEFAULT_REGION) --table-name $(PROJECT)-shared-terraform-state-locks
aws s3 rm s3://$(PROJECT)-shared-terraform-state --recursive
# TODO : This doesn't work because bucket versioning is enabled, we would need to loop through all versions of files and delete them manually
aws s3 rb s3://$(PROJECT)-shared-terraform-state --force

teardown-secrets:
@echo "Deleting secrets is not reversible, are you sure you want to delete the secrets? [y/N]:" ; read ans ; [ $${ans:-N} == "y" ] || exit 1
aws secretsmanager list-secrets --region $(AWS_DEFAULT_REGION) --query "SecretList[?Tags[?Key=='project' && Value=='$(PROJECT)']].[Name] | [0][0]" | xargs aws secretsmanager delete-secret --region $(AWS_DEFAULT_REGION) --secret-id || echo "Secret already removed"
Expand All @@ -69,8 +89,12 @@ teardown-env:
cd terraform/environments/$(ENVIRONMENT) && \
terraform destroy

teardown-shared-env:
cd terraform/environments/shared && \
terraform destroy

teardown-k8s-utils:
cd kubernetes/terraform/environments/$(ENVIRONMENT) && \
terraform destroy

.PHONY: apply apply-remote-state apply-secrets apply-env apply-k8s-utils teardown-k8s-utils teardown-env teardown-secrets teardown-remote-state
.PHONY: apply apply-remote-state apply-secrets apply-env apply-k8s-utils teardown-k8s-utils teardown-env teardown-shared-env teardown-secrets teardown-remote-state teardown-shared-remote-state
20 changes: 20 additions & 0 deletions templates/terraform/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,26 @@
make update-k8s-conf
```

If a user has a role other than admin (dev, operations, etc.) they can specify it here as well:
```
ROLE=<role> make update-k8s-conf
```

## User Access

You may want to give memebers of your team access to the infrastructure.
Individual roles and permissions are defined in `environments/<env>/user_access.tf`, these will define the amount of access a user in that role has to both AWS and Kubernetes.

1. Add users in `environments/shared/main.tf` and specify the role they should have in each environment, then run:
```
make apply-shared-env
```

2. To do the assignment of users to roles in each environment, you must run this for each:
```
ENVIRONENT=<env> make apply-env
```
This should detect that there was a new user created, and put them into the necessary group.


## Upgrading an EKS Cluster
Expand Down
55 changes: 45 additions & 10 deletions templates/terraform/environments/prod/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,28 @@ terraform {
}
}

locals {
project = "<% .Name %>"
region = "<% index .Params `region` %>"
account_id = "<% index .Params `accountId` %>"
domain_name = "<% index .Params `productionHostRoot` %>"
}

provider "aws" {
region = "<% index .Params `region` %>"
allowed_account_ids = ["<% index .Params `accountId` %>"]
region = local.region
allowed_account_ids = [local.account_id]
}

# remote state of "shared"
data "terraform_remote_state" "shared" {
backend = "s3"
config = {
bucket = "${local.project}-shared-terraform-state"
key = "infrastructure/terraform/environments/shared/main"
region = local.region
encrypt = true
dynamodb_table = "${local.project}-shared-terraform-state-locks"
}
}

# Instantiate the production environment
Expand All @@ -20,9 +39,9 @@ module "prod" {
environment = "prod"

# Project configuration
project = "<% .Name %>"
region = "<% index .Params `region` %>"
allowed_account_ids = ["<% index .Params `accountId` %>"]
project = local.project
region = local.region
allowed_account_ids = [local.account_id]
random_seed = "<% index .Params `randomSeed` %>"

# ECR configuration
Expand All @@ -35,15 +54,15 @@ module "prod" {
eks_worker_asg_max_size = 4

# EKS-Optimized AMI for your region: https://docs.aws.amazon.com/eks/latest/userguide/eks-optimized-ami.html
# https://<% index .Params `region` %>.console.aws.amazon.com/systems-manager/parameters/%252Faws%252Fservice%252Feks%252Foptimized-ami%252F1.17%252Famazon-linux-2%252Frecommended%252Fimage_id/description?region=<% index .Params `region` %>
# https://${local.region}.console.aws.amazon.com/systems-manager/parameters/%252Faws%252Fservice%252Feks%252Foptimized-ami%252F1.17%252Famazon-linux-2%252Frecommended%252Fimage_id/description?region=${local.region}
eks_worker_ami = "<% index .Params `eksWorkerAMI` %>"

# Hosting configuration. Each domain will have a bucket created for it, but may have mulitple aliases pointing to the same bucket.
hosted_domains = [
{ domain : "<% index .Params `productionHostRoot` %>", aliases : [] },
{ domain : "<% index .Params `productionFrontendSubdomain` %><% index .Params `productionHostRoot` %>", aliases : [] },
{ domain : local.domain_name, aliases : [] },
{ domain : "<% index .Params `productionFrontendSubdomain` %>${local.domain_name}", aliases : [] },
]
domain_name = "<% index .Params `productionHostRoot` %>"
domain_name = "${local.domain_name}"
cf_signed_downloads = <% if eq (index .Params `fileUploads`) "yes" %>true<% else %>false<% end %>

# DB configuration
Expand All @@ -61,5 +80,21 @@ module "prod" {
# See https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/aes-limits.html

sendgrid_enabled = <%if eq (index .Params `sendgridApiKey`) "" %>false<% else %>true<% end %>
sendgrid_api_key_secret_name = "<% .Name %>-sendgrid-<% index .Params `randomSeed` %>"
sendgrid_api_key_secret_name = "${local.project}-sendgrid-<% index .Params `randomSeed` %>"

# Roles configuration
roles = [
{
name = "developer"
aws_policy = data.aws_iam_policy_document.developer_access.json
k8s_policies = local.k8s_developer_access
},
{
name = "operator"
aws_policy = data.aws_iam_policy_document.operator_access.json
k8s_policies = local.k8s_operator_access
}
]

user_role_mapping = data.terraform_remote_state.shared.outputs.user_role_mapping
}
101 changes: 101 additions & 0 deletions templates/terraform/environments/prod/user_access.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
# define AWS policy documents for developer
data "aws_iam_policy_document" "developer_access" {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

At some point we will want to revisit these policies, add stuff like enforcing MFA, etc. but this should be fine for now.

# EKS
statement {
effect = "Allow"
actions = ["eks:ListClusters"]
resources = ["*"]
}
statement {
effect = "Allow"
actions = ["eks:DescribeCluster"]
resources = ["arn:aws:eks:${local.region}:${local.account_id}:cluster/${local.project}-stage*"]
}

# ECR
statement {
effect = "Allow"
actions = [
"ecr:DescribeImages",
"ecr:DescribeRepositories"
]
resources = ["*"]
}

# S3
statement {
effect = "Allow"
actions = ["s3:ListBucket"]
resources = ["arn:aws:s3:::*${local.domain_name}"]
}
statement {
effect = "Allow"
actions = ["s3:GetObject"]
resources = ["arn:aws:s3:::*${local.domain_name}/*"]
}
}

# define AWS policy documents for operator
data "aws_iam_policy_document" "operator_access" {
# IAM
statement {
effect = "Allow"
actions = [
"iam:ListRoles",
"sts:AssumeRole"
]
resources = ["arn:aws:iam::${local.account_id}:role/${local.project}-kubernetes-operator-stage"]
}

# EKS
statement {
effect = "Allow"
actions = ["eks:*"]
resources = ["arn:aws:eks:${local.region}:${local.account_id}:cluster/${local.project}-stage*"]
}

# ECR
statement {
effect = "Allow"
actions = ["ecr:*"]
resources = ["*"]
}

# S3
statement {
effect = "Allow"
actions = ["s3:*"]
resources = ["arn:aws:s3:::*${local.domain_name}"]
}
statement {
effect = "Allow"
actions = ["s3:*"]
resources = ["arn:aws:s3:::*${local.domain_name}/*"]
}
}



locals {
# define Kubernetes policy for developer
k8s_developer_access = [
{
verbs = ["exec"]
api_groups = [""]
resources = ["pods", "pods/exec", "pods/log", "pods/portforward"]
}, {
verbs = ["get", "list", "watch"]
api_groups = [""]
resources = ["deployments", "configmaps", "pods", "services", "endpoints"]
}
]

# define Kubernetes policy for operator
k8s_operator_access = [
{
verbs = ["exec", "create", "list", "get", "delete", "patch", "update"]
api_groups = [""]
resources = ["deployments", "configmaps", "pods", "secrets", "services", "endpoints"]
}
]
}
63 changes: 63 additions & 0 deletions templates/terraform/environments/shared/main.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
terraform {
required_version = ">= 0.13"
backend "s3" {
bucket = "<% .Name %>-shared-terraform-state"
key = "infrastructure/terraform/environments/shared/main"
encrypt = true
region = "<% index .Params `region` %>"
dynamodb_table = "<% .Name %>-shared-terraform-state-locks"
}
}

locals {
project = "<% .Name %>"
region = "<% index .Params `region` %>"
account_id = "<% index .Params `accountId` %>"
}

provider "aws" {
region = local.region
allowed_account_ids = [local.account_id]
}

# Instantiate the environment
locals {
# Users configuration
users = [
# {
# name = "dev1"
# roles = [
# { name = "developer", environments = ["stage", "prod"] }
# ]
# }, {
# name = "devops1"
# roles = [
# { name = "developer", environments = ["stage", "prod"] },
# { name = "operator", environments = ["stage"] }
# ]
# }, {
# name = "operator1"
# roles = [
# { name = "operator", environments = ["stage", "prod"] }
# ]
# },
]
}

## Create users
resource "aws_iam_user" "access_user" {
count = length(local.users)
name = "${local.project}-${local.users[count.index].name}"

tags = {
for r in local.users[count.index].roles : "role:${r.name}" => join("/", r.environments)
}
}

output "iam_users" {
value = aws_iam_user.access_user
}

output "user_role_mapping" {
value = local.users
}
Loading