diff --git a/.github/workflows/email_service.yaml b/.github/workflows/email_service.yaml
new file mode 100644
index 0000000..c0cace6
--- /dev/null
+++ b/.github/workflows/email_service.yaml
@@ -0,0 +1,185 @@
+name: "Email Service Infrastructure Change Management Pipeline with GitHub Actions"
+
+on:
+ pull_request:
+ paths:
+ - src/email_service/**
+ push:
+ branches:
+ - main
+ - dev
+ paths:
+ - src/email_service/**
+
+env:
+ TF_LOG: INFO
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ TF_DIR: ./src/email_service/terraform
+
+permissions:
+ contents: write
+ issues: write
+ pull-requests: write
+ actions: read
+ checks: write
+
+jobs:
+ preview:
+ name: "Deploy Preview Environment"
+ if: github.event_name == 'pull_request' && github.event.action != 'closed'
+ runs-on: ubuntu-latest
+ steps:
+ - name: "Print Debug Information"
+ run: |
+ echo "Event Name: ${{ github.event_name }}"
+ echo "Event Action: ${{ github.event.action }}"
+ echo "Merged: ${{ github.event.pull_request.merged }}"
+ - uses: actions/checkout@v4
+ - uses: hashicorp/setup-terraform@v3
+ with:
+ terraform_version: 1.8.3
+
+ - name: Terraform Init
+ id: init
+ run: terraform init -backend-config="preview.tfbackend" -reconfigure
+ working-directory: ${{ env.TF_DIR }}
+
+ - name: Terraform Destroy (Cleanup)
+ id: destroy
+ run: terraform destroy -auto-approve -var-file='preview.tfvars'
+ working-directory: ${{ env.TF_DIR }}
+ continue-on-error: true
+
+ - name: Terraform Plan
+ id: plan
+ run: terraform plan -var-file='preview.tfvars'
+ working-directory: ${{ env.TF_DIR }}
+ continue-on-error: true
+
+ - name: Terraform Apply
+ id: apply
+ run: terraform apply -var-file='preview.tfvars' -auto-approve
+ working-directory: ${{ env.TF_DIR }}
+ continue-on-error: true
+
+ - uses: actions/github-script@v6
+ if: github.event_name == 'pull_request'
+ env:
+ APPLY: "terraform\n${{ steps.apply.outputs.stdout }}"
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ script: |
+ const { data: comments } = await github.rest.issues.listComments({
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ issue_number: context.issue.number,
+ });
+
+ const botComment = comments.find(comment => {
+ return comment.user.type === 'Bot' && comment.body.includes('Terraform Apply Results');
+ });
+
+ const output = `#### Terraform Apply Results 🌟
+ Preview environment has been deployed. You can now test your changes.
+
+ Show Actual Apply
+
+ \`\`\`\n
+ ${{ steps.apply.outputs.stdout }}
+ \`\`\`
+
+
+
+ *Pushed by: @${{ github.actor }}, Action: \`${{ github.event_name }}\`*`;
+
+ if (botComment) {
+ await github.rest.issues.updateComment({
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ comment_id: botComment.id,
+ body: output,
+ });
+ } else {
+ await github.rest.issues.createComment({
+ issue_number: context.issue.number,
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ body: output,
+ });
+ }
+
+ cleanup_preview:
+ name: "Cleanup Preview Environment"
+ if: github.event_name == 'pull_request' && github.event.action == 'closed' || github.event_name == 'push'
+ runs-on: ubuntu-latest
+ steps:
+ - name: "Print Debug Information"
+ run: |
+ echo "Event Name: ${{ github.event_name }}"
+ echo "Event Action: ${{ github.event.action }}"
+ echo "Merged: ${{ github.event.pull_request.merged }}"
+ - uses: actions/checkout@v3
+ - uses: hashicorp/setup-terraform@v3
+ with:
+ terraform_version: 1.8.3
+
+ - name: Terraform Init
+ run: terraform init -backend-config="preview.tfbackend" -reconfigure
+ working-directory: ${{ env.TF_DIR }}
+
+ - name: Terraform Destroy
+ run: terraform destroy -auto-approve -var-file='preview.tfvars'
+ working-directory: ${{ env.TF_DIR }}
+
+ deploy:
+ name: "Deploy to Target Environment"
+ if: github.event_name == 'push' && (github.ref == 'refs/heads/main' || github.ref == 'refs/heads/dev')
+ runs-on: ubuntu-latest
+ needs: [cleanup_preview]
+ steps:
+ - name: "Print Debug Information"
+ run: |
+ echo "Event Name: ${{ github.event_name }}"
+ echo "Event Action: ${{ github.event.action }}"
+ echo "Merged: ${{ github.event.pull_request.merged }}"
+ - uses: actions/checkout@v3
+ - uses: hashicorp/setup-terraform@v3
+ with:
+ terraform_version: 1.8.3
+
+ - name: Set Environment Variables
+ run: |
+ if [[ "${{ github.ref }}" == "refs/heads/main" ]]; then
+ echo "ENVIRONMENT=prod" >> $GITHUB_ENV
+ echo "TF_VARS_FILE=prod.tfvars" >> $GITHUB_ENV
+ elif [[ "${{ github.ref }}" == "refs/heads/dev" ]]; then
+ echo "ENVIRONMENT=dev" >> $GITHUB_ENV
+ echo "TF_VARS_FILE=dev.tfvars" >> $GITHUB_ENV
+ else
+ echo "Unsupported branch: ${{ github.ref }}"
+ exit 1
+ fi
+
+ - name: Terraform Init
+ id: init
+ run: terraform init -backend-config="${{ env.ENVIRONMENT }}.tfbackend" -reconfigure
+ working-directory: ${{ env.TF_DIR }}
+
+ - name: Terraform Destroy (Cleanup)
+ id: destroy
+ run: terraform destroy -auto-approve -var-file='${{ env.TF_VARS_FILE }}'
+ working-directory: ${{ env.TF_DIR }}
+ continue-on-error: true
+
+ - name: Terraform Plan
+ id: plan
+ run: terraform plan -var-file='${{ env.TF_VARS_FILE }}' -out=tfplan
+ working-directory: ${{ env.TF_DIR }}
+ continue-on-error: true
+
+ - name: Terraform Apply
+ id: apply
+ run: terraform apply -var-file='${{ env.TF_VARS_FILE }}' -auto-approve
+ working-directory: ${{ env.TF_DIR }}
+ continue-on-error: true
diff --git a/.github/workflows/file_service.yaml b/.github/workflows/file_service.yaml
new file mode 100644
index 0000000..4b6b7f6
--- /dev/null
+++ b/.github/workflows/file_service.yaml
@@ -0,0 +1,185 @@
+name: "File Service Infrastructure Change Management Pipeline with GitHub Actions"
+
+on:
+ pull_request:
+ paths:
+ - src/file_service/**
+ push:
+ branches:
+ - main
+ - dev
+ paths:
+ - src/file_service/**
+
+env:
+ TF_LOG: INFO
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ TF_DIR: ./src/file_service/terraform
+
+permissions:
+ contents: write
+ issues: write
+ pull-requests: write
+ actions: read
+ checks: write
+
+jobs:
+ preview:
+ name: "Deploy Preview Environment"
+ if: github.event_name == 'pull_request' && github.event.action != 'closed'
+ runs-on: ubuntu-latest
+ steps:
+ - name: "Print Debug Information"
+ run: |
+ echo "Event Name: ${{ github.event_name }}"
+ echo "Event Action: ${{ github.event.action }}"
+ echo "Merged: ${{ github.event.pull_request.merged }}"
+ - uses: actions/checkout@v4
+ - uses: hashicorp/setup-terraform@v3
+ with:
+ terraform_version: 1.8.3
+
+ - name: Terraform Init
+ id: init
+ run: terraform init -backend-config="preview.tfbackend" -reconfigure
+ working-directory: ${{ env.TF_DIR }}
+
+ - name: Terraform Destroy (Cleanup)
+ id: destroy
+ run: terraform destroy -auto-approve -var-file='preview.tfvars'
+ working-directory: ${{ env.TF_DIR }}
+ continue-on-error: true
+
+ - name: Terraform Plan
+ id: plan
+ run: terraform plan -var-file='preview.tfvars'
+ working-directory: ${{ env.TF_DIR }}
+ continue-on-error: true
+
+ - name: Terraform Apply
+ id: apply
+ run: terraform apply -var-file='preview.tfvars' -auto-approve
+ working-directory: ${{ env.TF_DIR }}
+ continue-on-error: true
+
+ - uses: actions/github-script@v6
+ if: github.event_name == 'pull_request'
+ env:
+ APPLY: "terraform\n${{ steps.apply.outputs.stdout }}"
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ script: |
+ const { data: comments } = await github.rest.issues.listComments({
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ issue_number: context.issue.number,
+ });
+
+ const botComment = comments.find(comment => {
+ return comment.user.type === 'Bot' && comment.body.includes('Terraform Apply Results');
+ });
+
+ const output = `#### Terraform Apply Results 🌟
+ Preview environment has been deployed. You can now test your changes.
+
+ Show Actual Apply
+
+ \`\`\`\n
+ ${{ steps.apply.outputs.stdout }}
+ \`\`\`
+
+
+
+ *Pushed by: @${{ github.actor }}, Action: \`${{ github.event_name }}\`*`;
+
+ if (botComment) {
+ await github.rest.issues.updateComment({
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ comment_id: botComment.id,
+ body: output,
+ });
+ } else {
+ await github.rest.issues.createComment({
+ issue_number: context.issue.number,
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ body: output,
+ });
+ }
+
+ cleanup_preview:
+ name: "Cleanup Preview Environment"
+ if: github.event_name == 'pull_request' && github.event.action == 'closed' || github.event_name == 'push'
+ runs-on: ubuntu-latest
+ steps:
+ - name: "Print Debug Information"
+ run: |
+ echo "Event Name: ${{ github.event_name }}"
+ echo "Event Action: ${{ github.event.action }}"
+ echo "Merged: ${{ github.event.pull_request.merged }}"
+ - uses: actions/checkout@v3
+ - uses: hashicorp/setup-terraform@v3
+ with:
+ terraform_version: 1.8.3
+
+ - name: Terraform Init
+ run: terraform init -backend-config="preview.tfbackend" -reconfigure
+ working-directory: ${{ env.TF_DIR }}
+
+ - name: Terraform Destroy
+ run: terraform destroy -auto-approve -var-file='preview.tfvars'
+ working-directory: ${{ env.TF_DIR }}
+
+ deploy:
+ name: "Deploy to Target Environment"
+ if: github.event_name == 'push' && (github.ref == 'refs/heads/main' || github.ref == 'refs/heads/dev')
+ runs-on: ubuntu-latest
+ needs: [cleanup_preview]
+ steps:
+ - name: "Print Debug Information"
+ run: |
+ echo "Event Name: ${{ github.event_name }}"
+ echo "Event Action: ${{ github.event.action }}"
+ echo "Merged: ${{ github.event.pull_request.merged }}"
+ - uses: actions/checkout@v3
+ - uses: hashicorp/setup-terraform@v3
+ with:
+ terraform_version: 1.8.3
+
+ - name: Set Environment Variables
+ run: |
+ if [[ "${{ github.ref }}" == "refs/heads/main" ]]; then
+ echo "ENVIRONMENT=prod" >> $GITHUB_ENV
+ echo "TF_VARS_FILE=prod.tfvars" >> $GITHUB_ENV
+ elif [[ "${{ github.ref }}" == "refs/heads/dev" ]]; then
+ echo "ENVIRONMENT=dev" >> $GITHUB_ENV
+ echo "TF_VARS_FILE=dev.tfvars" >> $GITHUB_ENV
+ else
+ echo "Unsupported branch: ${{ github.ref }}"
+ exit 1
+ fi
+
+ - name: Terraform Init
+ id: init
+ run: terraform init -backend-config="${{ env.ENVIRONMENT }}.tfbackend" -reconfigure
+ working-directory: ${{ env.TF_DIR }}
+
+ - name: Terraform Destroy (Cleanup)
+ id: destroy
+ run: terraform destroy -auto-approve -var-file='${{ env.TF_VARS_FILE }}'
+ working-directory: ${{ env.TF_DIR }}
+ continue-on-error: true
+
+ - name: Terraform Plan
+ id: plan
+ run: terraform plan -var-file='${{ env.TF_VARS_FILE }}' -out=tfplan
+ working-directory: ${{ env.TF_DIR }}
+ continue-on-error: true
+
+ - name: Terraform Apply
+ id: apply
+ run: terraform apply -var-file='${{ env.TF_VARS_FILE }}' -auto-approve
+ working-directory: ${{ env.TF_DIR }}
+ continue-on-error: true
diff --git a/.github/workflows/slack_notification.yaml b/.github/workflows/slack_notification.yaml
new file mode 100644
index 0000000..03ce947
--- /dev/null
+++ b/.github/workflows/slack_notification.yaml
@@ -0,0 +1,26 @@
+name: Slack Deployment Notification
+
+on:
+ workflow_run:
+ workflows:
+ - "Terraform Infrastructure Change Management Pipeline with GitHub Actions"
+ types:
+ - completed
+
+jobs:
+ notify:
+ runs-on: ubuntu-latest
+ if: >
+ github.event.workflow_run.conclusion == 'success' ||
+ github.event.workflow_run.conclusion == 'failure'
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v3
+
+ - name: Slack Notification
+ uses: 8398a7/action-slack@v3
+ with:
+ status: ${{ github.event.workflow_run.conclusion }}
+ fields: repo,message,commit,author
+ env:
+ SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
diff --git a/.gitignore b/.gitignore
index d0cb24c..dcb715e 100644
--- a/.gitignore
+++ b/.gitignore
@@ -252,12 +252,6 @@ $RECYCLE.BIN/
crash.log
crash.*.log
-# Exclude all .tfvars files, which are likely to contain sensitive data, such as
-# password, private keys, and other secrets. These should not be part of version
-# control as they are data points which are potentially sensitive and subject
-# to change depending on the environment.
-*.tfvars
-*.tfvars.json
# Ignore override files as they are usually used to override resources locally and so
# are not checked in
@@ -294,4 +288,7 @@ lambda_function.zip
tests/files/*
+# SAM CLI build directory
+src/**/.aws-sam/*
+
# End of https://www.gitignore.io/api/osx,linux,python,windows,pycharm,visualstudiocode
\ No newline at end of file
diff --git a/events/event.json b/events/event.json
deleted file mode 100644
index a6197de..0000000
--- a/events/event.json
+++ /dev/null
@@ -1,62 +0,0 @@
-{
- "body": "{\"message\": \"hello world\"}",
- "resource": "/hello",
- "path": "/hello",
- "httpMethod": "GET",
- "isBase64Encoded": false,
- "queryStringParameters": {
- "foo": "bar"
- },
- "pathParameters": {
- "proxy": "/path/to/resource"
- },
- "stageVariables": {
- "baz": "qux"
- },
- "headers": {
- "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
- "Accept-Encoding": "gzip, deflate, sdch",
- "Accept-Language": "en-US,en;q=0.8",
- "Cache-Control": "max-age=0",
- "CloudFront-Forwarded-Proto": "https",
- "CloudFront-Is-Desktop-Viewer": "true",
- "CloudFront-Is-Mobile-Viewer": "false",
- "CloudFront-Is-SmartTV-Viewer": "false",
- "CloudFront-Is-Tablet-Viewer": "false",
- "CloudFront-Viewer-Country": "US",
- "Host": "1234567890.execute-api.us-east-1.amazonaws.com",
- "Upgrade-Insecure-Requests": "1",
- "User-Agent": "Custom User Agent String",
- "Via": "1.1 08f323deadbeefa7af34d5feb414ce27.cloudfront.net (CloudFront)",
- "X-Amz-Cf-Id": "cDehVQoZnx43VYQb9j2-nvCh-9z396Uhbp027Y2JvkCPNLmGJHqlaA==",
- "X-Forwarded-For": "127.0.0.1, 127.0.0.2",
- "X-Forwarded-Port": "443",
- "X-Forwarded-Proto": "https"
- },
- "requestContext": {
- "accountId": "123456789012",
- "resourceId": "123456",
- "stage": "prod",
- "requestId": "c6af9ac6-7b61-11e6-9a41-93e8deadbeef",
- "requestTime": "09/Apr/2015:12:34:56 +0000",
- "requestTimeEpoch": 1428582896000,
- "identity": {
- "cognitoIdentityPoolId": null,
- "accountId": null,
- "cognitoIdentityId": null,
- "caller": null,
- "accessKey": null,
- "sourceIp": "127.0.0.1",
- "cognitoAuthenticationType": null,
- "cognitoAuthenticationProvider": null,
- "userArn": null,
- "userAgent": "Custom User Agent String",
- "user": null
- },
- "path": "/prod/hello",
- "resourcePath": "/hello",
- "httpMethod": "POST",
- "apiId": "1234567890",
- "protocol": "HTTP/1.1"
- }
-}
diff --git a/examples/terraform/acm/acm.tf b/examples/terraform/acm/acm.tf
new file mode 100644
index 0000000..6c588c3
--- /dev/null
+++ b/examples/terraform/acm/acm.tf
@@ -0,0 +1,46 @@
+provider "aws" {
+ region = "us-west-2"
+ default_tags {
+ tags = {
+ "Terraform" = "true",
+ "Project" = "AWS Educate TPET"
+ }
+ }
+}
+
+data "aws_route53_zone" "awseducate_systems" {
+ name = "awseducate.systems"
+ private_zone = false
+}
+
+resource "aws_route53_record" "file_service_record" {
+ for_each = {
+ for dvo in aws_acm_certificate.cert.domain_validation_options : dvo.domain_name => {
+ name = dvo.resource_record_name
+ record = dvo.resource_record_value
+ type = dvo.resource_record_type
+ }
+ }
+
+ allow_overwrite = true
+ name = each.value.name
+ records = [each.value.record]
+ ttl = 60
+ type = each.value.type
+ zone_id = data.aws_route53_zone.awseducate_systems.zone_id
+}
+
+
+resource "aws_acm_certificate" "cert" {
+ domain_name = "file-service.awseducate.systems"
+ validation_method = "DNS"
+
+ lifecycle {
+ create_before_destroy = true
+ }
+}
+
+resource "aws_acm_certificate_validation" "cert_validation" {
+ certificate_arn = aws_acm_certificate.cert.arn
+ validation_record_fqdns = [for record in aws_route53_record.file_service_record : record.fqdn]
+}
diff --git a/examples/terraform/backend/dev.tfbackend b/examples/terraform/backend/dev.tfbackend
new file mode 100644
index 0000000..95eaf6e
--- /dev/null
+++ b/examples/terraform/backend/dev.tfbackend
@@ -0,0 +1,4 @@
+bucket = "terraform-state-20240618152116874600000001"
+region = "us-west-2"
+key = "example_service/dev/terraform.tfstate"
+dynamodb_table = "terraform-locks"
\ No newline at end of file
diff --git a/examples/terraform/backend/local-dev.tfbackend b/examples/terraform/backend/local-dev.tfbackend
new file mode 100644
index 0000000..ddd8ec4
--- /dev/null
+++ b/examples/terraform/backend/local-dev.tfbackend
@@ -0,0 +1,4 @@
+bucket = "terraform-state-20240618152116874600000001"
+region = "us-west-2"
+key = "example_service/local-dev/terraform.tfstate"
+dynamodb_table = "terraform-locks"
\ No newline at end of file
diff --git a/examples/terraform/backend/preview.tfbackend b/examples/terraform/backend/preview.tfbackend
new file mode 100644
index 0000000..2fcb8e4
--- /dev/null
+++ b/examples/terraform/backend/preview.tfbackend
@@ -0,0 +1,4 @@
+bucket = "terraform-state-20240618152116874600000001"
+region = "us-west-2"
+key = "example_service/preview/terraform.tfstate"
+dynamodb_table = "terraform-locks"
\ No newline at end of file
diff --git a/examples/terraform/backend/prod.tfbackend b/examples/terraform/backend/prod.tfbackend
new file mode 100644
index 0000000..9f30cc1
--- /dev/null
+++ b/examples/terraform/backend/prod.tfbackend
@@ -0,0 +1,4 @@
+bucket = "terraform-state-20240618152116874600000001"
+region = "us-west-2"
+key = "example_service/prod/terraform.tfstate"
+dynamodb_table = "terraform-locks"
\ No newline at end of file
diff --git a/examples/terraform/vars/dev.tfvars b/examples/terraform/vars/dev.tfvars
new file mode 100644
index 0000000..09ea032
--- /dev/null
+++ b/examples/terraform/vars/dev.tfvars
@@ -0,0 +1,5 @@
+aws_region = "us-east-1"
+environment = "dev"
+service_underscore = "example_service"
+service_hyphen = "email-service"
+dynamodb_table = "email"
diff --git a/examples/terraform/vars/local-dev.tfvars b/examples/terraform/vars/local-dev.tfvars
new file mode 100644
index 0000000..b3fbfee
--- /dev/null
+++ b/examples/terraform/vars/local-dev.tfvars
@@ -0,0 +1,5 @@
+aws_region = "us-west-2"
+environment = "local-dev"
+service_underscore = "example_service"
+service_hyphen = "email-service"
+dynamodb_table = "email"
diff --git a/examples/terraform/vars/preview.tfvars b/examples/terraform/vars/preview.tfvars
new file mode 100644
index 0000000..fe3b089
--- /dev/null
+++ b/examples/terraform/vars/preview.tfvars
@@ -0,0 +1,5 @@
+aws_region = "us-west-1"
+environment = "preview"
+service_underscore = "example_service"
+service_hyphen = "email-service"
+dynamodb_table = "email"
diff --git a/examples/terraform/vars/prod.tfvars b/examples/terraform/vars/prod.tfvars
new file mode 100644
index 0000000..b593b75
--- /dev/null
+++ b/examples/terraform/vars/prod.tfvars
@@ -0,0 +1,5 @@
+aws_region = "ap-northeast-1"
+environment = "prod"
+service_underscore = "example_service"
+service_hyphen = "email-service"
+dynamodb_table = "email"
diff --git a/examples/terraform/vars/variables.tf b/examples/terraform/vars/variables.tf
new file mode 100644
index 0000000..8247229
--- /dev/null
+++ b/examples/terraform/vars/variables.tf
@@ -0,0 +1,23 @@
+variable "aws_region" {
+ description = "aws region"
+}
+
+variable "environment" {
+ description = "Current environtment: prod(ap-northeast-1)/dev(us-east-1)/local-dev(us-west-2), default dev(us-east-1)"
+}
+
+variable "service_underscore" {
+ description = "Current service name"
+}
+
+variable "service_hyphen" {
+ description = "This variable contains the current service name, but with hyphens instead of underscores. For example: demo-service."
+}
+variable "domain_name" {
+ description = "Domain name, for example: example.com"
+ default = "awseducate.systems"
+}
+
+variable "dynamodb_table" {
+ description = "Current service's DynamoDB table name"
+}
diff --git a/poetry.lock b/poetry.lock
index 6d15325..c709279 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,4 +1,4 @@
-# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand.
+# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
[[package]]
name = "boto3"
@@ -21,13 +21,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
[[package]]
name = "botocore"
-version = "1.34.122"
+version = "1.34.127"
description = "Low-level, data-driven core of boto 3."
optional = false
python-versions = ">=3.8"
files = [
- {file = "botocore-1.34.122-py3-none-any.whl", hash = "sha256:6d75df3af831b62f0c7baa109728d987e0a8d34bfadf0476eb32e2f29a079a36"},
- {file = "botocore-1.34.122.tar.gz", hash = "sha256:9374e16a36f1062c3e27816e8599b53eba99315dfac71cc84fc3aee3f5d3cbe3"},
+ {file = "botocore-1.34.127-py3-none-any.whl", hash = "sha256:e14fa28c8bb141de965e700f88b196d17c67a703c7f0f5c7e14f7dd1cf636011"},
+ {file = "botocore-1.34.127.tar.gz", hash = "sha256:a377871742c40603d559103f19acb7bc93cfaf285e68f21b81637ec396099877"},
]
[package.dependencies]
@@ -159,6 +159,17 @@ files = [
{file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"},
]
+[[package]]
+name = "et-xmlfile"
+version = "1.1.0"
+description = "An implementation of lxml.xmlfile for the standard library"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"},
+ {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"},
+]
+
[[package]]
name = "google-api-core"
version = "2.19.0"
@@ -336,6 +347,20 @@ files = [
{file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"},
]
+[[package]]
+name = "openpyxl"
+version = "3.1.4"
+description = "A Python library to read/write Excel 2010 xlsx/xlsm files"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "openpyxl-3.1.4-py2.py3-none-any.whl", hash = "sha256:ec17f6483f2b8f7c88c57e5e5d3b0de0e3fb9ac70edc084d28e864f5b33bbefd"},
+ {file = "openpyxl-3.1.4.tar.gz", hash = "sha256:8d2c8adf5d20d6ce8f9bca381df86b534835e974ed0156dacefa76f68c1d69fb"},
+]
+
+[package.dependencies]
+et-xmlfile = "*"
+
[[package]]
name = "pandas"
version = "2.2.2"
@@ -344,6 +369,7 @@ optional = false
python-versions = ">=3.9"
files = [
{file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"},
+ {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"},
{file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"},
{file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"},
{file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"},
@@ -364,6 +390,7 @@ files = [
{file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"},
{file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"},
{file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"},
+ {file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"},
{file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"},
{file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"},
{file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"},
@@ -506,13 +533,13 @@ files = [
[[package]]
name = "requests"
-version = "2.31.0"
+version = "2.32.3"
description = "Python HTTP for Humans."
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"},
- {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"},
+ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"},
+ {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"},
]
[package.dependencies]
@@ -623,4 +650,4 @@ zstd = ["zstandard (>=0.18.0)"]
[metadata]
lock-version = "2.0"
python-versions = "~3.11"
-content-hash = "c19f19e37b21fafe99de70b3e3967feea700ba574d8da3fb9c0339d74d4471dc"
+content-hash = "b80303d26f74c473a4775d1a08d111cf40ac438603746e19806cacea998264d4"
diff --git a/pyproject.toml b/pyproject.toml
index 872e232..524a484 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -12,6 +12,9 @@ google-api-python-client = "^2.127.0"
pandas = "^2.2.2"
boto3 = "^1.34.122"
requests-toolbelt = "^1.0.0"
+botocore = "^1.34.127"
+requests = "^2.32.3"
+openpyxl = "^3.1.4"
[build-system]
diff --git a/samconfig.toml b/samconfig.toml
deleted file mode 100644
index 9d6a5c2..0000000
--- a/samconfig.toml
+++ /dev/null
@@ -1,31 +0,0 @@
-# More information about the configuration file can be found here:
-# https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-config.html
-version = 0.1
-
-[default]
-[default.global.parameters]
-stack_name = "aws-educate-tpet-backend"
-
-[default.build.parameters]
-cached = true
-parallel = true
-
-[default.validate.parameters]
-lint = true
-
-[default.deploy.parameters]
-capabilities = "CAPABILITY_IAM"
-confirm_changeset = true
-resolve_s3 = true
-
-[default.package.parameters]
-resolve_s3 = true
-
-[default.sync.parameters]
-watch = true
-
-[default.local_start_api.parameters]
-warm_containers = "EAGER"
-
-[default.local_start_lambda.parameters]
-warm_containers = "EAGER"
diff --git a/src/.aws-sam/build.toml b/src/.aws-sam/build.toml
deleted file mode 100644
index 59552c0..0000000
--- a/src/.aws-sam/build.toml
+++ /dev/null
@@ -1,12 +0,0 @@
-# This file is auto generated by SAM CLI build command
-
-[function_build_definitions.ae5e1e2e-3121-420d-a24c-40092955faf0]
-packagetype = "Image"
-functions = ["EmailSenderFunction"]
-
-[function_build_definitions.ae5e1e2e-3121-420d-a24c-40092955faf0.metadata]
-Dockerfile = "Dockerfile"
-DockerContext = "/Users/rich/Desktop/aws-educate-tpet-backend/src/send-email"
-DockerTag = "python3.11"
-
-[layer_build_definitions]
diff --git a/src/email_service/send-email/lambda_function.py b/src/email_service/send-email/lambda_function.py
deleted file mode 100644
index 056de87..0000000
--- a/src/email_service/send-email/lambda_function.py
+++ /dev/null
@@ -1,284 +0,0 @@
-import datetime
-import io
-import json
-import logging
-import os
-import re
-import uuid
-
-import boto3
-import pandas as pd
-import requests
-from botocore.exceptions import ClientError
-from requests.exceptions import RequestException
-
-# Configure logging
-logger = logging.getLogger()
-logger.setLevel(logging.INFO)
-
-TIME_FORMAT = "%Y-%m-%dT%H:%M:%S"
-BUCKET_NAME = os.getenv("BUCKET_NAME")
-
-
-# Function to call API to get file information using the file ID
-def get_file_info(file_id):
- try:
- api_url = f"https://8um2zizr80.execute-api.ap-northeast-1.amazonaws.com/dev/files/{file_id}"
- response = requests.get(api_url)
- response.raise_for_status()
- return response.json()
- except RequestException as e:
- logger.error("Error in get_file_info: %s", e)
- raise
-
-
-# Function to retrieve the email template from an S3 bucket
-def get_template(template_file_s3_key):
- try:
- s3 = boto3.client("s3")
- request = s3.get_object(Bucket=BUCKET_NAME, Key=template_file_s3_key)
- template_content = request["Body"].read().decode("utf-8")
- return template_content
- except Exception as e:
- logger.error("Error in get_template: %s", e)
- raise
-
-
-# Function to read and parse spreadsheet data from S3
-def read_sheet_data_from_s3(spreadsheet_file_s3_key):
- try:
- s3 = boto3.client("s3")
- request = s3.get_object(Bucket=BUCKET_NAME, Key=spreadsheet_file_s3_key)
- xlsx_content = request["Body"].read()
- excel_data = pd.read_excel(io.BytesIO(xlsx_content), engine="openpyxl")
- rows = excel_data.to_dict(orient="records")
- if excel_data.empty:
- return [], 0
- return rows, excel_data.columns.tolist()
- except Exception as e:
- logger.error("Error in read excel from s3: %s", e)
- raise
-
-
-# Function to validate template placeholders against spreadsheet columns
-def validate_template(template_content, columns):
- try:
- placeholders = re.findall(
- r"{{(.*?)}}", template_content
- ) # placeholder format: {{column_name}}
- missing_columns = [
- placeholder for placeholder in placeholders if placeholder not in columns
- ]
- return missing_columns
- except Exception as e:
- logger.error(
- "Error in excel column, can't find the match placeholder in template: %s", e
- )
- raise
-
-
-# Function to send email using the SES client
-def send_email(ses_client, email_title, template_content, row, display_name):
- try:
- template_content = template_content.replace("\r", "")
- template_content = re.sub(r"\{\{(.*?)\}\}", r"{\1}", template_content)
- receiver_email = row.get("Email")
- if not receiver_email:
- logger.warning("Email address not found in row: %s", row)
- return "FAILED"
- try:
- # Ensure all values in row are strings
- formatted_row = {k: str(v) for k, v in row.items()}
- formatted_content = template_content.format(**formatted_row)
- source_email = "awseducate.cloudambassador@gmail.com"
- formatted_source_email = f"{display_name} <{source_email}>"
- ses_client.send_email(
- Source=formatted_source_email,
- Destination={"ToAddresses": [receiver_email]},
- Message={
- "Subject": {"Data": email_title},
- "Body": {"Html": {"Data": formatted_content}},
- },
- )
- _ = datetime.datetime.now() + datetime.timedelta(hours=8)
- formatted_send_time = _.strftime(TIME_FORMAT + "Z")
- logger.info(
- "Email sent to {row.get('Name', 'Unknown')} at %s", formatted_send_time
- )
- return formatted_send_time, "SUCCESS"
- except Exception as e:
- logger.error("Failed to send email to %s: %s", receiver_email, e)
- return "FAILED"
- except Exception as e:
- logger.error("Error in send_email: %s", e)
- raise
-
-
-# Function to save email sending records to DynamoDB
-def save_to_dynamodb(
- run_id,
- email_id,
- display_name,
- status,
- recipient_email,
- template_file_id,
- spreadsheet_file_id,
- created_at,
-):
- try:
- dynamodb = boto3.resource("dynamodb")
- table_name = os.environ.get("DYNAMODB_TABLE")
- table = dynamodb.Table(table_name)
- item = {
- "run_id": run_id,
- "email_id": email_id,
- "display_name": display_name,
- "status": status,
- "recipient_email": recipient_email,
- "template_file_id": template_file_id,
- "spreadsheet_file_id": spreadsheet_file_id,
- "created_at": created_at,
- }
- table.put_item(Item=item)
- except ClientError as e:
- logger.error("Error in save_to_dynamodb: %s", e)
- except Exception as e:
- logger.error("Error in save_to_dynamodb: %s", e)
- raise
-
-
-# Function to handle sending emails and saving results to DynamoDB
-def process_email(
- ses_client,
- email_title,
- template_content,
- row,
- display_name,
- run_id,
- template_file_id,
- spreadsheet_id,
-):
- email = str(row.get("Email", ""))
- if not re.match(r"[^@]+@[^@]+\.[^@]+", email):
- logger.warning("Invalid email address provided: %s", email)
- return "FAILED", email
- send_time, status = send_email(
- ses_client, email_title, template_content, row, display_name
- )
- save_to_dynamodb(
- run_id,
- uuid.uuid4().hex,
- display_name,
- status,
- email,
- template_file_id,
- spreadsheet_id,
- send_time,
- )
- return status, email
-
-
-def lambda_handler(event, context):
- try:
- body = json.loads(event.get("body", "{}"))
- template_file_id = body.get("template_file_id")
- spreadsheet_id = body.get("spreadsheet_file_id")
- email_title = body.get("subject")
- display_name = body.get("display_name", "No Name Provided")
- run_id = body.get("run_id") if body.get("run_id") else uuid.uuid4().hex
-
- # Check for missing required parameters
- if not email_title:
- logger.error("Error: Missing required parameter: email_title (subject).")
- return {
- "statusCode": 400,
- "body": json.dumps(
- "Error: Missing required parameter: email_title (subject)."
- ),
- }
- if not template_file_id:
- logger.error("Error: Missing required parameter: template_file_id.")
- return {
- "statusCode": 400,
- "body": json.dumps(
- "Error: Missing required parameter: template_file_id."
- ),
- }
- if not spreadsheet_id:
- logger.error("Error: Missing required parameter: spreadsheet_file_id.")
- return {
- "statusCode": 400,
- "body": json.dumps(
- "Error: Missing required parameter: spreadsheet_file_id."
- ),
- }
-
- # Fetch and validate template content
- template_info = get_file_info(template_file_id)
- template_s3_key = template_info["s3_object_key"]
- template_content = get_template(template_s3_key)
-
- # Fetch and read spreadsheet data
- spreadsheet_info = get_file_info(spreadsheet_id)
- spreadsheet_s3_key = spreadsheet_info["s3_object_key"]
- data, columns = read_sheet_data_from_s3(spreadsheet_s3_key)
-
- # Validate template against spreadsheet columns
- missing_columns = validate_template(template_content, columns)
- if missing_columns:
- error_message = (
- "Template validation error: Missing required columns for placeholders: %s"
- % ", ".join(missing_columns)
- )
- logger.error(error_message)
- return {"statusCode": 400, "body": json.dumps(error_message)}
-
- # Send emails and save results to DynamoDB
- ses_client = boto3.client("ses", region_name="ap-northeast-1")
- failed_recipients = []
- success_recipients = []
- for row in data:
- status, email = process_email(
- ses_client,
- email_title,
- template_content,
- row,
- display_name,
- run_id,
- template_file_id,
- spreadsheet_id,
- )
- if status == "FAILED":
- failed_recipients.append(email)
- else:
- success_recipients.append(email)
-
- # Return final response
- if failed_recipients:
- response = {
- "status": "FAILED",
- "message": f"Failed to send {len(failed_recipients)} emails, successfully sent {len(success_recipients)} emails.",
- "failed_recipients": failed_recipients,
- "success_recipients": success_recipients,
- "request_id": run_id,
- "timestamp": datetime.datetime.now().strftime(TIME_FORMAT + "Z"),
- "sqs_message_id": uuid.uuid4().hex,
- }
- logger.info("Response: %s", response)
- return {"statusCode": 500, "body": json.dumps(response)}
-
- response = {
- "status": "SUCCESS",
- "message": f"All {len(success_recipients)} emails were sent successfully.",
- "request_id": run_id,
- "timestamp": datetime.datetime.now().strftime(TIME_FORMAT + "Z"),
- "sqs_message_id": uuid.uuid4().hex,
- }
- logger.info("Response: %s", response)
- return {"statusCode": 200, "body": json.dumps(response)}
- except Exception as e:
- logger.error("Internal server error: %s", e)
- return {
- "statusCode": 500,
- "body": json.dumps("Internal server error: Detailed error message: %s" % e),
- }
diff --git a/src/email_service/send-email/main.tf b/src/email_service/send-email/main.tf
deleted file mode 100644
index ed3e66a..0000000
--- a/src/email_service/send-email/main.tf
+++ /dev/null
@@ -1,241 +0,0 @@
-provider "aws" {
- region = "ap-northeast-1"
- profile = "my-profile"
-}
-
-variable "aws_region" {
- default = "ap-northeast-1"
-}
-
-resource "aws_dynamodb_table" "email" {
- name = "email"
- billing_mode = "PAY_PER_REQUEST"
- hash_key = "run_id"
- range_key = "email_id"
-
- attribute {
- name = "run_id"
- type = "S"
- }
-
- attribute {
- name = "email_id"
- type = "S"
- }
-
- attribute {
- name = "status"
- type = "S"
- }
-
- attribute {
- name = "created_at"
- type = "S"
- }
-
- local_secondary_index {
- name = "status_lsi"
- projection_type = "ALL"
- range_key = "status"
- }
-
- local_secondary_index {
- name = "create_at_lsi"
- projection_type = "ALL"
- range_key = "created_at"
- }
-
- tags = {
- Name = "email"
- Creator = "Richie"
- }
-}
-
-resource "aws_iam_role" "lambda_role" {
- name = "lambda_execution_role"
- assume_role_policy = jsonencode({
- "Version" : "2012-10-17",
- "Statement" : [
- {
- "Action" : "sts:AssumeRole",
- "Effect" : "Allow",
- "Principal" : {
- "Service" : "lambda.amazonaws.com"
- }
- }
- ]
- })
-
- tags = {
- Creator = "Richie"
- }
-}
-
-resource "aws_iam_policy" "ecr_policy" {
- name = "ECRPolicy"
- policy = jsonencode({
- "Version": "2012-10-17",
- "Statement": [
- {
- "Effect": "Allow",
- "Action": [
- "ecr:BatchCheckLayerAvailability",
- "ecr:CompleteLayerUpload",
- "ecr:GetDownloadUrlForLayer",
- "ecr:InitiateLayerUpload",
- "ecr:PutImage",
- "ecr:UploadLayerPart"
- ],
- "Resource": "arn:aws:ecr:ap-northeast-1:070576557102:repository/email-sender-repo"
- }
- ]
- })
-}
-
-resource "aws_iam_role_policy_attachment" "attach_ecr_policy" {
- role = aws_iam_role.lambda_role.name
- policy_arn = aws_iam_policy.ecr_policy.arn
-}
-
-resource "aws_iam_role_policy" "lambda_policy" {
- name = "lambda_policy"
- role = aws_iam_role.lambda_role.id
- policy = jsonencode({
- "Version" : "2012-10-17",
- "Statement" : [
- {
- "Effect" : "Allow",
- "Action" : [
- "logs:CreateLogGroup",
- "logs:CreateLogStream",
- "logs:PutLogEvents"
- ],
- "Resource" : "arn:aws:logs:*:*:*"
- },
- {
- "Effect" : "Allow",
- "Action" : [
- "s3:GetObject"
- ],
- "Resource" : "arn:aws:s3:::email-sender-excel/*"
- },
- {
- "Effect" : "Allow",
- "Action" : [
- "ses:SendEmail"
- ],
- "Resource" : "*"
- },
- {
- "Effect" : "Allow",
- "Action" : [
- "dynamodb:PutItem"
- ],
- "Resource" : "arn:aws:dynamodb:${var.aws_region}:*:table/email"
- }
- ]
- })
-}
-
-resource "aws_ecr_repository" "email_sender_repo" {
- name = "email-sender-repo"
-
- image_scanning_configuration {
- scan_on_push = true
- }
-
- tags = {
- Name = "email-sender-repo"
- Creator = "Richie"
- }
-}
-
-data "aws_ecr_authorization_token" "ecr" {}
-
-resource "null_resource" "docker_image" {
- provisioner "local-exec" {
- command = <"
+ ses_client.send_email(
+ Source=formatted_source_email,
+ Destination={"ToAddresses": [recipient_email]},
+ Message={
+ "Subject": {"Data": email_title},
+ "Body": {"Html": {"Data": formatted_content}},
+ },
+ )
+ sent_time = time_util.get_current_utc_time()
+ logger.info(
+ "Email sent to %s at %s",
+ row.get("Email", "Unknown"),
+ sent_time,
+ )
+ return sent_time, "SUCCESS"
+ except Exception as e:
+ logger.error("Failed to send email to %s: %s", recipient_email, e)
+ return None, "FAILED"
+ except Exception as e:
+ logger.error("Error in send_email: %s", e)
+ return None, "FAILED"
+
+
+def process_email(
+ ses_client,
+ email_title,
+ template_content,
+ row,
+ display_name,
+ run_id,
+ template_file_id,
+ spreadsheet_id,
+ email_id,
+):
+ recipient_email = str(row.get("Email", ""))
+ if not re.match(r"[^@]+@[^@]+\.[^@]+", recipient_email):
+ logger.warning("Invalid email address provided: %s", recipient_email)
+ return "FAILED", email_id
+
+ sent_time, status = send_email(
+ ses_client, email_title, template_content, row, display_name
+ )
+ updated_at = time_util.get_current_utc_time()
+ save_to_dynamodb(
+ run_id,
+ email_id,
+ display_name,
+ status,
+ recipient_email,
+ template_file_id,
+ spreadsheet_id,
+ time_util.get_current_utc_time(),
+ sent_at=sent_time,
+ updated_at=updated_at,
+ )
+ return status, email_id
diff --git a/src/email_service/send_email/sqs.py b/src/email_service/send_email/sqs.py
new file mode 100644
index 0000000..723f280
--- /dev/null
+++ b/src/email_service/send_email/sqs.py
@@ -0,0 +1,16 @@
+import logging
+import os
+
+logger = logging.getLogger(__name__)
+logger.setLevel(logging.INFO)
+
+SQS_QUEUE_URL = os.getenv("SQS_QUEUE_URL")
+
+
+def delete_sqs_message(sqs_client, queue_url, receipt_handle):
+ try:
+ sqs_client.delete_message(QueueUrl=queue_url, ReceiptHandle=receipt_handle)
+ logger.info("Deleted message from SQS: %s", receipt_handle)
+ except Exception as e:
+ logger.error("Error deleting message from SQS: %s", e)
+ raise
diff --git a/src/email_service/send_email/template.yaml b/src/email_service/send_email/template.yaml
new file mode 100644
index 0000000..2f859ab
--- /dev/null
+++ b/src/email_service/send_email/template.yaml
@@ -0,0 +1,127 @@
+AWSTemplateFormatVersion: '2010-09-09'
+Transform: 'AWS::Serverless-2016-10-31'
+Description: AWS SAM template for sending emails
+
+Parameters:
+ AwsRegion:
+ Type: String
+ Default: "ap-northeast-1"
+ Description: AWS region
+
+Resources:
+ EmailTable:
+ Type: AWS::DynamoDB::Table
+ Properties:
+ TableName: email
+ BillingMode: PAY_PER_REQUEST
+ AttributeDefinitions:
+ - AttributeName: run_id
+ AttributeType: S
+ - AttributeName: email_id
+ AttributeType: S
+ - AttributeName: status
+ AttributeType: S
+ - AttributeName: created_at
+ AttributeType: S
+ KeySchema:
+ - AttributeName: run_id
+ KeyType: HASH
+ - AttributeName: email_id
+ KeyType: RANGE
+ LocalSecondaryIndexes:
+ - IndexName: status_lsi
+ KeySchema:
+ - AttributeName: run_id
+ KeyType: HASH
+ - AttributeName: status
+ KeyType: RANGE
+ Projection:
+ ProjectionType: ALL
+ - IndexName: created_at_lsi
+ KeySchema:
+ - AttributeName: run_id
+ KeyType: HASH
+ - AttributeName: created_at
+ KeyType: RANGE
+ Projection:
+ ProjectionType: ALL
+
+ EmailSenderFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ FunctionName: email-sender
+ Role: !GetAtt LambdaExecutionRole.Arn
+ PackageType: Image
+ CodeUri: .
+ Environment:
+ Variables:
+ BUCKET_NAME: "email-sender-excel"
+ TABLE_NAME: "email"
+ SQS_QUEUE_URL: !Ref SQSEmailQueue
+ Timeout: 30
+ Events:
+ SQSTrigger:
+ Type: SQS
+ Properties:
+ Queue: !GetAtt SQSEmailQueue.Arn
+ Metadata:
+ Dockerfile: Dockerfile
+ DockerContext: .
+ DockerTag: python3.11
+
+ SQSEmailQueue:
+ Type: AWS::SQS::Queue
+ Properties:
+ QueueName: email-queue
+ ReceiveMessageWaitTimeSeconds: 20
+
+ LambdaExecutionRole:
+ Type: AWS::IAM::Role
+ Properties:
+ RoleName: !Sub "lambda_execution_role-${AWS::AccountId}-${AWS::Region}"
+ AssumeRolePolicyDocument:
+ Version: "2012-10-17"
+ Statement:
+ - Effect: Allow
+ Principal:
+ Service: lambda.amazonaws.com
+ Action: sts:AssumeRole
+ Policies:
+ - PolicyName: !Sub "LambdaPolicy-${AWS::AccountId}-${AWS::Region}"
+ PolicyDocument:
+ Version: "2012-10-17"
+ Statement:
+ - Effect: Allow
+ Action:
+ - logs:CreateLogGroup
+ - logs:CreateLogStream
+ - logs:PutLogEvents
+ Resource: "arn:aws:logs:*:*:*"
+ - Effect: Allow
+ Action:
+ - s3:GetObject
+ Resource: "arn:aws:s3:::email-sender-excel/*"
+ - Effect: Allow
+ Action:
+ - ses:SendEmail
+ Resource: "*"
+ - Effect: Allow
+ Action:
+ - dynamodb:PutItem
+ - dynamodb:UpdateItem
+ Resource: !Sub "arn:aws:dynamodb:${AwsRegion}:*:table/email"
+ - Effect: Allow
+ Action:
+ - sqs:SendMessage
+ - sqs:DeleteMessage
+ - sqs:GetQueueAttributes
+ - sqs:ReceiveMessage
+ Resource: !GetAtt SQSEmailQueue.Arn
+
+Outputs:
+ EmailSenderFunctionArn:
+ Description: ARN of the email sender function
+ Value: !GetAtt EmailSenderFunction.Arn
+ SQSQueueUrl:
+ Description: URL of the SQS Queue
+ Value: !Ref SQSEmailQueue
diff --git a/src/email_service/send_email/time_util.py b/src/email_service/send_email/time_util.py
new file mode 100644
index 0000000..c627c6e
--- /dev/null
+++ b/src/email_service/send_email/time_util.py
@@ -0,0 +1,66 @@
+import datetime
+
+TIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
+
+
+def get_current_utc_time() -> str:
+ """
+ Get the current UTC time and format it as ISO 8601.
+
+ :return: Current UTC time in ISO 8601 format.
+ """
+ return datetime.datetime.now(datetime.timezone.utc).strftime(TIME_FORMAT)
+
+
+def format_time_to_iso8601(dt: datetime.datetime) -> str:
+ """
+ Format a datetime object as ISO 8601.
+
+ :param dt: Datetime object.
+ :return: Formatted time as ISO 8601 string.
+ """
+ if dt.tzinfo is None:
+ dt = dt.replace(tzinfo=datetime.timezone.utc)
+ return dt.strftime(TIME_FORMAT)
+
+
+def parse_iso8601_to_datetime(iso8601_str: str) -> datetime.datetime:
+ """
+ Parse an ISO 8601 string to a datetime object.
+
+ :param iso8601_str: ISO 8601 formatted string.
+ :return: Datetime object.
+ """
+ return datetime.datetime.strptime(iso8601_str, TIME_FORMAT).replace(
+ tzinfo=datetime.timezone.utc
+ )
+
+
+def add_hours_to_time(iso8601_str: str, hours: int) -> str:
+ """
+ Add a specified number of hours to an ISO 8601 time string.
+
+ :param iso8601_str: ISO 8601 formatted string.
+ :param hours: Number of hours to add.
+ :return: New ISO 8601 formatted time string.
+ """
+ dt = parse_iso8601_to_datetime(iso8601_str)
+ new_dt = dt + datetime.timedelta(hours=hours)
+ return format_time_to_iso8601(new_dt)
+
+
+# Example usage
+if __name__ == "__main__":
+ current_time = get_current_utc_time()
+ print("Current UTC Time:", current_time)
+
+ formatted_time = format_time_to_iso8601(
+ datetime.datetime.now(datetime.timezone.utc)
+ )
+ print("Formatted Time:", formatted_time)
+
+ parsed_time = parse_iso8601_to_datetime("2024-07-11T12:00:00Z")
+ print("Parsed Time:", parsed_time)
+
+ new_time = add_hours_to_time("2024-07-11T12:00:00Z", 3)
+ print("New Time:", new_time)
diff --git a/src/email_service/terraform/.terraform.lock.hcl b/src/email_service/terraform/.terraform.lock.hcl
new file mode 100644
index 0000000..3d25ee8
--- /dev/null
+++ b/src/email_service/terraform/.terraform.lock.hcl
@@ -0,0 +1,126 @@
+# This file is maintained automatically by "terraform init".
+# Manual edits may be lost in future updates.
+
+provider "registry.terraform.io/hashicorp/aws" {
+ version = "5.54.1"
+ constraints = ">= 4.22.0, >= 4.40.0, >= 5.32.0, >= 5.37.0, ~> 5.54.0"
+ hashes = [
+ "h1:h6AA+TgBpDNQXFcLi4xKYiDbn94Dfhz7lt8Q8x8CEI8=",
+ "zh:37c09b9a0a0a2f7854fe52c6adb15f71593810b458a8283ed71d68036af7ba3a",
+ "zh:42fe11d87723d4e43b9c6224ae6bacdcb53faee8abc58f0fc625a161d1f71cb1",
+ "zh:57c6dfc46f28c9c2737559bd84acbc05aeae90431e731bb72a0024028a2d2412",
+ "zh:5ba9665a4ca0e182effd75575b19a4d47383ec02662024b9fe26f78286c36619",
+ "zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425",
+ "zh:b55980be0237644123a02a30b56d4cc03863ef29036c47d6e8ab5429ab45adf5",
+ "zh:b81e7664f10855a3a6fc234a18b4c4f1456273126a40c41516f2061696fb9870",
+ "zh:bd09736ffafd92af104c3c34b5add138ae8db4402eb687863ce472ca7e5ff2e2",
+ "zh:cc2eb1c62fba2a11d1f239e650cc2ae94bcab01c907384dcf2e213a6ee1bd5b2",
+ "zh:e5dc40205d9cf6f353c0ca532ae29afc6c83928bc9bcca47d74b640d3bb5a38c",
+ "zh:ebf1acdcd13f10db1b9c85050ddaadc70ab269c47c5a240753362446442d8371",
+ "zh:f2fc28a4ad94af5e6144a7309286505e3eb7a94d9dc106722b506c372ff7f591",
+ "zh:f49445e8435944df122aa89853260a2716ba8b73d6a6a70cae1661554926d5a2",
+ "zh:fc3b5046e60ae7cab20715be23de8436eb12736136fd6d0f0cc1549ebda6cc73",
+ "zh:fdb98a53500e245a3b5bec077b994da6959dba8fc4eb7534528658d820e06bd5",
+ ]
+}
+
+provider "registry.terraform.io/hashicorp/external" {
+ version = "2.3.3"
+ constraints = ">= 1.0.0"
+ hashes = [
+ "h1:/x65slrvO8YG5MKxE2DaU5udEbUxBu3BgEiO7EEM9bQ=",
+ "zh:03d81462f9578ec91ce8e26f887e34151eda0e100f57e9772dbea86363588239",
+ "zh:37ec2a20f6a3ec3a0fd95d3f3de26da6cb9534b30488bc45723e118a0911c0d8",
+ "zh:4eb5b119179539f2749ce9de0e1b9629d025990f062f4f4dddc161562bb89d37",
+ "zh:5a31bb58414f41bee5e09b939012df5b88654120b0238a89dfd6691ba197619a",
+ "zh:6221a05e52a6a2d4f520ffe7cbc741f4f6080e0855061b0ed54e8be4a84eb9b7",
+ "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
+ "zh:8bb068496b4679bef625e4710d9f3432e301c3a56602271f04e60eadf7f8a94c",
+ "zh:94742aa5378bab626ce34f79bcef6a373e4f86ea7a8b762e9f71270a899e0d00",
+ "zh:a485831b5a525cd8f40e8982fa37da40ff70b1ae092c8b755fcde123f0b1238d",
+ "zh:a647ff16d071eabcabd87ea8183eb90a775a0294ddd735d742075d62fff09193",
+ "zh:b74710c5954aaa3faf262c18d36a8c2407862d9f842c63e7fa92fa4de3d29df6",
+ "zh:fa73d83edc92af2e551857594c2232ba6a9e3603ad34b0a5940865202c08d8d7",
+ ]
+}
+
+provider "registry.terraform.io/hashicorp/local" {
+ version = "2.5.1"
+ constraints = ">= 1.0.0, ~> 2.5.1"
+ hashes = [
+ "h1:Np4kERf9SMrqUi7DJ1rK3soMK14k49nfgE7l/ipQ5xw=",
+ "zh:0af29ce2b7b5712319bf6424cb58d13b852bf9a777011a545fac99c7fdcdf561",
+ "zh:126063ea0d79dad1f68fa4e4d556793c0108ce278034f101d1dbbb2463924561",
+ "zh:196bfb49086f22fd4db46033e01655b0e5e036a5582d250412cc690fa7995de5",
+ "zh:37c92ec084d059d37d6cffdb683ccf68e3a5f8d2eb69dd73c8e43ad003ef8d24",
+ "zh:4269f01a98513651ad66763c16b268f4c2da76cc892ccfd54b401fff6cc11667",
+ "zh:51904350b9c728f963eef0c28f1d43e73d010333133eb7f30999a8fb6a0cc3d8",
+ "zh:73a66611359b83d0c3fcba2984610273f7954002febb8a57242bbb86d967b635",
+ "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
+ "zh:7ae387993a92bcc379063229b3cce8af7eaf082dd9306598fcd42352994d2de0",
+ "zh:9e0f365f807b088646db6e4a8d4b188129d9ebdbcf2568c8ab33bddd1b82c867",
+ "zh:b5263acbd8ae51c9cbffa79743fbcadcb7908057c87eb22fd9048268056efbc4",
+ "zh:dfcd88ac5f13c0d04e24be00b686d069b4879cc4add1b7b1a8ae545783d97520",
+ ]
+}
+
+provider "registry.terraform.io/hashicorp/null" {
+ version = "3.2.2"
+ constraints = ">= 2.0.0"
+ hashes = [
+ "h1:m467k2tZ9cdFFgHW7LPBK2GLPH43LC6wc3ppxr8yvoE=",
+ "zh:3248aae6a2198f3ec8394218d05bd5e42be59f43a3a7c0b71c66ec0df08b69e7",
+ "zh:32b1aaa1c3013d33c245493f4a65465eab9436b454d250102729321a44c8ab9a",
+ "zh:38eff7e470acb48f66380a73a5c7cdd76cc9b9c9ba9a7249c7991488abe22fe3",
+ "zh:4c2f1faee67af104f5f9e711c4574ff4d298afaa8a420680b0cb55d7bbc65606",
+ "zh:544b33b757c0b954dbb87db83a5ad921edd61f02f1dc86c6186a5ea86465b546",
+ "zh:696cf785090e1e8cf1587499516b0494f47413b43cb99877ad97f5d0de3dc539",
+ "zh:6e301f34757b5d265ae44467d95306d61bef5e41930be1365f5a8dcf80f59452",
+ "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
+ "zh:913a929070c819e59e94bb37a2a253c228f83921136ff4a7aa1a178c7cce5422",
+ "zh:aa9015926cd152425dbf86d1abdbc74bfe0e1ba3d26b3db35051d7b9ca9f72ae",
+ "zh:bb04798b016e1e1d49bcc76d62c53b56c88c63d6f2dfe38821afef17c416a0e1",
+ "zh:c23084e1b23577de22603cff752e59128d83cfecc2e6819edadd8cf7a10af11e",
+ ]
+}
+
+provider "registry.terraform.io/hashicorp/random" {
+ version = "3.6.2"
+ hashes = [
+ "h1:5lstwe/L8AZS/CP0lil2nPvmbbjAu8kCaU/ogSGNbxk=",
+ "zh:0ef01a4f81147b32c1bea3429974d4d104bbc4be2ba3cfa667031a8183ef88ec",
+ "zh:1bcd2d8161e89e39886119965ef0f37fcce2da9c1aca34263dd3002ba05fcb53",
+ "zh:37c75d15e9514556a5f4ed02e1548aaa95c0ecd6ff9af1119ac905144c70c114",
+ "zh:4210550a767226976bc7e57d988b9ce48f4411fa8a60cd74a6b246baf7589dad",
+ "zh:562007382520cd4baa7320f35e1370ffe84e46ed4e2071fdc7e4b1a9b1f8ae9b",
+ "zh:5efb9da90f665e43f22c2e13e0ce48e86cae2d960aaf1abf721b497f32025916",
+ "zh:6f71257a6b1218d02a573fc9bff0657410404fb2ef23bc66ae8cd968f98d5ff6",
+ "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
+ "zh:9647e18f221380a85f2f0ab387c68fdafd58af6193a932417299cdcae4710150",
+ "zh:bb6297ce412c3c2fa9fec726114e5e0508dd2638cad6a0cb433194930c97a544",
+ "zh:f83e925ed73ff8a5ef6e3608ad9225baa5376446349572c2449c0c0b3cf184b7",
+ "zh:fbef0781cb64de76b1df1ca11078aecba7800d82fd4a956302734999cfd9a4af",
+ ]
+}
+
+provider "registry.terraform.io/kreuzwerker/docker" {
+ version = "3.0.2"
+ constraints = ">= 3.0.0, ~> 3.0.2"
+ hashes = [
+ "h1:DcRxJArfX6EiATluWeCBW7HoD6usz9fMoTK2U3dmyPk=",
+ "zh:15b0a2b2b563d8d40f62f83057d91acb02cd0096f207488d8b4298a59203d64f",
+ "zh:23d919de139f7cd5ebfd2ff1b94e6d9913f0977fcfc2ca02e1573be53e269f95",
+ "zh:38081b3fe317c7e9555b2aaad325ad3fa516a886d2dfa8605ae6a809c1072138",
+ "zh:4a9c5065b178082f79ad8160243369c185214d874ff5048556d48d3edd03c4da",
+ "zh:5438ef6afe057945f28bce43d76c4401254073de01a774760169ac1058830ac2",
+ "zh:60b7fadc287166e5c9873dfe53a7976d98244979e0ab66428ea0dea1ebf33e06",
+ "zh:61c5ec1cb94e4c4a4fb1e4a24576d5f39a955f09afb17dab982de62b70a9bdd1",
+ "zh:a38fe9016ace5f911ab00c88e64b156ebbbbfb72a51a44da3c13d442cd214710",
+ "zh:c2c4d2b1fd9ebb291c57f524b3bf9d0994ff3e815c0cd9c9bcb87166dc687005",
+ "zh:d567bb8ce483ab2cf0602e07eae57027a1a53994aba470fa76095912a505533d",
+ "zh:e83bf05ab6a19dd8c43547ce9a8a511f8c331a124d11ac64687c764ab9d5a792",
+ "zh:e90c934b5cd65516fbcc454c89a150bfa726e7cf1fe749790c7480bbeb19d387",
+ "zh:f05f167d2eaf913045d8e7b88c13757e3cf595dd5cd333057fdafc7c4b7fed62",
+ "zh:fcc9c1cea5ce85e8bcb593862e699a881bd36dffd29e2e367f82d15368659c3d",
+ ]
+}
diff --git a/src/email_service/terraform/api_gateway.tf b/src/email_service/terraform/api_gateway.tf
new file mode 100644
index 0000000..98374d9
--- /dev/null
+++ b/src/email_service/terraform/api_gateway.tf
@@ -0,0 +1,125 @@
+locals {
+ region = var.aws_region
+ custom_domain_name = "${var.environment}-${var.service_hyphen}-internal-api-tpet.awseducate.systems"
+ sub_domain_name = "${var.environment}-${var.service_hyphen}-internal-api-tpet"
+
+ tags = {
+ Service = var.service_underscore
+ }
+}
+
+# Find a certificate that is issued
+data "aws_acm_certificate" "issued" {
+ domain = "*.${var.domain_name}"
+ statuses = ["ISSUED"]
+}
+
+data "aws_route53_zone" "awseducate_systems" {
+ name = var.domain_name
+ private_zone = false
+}
+
+################################################################################
+# API Gateway Module
+################################################################################
+
+module "api_gateway" {
+ source = "terraform-aws-modules/apigateway-v2/aws"
+ version = "5.0.0"
+
+ description = "Email service api gateway to lambda container image"
+ name = "${var.environment}-${var.service_underscore}"
+ stage_name = var.environment
+
+
+ cors_configuration = {
+ allow_headers = ["content-type", "x-amz-date", "authorization", "x-api-key", "x-amz-security-token", "x-amz-user-agent"]
+ allow_methods = ["*"]
+ allow_origins = ["*"]
+ }
+
+ fail_on_warnings = false
+
+
+ # Custom Domain Name
+ domain_name = local.custom_domain_name
+ domain_name_certificate_arn = data.aws_acm_certificate.issued.arn
+ api_mapping_key = var.environment
+ create_domain_records = false
+ create_certificate = false
+ create_domain_name = true
+
+
+ # Routes & Integration(s)
+ routes = {
+ "POST /send-email" = {
+ detailed_metrics_enabled = true
+ throttling_rate_limit = 80
+ throttling_burst_limit = 40
+ integration = {
+ uri = module.validate_input_lambda.lambda_function_arn
+ type = "AWS_PROXY"
+ payload_format_version = "1.0"
+ timeout_milliseconds = 29000
+ }
+ }
+
+
+
+ "$default" = {
+ integration = {
+ uri = module.send_email_lambda.lambda_function_arn
+ }
+ }
+ }
+
+ # Stage
+ stage_access_log_settings = {
+ create_log_group = true
+ log_group_retention_in_days = 7
+ format = jsonencode({
+ context = {
+ domainName = "$context.domainName"
+ integrationErrorMessage = "$context.integrationErrorMessage"
+ protocol = "$context.protocol"
+ requestId = "$context.requestId"
+ requestTime = "$context.requestTime"
+ responseLength = "$context.responseLength"
+ routeKey = "$context.routeKey"
+ stage = "$context.stage"
+ status = "$context.status"
+ error = {
+ message = "$context.error.message"
+ responseType = "$context.error.responseType"
+ }
+ identity = {
+ sourceIP = "$context.identity.sourceIp"
+ }
+ integration = {
+ error = "$context.integration.error"
+ integrationStatus = "$context.integration.integrationStatus"
+ }
+ }
+ })
+ }
+
+ stage_default_route_settings = {
+ detailed_metrics_enabled = true
+ throttling_burst_limit = 100
+ throttling_rate_limit = 100
+ }
+
+ tags = local.tags
+}
+
+resource "aws_route53_record" "api_gateway_custom_domain_record" {
+ zone_id = data.aws_route53_zone.awseducate_systems.zone_id
+ name = local.custom_domain_name
+ type = "A"
+
+ alias {
+ name = module.api_gateway.domain_name_target_domain_name
+ zone_id = module.api_gateway.domain_name_hosted_zone_id
+ evaluate_target_health = false
+ }
+}
diff --git a/src/email_service/terraform/backend.tf b/src/email_service/terraform/backend.tf
new file mode 100644
index 0000000..12c0dbe
--- /dev/null
+++ b/src/email_service/terraform/backend.tf
@@ -0,0 +1,3 @@
+terraform {
+ backend "s3" {}
+}
diff --git a/src/email_service/terraform/dev.tfbackend b/src/email_service/terraform/dev.tfbackend
new file mode 100644
index 0000000..90cb063
--- /dev/null
+++ b/src/email_service/terraform/dev.tfbackend
@@ -0,0 +1,4 @@
+bucket = "terraform-state-20240618152116874600000001"
+region = "us-west-2"
+key = "email_service/dev/terraform.tfstate"
+dynamodb_table = "terraform-locks"
\ No newline at end of file
diff --git a/src/email_service/terraform/dev.tfvars b/src/email_service/terraform/dev.tfvars
new file mode 100644
index 0000000..615e893
--- /dev/null
+++ b/src/email_service/terraform/dev.tfvars
@@ -0,0 +1,5 @@
+aws_region = "us-east-1"
+environment = "dev"
+service_underscore = "email_service"
+service_hyphen = "email-service"
+dynamodb_table = "email"
diff --git a/src/email_service/terraform/dynamodb.tf b/src/email_service/terraform/dynamodb.tf
new file mode 100644
index 0000000..7c1670a
--- /dev/null
+++ b/src/email_service/terraform/dynamodb.tf
@@ -0,0 +1,45 @@
+resource "aws_dynamodb_table" "email" {
+ name = "email"
+ billing_mode = "PAY_PER_REQUEST"
+ hash_key = "run_id"
+ range_key = "email_id"
+
+ attribute {
+ name = "run_id"
+ type = "S"
+ }
+
+ attribute {
+ name = "email_id"
+ type = "S"
+ }
+
+ attribute {
+ name = "status"
+ type = "S"
+ }
+
+ attribute {
+ name = "created_at"
+ type = "S"
+ }
+
+ global_secondary_index {
+ name = "run_id-status-gsi"
+ hash_key = "run_id"
+ range_key = "status"
+ projection_type = "ALL"
+ }
+
+ global_secondary_index {
+ name = "run_id-created_at-gsi"
+ hash_key = "run_id"
+ range_key = "created_at"
+ projection_type = "ALL"
+ }
+
+ tags = {
+ Name = "email"
+ Creator = "Richie"
+ }
+}
diff --git a/src/email_service/terraform/lambda.tf b/src/email_service/terraform/lambda.tf
new file mode 100644
index 0000000..f239cda
--- /dev/null
+++ b/src/email_service/terraform/lambda.tf
@@ -0,0 +1,281 @@
+data "aws_ecr_authorization_token" "token" {
+}
+
+data "aws_caller_identity" "this" {}
+
+resource "random_string" "this" {
+ length = 4
+ special = false
+ lower = true
+ upper = false
+}
+
+locals {
+ source_path = "${path.module}/.."
+ validate_input_function_name_and_ecr_repo_name = "${var.environment}-${var.service_underscore}-validate_input-${random_string.this.result}"
+ send_email_function_name_and_ecr_repo_name = "${var.environment}-${var.service_underscore}-send_email-${random_string.this.result}"
+ path_include = ["**"]
+ path_exclude = ["**/__pycache__/**"]
+ files_include = setunion([for f in local.path_include : fileset(local.source_path, f)]...)
+ files_exclude = setunion([for f in local.path_exclude : fileset(local.source_path, f)]...)
+ files = sort(setsubtract(local.files_include, local.files_exclude))
+ dir_sha = sha1(join("", [for f in local.files : filesha1("${local.source_path}/${f}")]))
+}
+
+provider "docker" {
+ registry_auth {
+ address = format("%v.dkr.ecr.%v.amazonaws.com", data.aws_caller_identity.this.account_id, var.aws_region)
+ username = data.aws_ecr_authorization_token.token.user_name
+ password = data.aws_ecr_authorization_token.token.password
+ }
+}
+
+####################################
+####################################
+####################################
+# POST /send-email #################
+####################################
+####################################
+####################################
+
+module "validate_input_lambda" {
+ source = "terraform-aws-modules/lambda/aws"
+ version = "7.7.0"
+
+ function_name = local.validate_input_function_name_and_ecr_repo_name # Remember to change
+ description = "AWS Educate TPET ${var.service_hyphen} in ${var.environment}: POST /send-email" # Remember to change
+ create_package = false
+ timeout = 30
+
+ ##################
+ # Container Image
+ ##################
+ package_type = "Image"
+ architectures = ["x86_64"] # or ["arm64"]
+ image_uri = module.validate_input_docker_image.image_uri # Remember to change
+
+ publish = true # Whether to publish creation/change as new Lambda Function Version.
+
+
+ environment_variables = {
+ "ENVIRONMENT" = var.environment,
+ "SERVICE" = var.service_underscore
+ "BUCKET_NAME" = "${var.environment}-aws-educate-tpet-storage"
+ "SQS_QUEUE_URL" = module.send_email_sqs.queue_url
+ }
+
+ allowed_triggers = {
+ AllowExecutionFromAPIGateway = {
+ service = "apigateway"
+ source_arn = "${module.api_gateway.api_execution_arn}/*/*"
+ }
+ }
+
+ tags = {
+ "Terraform" = "true",
+ "Environment" = var.environment,
+ "Service" = var.service_underscore
+ }
+ ######################
+ # Additional policies
+ ######################
+
+ attach_policy_statements = true
+ policy_statements = {
+ s3_crud = {
+ effect = "Allow",
+ actions = [
+ "s3:ListBucket",
+ "s3:GetBucketLocation",
+ "s3:CreateBucket",
+ "s3:DeleteBucket",
+ "s3:PutObject",
+ "s3:GetObject",
+ "s3:DeleteObject",
+ "s3:ListBucketMultipartUploads",
+ "s3:ListMultipartUploadParts",
+ "s3:AbortMultipartUpload"
+ ],
+ resources = [
+ "arn:aws:s3:::${var.environment}-aws-educate-tpet-storage",
+ "arn:aws:s3:::${var.environment}-aws-educate-tpet-storage/*"
+ ]
+ },
+ sqs_send_message = {
+ effect = "Allow",
+ actions = [
+ "sqs:SendMessage"
+ ],
+ resources = [
+ "arn:aws:sqs:${var.aws_region}:${data.aws_caller_identity.this.account_id}:${module.send_email_sqs.queue_name}"
+ ]
+ }
+ }
+}
+
+module "validate_input_docker_image" {
+ source = "terraform-aws-modules/lambda/aws//modules/docker-build"
+ version = "7.7.0"
+
+ create_ecr_repo = true
+ keep_remotely = true
+ use_image_tag = false
+ image_tag_mutability = "MUTABLE"
+ ecr_repo = local.validate_input_function_name_and_ecr_repo_name # Remember to change
+ ecr_repo_lifecycle_policy = jsonencode({
+ "rules" : [
+ {
+ "rulePriority" : 1,
+ "description" : "Keep only the last 10 images",
+ "selection" : {
+ "tagStatus" : "any",
+ "countType" : "imageCountMoreThan",
+ "countNumber" : 10
+ },
+ "action" : {
+ "type" : "expire"
+ }
+ }
+ ]
+ })
+
+ # docker_file_path = "${local.source_path}/path/to/Dockerfile" # set `docker_file_path` If your Dockerfile is not in `source_path`
+ source_path = "${local.source_path}/validate_input/" # Remember to change
+ triggers = {
+ dir_sha = local.dir_sha
+ }
+
+}
+
+module "send_email_lambda" {
+ source = "terraform-aws-modules/lambda/aws"
+ version = "7.7.0"
+
+ function_name = local.send_email_function_name_and_ecr_repo_name
+ description = "AWS Educate TPET ${var.service_hyphen} in ${var.environment}: POST /send-email"
+ event_source_mapping = {
+ sqs = {
+ event_source_arn = module.send_email_sqs.queue_arn
+ function_response_types = ["ReportBatchItemFailures"] # Setting to ["ReportBatchItemFailures"] means that when the Lambda function processes a batch of SQS messages, it can report which messages failed to process.
+ scaling_config = {
+ # The `maximum_concurrency` parameter limits the number of concurrent Lambda instances that can process messages from the SQS queue.
+ # Setting `maximum_concurrency = 5` means that up to 5 Lambda instances can run simultaneously, each processing different messages from the SQS queue.
+ # It ensures that multiple messages can be processed in parallel, increasing throughput, but each message is still processed only once by a single Lambda instance.
+ maximum_concurrency = 20
+ }
+ }
+ }
+ create_package = false
+ timeout = 300
+
+ ##################
+ # Container Image
+ ##################
+ package_type = "Image"
+ architectures = ["x86_64"] # or ["arm64"]
+ image_uri = module.docker_image.image_uri
+
+ publish = true # Whether to publish creation/change as new Lambda Function Version.
+
+
+ environment_variables = {
+ "ENVIRONMENT" = var.environment,
+ "SERVICE" = var.service_underscore
+ "DYNAMODB_TABLE" = var.dynamodb_table
+ "BUCKET_NAME" = "${var.environment}-aws-educate-tpet-storage"
+ "SQS_QUEUE_URL" = module.send_email_sqs.queue_url
+ }
+
+ allowed_triggers = {
+ allow_execution_from_sqs = {
+ principal = "sqs.amazonaws.com"
+ source_arn = module.send_email_sqs.queue_arn
+ }
+ }
+
+ tags = {
+ "Terraform" = "true",
+ "Environment" = var.environment,
+ "Service" = var.service_underscore
+
+ }
+ ######################
+ # Additional policies
+ ######################
+
+ attach_policy_statements = true
+ policy_statements = {
+ dynamodb_crud = {
+ effect = "Allow",
+ actions = [
+ "dynamodb:BatchGetItem",
+ "dynamodb:BatchWriteItem",
+ "dynamodb:DeleteItem",
+ "dynamodb:GetItem",
+ "dynamodb:PutItem",
+ "dynamodb:Query",
+ "dynamodb:Scan",
+ "dynamodb:UpdateItem"
+ ],
+ resources = [
+ "arn:aws:dynamodb:${var.aws_region}:${data.aws_caller_identity.this.account_id}:table/${var.dynamodb_table}",
+ "arn:aws:dynamodb:${var.aws_region}:${data.aws_caller_identity.this.account_id}:table/${var.dynamodb_table}/index/*"
+ ]
+ },
+ ses_send_email = {
+ effect = "Allow",
+ actions = [
+ "ses:SendEmail",
+ "ses:SendRawEmail"
+ ],
+ resources = [
+ "arn:aws:ses:ap-northeast-1:${data.aws_caller_identity.this.account_id}:identity/awseducate.cloudambassador@gmail.com"
+ ]
+ },
+ sqs_receive_message = {
+ effect = "Allow",
+ actions = [
+ "sqs:ReceiveMessage",
+ "sqs:DeleteMessage",
+ "sqs:GetQueueAttributes"
+ ],
+ resources = [
+ "arn:aws:sqs:${var.aws_region}:${data.aws_caller_identity.this.account_id}:${module.send_email_sqs.queue_name}"
+ ]
+ }
+ }
+}
+
+module "docker_image" {
+ source = "terraform-aws-modules/lambda/aws//modules/docker-build"
+ version = "7.7.0"
+
+ create_ecr_repo = true
+ keep_remotely = true
+ use_image_tag = false
+ image_tag_mutability = "MUTABLE"
+ ecr_repo = local.send_email_function_name_and_ecr_repo_name
+ ecr_repo_lifecycle_policy = jsonencode({
+ "rules" : [
+ {
+ "rulePriority" : 1,
+ "description" : "Keep only the last 10 images",
+ "selection" : {
+ "tagStatus" : "any",
+ "countType" : "imageCountMoreThan",
+ "countNumber" : 10
+ },
+ "action" : {
+ "type" : "expire"
+ }
+ }
+ ]
+ })
+
+ # docker_file_path = "${local.source_path}/path/to/Dockerfile" # set `docker_file_path` If your Dockerfile is not in `source_path`
+ source_path = "${local.source_path}/send_email/" # Remember to change
+ triggers = {
+ dir_sha = local.dir_sha
+ }
+
+}
diff --git a/src/email_service/terraform/local-dev.tfbackend b/src/email_service/terraform/local-dev.tfbackend
new file mode 100644
index 0000000..fa25a75
--- /dev/null
+++ b/src/email_service/terraform/local-dev.tfbackend
@@ -0,0 +1,4 @@
+bucket = "terraform-state-20240618152116874600000001"
+region = "us-west-2"
+key = "email_service/local-dev/terraform.tfstate"
+dynamodb_table = "terraform-locks"
\ No newline at end of file
diff --git a/src/email_service/terraform/local-dev.tfvars b/src/email_service/terraform/local-dev.tfvars
new file mode 100644
index 0000000..1c9db65
--- /dev/null
+++ b/src/email_service/terraform/local-dev.tfvars
@@ -0,0 +1,5 @@
+aws_region = "us-west-2"
+environment = "local-dev"
+service_underscore = "email_service"
+service_hyphen = "email-service"
+dynamodb_table = "email"
diff --git a/src/email_service/terraform/preview.tfbackend b/src/email_service/terraform/preview.tfbackend
new file mode 100644
index 0000000..ec842d7
--- /dev/null
+++ b/src/email_service/terraform/preview.tfbackend
@@ -0,0 +1,4 @@
+bucket = "terraform-state-20240618152116874600000001"
+region = "us-west-2"
+key = "email_service/preview/terraform.tfstate"
+dynamodb_table = "terraform-locks"
\ No newline at end of file
diff --git a/src/email_service/terraform/preview.tfvars b/src/email_service/terraform/preview.tfvars
new file mode 100644
index 0000000..2859424
--- /dev/null
+++ b/src/email_service/terraform/preview.tfvars
@@ -0,0 +1,5 @@
+aws_region = "us-west-1"
+environment = "preview"
+service_underscore = "email_service"
+service_hyphen = "email-service"
+dynamodb_table = "email"
diff --git a/src/email_service/terraform/prod.tfbackend b/src/email_service/terraform/prod.tfbackend
new file mode 100644
index 0000000..30caa19
--- /dev/null
+++ b/src/email_service/terraform/prod.tfbackend
@@ -0,0 +1,4 @@
+bucket = "terraform-state-20240618152116874600000001"
+region = "us-west-2"
+key = "email_service/prod/terraform.tfstate"
+dynamodb_table = "terraform-locks"
\ No newline at end of file
diff --git a/src/email_service/terraform/prod.tfvars b/src/email_service/terraform/prod.tfvars
new file mode 100644
index 0000000..6377a9c
--- /dev/null
+++ b/src/email_service/terraform/prod.tfvars
@@ -0,0 +1,5 @@
+aws_region = "ap-northeast-1"
+environment = "prod"
+service_underscore = "email_service"
+service_hyphen = "email-service"
+dynamodb_table = "email"
diff --git a/src/email_service/terraform/provider.tf b/src/email_service/terraform/provider.tf
new file mode 100644
index 0000000..5ac480b
--- /dev/null
+++ b/src/email_service/terraform/provider.tf
@@ -0,0 +1,10 @@
+provider "aws" {
+ region = var.aws_region
+ default_tags {
+ tags = {
+ "Terraform" = "true",
+ "Environment" = var.environment,
+ "Project" = "AWS Educate TPET"
+ }
+ }
+}
diff --git a/src/email_service/terraform/sqs.tf b/src/email_service/terraform/sqs.tf
new file mode 100644
index 0000000..246bd70
--- /dev/null
+++ b/src/email_service/terraform/sqs.tf
@@ -0,0 +1,14 @@
+module "send_email_sqs" {
+ source = "terraform-aws-modules/sqs/aws"
+ version = "4.2.0"
+
+ name = "${var.environment}-send-email-sqs"
+ visibility_timeout_seconds = 320 # Second, make sure it is larger than the Lambda timeout
+
+ # Dead letter queue
+ create_dlq = true
+ redrive_policy = {
+ # One failure to receive a message would cause the message to move to the DLQ
+ maxReceiveCount = 1
+ }
+}
diff --git a/src/email_service/terraform/variables.tf b/src/email_service/terraform/variables.tf
new file mode 100644
index 0000000..8247229
--- /dev/null
+++ b/src/email_service/terraform/variables.tf
@@ -0,0 +1,23 @@
+variable "aws_region" {
+ description = "aws region"
+}
+
+variable "environment" {
+ description = "Current environtment: prod(ap-northeast-1)/dev(us-east-1)/local-dev(us-west-2), default dev(us-east-1)"
+}
+
+variable "service_underscore" {
+ description = "Current service name"
+}
+
+variable "service_hyphen" {
+ description = "This variable contains the current service name, but with hyphens instead of underscores. For example: demo-service."
+}
+variable "domain_name" {
+ description = "Domain name, for example: example.com"
+ default = "awseducate.systems"
+}
+
+variable "dynamodb_table" {
+ description = "Current service's DynamoDB table name"
+}
diff --git a/src/email_service/terraform/versions.tf b/src/email_service/terraform/versions.tf
new file mode 100644
index 0000000..ca481d8
--- /dev/null
+++ b/src/email_service/terraform/versions.tf
@@ -0,0 +1,19 @@
+terraform {
+ required_version = "~> 1.8.0"
+ required_providers {
+ aws = {
+ source = "hashicorp/aws"
+ version = "~> 5.54.0"
+ }
+
+ local = {
+ source = "hashicorp/local"
+ version = "~> 2.5.1"
+ }
+
+ docker = {
+ source = "kreuzwerker/docker"
+ version = "~> 3.0.2"
+ }
+ }
+}
diff --git a/src/email_service/send-email/Dockerfile b/src/email_service/validate_input/Dockerfile
similarity index 66%
rename from src/email_service/send-email/Dockerfile
rename to src/email_service/validate_input/Dockerfile
index 80ddbf2..68b05bb 100644
--- a/src/email_service/send-email/Dockerfile
+++ b/src/email_service/validate_input/Dockerfile
@@ -1,12 +1,11 @@
FROM public.ecr.aws/lambda/python:3.11
-# 安裝依賴項
+# Install dependencies
COPY requirements.txt /var/task/
RUN pip install -r /var/task/requirements.txt
-# 複製函數代碼
+# Copy function code
COPY lambda_function.py /var/task/
-COPY .env /var/task/
-# 設定運行 Lambda 函數的命令
+# Set the command to run the Lambda function
CMD ["lambda_function.lambda_handler"]
diff --git a/src/email_service/validate_input/lambda_function.py b/src/email_service/validate_input/lambda_function.py
new file mode 100644
index 0000000..b0fb995
--- /dev/null
+++ b/src/email_service/validate_input/lambda_function.py
@@ -0,0 +1,187 @@
+import io
+import json
+import logging
+import os
+import re
+import uuid
+
+import boto3
+import pandas as pd
+import requests
+from requests.exceptions import RequestException
+
+# Set up logging
+logger = logging.getLogger()
+logger.setLevel(logging.INFO)
+
+# Constants
+BUCKET_NAME = os.getenv("BUCKET_NAME")
+ENVIRONMENT = os.getenv("ENVIRONMENT")
+FILE_SERVICE_API_BASE_URL = f"https://{ENVIRONMENT}-file-service-internal-api-tpet.awseducate.systems/{ENVIRONMENT}"
+SQS_QUEUE_URL = os.getenv("SQS_QUEUE_URL")
+
+
+# Initialize AWS SQS client
+sqs_client = boto3.client("sqs")
+
+
+def get_file_info(file_id):
+ """
+ Retrieve file information from the file service API.
+
+ :param file_id: ID of the file to retrieve information for
+ :return: JSON response containing file information
+ """
+ try:
+ api_url = f"{FILE_SERVICE_API_BASE_URL}/files/{file_id}"
+ response = requests.get(url=api_url, timeout=25)
+ response.raise_for_status()
+ return response.json()
+ except requests.exceptions.Timeout:
+ logger.error("Request timed out for file_id: %s", file_id)
+ raise
+ except RequestException as e:
+ logger.error("Error in get_file_info: %s", e)
+ raise
+
+
+def get_template(template_file_s3_key):
+ """
+ Retrieve template content from S3 bucket.
+
+ :param template_file_s3_key: S3 key of the template file
+ :return: Decoded content of the template file
+ """
+ try:
+ s3 = boto3.client("s3")
+ request = s3.get_object(Bucket=BUCKET_NAME, Key=template_file_s3_key)
+ template_content = request["Body"].read().decode("utf-8")
+ return template_content
+ except Exception as e:
+ logger.error("Error in get_template: %s", e)
+ raise
+
+
+def read_sheet_data_from_s3(spreadsheet_file_s3_key):
+ """
+ Read Excel sheet data from S3 bucket.
+
+ :param spreadsheet_file_s3_key: S3 key of the spreadsheet file
+ :return: Tuple containing list of rows and list of column names
+ """
+ try:
+ s3 = boto3.client("s3")
+ request = s3.get_object(Bucket=BUCKET_NAME, Key=spreadsheet_file_s3_key)
+ xlsx_content = request["Body"].read()
+ excel_data = pd.read_excel(io.BytesIO(xlsx_content), engine="openpyxl")
+ rows = excel_data.to_dict(orient="records")
+ if excel_data.empty:
+ return [], 0
+ return rows, excel_data.columns.tolist()
+ except Exception as e:
+ logger.error("Error in read excel from s3: %s", e)
+ raise
+
+
+def validate_template(template_content, columns):
+ """
+ Validate template placeholders against available columns.
+
+ :param template_content: Content of the template
+ :param columns: List of available columns
+ :return: List of missing columns
+ """
+ try:
+ placeholders = re.findall(r"{{(.*?)}}", template_content)
+ missing_columns = [
+ placeholder for placeholder in placeholders if placeholder not in columns
+ ]
+ return missing_columns
+ except Exception as e:
+ logger.error("Error in validate_template: %s", e)
+ raise
+
+
+def lambda_handler(event, context):
+ """
+ Main Lambda function handler.
+
+ :param event: Lambda event object
+ :param context: Lambda context object
+ :return: Dictionary containing status code and response body
+ """
+ try:
+ # Parse input from the event body
+ body = json.loads(event.get("body", "{}"))
+ template_file_id = body.get("template_file_id")
+ spreadsheet_id = body.get("spreadsheet_file_id")
+ email_title = body.get("subject")
+ display_name = body.get("display_name", "No Name Provided")
+ run_id = body.get("run_id") if body.get("run_id") else uuid.uuid4().hex
+
+ # Validate required inputs
+ if not email_title:
+ return {"statusCode": 400, "body": json.dumps("Missing email title")}
+ if not template_file_id:
+ return {"statusCode": 400, "body": json.dumps("Missing template file ID")}
+ if not spreadsheet_id:
+ return {
+ "statusCode": 400,
+ "body": json.dumps("Missing spreadsheet file ID"),
+ }
+
+ # Get template file information and content
+ template_info = get_file_info(template_file_id)
+ template_s3_key = template_info["s3_object_key"]
+ template_content = get_template(template_s3_key)
+
+ # Get spreadsheet file information and columns
+ spreadsheet_info = get_file_info(spreadsheet_id)
+ spreadsheet_s3_key = spreadsheet_info["s3_object_key"]
+ _, columns = read_sheet_data_from_s3(spreadsheet_s3_key)
+
+ # Validate template placeholders against spreadsheet columns
+ missing_columns = validate_template(template_content, columns)
+ if missing_columns:
+ error_message = "Missing required columns for placeholders: %s" % ", ".join(
+ missing_columns
+ )
+ return {"statusCode": 400, "body": json.dumps(error_message)}
+
+ # Prepare message for SQS
+ message_body = {
+ "run_id": run_id,
+ "template_file_id": template_file_id,
+ "spreadsheet_file_id": spreadsheet_id,
+ "email_title": email_title,
+ "display_name": display_name,
+ }
+
+ # Send message to SQS
+ sqs_client.send_message(
+ QueueUrl=SQS_QUEUE_URL, MessageBody=json.dumps(message_body)
+ )
+ logger.info("Message sent to SQS: %s", message_body)
+
+ # Prepare success response
+ response = {
+ "status": "SUCCESS",
+ "message": "Input message accepted for processing",
+ "run_id": run_id,
+ "template_file_id": template_file_id,
+ "spreadsheet_file_id": spreadsheet_id,
+ "email_title": email_title,
+ "display_name": display_name,
+ }
+
+ return {"statusCode": 202, "body": json.dumps(response)}
+
+ except Exception as e:
+ # Handle exceptions and return error response
+ response = {
+ "status": "FAILED",
+ "message": "Internal server error",
+ "error": str(e),
+ }
+ logger.error("Internal server error: %s", e)
+ return {"statusCode": 500, "body": json.dumps(response)}
diff --git a/src/email_service/send-email/requirements.txt b/src/email_service/validate_input/requirements.txt
similarity index 100%
rename from src/email_service/send-email/requirements.txt
rename to src/email_service/validate_input/requirements.txt
diff --git a/src/email_service/validate_input/samconfig.toml b/src/email_service/validate_input/samconfig.toml
new file mode 100644
index 0000000..72e5023
--- /dev/null
+++ b/src/email_service/validate_input/samconfig.toml
@@ -0,0 +1,9 @@
+version = 0.1
+[default.deploy.parameters]
+stack_name = "validate-input"
+resolve_s3 = true
+s3_prefix = "validate-input"
+region = "us-east-1"
+capabilities = "CAPABILITY_NAMED_IAM"
+parameter_overrides = "AwsRegion=\"us-east-1\""
+image_repositories = ["ValidateInputFunction=070576557102.dkr.ecr.us-east-1.amazonaws.com/validateinput0ae3ea30/validateinputfunction28c7d7d1repo"]
diff --git a/src/email_service/validate_input/template.yaml b/src/email_service/validate_input/template.yaml
new file mode 100644
index 0000000..760ca29
--- /dev/null
+++ b/src/email_service/validate_input/template.yaml
@@ -0,0 +1,80 @@
+AWSTemplateFormatVersion: '2010-09-09'
+Transform: 'AWS::Serverless-2016-10-31'
+Description: AWS SAM template for email input validation
+
+Parameters:
+ AwsRegion:
+ Type: String
+ Default: "us-east-1"
+ Description: AWS region
+
+Resources:
+ ValidateInputFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ FunctionName: validate-input
+ Role: !GetAtt LambdaExecutionRole.Arn
+ PackageType: Image
+ CodeUri: .
+ Environment:
+ Variables:
+ BUCKET_NAME: "email-sender-excel"
+ SQS_QUEUE_URL: !Sub "https://sqs.${AwsRegion}.amazonaws.com/070576557102/email-queue"
+ Timeout: 20
+ Events:
+ ValidateInputApi:
+ Type: Api
+ Properties:
+ Path: /send-email
+ Method: post
+ RestApiId: !Ref ValidateInputApi
+ Metadata:
+ Dockerfile: Dockerfile
+ DockerContext: .
+ DockerTag: python3.11
+
+ ValidateInputApi:
+ Type: AWS::Serverless::Api
+ Properties:
+ Name: ValidateInputAPI
+ StageName: dev
+ EndpointConfiguration: REGIONAL
+
+ LambdaExecutionRole:
+ Type: AWS::IAM::Role
+ Properties:
+ AssumeRolePolicyDocument:
+ Version: "2012-10-17"
+ Statement:
+ - Effect: Allow
+ Principal:
+ Service: lambda.amazonaws.com
+ Action: sts:AssumeRole
+ RoleName: !Sub "LambdaExecutionRole-${AWS::StackName}-${AWS::Region}-${AWS::AccountId}"
+ Policies:
+ - PolicyName: !Sub "LambdaPolicy-${AWS::StackName}-${AWS::Region}-${AWS::AccountId}"
+ PolicyDocument:
+ Version: "2012-10-17"
+ Statement:
+ - Effect: Allow
+ Action:
+ - logs:CreateLogGroup
+ - logs:CreateLogStream
+ - logs:PutLogEvents
+ Resource: "arn:aws:logs:*:*:*"
+ - Effect: Allow
+ Action:
+ - s3:GetObject
+ Resource: "arn:aws:s3:::email-sender-excel/*"
+ - Effect: Allow
+ Action:
+ - sqs:SendMessage
+ Resource: !Sub "arn:aws:sqs:${AwsRegion}:070576557102:email-queue"
+
+Outputs:
+ ApiGatewayUrl:
+ Description: URL of the API Gateway for validating input
+ Value: !Sub "https://${ValidateInputApi}.execute-api.${AwsRegion}.amazonaws.com/dev/send-email"
+ SQSQueueUrl:
+ Description: URL of the SQS Queue
+ Value: !Sub "arn:aws:sqs:${AwsRegion}:070576557102:email-queue"
diff --git a/src/file_service/get_file/Dockerfile b/src/file_service/get_file/Dockerfile
new file mode 100644
index 0000000..68b05bb
--- /dev/null
+++ b/src/file_service/get_file/Dockerfile
@@ -0,0 +1,11 @@
+FROM public.ecr.aws/lambda/python:3.11
+
+# Install dependencies
+COPY requirements.txt /var/task/
+RUN pip install -r /var/task/requirements.txt
+
+# Copy function code
+COPY lambda_function.py /var/task/
+
+# Set the command to run the Lambda function
+CMD ["lambda_function.lambda_handler"]
diff --git a/src/file_service/get_file/get_file_by_id_function.py b/src/file_service/get_file/get_file_by_id_function.py
deleted file mode 100644
index e745e81..0000000
--- a/src/file_service/get_file/get_file_by_id_function.py
+++ /dev/null
@@ -1,31 +0,0 @@
-import json
-
-import boto3
-
-dynamodb = boto3.resource("dynamodb")
-
-
-def lambda_handler(event, context):
- file_id = event["pathParameters"]["file_id"]
-
- table = dynamodb.Table("Files")
- response = table.get_item(Key={"file_id": file_id})
-
- if "Item" not in response:
- return {"statusCode": 404, "body": json.dumps({"message": "File not found"})}
-
- file_item = response["Item"]
-
- result = {
- "file_id": file_item["file_id"],
- "s3_object_key": file_item["s3_object_key"],
- "created_at": file_item["created_at"],
- "updated_at": file_item["updated_at"],
- "file_url": file_item["file_url"],
- "file_name": file_item["file_name"],
- "file_extension": file_item["file_extension"],
- "file_size": file_item["file_size"],
- "uploader_id": file_item["uploader_id"],
- }
-
- return {"statusCode": 200, "body": json.dumps(result)}
diff --git a/src/file_service/get_file/get_file_by_id.py b/src/file_service/get_file/lambda_function.py
similarity index 94%
rename from src/file_service/get_file/get_file_by_id.py
rename to src/file_service/get_file/lambda_function.py
index 8371133..d2b8c3a 100644
--- a/src/file_service/get_file/get_file_by_id.py
+++ b/src/file_service/get_file/lambda_function.py
@@ -5,7 +5,7 @@
import boto3
dynamodb = boto3.resource("dynamodb")
-table = dynamodb.Table(os.getenv("TABLE_NAME"))
+table = dynamodb.Table(os.getenv("DYNAMODB_TABLE"))
class DecimalEncoder(json.JSONEncoder):
@@ -17,7 +17,7 @@ def default(self, o):
def lambda_handler(event, context):
file_id = event["pathParameters"]["file_id"]
-
+ print("test")
response = table.get_item(Key={"file_id": file_id})
if "Item" not in response:
diff --git a/__init__.py b/src/file_service/get_file/requirements.txt
similarity index 100%
rename from __init__.py
rename to src/file_service/get_file/requirements.txt
diff --git a/src/file_service/list_files/Dockerfile b/src/file_service/list_files/Dockerfile
new file mode 100644
index 0000000..68b05bb
--- /dev/null
+++ b/src/file_service/list_files/Dockerfile
@@ -0,0 +1,11 @@
+FROM public.ecr.aws/lambda/python:3.11
+
+# Install dependencies
+COPY requirements.txt /var/task/
+RUN pip install -r /var/task/requirements.txt
+
+# Copy function code
+COPY lambda_function.py /var/task/
+
+# Set the command to run the Lambda function
+CMD ["lambda_function.lambda_handler"]
diff --git a/src/file_service/list_files/list_files_function.py b/src/file_service/list_files/lambda_function.py
similarity index 86%
rename from src/file_service/list_files/list_files_function.py
rename to src/file_service/list_files/lambda_function.py
index 6fe1f40..7527b4b 100644
--- a/src/file_service/list_files/list_files_function.py
+++ b/src/file_service/list_files/lambda_function.py
@@ -5,6 +5,7 @@
from decimal import Decimal
import boto3
+import botocore.exceptions
from boto3.dynamodb.conditions import Key
# Configure logging
@@ -12,7 +13,7 @@
logger.setLevel(logging.INFO)
dynamodb = boto3.resource("dynamodb")
-table = dynamodb.Table(os.getenv("TABLE_NAME"))
+table = dynamodb.Table(os.getenv("DYNAMODB_TABLE"))
class DecimalEncoder(json.JSONEncoder):
@@ -81,9 +82,13 @@ def lambda_handler(event, context):
try:
response = table.query(**query_kwargs)
logger.info("Query successful")
- except dynamodb.meta.client.exceptions.ValidationException as e:
- logger.error("Query failed: %s", str(e))
- return {"statusCode": 400, "body": json.dumps({"error": str(e)})}
+ except botocore.exceptions.ClientError as e:
+ error_code = e.response["Error"]["Code"]
+ if error_code == "ValidationException":
+ logger.error("Query failed: %s", str(e))
+ return {"statusCode": 400, "body": json.dumps({"error": str(e)})}
+ else:
+ raise e
else:
scan_kwargs = {
"Limit": limit,
@@ -112,9 +117,13 @@ def lambda_handler(event, context):
try:
response = table.scan(**scan_kwargs)
logger.info("Scan successful")
- except dynamodb.meta.client.exceptions.ValidationException as e:
- logger.error("Scan failed: %s", str(e))
- return {"statusCode": 400, "body": json.dumps({"error": str(e)})}
+ except botocore.exceptions.ClientError as e:
+ error_code = e.response["Error"]["Code"]
+ if error_code == "ValidationException":
+ logger.error("Scan failed: %s", str(e))
+ return {"statusCode": 400, "body": json.dumps({"error": str(e)})}
+ else:
+ raise e
files = response.get("Items", [])
last_evaluated_key = response.get("LastEvaluatedKey")
diff --git a/src/file_service/list_files/requirements.txt b/src/file_service/list_files/requirements.txt
new file mode 100644
index 0000000..1db657b
--- /dev/null
+++ b/src/file_service/list_files/requirements.txt
@@ -0,0 +1 @@
+boto3
\ No newline at end of file
diff --git a/src/file_service/terraform/.terraform.lock.hcl b/src/file_service/terraform/.terraform.lock.hcl
new file mode 100644
index 0000000..3d25ee8
--- /dev/null
+++ b/src/file_service/terraform/.terraform.lock.hcl
@@ -0,0 +1,126 @@
+# This file is maintained automatically by "terraform init".
+# Manual edits may be lost in future updates.
+
+provider "registry.terraform.io/hashicorp/aws" {
+ version = "5.54.1"
+ constraints = ">= 4.22.0, >= 4.40.0, >= 5.32.0, >= 5.37.0, ~> 5.54.0"
+ hashes = [
+ "h1:h6AA+TgBpDNQXFcLi4xKYiDbn94Dfhz7lt8Q8x8CEI8=",
+ "zh:37c09b9a0a0a2f7854fe52c6adb15f71593810b458a8283ed71d68036af7ba3a",
+ "zh:42fe11d87723d4e43b9c6224ae6bacdcb53faee8abc58f0fc625a161d1f71cb1",
+ "zh:57c6dfc46f28c9c2737559bd84acbc05aeae90431e731bb72a0024028a2d2412",
+ "zh:5ba9665a4ca0e182effd75575b19a4d47383ec02662024b9fe26f78286c36619",
+ "zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425",
+ "zh:b55980be0237644123a02a30b56d4cc03863ef29036c47d6e8ab5429ab45adf5",
+ "zh:b81e7664f10855a3a6fc234a18b4c4f1456273126a40c41516f2061696fb9870",
+ "zh:bd09736ffafd92af104c3c34b5add138ae8db4402eb687863ce472ca7e5ff2e2",
+ "zh:cc2eb1c62fba2a11d1f239e650cc2ae94bcab01c907384dcf2e213a6ee1bd5b2",
+ "zh:e5dc40205d9cf6f353c0ca532ae29afc6c83928bc9bcca47d74b640d3bb5a38c",
+ "zh:ebf1acdcd13f10db1b9c85050ddaadc70ab269c47c5a240753362446442d8371",
+ "zh:f2fc28a4ad94af5e6144a7309286505e3eb7a94d9dc106722b506c372ff7f591",
+ "zh:f49445e8435944df122aa89853260a2716ba8b73d6a6a70cae1661554926d5a2",
+ "zh:fc3b5046e60ae7cab20715be23de8436eb12736136fd6d0f0cc1549ebda6cc73",
+ "zh:fdb98a53500e245a3b5bec077b994da6959dba8fc4eb7534528658d820e06bd5",
+ ]
+}
+
+provider "registry.terraform.io/hashicorp/external" {
+ version = "2.3.3"
+ constraints = ">= 1.0.0"
+ hashes = [
+ "h1:/x65slrvO8YG5MKxE2DaU5udEbUxBu3BgEiO7EEM9bQ=",
+ "zh:03d81462f9578ec91ce8e26f887e34151eda0e100f57e9772dbea86363588239",
+ "zh:37ec2a20f6a3ec3a0fd95d3f3de26da6cb9534b30488bc45723e118a0911c0d8",
+ "zh:4eb5b119179539f2749ce9de0e1b9629d025990f062f4f4dddc161562bb89d37",
+ "zh:5a31bb58414f41bee5e09b939012df5b88654120b0238a89dfd6691ba197619a",
+ "zh:6221a05e52a6a2d4f520ffe7cbc741f4f6080e0855061b0ed54e8be4a84eb9b7",
+ "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
+ "zh:8bb068496b4679bef625e4710d9f3432e301c3a56602271f04e60eadf7f8a94c",
+ "zh:94742aa5378bab626ce34f79bcef6a373e4f86ea7a8b762e9f71270a899e0d00",
+ "zh:a485831b5a525cd8f40e8982fa37da40ff70b1ae092c8b755fcde123f0b1238d",
+ "zh:a647ff16d071eabcabd87ea8183eb90a775a0294ddd735d742075d62fff09193",
+ "zh:b74710c5954aaa3faf262c18d36a8c2407862d9f842c63e7fa92fa4de3d29df6",
+ "zh:fa73d83edc92af2e551857594c2232ba6a9e3603ad34b0a5940865202c08d8d7",
+ ]
+}
+
+provider "registry.terraform.io/hashicorp/local" {
+ version = "2.5.1"
+ constraints = ">= 1.0.0, ~> 2.5.1"
+ hashes = [
+ "h1:Np4kERf9SMrqUi7DJ1rK3soMK14k49nfgE7l/ipQ5xw=",
+ "zh:0af29ce2b7b5712319bf6424cb58d13b852bf9a777011a545fac99c7fdcdf561",
+ "zh:126063ea0d79dad1f68fa4e4d556793c0108ce278034f101d1dbbb2463924561",
+ "zh:196bfb49086f22fd4db46033e01655b0e5e036a5582d250412cc690fa7995de5",
+ "zh:37c92ec084d059d37d6cffdb683ccf68e3a5f8d2eb69dd73c8e43ad003ef8d24",
+ "zh:4269f01a98513651ad66763c16b268f4c2da76cc892ccfd54b401fff6cc11667",
+ "zh:51904350b9c728f963eef0c28f1d43e73d010333133eb7f30999a8fb6a0cc3d8",
+ "zh:73a66611359b83d0c3fcba2984610273f7954002febb8a57242bbb86d967b635",
+ "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
+ "zh:7ae387993a92bcc379063229b3cce8af7eaf082dd9306598fcd42352994d2de0",
+ "zh:9e0f365f807b088646db6e4a8d4b188129d9ebdbcf2568c8ab33bddd1b82c867",
+ "zh:b5263acbd8ae51c9cbffa79743fbcadcb7908057c87eb22fd9048268056efbc4",
+ "zh:dfcd88ac5f13c0d04e24be00b686d069b4879cc4add1b7b1a8ae545783d97520",
+ ]
+}
+
+provider "registry.terraform.io/hashicorp/null" {
+ version = "3.2.2"
+ constraints = ">= 2.0.0"
+ hashes = [
+ "h1:m467k2tZ9cdFFgHW7LPBK2GLPH43LC6wc3ppxr8yvoE=",
+ "zh:3248aae6a2198f3ec8394218d05bd5e42be59f43a3a7c0b71c66ec0df08b69e7",
+ "zh:32b1aaa1c3013d33c245493f4a65465eab9436b454d250102729321a44c8ab9a",
+ "zh:38eff7e470acb48f66380a73a5c7cdd76cc9b9c9ba9a7249c7991488abe22fe3",
+ "zh:4c2f1faee67af104f5f9e711c4574ff4d298afaa8a420680b0cb55d7bbc65606",
+ "zh:544b33b757c0b954dbb87db83a5ad921edd61f02f1dc86c6186a5ea86465b546",
+ "zh:696cf785090e1e8cf1587499516b0494f47413b43cb99877ad97f5d0de3dc539",
+ "zh:6e301f34757b5d265ae44467d95306d61bef5e41930be1365f5a8dcf80f59452",
+ "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
+ "zh:913a929070c819e59e94bb37a2a253c228f83921136ff4a7aa1a178c7cce5422",
+ "zh:aa9015926cd152425dbf86d1abdbc74bfe0e1ba3d26b3db35051d7b9ca9f72ae",
+ "zh:bb04798b016e1e1d49bcc76d62c53b56c88c63d6f2dfe38821afef17c416a0e1",
+ "zh:c23084e1b23577de22603cff752e59128d83cfecc2e6819edadd8cf7a10af11e",
+ ]
+}
+
+provider "registry.terraform.io/hashicorp/random" {
+ version = "3.6.2"
+ hashes = [
+ "h1:5lstwe/L8AZS/CP0lil2nPvmbbjAu8kCaU/ogSGNbxk=",
+ "zh:0ef01a4f81147b32c1bea3429974d4d104bbc4be2ba3cfa667031a8183ef88ec",
+ "zh:1bcd2d8161e89e39886119965ef0f37fcce2da9c1aca34263dd3002ba05fcb53",
+ "zh:37c75d15e9514556a5f4ed02e1548aaa95c0ecd6ff9af1119ac905144c70c114",
+ "zh:4210550a767226976bc7e57d988b9ce48f4411fa8a60cd74a6b246baf7589dad",
+ "zh:562007382520cd4baa7320f35e1370ffe84e46ed4e2071fdc7e4b1a9b1f8ae9b",
+ "zh:5efb9da90f665e43f22c2e13e0ce48e86cae2d960aaf1abf721b497f32025916",
+ "zh:6f71257a6b1218d02a573fc9bff0657410404fb2ef23bc66ae8cd968f98d5ff6",
+ "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
+ "zh:9647e18f221380a85f2f0ab387c68fdafd58af6193a932417299cdcae4710150",
+ "zh:bb6297ce412c3c2fa9fec726114e5e0508dd2638cad6a0cb433194930c97a544",
+ "zh:f83e925ed73ff8a5ef6e3608ad9225baa5376446349572c2449c0c0b3cf184b7",
+ "zh:fbef0781cb64de76b1df1ca11078aecba7800d82fd4a956302734999cfd9a4af",
+ ]
+}
+
+provider "registry.terraform.io/kreuzwerker/docker" {
+ version = "3.0.2"
+ constraints = ">= 3.0.0, ~> 3.0.2"
+ hashes = [
+ "h1:DcRxJArfX6EiATluWeCBW7HoD6usz9fMoTK2U3dmyPk=",
+ "zh:15b0a2b2b563d8d40f62f83057d91acb02cd0096f207488d8b4298a59203d64f",
+ "zh:23d919de139f7cd5ebfd2ff1b94e6d9913f0977fcfc2ca02e1573be53e269f95",
+ "zh:38081b3fe317c7e9555b2aaad325ad3fa516a886d2dfa8605ae6a809c1072138",
+ "zh:4a9c5065b178082f79ad8160243369c185214d874ff5048556d48d3edd03c4da",
+ "zh:5438ef6afe057945f28bce43d76c4401254073de01a774760169ac1058830ac2",
+ "zh:60b7fadc287166e5c9873dfe53a7976d98244979e0ab66428ea0dea1ebf33e06",
+ "zh:61c5ec1cb94e4c4a4fb1e4a24576d5f39a955f09afb17dab982de62b70a9bdd1",
+ "zh:a38fe9016ace5f911ab00c88e64b156ebbbbfb72a51a44da3c13d442cd214710",
+ "zh:c2c4d2b1fd9ebb291c57f524b3bf9d0994ff3e815c0cd9c9bcb87166dc687005",
+ "zh:d567bb8ce483ab2cf0602e07eae57027a1a53994aba470fa76095912a505533d",
+ "zh:e83bf05ab6a19dd8c43547ce9a8a511f8c331a124d11ac64687c764ab9d5a792",
+ "zh:e90c934b5cd65516fbcc454c89a150bfa726e7cf1fe749790c7480bbeb19d387",
+ "zh:f05f167d2eaf913045d8e7b88c13757e3cf595dd5cd333057fdafc7c4b7fed62",
+ "zh:fcc9c1cea5ce85e8bcb593862e699a881bd36dffd29e2e367f82d15368659c3d",
+ ]
+}
diff --git a/src/file_service/terraform/api_gateway.tf b/src/file_service/terraform/api_gateway.tf
index c064dc6..e8eb599 100644
--- a/src/file_service/terraform/api_gateway.tf
+++ b/src/file_service/terraform/api_gateway.tf
@@ -1,125 +1,150 @@
-resource "aws_api_gateway_rest_api" "files_api" {
- name = "Files API"
- description = "API for listing files with pagination and filtering"
-}
+locals {
+ region = var.aws_region
+ custom_domain_name = "${var.environment}-${var.service_hyphen}-internal-api-tpet.awseducate.systems"
+ sub_domain_name = "${var.environment}-${var.service_hyphen}-internal-api-tpet"
-resource "aws_api_gateway_resource" "files_resource" {
- rest_api_id = aws_api_gateway_rest_api.files_api.id
- parent_id = aws_api_gateway_rest_api.files_api.root_resource_id
- path_part = "files"
+ tags = {
+ Service = var.service_underscore
+ }
}
-# GET Method
-resource "aws_api_gateway_method" "list_files_method" {
- rest_api_id = aws_api_gateway_rest_api.files_api.id
- resource_id = aws_api_gateway_resource.files_resource.id
- http_method = "GET"
- authorization = "NONE"
+# Find a certificate that is issued
+data "aws_acm_certificate" "issued" {
+ domain = "*.${var.domain_name}"
+ statuses = ["ISSUED"]
}
-resource "aws_api_gateway_integration" "list_files_integration" {
- rest_api_id = aws_api_gateway_rest_api.files_api.id
- resource_id = aws_api_gateway_resource.files_resource.id
- http_method = aws_api_gateway_method.list_files_method.http_method
- type = "AWS_PROXY"
- integration_http_method = "POST"
- uri = aws_lambda_function.list_files.invoke_arn
+data "aws_route53_zone" "awseducate_systems" {
+ name = var.domain_name
+ private_zone = false
}
-resource "aws_api_gateway_method_response" "list_files_response_200" {
-
- rest_api_id = aws_api_gateway_rest_api.files_api.id
- resource_id = aws_api_gateway_resource.files_resource.id
- http_method = aws_api_gateway_method.list_files_method.http_method
- status_code = "200"
+################################################################################
+# API Gateway Module
+################################################################################
- # CORS Settings
- response_parameters = {
- "method.response.header.Access-Control-Allow-Origin" = true,
- "method.response.header.Access-Control-Allow-Headers" = true,
- "method.response.header.Access-Control-Allow-Methods" = true
- }
-}
+module "api_gateway" {
+ source = "terraform-aws-modules/apigateway-v2/aws"
+ version = "5.0.0"
-resource "aws_api_gateway_integration_response" "list_files_integration_response_200" {
- depends_on = [aws_api_gateway_integration.list_files_integration]
+ description = "File service api gateway to lambda container image"
+ name = "${var.environment}-${var.service_underscore}"
+ stage_name = var.environment
- rest_api_id = aws_api_gateway_rest_api.files_api.id
- resource_id = aws_api_gateway_resource.files_resource.id
- http_method = aws_api_gateway_method.list_files_method.http_method
- status_code = "200"
- response_parameters = {
- "method.response.header.Access-Control-Allow-Origin" = "'${var.cors_allow_origin}'",
- "method.response.header.Access-Control-Allow-Headers" = "'*'",
- "method.response.header.Access-Control-Allow-Methods" = "'GET,OPTIONS,POST,PUT'"
+ cors_configuration = {
+ allow_headers = ["content-type", "x-amz-date", "authorization", "x-api-key", "x-amz-security-token", "x-amz-user-agent"]
+ allow_methods = ["*"]
+ allow_origins = ["*"]
}
-}
-
-# OPTIONS Method
-resource "aws_api_gateway_method" "options_method" {
- rest_api_id = aws_api_gateway_rest_api.files_api.id
- resource_id = aws_api_gateway_resource.files_resource.id
- http_method = "OPTIONS"
- authorization = "NONE"
-}
-resource "aws_api_gateway_integration" "options_integration" {
- rest_api_id = aws_api_gateway_rest_api.files_api.id
- resource_id = aws_api_gateway_resource.files_resource.id
- http_method = aws_api_gateway_method.options_method.http_method
- type = "MOCK"
- request_templates = {
- "application/json" = "{\"statusCode\": 200}"
+ fail_on_warnings = false
+
+
+ # Custom Domain Name
+ domain_name = local.custom_domain_name
+ domain_name_certificate_arn = data.aws_acm_certificate.issued.arn
+ api_mapping_key = var.environment
+ create_domain_records = false
+ create_certificate = false
+ create_domain_name = true
+
+
+ # Routes & Integration(s)
+ routes = {
+ "POST /upload-multiple-file" = {
+ detailed_metrics_enabled = true
+ throttling_rate_limit = 80
+ throttling_burst_limit = 40
+ integration = {
+ uri = module.upload_multiple_file_lambda.lambda_function_arn # Remember to change
+ type = "AWS_PROXY"
+ payload_format_version = "1.0"
+ timeout_milliseconds = 29000
+ }
+ }
+
+ "GET /files" = {
+ detailed_metrics_enabled = true
+ throttling_rate_limit = 80
+ throttling_burst_limit = 40
+ integration = {
+ uri = module.list_files_lambda.lambda_function_arn # Remember to change
+ type = "AWS_PROXY"
+ payload_format_version = "1.0"
+ timeout_milliseconds = 29000
+ }
+ }
+
+ "GET /files/{file_id}" = {
+ detailed_metrics_enabled = true
+ throttling_rate_limit = 80
+ throttling_burst_limit = 40
+ integration = {
+ uri = module.get_file_lambda.lambda_function_arn # Remember to change
+ type = "AWS_PROXY"
+ payload_format_version = "1.0"
+ timeout_milliseconds = 29000
+ }
+ }
+
+
+
+ "$default" = {
+ integration = {
+ uri = module.get_file_lambda.lambda_function_arn
+ passthrough_behavior = "WHEN_NO_MATCH"
+ }
+ }
}
-}
-
-resource "aws_api_gateway_method_response" "options_response_200" {
- rest_api_id = aws_api_gateway_rest_api.files_api.id
- resource_id = aws_api_gateway_resource.files_resource.id
- http_method = aws_api_gateway_method.options_method.http_method
- status_code = "200"
- response_parameters = {
- "method.response.header.Access-Control-Allow-Headers" = true,
- "method.response.header.Access-Control-Allow-Methods" = true,
- "method.response.header.Access-Control-Allow-Origin" = true
+ # Stage
+ stage_access_log_settings = {
+ create_log_group = true
+ log_group_retention_in_days = 7
+ format = jsonencode({
+ context = {
+ domainName = "$context.domainName"
+ integrationErrorMessage = "$context.integrationErrorMessage"
+ protocol = "$context.protocol"
+ requestId = "$context.requestId"
+ requestTime = "$context.requestTime"
+ responseLength = "$context.responseLength"
+ routeKey = "$context.routeKey"
+ stage = "$context.stage"
+ status = "$context.status"
+ error = {
+ message = "$context.error.message"
+ responseType = "$context.error.responseType"
+ }
+ identity = {
+ sourceIP = "$context.identity.sourceIp"
+ }
+ integration = {
+ error = "$context.integration.error"
+ integrationStatus = "$context.integration.integrationStatus"
+ }
+ }
+ })
}
- response_models = {
- "application/json" = "Empty"
+ stage_default_route_settings = {
+ detailed_metrics_enabled = true
+ throttling_burst_limit = 100
+ throttling_rate_limit = 100
}
-}
-
-resource "aws_api_gateway_integration_response" "options_integration_response" {
- rest_api_id = aws_api_gateway_rest_api.files_api.id
- resource_id = aws_api_gateway_resource.files_resource.id
- http_method = aws_api_gateway_method.options_method.http_method
- status_code = aws_api_gateway_method_response.options_response_200.status_code
- response_parameters = {
- "method.response.header.Access-Control-Allow-Headers" = "'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token'",
- "method.response.header.Access-Control-Allow-Methods" = "'GET,OPTIONS,POST,PUT'",
- "method.response.header.Access-Control-Allow-Origin" = "'${var.cors_allow_origin}'"
- }
+ tags = local.tags
}
-resource "aws_api_gateway_deployment" "files_api_deployment" {
- depends_on = [
- aws_api_gateway_integration.list_files_integration,
- aws_api_gateway_method_response.list_files_response_200,
- aws_api_gateway_integration_response.list_files_integration_response_200,
- aws_api_gateway_method.options_method,
- aws_api_gateway_method_response.options_response_200,
- aws_api_gateway_integration.options_integration,
- aws_api_gateway_integration_response.options_integration_response
- ]
- rest_api_id = aws_api_gateway_rest_api.files_api.id
- stage_name = var.environment
+resource "aws_route53_record" "api_gateway_custom_domain_record" {
+ zone_id = data.aws_route53_zone.awseducate_systems.zone_id
+ name = local.custom_domain_name
+ type = "A"
- triggers = {
- redeployment = timestamp()
+ alias {
+ name = module.api_gateway.domain_name_target_domain_name
+ zone_id = module.api_gateway.domain_name_hosted_zone_id
+ evaluate_target_health = false
}
}
-
-
diff --git a/src/file_service/terraform/backend.tf b/src/file_service/terraform/backend.tf
new file mode 100644
index 0000000..12c0dbe
--- /dev/null
+++ b/src/file_service/terraform/backend.tf
@@ -0,0 +1,3 @@
+terraform {
+ backend "s3" {}
+}
diff --git a/src/file_service/terraform/backup_terraform/api_gateway.tf b/src/file_service/terraform/backup_terraform/api_gateway.tf
new file mode 100644
index 0000000..c064dc6
--- /dev/null
+++ b/src/file_service/terraform/backup_terraform/api_gateway.tf
@@ -0,0 +1,125 @@
+resource "aws_api_gateway_rest_api" "files_api" {
+ name = "Files API"
+ description = "API for listing files with pagination and filtering"
+}
+
+resource "aws_api_gateway_resource" "files_resource" {
+ rest_api_id = aws_api_gateway_rest_api.files_api.id
+ parent_id = aws_api_gateway_rest_api.files_api.root_resource_id
+ path_part = "files"
+}
+
+# GET Method
+resource "aws_api_gateway_method" "list_files_method" {
+ rest_api_id = aws_api_gateway_rest_api.files_api.id
+ resource_id = aws_api_gateway_resource.files_resource.id
+ http_method = "GET"
+ authorization = "NONE"
+}
+
+resource "aws_api_gateway_integration" "list_files_integration" {
+ rest_api_id = aws_api_gateway_rest_api.files_api.id
+ resource_id = aws_api_gateway_resource.files_resource.id
+ http_method = aws_api_gateway_method.list_files_method.http_method
+ type = "AWS_PROXY"
+ integration_http_method = "POST"
+ uri = aws_lambda_function.list_files.invoke_arn
+}
+
+resource "aws_api_gateway_method_response" "list_files_response_200" {
+
+ rest_api_id = aws_api_gateway_rest_api.files_api.id
+ resource_id = aws_api_gateway_resource.files_resource.id
+ http_method = aws_api_gateway_method.list_files_method.http_method
+ status_code = "200"
+
+ # CORS Settings
+ response_parameters = {
+ "method.response.header.Access-Control-Allow-Origin" = true,
+ "method.response.header.Access-Control-Allow-Headers" = true,
+ "method.response.header.Access-Control-Allow-Methods" = true
+ }
+}
+
+resource "aws_api_gateway_integration_response" "list_files_integration_response_200" {
+ depends_on = [aws_api_gateway_integration.list_files_integration]
+
+ rest_api_id = aws_api_gateway_rest_api.files_api.id
+ resource_id = aws_api_gateway_resource.files_resource.id
+ http_method = aws_api_gateway_method.list_files_method.http_method
+ status_code = "200"
+
+ response_parameters = {
+ "method.response.header.Access-Control-Allow-Origin" = "'${var.cors_allow_origin}'",
+ "method.response.header.Access-Control-Allow-Headers" = "'*'",
+ "method.response.header.Access-Control-Allow-Methods" = "'GET,OPTIONS,POST,PUT'"
+ }
+}
+
+# OPTIONS Method
+resource "aws_api_gateway_method" "options_method" {
+ rest_api_id = aws_api_gateway_rest_api.files_api.id
+ resource_id = aws_api_gateway_resource.files_resource.id
+ http_method = "OPTIONS"
+ authorization = "NONE"
+}
+
+resource "aws_api_gateway_integration" "options_integration" {
+ rest_api_id = aws_api_gateway_rest_api.files_api.id
+ resource_id = aws_api_gateway_resource.files_resource.id
+ http_method = aws_api_gateway_method.options_method.http_method
+ type = "MOCK"
+ request_templates = {
+ "application/json" = "{\"statusCode\": 200}"
+ }
+}
+
+resource "aws_api_gateway_method_response" "options_response_200" {
+ rest_api_id = aws_api_gateway_rest_api.files_api.id
+ resource_id = aws_api_gateway_resource.files_resource.id
+ http_method = aws_api_gateway_method.options_method.http_method
+ status_code = "200"
+
+ response_parameters = {
+ "method.response.header.Access-Control-Allow-Headers" = true,
+ "method.response.header.Access-Control-Allow-Methods" = true,
+ "method.response.header.Access-Control-Allow-Origin" = true
+ }
+
+ response_models = {
+ "application/json" = "Empty"
+ }
+}
+
+resource "aws_api_gateway_integration_response" "options_integration_response" {
+ rest_api_id = aws_api_gateway_rest_api.files_api.id
+ resource_id = aws_api_gateway_resource.files_resource.id
+ http_method = aws_api_gateway_method.options_method.http_method
+ status_code = aws_api_gateway_method_response.options_response_200.status_code
+
+ response_parameters = {
+ "method.response.header.Access-Control-Allow-Headers" = "'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token'",
+ "method.response.header.Access-Control-Allow-Methods" = "'GET,OPTIONS,POST,PUT'",
+ "method.response.header.Access-Control-Allow-Origin" = "'${var.cors_allow_origin}'"
+ }
+}
+
+resource "aws_api_gateway_deployment" "files_api_deployment" {
+ depends_on = [
+ aws_api_gateway_integration.list_files_integration,
+ aws_api_gateway_method_response.list_files_response_200,
+ aws_api_gateway_integration_response.list_files_integration_response_200,
+ aws_api_gateway_method.options_method,
+ aws_api_gateway_method_response.options_response_200,
+ aws_api_gateway_integration.options_integration,
+ aws_api_gateway_integration_response.options_integration_response
+ ]
+ rest_api_id = aws_api_gateway_rest_api.files_api.id
+ stage_name = var.environment
+
+ triggers = {
+ redeployment = timestamp()
+ }
+}
+
+
diff --git a/src/file_service/terraform/iam.tf b/src/file_service/terraform/backup_terraform/iam.tf
similarity index 76%
rename from src/file_service/terraform/iam.tf
rename to src/file_service/terraform/backup_terraform/iam.tf
index d62777a..e108add 100644
--- a/src/file_service/terraform/iam.tf
+++ b/src/file_service/terraform/backup_terraform/iam.tf
@@ -1,5 +1,5 @@
-resource "aws_iam_role" "lambda_exec_role" {
- name = "lambda_exec_role"
+resource "aws_iam_role" "list_files_lambda_exec_role" {
+ name = "list_files_lambda_exec_role"
assume_role_policy = jsonencode({
Version = "2012-10-17"
Statement = [{
@@ -16,7 +16,4 @@ resource "aws_iam_role" "lambda_exec_role" {
"arn:aws:iam::aws:policy/AmazonDynamoDBReadOnlyAccess"
]
- lifecycle {
- prevent_destroy = true
- }
}
diff --git a/src/file_service/terraform/backup_terraform/lambda.tf b/src/file_service/terraform/backup_terraform/lambda.tf
new file mode 100644
index 0000000..bbe8f2a
--- /dev/null
+++ b/src/file_service/terraform/backup_terraform/lambda.tf
@@ -0,0 +1,28 @@
+data "archive_file" "lambda_zip" {
+ type = "zip"
+ source_dir = "${path.module}/../list_files"
+ output_path = "${path.module}/list_files_function.zip"
+}
+
+resource "aws_lambda_function" "list_files" {
+ filename = data.archive_file.lambda_zip.output_path
+ function_name = "list_files"
+ role = aws_iam_role.list_files_lambda_exec_role.arn
+ handler = "list_files_function.lambda_handler"
+ source_code_hash = filebase64sha256(data.archive_file.lambda_zip.output_path)
+ runtime = "python3.11"
+
+ environment {
+ variables = {
+ TABLE_NAME = "file"
+ }
+ }
+}
+
+resource "aws_lambda_permission" "api_gateway_permission" {
+ statement_id = "AllowAPIGatewayInvoke"
+ action = "lambda:InvokeFunction"
+ function_name = aws_lambda_function.list_files.function_name
+ principal = "apigateway.amazonaws.com"
+ source_arn = "${aws_api_gateway_rest_api.files_api.execution_arn}/*/*"
+}
diff --git a/src/file_service/terraform/backup_terraform/list_files_function.zip b/src/file_service/terraform/backup_terraform/list_files_function.zip
new file mode 100644
index 0000000..812f4a1
Binary files /dev/null and b/src/file_service/terraform/backup_terraform/list_files_function.zip differ
diff --git a/src/file_service/terraform/backup_terraform/main.tf b/src/file_service/terraform/backup_terraform/main.tf
new file mode 100644
index 0000000..64ae3a1
--- /dev/null
+++ b/src/file_service/terraform/backup_terraform/main.tf
@@ -0,0 +1,254 @@
+# provider "aws" {
+# region = "ap-northeast-1"
+# }
+
+# data "aws_region" "current" {}
+
+# resource "aws_iam_role" "lambda_exec_role" {
+# name = "lambda_exec_role"
+# assume_role_policy = jsonencode({
+# Version = "2012-10-17"
+# Statement = [{
+# Action = "sts:AssumeRole"
+# Effect = "Allow"
+# Principal = {
+# Service = "lambda.amazonaws.com"
+# }
+# }]
+# })
+
+# managed_policy_arns = [
+# "arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole",
+# "arn:aws:iam::aws:policy/AmazonDynamoDBReadOnlyAccess"
+# ]
+# }
+
+# data "archive_file" "lambda_zip" {
+# type = "zip"
+# source_dir = "${path.module}/../list_files"
+# output_path = "${path.module}/list_files_function.zip"
+# }
+
+# data "archive_file" "get_file_lambda_zip" {
+# type = "zip"
+# source_dir = "${path.module}/../get_file"
+# output_path = "${path.module}/get_file_function.zip"
+# }
+
+# resource "aws_lambda_function" "list_files" {
+# filename = data.archive_file.lambda_zip.output_path
+# function_name = "list_files"
+# role = aws_iam_role.lambda_exec_role.arn
+# handler = "list_files_function.lambda_handler"
+# source_code_hash = filebase64sha256(data.archive_file.lambda_zip.output_path)
+# runtime = "python3.11"
+
+# environment {
+# variables = {
+# TABLE_NAME = "file"
+# }
+# }
+# }
+
+# resource "aws_lambda_function" "get_file" {
+# filename = data.archive_file.get_file_lambda_zip.output_path
+# function_name = "get_file_by_id"
+# role = aws_iam_role.lambda_exec_role.arn
+# handler = "get_file_by_id.lambda_handler"
+# source_code_hash = filebase64sha256(data.archive_file.lambda_zip.output_path)
+# runtime = "python3.11"
+
+# environment {
+# variables = {
+# TABLE_NAME = "file"
+# }
+# }
+# }
+
+# resource "aws_api_gateway_rest_api" "files_api" {
+# name = "Files API"
+# description = "API for listing files with pagination and filtering"
+# }
+
+# resource "aws_api_gateway_resource" "files_resource" {
+# rest_api_id = aws_api_gateway_rest_api.files_api.id
+# parent_id = aws_api_gateway_rest_api.files_api.root_resource_id
+# path_part = "files"
+# }
+
+# resource "aws_api_gateway_resource" "file_resource" {
+# rest_api_id = aws_api_gateway_rest_api.files_api.id
+# parent_id = aws_api_gateway_resource.files_resource.id
+# path_part = "{file_id}"
+# }
+
+# # GET Method for listing files
+# resource "aws_api_gateway_method" "list_files_method" {
+# rest_api_id = aws_api_gateway_rest_api.files_api.id
+# resource_id = aws_api_gateway_resource.files_resource.id
+# http_method = "GET"
+# authorization = "NONE"
+# }
+
+# resource "aws_api_gateway_integration" "list_files_integration" {
+# rest_api_id = aws_api_gateway_rest_api.files_api.id
+# resource_id = aws_api_gateway_resource.files_resource.id
+# http_method = aws_api_gateway_method.list_files_method.http_method
+# type = "AWS_PROXY"
+# integration_http_method = "POST"
+# uri = aws_lambda_function.list_files.invoke_arn
+# }
+
+# resource "aws_api_gateway_method_response" "list_files_response_200" {
+# rest_api_id = aws_api_gateway_rest_api.files_api.id
+# resource_id = aws_api_gateway_resource.files_resource.id
+# http_method = aws_api_gateway_method.list_files_method.http_method
+# status_code = "200"
+
+# response_parameters = {
+# "method.response.header.Access-Control-Allow-Origin" = true,
+# "method.response.header.Access-Control-Allow-Headers" = true,
+# "method.response.header.Access-Control-Allow-Methods" = true
+# }
+# }
+
+# resource "aws_api_gateway_integration_response" "list_files_integration_response_200" {
+# rest_api_id = aws_api_gateway_rest_api.files_api.id
+# resource_id = aws_api_gateway_resource.files_resource.id
+# http_method = aws_api_gateway_method.list_files_method.http_method
+# status_code = "200"
+
+# response_parameters = {
+# "method.response.header.Access-Control-Allow-Origin" = "'*'",
+# "method.response.header.Access-Control-Allow-Headers" = "'*'",
+# "method.response.header.Access-Control-Allow-Methods" = "'GET,OPTIONS,POST,PUT'"
+# }
+# }
+
+# # GET Method for getting a specific file
+# resource "aws_api_gateway_method" "get_file_method" {
+# rest_api_id = aws_api_gateway_rest_api.files_api.id
+# resource_id = aws_api_gateway_resource.file_resource.id
+# http_method = "GET"
+# authorization = "NONE"
+# }
+
+# resource "aws_api_gateway_integration" "get_file_integration" {
+# rest_api_id = aws_api_gateway_rest_api.files_api.id
+# resource_id = aws_api_gateway_resource.file_resource.id
+# http_method = aws_api_gateway_method.get_file_method.http_method
+# type = "AWS_PROXY"
+# integration_http_method = "POST"
+# uri = aws_lambda_function.get_file.invoke_arn
+# }
+
+# resource "aws_api_gateway_method_response" "get_file_response_200" {
+# rest_api_id = aws_api_gateway_rest_api.files_api.id
+# resource_id = aws_api_gateway_resource.file_resource.id
+# http_method = aws_api_gateway_method.get_file_method.http_method
+# status_code = "200"
+
+# response_parameters = {
+# "method.response.header.Access-Control-Allow-Origin" = true,
+# "method.response.header.Access-Control-Allow-Headers" = true,
+# "method.response.header.Access-Control-Allow-Methods" = true
+# }
+# }
+
+# resource "aws_api_gateway_integration_response" "get_file_integration_response_200" {
+# rest_api_id = aws_api_gateway_rest_api.files_api.id
+# resource_id = aws_api_gateway_resource.file_resource.id
+# http_method = aws_api_gateway_method.get_file_method.http_method
+# status_code = "200"
+
+# response_parameters = {
+# "method.response.header.Access-Control-Allow-Origin" = "'*'",
+# "method.response.header.Access-Control-Allow-Headers" = "'*'",
+# "method.response.header.Access-Control-Allow-Methods" = "'GET,OPTIONS,POST,PUT'"
+# }
+# }
+
+# # OPTIONS Method
+# resource "aws_api_gateway_method" "options_method" {
+# rest_api_id = aws_api_gateway_rest_api.files_api.id
+# resource_id = aws_api_gateway_resource.files_resource.id
+# http_method = "OPTIONS"
+# authorization = "NONE"
+# }
+
+# resource "aws_api_gateway_integration" "options_integration" {
+# rest_api_id = aws_api_gateway_rest_api.files_api.id
+# resource_id = aws_api_gateway_resource.files_resource.id
+# http_method = aws_api_gateway_method.options_method.http_method
+# type = "MOCK"
+# request_templates = {
+# "application/json" = "{\"statusCode\": 200}"
+# }
+# }
+
+# resource "aws_api_gateway_method_response" "options_response_200" {
+# rest_api_id = aws_api_gateway_rest_api.files_api.id
+# resource_id = aws_api_gateway_resource.files_resource.id
+# http_method = aws_api_gateway_method.options_method.http_method
+# status_code = "200"
+
+# response_parameters = {
+# "method.response.header.Access-Control-Allow-Headers" = true,
+# "method.response.header.Access-Control-Allow-Methods" = true,
+# "method.response.header.Access-Control-Allow-Origin" = true
+# }
+
+# response_models = {
+# "application/json" = "Empty"
+# }
+# }
+
+# resource "aws_api_gateway_integration_response" "options_integration_response" {
+# rest_api_id = aws_api_gateway_rest_api.files_api.id
+# resource_id = aws_api_gateway_resource.files_resource.id
+# http_method = aws_api_gateway_method.options_method.http_method
+# status_code = aws_api_gateway_method_response.options_response_200.status_code
+
+# response_parameters = {
+# "method.response.header.Access-Control-Allow-Headers" = "'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token'",
+# "method.response.header.Access-Control-Allow-Methods" = "'GET,OPTIONS,POST,PUT'",
+# "method.response.header.Access-Control-Allow-Origin" = "'*'"
+# }
+# }
+
+# resource "aws_api_gateway_deployment" "files_api_deployment" {
+# depends_on = [
+# aws_api_gateway_integration.list_files_integration,
+# aws_api_gateway_method_response.list_files_response_200,
+# aws_api_gateway_integration_response.list_files_integration_response_200,
+# aws_api_gateway_integration.get_file_integration,
+# aws_api_gateway_method_response.get_file_response_200,
+# aws_api_gateway_integration_response.get_file_integration_response_200,
+# aws_api_gateway_method.options_method,
+# aws_api_gateway_method_response.options_response_200,
+# aws_api_gateway_integration.options_integration,
+# aws_api_gateway_integration_response.options_integration_response
+# ]
+# rest_api_id = aws_api_gateway_rest_api.files_api.id
+# stage_name = "dev"
+# }
+
+# resource "aws_lambda_permission" "api_gateway_permission" {
+# statement_id = "AllowAPIGatewayInvoke"
+# action = "lambda:InvokeFunction"
+# function_name = aws_lambda_function.list_files.function_name
+# principal = "apigateway.amazonaws.com"
+# source_arn = "${aws_api_gateway_rest_api.files_api.execution_arn}/*/*"
+# }
+
+# resource "aws_lambda_permission" "api_gateway_permission_get_file" {
+# statement_id = "AllowAPIGatewayInvokeGetFile"
+# action = "lambda:InvokeFunction"
+# function_name = aws_lambda_function.get_file.function_name
+# principal = "apigateway.amazonaws.com"
+# source_arn = "${aws_api_gateway_rest_api.files_api.execution_arn}/*/*"
+# }
+
+# output "api_url" {
+# value = "https://${aws_api_gateway_rest_api.files_api.id}.execute-api.${data.aws_region.current.name}.amazonaws.com/dev/files"
+# }
diff --git a/src/file_service/terraform/backup_terraform/output.tf b/src/file_service/terraform/backup_terraform/output.tf
new file mode 100644
index 0000000..0980c0a
--- /dev/null
+++ b/src/file_service/terraform/backup_terraform/output.tf
@@ -0,0 +1,9 @@
+output "api_url" {
+ description = "The URL of the API Gateway"
+ value = "https://${aws_api_gateway_rest_api.files_api.id}.execute-api.${data.aws_region.current.name}.amazonaws.com/dev/files"
+}
+
+output "api_gateway_domain_name" {
+ description = "The domain name of the API Gateway"
+ value = aws_api_gateway_rest_api.files_api.execution_arn
+}
diff --git a/src/file_service/terraform/backup_terraform/provider.tf b/src/file_service/terraform/backup_terraform/provider.tf
new file mode 100644
index 0000000..cd141e5
--- /dev/null
+++ b/src/file_service/terraform/backup_terraform/provider.tf
@@ -0,0 +1,13 @@
+provider "aws" {
+ region = var.aws_region
+ default_tags {
+ tags = {
+ Environment = var.environment
+ Terraform = "true"
+ Service = "file-service"
+ }
+
+ }
+}
+
+data "aws_region" "current" {}
diff --git a/src/file_service/upload_file/upload_file.py b/src/file_service/terraform/backup_terraform/upload_file.py
similarity index 100%
rename from src/file_service/upload_file/upload_file.py
rename to src/file_service/terraform/backup_terraform/upload_file.py
diff --git a/src/file_service/terraform/backup_terraform/variables.tf b/src/file_service/terraform/backup_terraform/variables.tf
new file mode 100644
index 0000000..3789c25
--- /dev/null
+++ b/src/file_service/terraform/backup_terraform/variables.tf
@@ -0,0 +1,18 @@
+variable "cors_allow_origin" {
+ description = "The origin that is allowed to access the API"
+ type = string
+ default = "*"
+}
+
+variable "environment" {
+ description = "The environment to deploy the service to"
+ type = string
+ default = "dev"
+}
+
+variable "aws_region" {
+ description = "The AWS region to deploy the service to"
+ type = string
+ default = "us-east-1"
+}
+
diff --git a/src/file_service/terraform/dev.tfbackend b/src/file_service/terraform/dev.tfbackend
new file mode 100644
index 0000000..50becf8
--- /dev/null
+++ b/src/file_service/terraform/dev.tfbackend
@@ -0,0 +1,4 @@
+bucket = "terraform-state-20240618152116874600000001"
+region = "us-west-2"
+key = "file_service/dev/terraform.tfstate"
+dynamodb_table = "terraform-locks"
\ No newline at end of file
diff --git a/src/file_service/terraform/dev.tfvars b/src/file_service/terraform/dev.tfvars
new file mode 100644
index 0000000..7c2ae60
--- /dev/null
+++ b/src/file_service/terraform/dev.tfvars
@@ -0,0 +1,5 @@
+aws_region = "us-east-1"
+environment = "dev"
+service_underscore = "file_service"
+service_hyphen = "file-service"
+dynamodb_table = "file"
diff --git a/src/file_service/terraform/dynamodb.tf b/src/file_service/terraform/dynamodb.tf
new file mode 100644
index 0000000..68ed4fc
--- /dev/null
+++ b/src/file_service/terraform/dynamodb.tf
@@ -0,0 +1,31 @@
+resource "aws_dynamodb_table" "file" {
+ name = "file"
+ billing_mode = "PAY_PER_REQUEST"
+ hash_key = "file_id"
+
+ attribute {
+ name = "file_id"
+ type = "S"
+ }
+
+ attribute {
+ name = "file_extension"
+ type = "S"
+ }
+
+ attribute {
+ name = "created_at"
+ type = "S"
+ }
+
+ global_secondary_index {
+ name = "file_extension-created_at-gsi"
+ hash_key = "file_extension"
+ range_key = "created_at"
+ projection_type = "ALL"
+ }
+
+ tags = {
+ Name = "file"
+ }
+}
diff --git a/src/file_service/terraform/lambda.tf b/src/file_service/terraform/lambda.tf
index 27e63d2..5471632 100644
--- a/src/file_service/terraform/lambda.tf
+++ b/src/file_service/terraform/lambda.tf
@@ -1,28 +1,406 @@
-data "archive_file" "lambda_zip" {
- type = "zip"
- source_dir = "${path.module}/../list_files"
- output_path = "${path.module}/list_files_function.zip"
-}
-
-resource "aws_lambda_function" "list_files" {
- filename = data.archive_file.lambda_zip.output_path
- function_name = "list_files"
- role = aws_iam_role.lambda_exec_role.arn
- handler = "list_files_function.lambda_handler"
- source_code_hash = filebase64sha256(data.archive_file.lambda_zip.output_path)
- runtime = "python3.11"
-
- environment {
- variables = {
- TABLE_NAME = "file"
+data "aws_ecr_authorization_token" "token" {
+}
+
+data "aws_caller_identity" "this" {}
+
+resource "random_string" "this" {
+ length = 4
+ special = false
+ lower = true
+ upper = false
+}
+
+locals {
+ source_path = "${path.module}/.."
+ upload_multiple_file_function_name_and_ecr_repo_name = "${var.environment}-${var.service_underscore}-upload_multiple_file-${random_string.this.result}"
+ list_files_function_name_and_ecr_repo_name = "${var.environment}-${var.service_underscore}-list_files-${random_string.this.result}"
+ get_file_function_name_and_ecr_repo_name = "${var.environment}-${var.service_underscore}-get_file-${random_string.this.result}"
+ path_include = ["**"]
+ path_exclude = ["**/__pycache__/**"]
+ files_include = setunion([for f in local.path_include : fileset(local.source_path, f)]...)
+ files_exclude = setunion([for f in local.path_exclude : fileset(local.source_path, f)]...)
+ files = sort(setsubtract(local.files_include, local.files_exclude))
+ dir_sha = sha1(join("", [for f in local.files : filesha1("${local.source_path}/${f}")]))
+}
+
+provider "docker" {
+ registry_auth {
+ address = format("%v.dkr.ecr.%v.amazonaws.com", data.aws_caller_identity.this.account_id, var.aws_region)
+ username = data.aws_ecr_authorization_token.token.user_name
+ password = data.aws_ecr_authorization_token.token.password
+ }
+}
+####################################
+####################################
+####################################
+# POST /upload-multiple-file #######
+####################################
+####################################
+####################################
+
+module "upload_multiple_file_lambda" {
+ source = "terraform-aws-modules/lambda/aws"
+ version = "7.7.0"
+
+ function_name = local.upload_multiple_file_function_name_and_ecr_repo_name
+ description = "AWS Educate TPET ${var.service_hyphen} in ${var.environment}: POST /upload-multiple-file"
+ create_package = false
+ timeout = 300
+
+ ##################
+ # Container Image
+ ##################
+ package_type = "Image"
+ architectures = ["x86_64"] # or ["arm64"]
+ image_uri = module.upload_multiple_file_docker_image.image_uri
+
+ publish = true # Whether to publish creation/change as new Lambda Function Version.
+
+
+ environment_variables = {
+ "ENVIRONMENT" = var.environment,
+ "SERVICE" = var.service_underscore
+ "DYNAMODB_TABLE" = var.dynamodb_table
+ "BUCKET_NAME" = "${var.environment}-aws-educate-tpet-storage"
+ }
+
+ allowed_triggers = {
+ AllowExecutionFromAPIGateway = {
+ service = "apigateway"
+ source_arn = "${module.api_gateway.api_execution_arn}/*/*"
+ }
+ }
+
+ tags = {
+ "Terraform" = "true",
+ "Environment" = var.environment,
+ "Service" = var.service_underscore
+ }
+ ######################
+ # Additional policies
+ ######################
+
+ attach_policy_statements = true
+ policy_statements = {
+ dynamodb_crud = {
+ effect = "Allow",
+ actions = [
+ "dynamodb:BatchGetItem",
+ "dynamodb:BatchWriteItem",
+ "dynamodb:DeleteItem",
+ "dynamodb:GetItem",
+ "dynamodb:PutItem",
+ "dynamodb:Query",
+ "dynamodb:Scan",
+ "dynamodb:UpdateItem"
+ ],
+ resources = [
+ "arn:aws:dynamodb:${var.aws_region}:${data.aws_caller_identity.this.account_id}:table/${var.dynamodb_table}"
+ ]
+ },
+ s3_crud = {
+ effect = "Allow",
+ actions = [
+ "s3:ListBucket",
+ "s3:GetBucketLocation",
+ "s3:CreateBucket",
+ "s3:DeleteBucket",
+ "s3:PutObject",
+ "s3:GetObject",
+ "s3:DeleteObject",
+ "s3:ListBucketMultipartUploads",
+ "s3:ListMultipartUploadParts",
+ "s3:AbortMultipartUpload"
+ ],
+ resources = [
+ "arn:aws:s3:::${var.environment}-aws-educate-tpet-storage",
+ "arn:aws:s3:::${var.environment}-aws-educate-tpet-storage/*"
+ ]
+ }
+ }
+}
+
+module "upload_multiple_file_docker_image" {
+ source = "terraform-aws-modules/lambda/aws//modules/docker-build"
+ version = "7.7.0"
+
+ create_ecr_repo = true
+ keep_remotely = true
+ use_image_tag = false
+ image_tag_mutability = "MUTABLE"
+ ecr_repo = local.upload_multiple_file_function_name_and_ecr_repo_name
+ ecr_repo_lifecycle_policy = jsonencode({
+ "rules" : [
+ {
+ "rulePriority" : 1,
+ "description" : "Keep only the last 10 images",
+ "selection" : {
+ "tagStatus" : "any",
+ "countType" : "imageCountMoreThan",
+ "countNumber" : 10
+ },
+ "action" : {
+ "type" : "expire"
+ }
+ }
+ ]
+ })
+
+ # docker_file_path = "${local.source_path}/path/to/Dockerfile" # set `docker_file_path` If your Dockerfile is not in `source_path`
+ source_path = "${local.source_path}/upload_multiple_file/" # Remember to change
+ triggers = {
+ dir_sha = local.dir_sha
+ }
+
+}
+
+
+####################################
+####################################
+####################################
+# GET /files #######################
+####################################
+####################################
+####################################
+
+module "list_files_lambda" {
+ source = "terraform-aws-modules/lambda/aws"
+ version = "7.7.0"
+
+ function_name = local.list_files_function_name_and_ecr_repo_name # Remember to change
+ description = "AWS Educate TPET ${var.service_hyphen} in ${var.environment}: GET /files" # Remember to change
+ create_package = false
+ timeout = 30
+
+ ##################
+ # Container Image
+ ##################
+ package_type = "Image"
+ architectures = ["x86_64"] # or ["arm64"]
+ image_uri = module.list_files_docker_image.image_uri # Remember to change
+
+ publish = true # Whether to publish creation/change as new Lambda Function Version.
+
+
+ environment_variables = {
+ "ENVIRONMENT" = var.environment,
+ "SERVICE" = var.service_underscore
+ "DYNAMODB_TABLE" = var.dynamodb_table
+ "BUCKET_NAME" = "${var.environment}-aws-educate-tpet-storage"
+ }
+
+ allowed_triggers = {
+ AllowExecutionFromAPIGateway = {
+ service = "apigateway"
+ source_arn = "${module.api_gateway.api_execution_arn}/*/*"
+ }
+ }
+
+ tags = {
+ "Terraform" = "true",
+ "Environment" = var.environment,
+ "Service" = var.service_underscore
+ }
+ ######################
+ # Additional policies
+ ######################
+
+ attach_policy_statements = true
+ policy_statements = {
+ dynamodb_crud = {
+ effect = "Allow",
+ actions = [
+ "dynamodb:BatchGetItem",
+ "dynamodb:BatchWriteItem",
+ "dynamodb:DeleteItem",
+ "dynamodb:GetItem",
+ "dynamodb:PutItem",
+ "dynamodb:Query",
+ "dynamodb:Scan",
+ "dynamodb:UpdateItem"
+ ],
+ resources = [
+ "arn:aws:dynamodb:${var.aws_region}:${data.aws_caller_identity.this.account_id}:table/${var.dynamodb_table}",
+ "arn:aws:dynamodb:${var.aws_region}:${data.aws_caller_identity.this.account_id}:table/${var.dynamodb_table}/index/*"
+ ]
+ },
+ s3_crud = {
+ effect = "Allow",
+ actions = [
+ "s3:ListBucket",
+ "s3:GetBucketLocation",
+ "s3:CreateBucket",
+ "s3:DeleteBucket",
+ "s3:PutObject",
+ "s3:GetObject",
+ "s3:DeleteObject",
+ "s3:ListBucketMultipartUploads",
+ "s3:ListMultipartUploadParts",
+ "s3:AbortMultipartUpload"
+ ],
+ resources = [
+ "arn:aws:s3:::${var.environment}-aws-educate-tpet-storage",
+ "arn:aws:s3:::${var.environment}-aws-educate-tpet-storage/*"
+ ]
}
}
}
-resource "aws_lambda_permission" "api_gateway_permission" {
- statement_id = "AllowAPIGatewayInvoke"
- action = "lambda:InvokeFunction"
- function_name = aws_lambda_function.list_files.function_name
- principal = "apigateway.amazonaws.com"
- source_arn = "${aws_api_gateway_rest_api.files_api.execution_arn}/*/*"
+module "list_files_docker_image" {
+ source = "terraform-aws-modules/lambda/aws//modules/docker-build"
+ version = "7.7.0"
+
+ create_ecr_repo = true
+ keep_remotely = true
+ use_image_tag = false
+ image_tag_mutability = "MUTABLE"
+ ecr_repo = local.list_files_function_name_and_ecr_repo_name # Remember to change
+ ecr_repo_lifecycle_policy = jsonencode({
+ "rules" : [
+ {
+ "rulePriority" : 1,
+ "description" : "Keep only the last 10 images",
+ "selection" : {
+ "tagStatus" : "any",
+ "countType" : "imageCountMoreThan",
+ "countNumber" : 10
+ },
+ "action" : {
+ "type" : "expire"
+ }
+ }
+ ]
+ })
+
+ # docker_file_path = "${local.source_path}/path/to/Dockerfile" # set `docker_file_path` If your Dockerfile is not in `source_path`
+ source_path = "${local.source_path}/list_files/" # Remember to change
+ triggers = {
+ dir_sha = local.dir_sha
+ }
+
+}
+
+
+####################################
+####################################
+####################################
+# GET /files/{file_id} #############
+####################################
+####################################
+####################################
+
+module "get_file_lambda" {
+ source = "terraform-aws-modules/lambda/aws"
+ version = "7.7.0"
+
+ function_name = local.get_file_function_name_and_ecr_repo_name # Remember to change
+ description = "AWS Educate TPET ${var.service_hyphen} in ${var.environment}: GET /files/{file_id}" # Remember to change
+ create_package = false
+ timeout = 30
+
+ ##################
+ # Container Image
+ ##################
+ package_type = "Image"
+ architectures = ["x86_64"] # or ["arm64"]
+ image_uri = module.get_file_docker_image.image_uri # Remember to change
+
+ publish = true # Whether to publish creation/change as new Lambda Function Version.
+
+
+ environment_variables = {
+ "ENVIRONMENT" = var.environment,
+ "SERVICE" = var.service_underscore
+ "DYNAMODB_TABLE" = var.dynamodb_table
+ "BUCKET_NAME" = "${var.environment}-aws-educate-tpet-storage"
+ }
+
+ allowed_triggers = {
+ AllowExecutionFromAPIGateway = {
+ service = "apigateway"
+ source_arn = "${module.api_gateway.api_execution_arn}/*/*"
+ }
+ }
+
+ tags = {
+ "Terraform" = "true",
+ "Environment" = var.environment,
+ "Service" = var.service_underscore
+ }
+ ######################
+ # Additional policies
+ ######################
+
+ attach_policy_statements = true
+ policy_statements = {
+ dynamodb_crud = {
+ effect = "Allow",
+ actions = [
+ "dynamodb:BatchGetItem",
+ "dynamodb:BatchWriteItem",
+ "dynamodb:DeleteItem",
+ "dynamodb:GetItem",
+ "dynamodb:PutItem",
+ "dynamodb:Query",
+ "dynamodb:Scan",
+ "dynamodb:UpdateItem"
+ ],
+ resources = [
+ "arn:aws:dynamodb:${var.aws_region}:${data.aws_caller_identity.this.account_id}:table/${var.dynamodb_table}"
+ ]
+ },
+ s3_crud = {
+ effect = "Allow",
+ actions = [
+ "s3:ListBucket",
+ "s3:GetBucketLocation",
+ "s3:CreateBucket",
+ "s3:DeleteBucket",
+ "s3:PutObject",
+ "s3:GetObject",
+ "s3:DeleteObject",
+ "s3:ListBucketMultipartUploads",
+ "s3:ListMultipartUploadParts",
+ "s3:AbortMultipartUpload"
+ ],
+ resources = [
+ "arn:aws:s3:::${var.environment}-aws-educate-tpet-storage",
+ "arn:aws:s3:::${var.environment}-aws-educate-tpet-storage/*"
+ ]
+ }
+ }
+}
+
+module "get_file_docker_image" {
+ source = "terraform-aws-modules/lambda/aws//modules/docker-build"
+ version = "7.7.0"
+
+ create_ecr_repo = true
+ keep_remotely = true
+ use_image_tag = false
+ image_tag_mutability = "MUTABLE"
+ ecr_repo = local.get_file_function_name_and_ecr_repo_name
+ ecr_repo_lifecycle_policy = jsonencode({
+ "rules" : [
+ {
+ "rulePriority" : 1,
+ "description" : "Keep only the last 10 images",
+ "selection" : {
+ "tagStatus" : "any",
+ "countType" : "imageCountMoreThan",
+ "countNumber" : 10
+ },
+ "action" : {
+ "type" : "expire"
+ }
+ }
+ ]
+ })
+
+ # docker_file_path = "${local.source_path}/path/to/Dockerfile" # set `docker_file_path` If your Dockerfile is not in `source_path`
+ source_path = "${local.source_path}/get_file/" # Remember to change
+ triggers = {
+ dir_sha = local.dir_sha
+ }
+
}
diff --git a/src/file_service/terraform/local-dev.tfbackend b/src/file_service/terraform/local-dev.tfbackend
new file mode 100644
index 0000000..c78498c
--- /dev/null
+++ b/src/file_service/terraform/local-dev.tfbackend
@@ -0,0 +1,4 @@
+bucket = "terraform-state-20240618152116874600000001"
+region = "us-west-2"
+key = "file_service/local-dev/terraform.tfstate"
+dynamodb_table = "terraform-locks"
\ No newline at end of file
diff --git a/src/file_service/terraform/local-dev.tfvars b/src/file_service/terraform/local-dev.tfvars
new file mode 100644
index 0000000..efa14f4
--- /dev/null
+++ b/src/file_service/terraform/local-dev.tfvars
@@ -0,0 +1,5 @@
+aws_region = "us-west-2"
+environment = "local-dev"
+service_underscore = "file_service"
+service_hyphen = "file-service"
+dynamodb_table = "file"
diff --git a/src/file_service/terraform/main.tf b/src/file_service/terraform/main.tf
deleted file mode 100644
index 749846d..0000000
--- a/src/file_service/terraform/main.tf
+++ /dev/null
@@ -1,254 +0,0 @@
-provider "aws" {
- region = "ap-northeast-1"
-}
-
-data "aws_region" "current" {}
-
-resource "aws_iam_role" "lambda_exec_role" {
- name = "lambda_exec_role"
- assume_role_policy = jsonencode({
- Version = "2012-10-17"
- Statement = [{
- Action = "sts:AssumeRole"
- Effect = "Allow"
- Principal = {
- Service = "lambda.amazonaws.com"
- }
- }]
- })
-
- managed_policy_arns = [
- "arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole",
- "arn:aws:iam::aws:policy/AmazonDynamoDBReadOnlyAccess"
- ]
-}
-
-data "archive_file" "lambda_zip" {
- type = "zip"
- source_dir = "${path.module}/../list_files"
- output_path = "${path.module}/list_files_function.zip"
-}
-
-data "archive_file" "get_file_lambda_zip" {
- type = "zip"
- source_dir = "${path.module}/../get_file"
- output_path = "${path.module}/get_file_function.zip"
-}
-
-resource "aws_lambda_function" "list_files" {
- filename = data.archive_file.lambda_zip.output_path
- function_name = "list_files"
- role = aws_iam_role.lambda_exec_role.arn
- handler = "list_files_function.lambda_handler"
- source_code_hash = filebase64sha256(data.archive_file.lambda_zip.output_path)
- runtime = "python3.11"
-
- environment {
- variables = {
- TABLE_NAME = "file"
- }
- }
-}
-
-resource "aws_lambda_function" "get_file" {
- filename = data.archive_file.get_file_lambda_zip.output_path
- function_name = "get_file_by_id"
- role = aws_iam_role.lambda_exec_role.arn
- handler = "get_file_by_id.lambda_handler"
- source_code_hash = filebase64sha256(data.archive_file.lambda_zip.output_path)
- runtime = "python3.11"
-
- environment {
- variables = {
- TABLE_NAME = "file"
- }
- }
-}
-
-resource "aws_api_gateway_rest_api" "files_api" {
- name = "Files API"
- description = "API for listing files with pagination and filtering"
-}
-
-resource "aws_api_gateway_resource" "files_resource" {
- rest_api_id = aws_api_gateway_rest_api.files_api.id
- parent_id = aws_api_gateway_rest_api.files_api.root_resource_id
- path_part = "files"
-}
-
-resource "aws_api_gateway_resource" "file_resource" {
- rest_api_id = aws_api_gateway_rest_api.files_api.id
- parent_id = aws_api_gateway_resource.files_resource.id
- path_part = "{file_id}"
-}
-
-# GET Method for listing files
-resource "aws_api_gateway_method" "list_files_method" {
- rest_api_id = aws_api_gateway_rest_api.files_api.id
- resource_id = aws_api_gateway_resource.files_resource.id
- http_method = "GET"
- authorization = "NONE"
-}
-
-resource "aws_api_gateway_integration" "list_files_integration" {
- rest_api_id = aws_api_gateway_rest_api.files_api.id
- resource_id = aws_api_gateway_resource.files_resource.id
- http_method = aws_api_gateway_method.list_files_method.http_method
- type = "AWS_PROXY"
- integration_http_method = "POST"
- uri = aws_lambda_function.list_files.invoke_arn
-}
-
-resource "aws_api_gateway_method_response" "list_files_response_200" {
- rest_api_id = aws_api_gateway_rest_api.files_api.id
- resource_id = aws_api_gateway_resource.files_resource.id
- http_method = aws_api_gateway_method.list_files_method.http_method
- status_code = "200"
-
- response_parameters = {
- "method.response.header.Access-Control-Allow-Origin" = true,
- "method.response.header.Access-Control-Allow-Headers" = true,
- "method.response.header.Access-Control-Allow-Methods" = true
- }
-}
-
-resource "aws_api_gateway_integration_response" "list_files_integration_response_200" {
- rest_api_id = aws_api_gateway_rest_api.files_api.id
- resource_id = aws_api_gateway_resource.files_resource.id
- http_method = aws_api_gateway_method.list_files_method.http_method
- status_code = "200"
-
- response_parameters = {
- "method.response.header.Access-Control-Allow-Origin" = "'*'",
- "method.response.header.Access-Control-Allow-Headers" = "'*'",
- "method.response.header.Access-Control-Allow-Methods" = "'GET,OPTIONS,POST,PUT'"
- }
-}
-
-# GET Method for getting a specific file
-resource "aws_api_gateway_method" "get_file_method" {
- rest_api_id = aws_api_gateway_rest_api.files_api.id
- resource_id = aws_api_gateway_resource.file_resource.id
- http_method = "GET"
- authorization = "NONE"
-}
-
-resource "aws_api_gateway_integration" "get_file_integration" {
- rest_api_id = aws_api_gateway_rest_api.files_api.id
- resource_id = aws_api_gateway_resource.file_resource.id
- http_method = aws_api_gateway_method.get_file_method.http_method
- type = "AWS_PROXY"
- integration_http_method = "POST"
- uri = aws_lambda_function.get_file.invoke_arn
-}
-
-resource "aws_api_gateway_method_response" "get_file_response_200" {
- rest_api_id = aws_api_gateway_rest_api.files_api.id
- resource_id = aws_api_gateway_resource.file_resource.id
- http_method = aws_api_gateway_method.get_file_method.http_method
- status_code = "200"
-
- response_parameters = {
- "method.response.header.Access-Control-Allow-Origin" = true,
- "method.response.header.Access-Control-Allow-Headers" = true,
- "method.response.header.Access-Control-Allow-Methods" = true
- }
-}
-
-resource "aws_api_gateway_integration_response" "get_file_integration_response_200" {
- rest_api_id = aws_api_gateway_rest_api.files_api.id
- resource_id = aws_api_gateway_resource.file_resource.id
- http_method = aws_api_gateway_method.get_file_method.http_method
- status_code = "200"
-
- response_parameters = {
- "method.response.header.Access-Control-Allow-Origin" = "'*'",
- "method.response.header.Access-Control-Allow-Headers" = "'*'",
- "method.response.header.Access-Control-Allow-Methods" = "'GET,OPTIONS,POST,PUT'"
- }
-}
-
-# OPTIONS Method
-resource "aws_api_gateway_method" "options_method" {
- rest_api_id = aws_api_gateway_rest_api.files_api.id
- resource_id = aws_api_gateway_resource.files_resource.id
- http_method = "OPTIONS"
- authorization = "NONE"
-}
-
-resource "aws_api_gateway_integration" "options_integration" {
- rest_api_id = aws_api_gateway_rest_api.files_api.id
- resource_id = aws_api_gateway_resource.files_resource.id
- http_method = aws_api_gateway_method.options_method.http_method
- type = "MOCK"
- request_templates = {
- "application/json" = "{\"statusCode\": 200}"
- }
-}
-
-resource "aws_api_gateway_method_response" "options_response_200" {
- rest_api_id = aws_api_gateway_rest_api.files_api.id
- resource_id = aws_api_gateway_resource.files_resource.id
- http_method = aws_api_gateway_method.options_method.http_method
- status_code = "200"
-
- response_parameters = {
- "method.response.header.Access-Control-Allow-Headers" = true,
- "method.response.header.Access-Control-Allow-Methods" = true,
- "method.response.header.Access-Control-Allow-Origin" = true
- }
-
- response_models = {
- "application/json" = "Empty"
- }
-}
-
-resource "aws_api_gateway_integration_response" "options_integration_response" {
- rest_api_id = aws_api_gateway_rest_api.files_api.id
- resource_id = aws_api_gateway_resource.files_resource.id
- http_method = aws_api_gateway_method.options_method.http_method
- status_code = aws_api_gateway_method_response.options_response_200.status_code
-
- response_parameters = {
- "method.response.header.Access-Control-Allow-Headers" = "'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token'",
- "method.response.header.Access-Control-Allow-Methods" = "'GET,OPTIONS,POST,PUT'",
- "method.response.header.Access-Control-Allow-Origin" = "'*'"
- }
-}
-
-resource "aws_api_gateway_deployment" "files_api_deployment" {
- depends_on = [
- aws_api_gateway_integration.list_files_integration,
- aws_api_gateway_method_response.list_files_response_200,
- aws_api_gateway_integration_response.list_files_integration_response_200,
- aws_api_gateway_integration.get_file_integration,
- aws_api_gateway_method_response.get_file_response_200,
- aws_api_gateway_integration_response.get_file_integration_response_200,
- aws_api_gateway_method.options_method,
- aws_api_gateway_method_response.options_response_200,
- aws_api_gateway_integration.options_integration,
- aws_api_gateway_integration_response.options_integration_response
- ]
- rest_api_id = aws_api_gateway_rest_api.files_api.id
- stage_name = "dev"
-}
-
-resource "aws_lambda_permission" "api_gateway_permission" {
- statement_id = "AllowAPIGatewayInvoke"
- action = "lambda:InvokeFunction"
- function_name = aws_lambda_function.list_files.function_name
- principal = "apigateway.amazonaws.com"
- source_arn = "${aws_api_gateway_rest_api.files_api.execution_arn}/*/*"
-}
-
-resource "aws_lambda_permission" "api_gateway_permission_get_file" {
- statement_id = "AllowAPIGatewayInvokeGetFile"
- action = "lambda:InvokeFunction"
- function_name = aws_lambda_function.get_file.function_name
- principal = "apigateway.amazonaws.com"
- source_arn = "${aws_api_gateway_rest_api.files_api.execution_arn}/*/*"
-}
-
-output "api_url" {
- value = "https://${aws_api_gateway_rest_api.files_api.id}.execute-api.${data.aws_region.current.name}.amazonaws.com/dev/files"
-}
diff --git a/src/file_service/terraform/output.tf b/src/file_service/terraform/output.tf
deleted file mode 100644
index d60c555..0000000
--- a/src/file_service/terraform/output.tf
+++ /dev/null
@@ -1,3 +0,0 @@
-output "api_url" {
- value = "https://${aws_api_gateway_rest_api.files_api.id}.execute-api.${data.aws_region.current.name}.amazonaws.com/dev/files"
-}
diff --git a/src/file_service/terraform/preview.tfbackend b/src/file_service/terraform/preview.tfbackend
new file mode 100644
index 0000000..4d449a7
--- /dev/null
+++ b/src/file_service/terraform/preview.tfbackend
@@ -0,0 +1,4 @@
+bucket = "terraform-state-20240618152116874600000001"
+region = "us-west-2"
+key = "file_service/preview/terraform.tfstate"
+dynamodb_table = "terraform-locks"
\ No newline at end of file
diff --git a/src/file_service/terraform/preview.tfvars b/src/file_service/terraform/preview.tfvars
new file mode 100644
index 0000000..bb4a6b9
--- /dev/null
+++ b/src/file_service/terraform/preview.tfvars
@@ -0,0 +1,5 @@
+aws_region = "us-west-1"
+environment = "preview"
+service_underscore = "file_service"
+service_hyphen = "file-service"
+dynamodb_table = "file"
diff --git a/src/file_service/terraform/prod.tfbackend b/src/file_service/terraform/prod.tfbackend
new file mode 100644
index 0000000..3b79c4b
--- /dev/null
+++ b/src/file_service/terraform/prod.tfbackend
@@ -0,0 +1,4 @@
+bucket = "terraform-state-20240618152116874600000001"
+region = "us-west-2"
+key = "file_service/prod/terraform.tfstate"
+dynamodb_table = "terraform-locks"
\ No newline at end of file
diff --git a/src/file_service/terraform/prod.tfvars b/src/file_service/terraform/prod.tfvars
new file mode 100644
index 0000000..e7741cf
--- /dev/null
+++ b/src/file_service/terraform/prod.tfvars
@@ -0,0 +1,5 @@
+aws_region = "ap-northeast-1"
+environment = "prod"
+service_underscore = "file_service"
+service_hyphen = "file-service"
+dynamodb_table = "file"
diff --git a/src/file_service/terraform/provider.tf b/src/file_service/terraform/provider.tf
index cd141e5..5ac480b 100644
--- a/src/file_service/terraform/provider.tf
+++ b/src/file_service/terraform/provider.tf
@@ -2,12 +2,9 @@ provider "aws" {
region = var.aws_region
default_tags {
tags = {
- Environment = var.environment
- Terraform = "true"
- Service = "file-service"
+ "Terraform" = "true",
+ "Environment" = var.environment,
+ "Project" = "AWS Educate TPET"
}
-
}
}
-
-data "aws_region" "current" {}
diff --git a/src/file_service/terraform/variables.tf b/src/file_service/terraform/variables.tf
index 3789c25..8247229 100644
--- a/src/file_service/terraform/variables.tf
+++ b/src/file_service/terraform/variables.tf
@@ -1,18 +1,23 @@
-variable "cors_allow_origin" {
- description = "The origin that is allowed to access the API"
- type = string
- default = "*"
+variable "aws_region" {
+ description = "aws region"
}
variable "environment" {
- description = "The environment to deploy the service to"
- type = string
- default = "dev"
+ description = "Current environtment: prod(ap-northeast-1)/dev(us-east-1)/local-dev(us-west-2), default dev(us-east-1)"
}
-variable "aws_region" {
- description = "The AWS region to deploy the service to"
- type = string
- default = "us-east-1"
+variable "service_underscore" {
+ description = "Current service name"
+}
+
+variable "service_hyphen" {
+ description = "This variable contains the current service name, but with hyphens instead of underscores. For example: demo-service."
+}
+variable "domain_name" {
+ description = "Domain name, for example: example.com"
+ default = "awseducate.systems"
}
+variable "dynamodb_table" {
+ description = "Current service's DynamoDB table name"
+}
diff --git a/src/file_service/terraform/variables_dev.tfvars b/src/file_service/terraform/variables_dev.tfvars
deleted file mode 100644
index ec63c3d..0000000
--- a/src/file_service/terraform/variables_dev.tfvars
+++ /dev/null
@@ -1,2 +0,0 @@
-aws_region = "us-east-1"
-environment = "dev"
diff --git a/src/file_service/terraform/variables_poc.tfvars b/src/file_service/terraform/variables_poc.tfvars
deleted file mode 100644
index fcc4e9d..0000000
--- a/src/file_service/terraform/variables_poc.tfvars
+++ /dev/null
@@ -1,2 +0,0 @@
-aws_region = "us-west-2"
-environment = "poc"
diff --git a/src/file_service/terraform/variables_prod.tfvars b/src/file_service/terraform/variables_prod.tfvars
deleted file mode 100644
index 2e10248..0000000
--- a/src/file_service/terraform/variables_prod.tfvars
+++ /dev/null
@@ -1,3 +0,0 @@
-aws_region = "ap-northeast-1"
-environment = "prod"
-cors_allow_origin = "https://tpet.awseducate.systems"
diff --git a/src/file_service/terraform/versions.tf b/src/file_service/terraform/versions.tf
new file mode 100644
index 0000000..ca481d8
--- /dev/null
+++ b/src/file_service/terraform/versions.tf
@@ -0,0 +1,19 @@
+terraform {
+ required_version = "~> 1.8.0"
+ required_providers {
+ aws = {
+ source = "hashicorp/aws"
+ version = "~> 5.54.0"
+ }
+
+ local = {
+ source = "hashicorp/local"
+ version = "~> 2.5.1"
+ }
+
+ docker = {
+ source = "kreuzwerker/docker"
+ version = "~> 3.0.2"
+ }
+ }
+}
diff --git a/src/file_service/upload_multiple_file/Dockerfile b/src/file_service/upload_multiple_file/Dockerfile
new file mode 100644
index 0000000..68b05bb
--- /dev/null
+++ b/src/file_service/upload_multiple_file/Dockerfile
@@ -0,0 +1,11 @@
+FROM public.ecr.aws/lambda/python:3.11
+
+# Install dependencies
+COPY requirements.txt /var/task/
+RUN pip install -r /var/task/requirements.txt
+
+# Copy function code
+COPY lambda_function.py /var/task/
+
+# Set the command to run the Lambda function
+CMD ["lambda_function.lambda_handler"]
diff --git a/src/file_service/upload_file/upload_multiple_file.py b/src/file_service/upload_multiple_file/lambda_function.py
similarity index 98%
rename from src/file_service/upload_file/upload_multiple_file.py
rename to src/file_service/upload_multiple_file/lambda_function.py
index c516278..a4c9782 100644
--- a/src/file_service/upload_file/upload_multiple_file.py
+++ b/src/file_service/upload_multiple_file/lambda_function.py
@@ -18,7 +18,7 @@
s3_client = boto3.client("s3")
dynamodb = boto3.client("dynamodb")
BUCKET_NAME = os.environ["BUCKET_NAME"]
-TABLE_NAME = os.environ["TABLE_NAME"]
+DYNAMODB_TABLE = os.environ["DYNAMODB_TABLE"]
S3_BASE_URL = f"https://{BUCKET_NAME}.s3.amazonaws.com/"
TIME_FORMAT = "%Y-%m-%dT%H:%M:%S"
@@ -79,7 +79,7 @@ def lambda_handler(event, context):
print(f"Storing file metadata in DynamoDB: {file_id}")
dynamodb.put_item(
- TableName=TABLE_NAME,
+ TableName=DYNAMODB_TABLE,
Item={
"file_id": {"S": file_id},
"s3_object_key": {"S": unique_file_name},
diff --git a/src/file_service/upload_multiple_file/requirements.txt b/src/file_service/upload_multiple_file/requirements.txt
new file mode 100644
index 0000000..469ace6
--- /dev/null
+++ b/src/file_service/upload_multiple_file/requirements.txt
@@ -0,0 +1,3 @@
+boto3
+botocore
+requests-toolbelt
diff --git a/template.yaml b/template.yaml
deleted file mode 100644
index bcb1b26..0000000
--- a/template.yaml
+++ /dev/null
@@ -1,41 +0,0 @@
-AWSTemplateFormatVersion: "2010-09-09"
-Transform: AWS::Serverless-2016-10-31
-Description: >
- aws-educate-tpet-backend
-
- Sample SAM Template for aws-educate-tpet-backend
-
-# More info about Globals: https://github.com/awslabs/serverless-application-model/blob/master/docs/globals.rst
-Globals:
- Function:
- Timeout: 3
-
-Resources:
- HelloWorldFunction:
- Type: AWS::Serverless::Function # More info about Function Resource: https://github.com/awslabs/serverless-application-model/blob/master/versions/2016-10-31.md#awsserverlessfunction
- Properties:
- CodeUri: src/
- Handler: app.lambda_handler
- Runtime: python3.11
- Architectures:
- - x86_64
- Events:
- HelloWorld:
- Type: Api # More info about API Event Source: https://github.com/awslabs/serverless-application-model/blob/master/versions/2016-10-31.md#api
- Properties:
- Path: /hello
- Method: get
-
-Outputs:
- # ServerlessRestApi is an implicit API created out of Events key under Serverless::Function
- # Find out more about other implicit resources you can reference within SAM
- # https://github.com/awslabs/serverless-application-model/blob/master/docs/internals/generated_resources.rst#api
- HelloWorldApi:
- Description: "API Gateway endpoint URL for Prod stage for Hello World function"
- Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/hello/"
- HelloWorldFunction:
- Description: "Hello World Lambda Function ARN"
- Value: !GetAtt HelloWorldFunction.Arn
- HelloWorldFunctionIamRole:
- Description: "Implicit IAM Role created for Hello World function"
- Value: !GetAtt HelloWorldFunctionRole.Arn
diff --git a/terraform/acm_shared_cert/.terraform.lock.hcl b/terraform/acm_shared_cert/.terraform.lock.hcl
new file mode 100644
index 0000000..23e58a3
--- /dev/null
+++ b/terraform/acm_shared_cert/.terraform.lock.hcl
@@ -0,0 +1,25 @@
+# This file is maintained automatically by "terraform init".
+# Manual edits may be lost in future updates.
+
+provider "registry.terraform.io/hashicorp/aws" {
+ version = "5.54.1"
+ constraints = ">= 4.40.0, ~> 5.54.0"
+ hashes = [
+ "h1:h6AA+TgBpDNQXFcLi4xKYiDbn94Dfhz7lt8Q8x8CEI8=",
+ "zh:37c09b9a0a0a2f7854fe52c6adb15f71593810b458a8283ed71d68036af7ba3a",
+ "zh:42fe11d87723d4e43b9c6224ae6bacdcb53faee8abc58f0fc625a161d1f71cb1",
+ "zh:57c6dfc46f28c9c2737559bd84acbc05aeae90431e731bb72a0024028a2d2412",
+ "zh:5ba9665a4ca0e182effd75575b19a4d47383ec02662024b9fe26f78286c36619",
+ "zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425",
+ "zh:b55980be0237644123a02a30b56d4cc03863ef29036c47d6e8ab5429ab45adf5",
+ "zh:b81e7664f10855a3a6fc234a18b4c4f1456273126a40c41516f2061696fb9870",
+ "zh:bd09736ffafd92af104c3c34b5add138ae8db4402eb687863ce472ca7e5ff2e2",
+ "zh:cc2eb1c62fba2a11d1f239e650cc2ae94bcab01c907384dcf2e213a6ee1bd5b2",
+ "zh:e5dc40205d9cf6f353c0ca532ae29afc6c83928bc9bcca47d74b640d3bb5a38c",
+ "zh:ebf1acdcd13f10db1b9c85050ddaadc70ab269c47c5a240753362446442d8371",
+ "zh:f2fc28a4ad94af5e6144a7309286505e3eb7a94d9dc106722b506c372ff7f591",
+ "zh:f49445e8435944df122aa89853260a2716ba8b73d6a6a70cae1661554926d5a2",
+ "zh:fc3b5046e60ae7cab20715be23de8436eb12736136fd6d0f0cc1549ebda6cc73",
+ "zh:fdb98a53500e245a3b5bec077b994da6959dba8fc4eb7534528658d820e06bd5",
+ ]
+}
diff --git a/terraform/acm_shared_cert/acm.tf b/terraform/acm_shared_cert/acm.tf
new file mode 100644
index 0000000..2100b19
--- /dev/null
+++ b/terraform/acm_shared_cert/acm.tf
@@ -0,0 +1,22 @@
+data "aws_route53_zone" "awseducate_systems" {
+ name = var.domain_name
+ private_zone = false
+}
+
+
+module "acm" {
+ source = "terraform-aws-modules/acm/aws"
+ version = "~> 5.0.0"
+
+ domain_name = "*.${var.domain_name}"
+ zone_id = data.aws_route53_zone.awseducate_systems.zone_id
+
+ validation_method = "DNS"
+
+ subject_alternative_names = [
+ "*.${var.domain_name}",
+ var.domain_name,
+ ]
+
+ wait_for_validation = true
+}
diff --git a/terraform/acm_shared_cert/backend.tf b/terraform/acm_shared_cert/backend.tf
new file mode 100644
index 0000000..12c0dbe
--- /dev/null
+++ b/terraform/acm_shared_cert/backend.tf
@@ -0,0 +1,3 @@
+terraform {
+ backend "s3" {}
+}
diff --git a/terraform/acm_shared_cert/dev.tfbackend b/terraform/acm_shared_cert/dev.tfbackend
new file mode 100644
index 0000000..bcb40d9
--- /dev/null
+++ b/terraform/acm_shared_cert/dev.tfbackend
@@ -0,0 +1,4 @@
+bucket = "terraform-state-20240618152116874600000001"
+region = "us-west-2"
+key = "acm_shared_cert/dev/terraform.tfstate"
+dynamodb_table = "terraform-locks"
\ No newline at end of file
diff --git a/terraform/acm_shared_cert/dev.tfvars b/terraform/acm_shared_cert/dev.tfvars
new file mode 100644
index 0000000..1f5289b
--- /dev/null
+++ b/terraform/acm_shared_cert/dev.tfvars
@@ -0,0 +1,5 @@
+aws_region = "us-east-1"
+environment = "dev"
+service_underscore = "acm_shared_cert"
+service_hyphen = "acm-shared-cert"
+domain_name = "awseducate.systems"
diff --git a/terraform/acm_shared_cert/local-dev.tfbackend b/terraform/acm_shared_cert/local-dev.tfbackend
new file mode 100644
index 0000000..389acc5
--- /dev/null
+++ b/terraform/acm_shared_cert/local-dev.tfbackend
@@ -0,0 +1,4 @@
+bucket = "terraform-state-20240618152116874600000001"
+region = "us-west-2"
+key = "acm_shared_cert/local-dev/terraform.tfstate"
+dynamodb_table = "terraform-locks"
\ No newline at end of file
diff --git a/terraform/acm_shared_cert/local-dev.tfvars b/terraform/acm_shared_cert/local-dev.tfvars
new file mode 100644
index 0000000..6d0f17a
--- /dev/null
+++ b/terraform/acm_shared_cert/local-dev.tfvars
@@ -0,0 +1,5 @@
+aws_region = "us-west-2"
+environment = "local-dev"
+service_underscore = "acm_shared_cert"
+service_hyphen = "acm-shared-cert"
+domain_name = "awseducate.systems"
diff --git a/terraform/acm_shared_cert/preview.tfbackend b/terraform/acm_shared_cert/preview.tfbackend
new file mode 100644
index 0000000..efe174e
--- /dev/null
+++ b/terraform/acm_shared_cert/preview.tfbackend
@@ -0,0 +1,4 @@
+bucket = "terraform-state-20240618152116874600000001"
+region = "us-west-2"
+key = "acm_shared_cert/preview/terraform.tfstate"
+dynamodb_table = "terraform-locks"
\ No newline at end of file
diff --git a/terraform/acm_shared_cert/preview.tfvars b/terraform/acm_shared_cert/preview.tfvars
new file mode 100644
index 0000000..0093636
--- /dev/null
+++ b/terraform/acm_shared_cert/preview.tfvars
@@ -0,0 +1,5 @@
+aws_region = "us-west-1"
+environment = "preview"
+service_underscore = "acm_shared_cert"
+service_hyphen = "acm-shared-cert"
+domain_name = "awseducate.systems"
diff --git a/terraform/acm_shared_cert/prod.tfbackend b/terraform/acm_shared_cert/prod.tfbackend
new file mode 100644
index 0000000..8c197f1
--- /dev/null
+++ b/terraform/acm_shared_cert/prod.tfbackend
@@ -0,0 +1,4 @@
+bucket = "terraform-state-20240618152116874600000001"
+region = "us-west-2"
+key = "acm_shared_cert/prod/terraform.tfstate"
+dynamodb_table = "terraform-locks"
\ No newline at end of file
diff --git a/terraform/acm_shared_cert/prod.tfvars b/terraform/acm_shared_cert/prod.tfvars
new file mode 100644
index 0000000..ae1df69
--- /dev/null
+++ b/terraform/acm_shared_cert/prod.tfvars
@@ -0,0 +1,5 @@
+aws_region = "ap-northeast-1"
+environment = "prod"
+service_underscore = "acm_shared_cert"
+service_hyphen = "acm-shared-cert"
+domain_name = "awseducate.systems"
diff --git a/terraform/acm_shared_cert/provider.tf b/terraform/acm_shared_cert/provider.tf
new file mode 100644
index 0000000..45a99ec
--- /dev/null
+++ b/terraform/acm_shared_cert/provider.tf
@@ -0,0 +1,11 @@
+provider "aws" {
+ region = var.aws_region
+ default_tags {
+ tags = {
+ "Terraform" = "true",
+ "Environment" = var.environment,
+ "Project" = "AWS Educate TPET"
+ "Service" = "acm_shared_cert"
+ }
+ }
+}
diff --git a/terraform/acm_shared_cert/variables.tf b/terraform/acm_shared_cert/variables.tf
new file mode 100644
index 0000000..510ea9a
--- /dev/null
+++ b/terraform/acm_shared_cert/variables.tf
@@ -0,0 +1,19 @@
+variable "aws_region" {
+ description = "aws region"
+}
+
+variable "environment" {
+ description = "Current environtment: prod(ap-northeast-1)/dev(us-east-1)/local-dev(us-west-2), default dev(us-east-1)"
+}
+
+variable "service_underscore" {
+ description = "Current service name"
+}
+
+variable "service_hyphen" {
+ description = "This variable contains the current service name, but with hyphens instead of underscores. For example: demo-service."
+}
+variable "domain_name" {
+ description = "Domain name, for example: example.com"
+ default = "awseducate.systems"
+}
diff --git a/terraform/acm_shared_cert/versions.tf b/terraform/acm_shared_cert/versions.tf
new file mode 100644
index 0000000..ef78c8e
--- /dev/null
+++ b/terraform/acm_shared_cert/versions.tf
@@ -0,0 +1,9 @@
+terraform {
+ required_version = "~> 1.8.0"
+ required_providers {
+ aws = {
+ source = "hashicorp/aws"
+ version = "~> 5.54.0"
+ }
+ }
+}
diff --git a/terraform/api_gateway_custom_domain/dev.tfvars b/terraform/api_gateway_custom_domain/dev.tfvars
new file mode 100644
index 0000000..7d1f0df
--- /dev/null
+++ b/terraform/api_gateway_custom_domain/dev.tfvars
@@ -0,0 +1,5 @@
+aws_region = "us-east-1"
+environment = "dev"
+service_underscore = "api_gateway_custom_domain"
+service_hyphen = "api-gateway-custom-domain"
+domain_name = "awseducate.systems"
diff --git a/terraform/api_gateway_custom_domain/local-dev.tfvars b/terraform/api_gateway_custom_domain/local-dev.tfvars
new file mode 100644
index 0000000..93d12c0
--- /dev/null
+++ b/terraform/api_gateway_custom_domain/local-dev.tfvars
@@ -0,0 +1,5 @@
+aws_region = "us-west-2"
+environment = "local-dev"
+service_underscore = "api_gateway_custom_domain"
+service_hyphen = "api-gateway-custom-domain"
+domain_name = "awseducate.systems"
diff --git a/terraform/api_gateway_custom_domain/preview.tfvars b/terraform/api_gateway_custom_domain/preview.tfvars
new file mode 100644
index 0000000..536cc64
--- /dev/null
+++ b/terraform/api_gateway_custom_domain/preview.tfvars
@@ -0,0 +1,5 @@
+aws_region = "us-west-1"
+environment = "preview"
+service_underscore = "api_gateway_custom_domain"
+service_hyphen = "api-gateway-custom-domain"
+domain_name = "awseducate.systems"
diff --git a/terraform/api_gateway_custom_domain/prod.tfvars b/terraform/api_gateway_custom_domain/prod.tfvars
new file mode 100644
index 0000000..6c99c9c
--- /dev/null
+++ b/terraform/api_gateway_custom_domain/prod.tfvars
@@ -0,0 +1,5 @@
+aws_region = "ap-northeast-1"
+environment = "prod"
+service_underscore = "api_gateway_custom_domain"
+service_hyphen = "api-gateway-custom-domain"
+domain_name = "awseducate.systems"
diff --git a/terraform/dev/.terraform.lock.hcl b/terraform/dev/.terraform.lock.hcl
new file mode 100644
index 0000000..2d477d7
--- /dev/null
+++ b/terraform/dev/.terraform.lock.hcl
@@ -0,0 +1,44 @@
+# This file is maintained automatically by "terraform init".
+# Manual edits may be lost in future updates.
+
+provider "registry.terraform.io/hashicorp/archive" {
+ version = "2.4.2"
+ hashes = [
+ "h1:sCJKfmlbxo4w+AxizeJ3i1rPEfoVarCajyJuIODVPTg=",
+ "zh:08faed7c9f42d82bc3d406d0d9d4971e2d1c2d34eae268ad211b8aca57b7f758",
+ "zh:3564112ed2d097d7e0672378044a69b06642c326f6f1584d81c7cdd32ebf3a08",
+ "zh:53cd9afd223c15828c1916e68cb728d2be1cbccb9545568d6c2b122d0bac5102",
+ "zh:5ae4e41e3a1ce9d40b6458218a85bbde44f21723943982bca4a3b8bb7c103670",
+ "zh:5b65499218b315b96e95c5d3463ea6d7c66245b59461217c99eaa1611891cd2c",
+ "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
+ "zh:7f45b35a8330bebd184c2545a41782ff58240ed6ba947274d9881dd5da44b02e",
+ "zh:87e67891033214e55cfead1391d68e6a3bf37993b7607753237e82aa3250bb71",
+ "zh:de3590d14037ad81fc5cedf7cfa44614a92452d7b39676289b704a962050bc5e",
+ "zh:e7e6f2ea567f2dbb3baa81c6203be69f9cd6aeeb01204fd93e3cf181e099b610",
+ "zh:fd24d03c89a7702628c2e5a3c732c0dede56fa75a08da4a1efe17b5f881c88e2",
+ "zh:febf4b7b5f3ff2adff0573ef6361f09b6638105111644bdebc0e4f575373935f",
+ ]
+}
+
+provider "registry.terraform.io/hashicorp/aws" {
+ version = "5.53.0"
+ constraints = ">= 5.12.0, >= 5.37.0"
+ hashes = [
+ "h1:YCupEeam12IXAPo9j2wvnfJTqFFuaHjyzTgSj3GlOeg=",
+ "zh:2adad39412111d19a5195474d6b95577fc25ccf06d88a90019bee0efba33a1e3",
+ "zh:51226453a14f95b0d1163cfecafc9cf1a92ce5f66e42e6b4065d83a813836a2c",
+ "zh:62450fadb56db9c18d50bb8b7728a3d009be608d7ee0d4fe95c85ccb521dff83",
+ "zh:6f3ad977a9cc4800847c136690b1c0a0fd8437705062163d29dc4e9429598950",
+ "zh:71ca0a16b735b8d34b7127dd7d1e1e5d1eaac9c9f792e08abde291b5beb947d5",
+ "zh:7ae9cf4838eea80288305be0a3e69b39ffff86ede7b4319be421f06d32d04fb6",
+ "zh:93abc2db5ad995cfee014eb7446abc7caedc427e141d375a11993e6e199076b5",
+ "zh:9560b3424d97da804e98ee86b474b7370afefa09baf350cae7f33afb3f1aa209",
+ "zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425",
+ "zh:9eb57a9b649c217ac4eeb27af2a1935c18bd9bc8fb1be07434e7de74729eff46",
+ "zh:b5f32dcbe71ea22c2090eeeaec9af3e098d7b8c3e4491f34ffdfdc6f1c1abf81",
+ "zh:c9fbd5417f266c773055178e87bb4091df7f0542b72bf5ad0a4ae27045a2b7ca",
+ "zh:d518b3c52c8a9f79769dbe1b3683d25b4cdc8bfc77a3b3cd9c85f74e6c7383e1",
+ "zh:db741be21f32404bb87d73d25b1b7fd9b813b00aeb20a130ed8806d44dc26680",
+ "zh:ed1a8bb4d08653d87265ae534d6fc33bbdabae1608692a1ee364fce03548d36c",
+ ]
+}
diff --git a/src/__init__.py b/terraform/dev/backend.tf
similarity index 100%
rename from src/__init__.py
rename to terraform/dev/backend.tf
diff --git a/terraform/dev/backend_setting/dynamodb.tf b/terraform/dev/backend_setting/dynamodb.tf
new file mode 100644
index 0000000..3f5de03
--- /dev/null
+++ b/terraform/dev/backend_setting/dynamodb.tf
@@ -0,0 +1,14 @@
+resource "aws_dynamodb_table" "state_locks" {
+ name = "terraform-locks"
+ billing_mode = "PAY_PER_REQUEST"
+ hash_key = "LockID"
+
+ attribute {
+ name = "LockID"
+ type = "S"
+ }
+
+ tags = {
+ Terraform = true
+ }
+}
diff --git a/terraform/dev/backend_setting/output.tf b/terraform/dev/backend_setting/output.tf
new file mode 100644
index 0000000..f2fa6d6
--- /dev/null
+++ b/terraform/dev/backend_setting/output.tf
@@ -0,0 +1,7 @@
+output "bucket_name" {
+ value = aws_s3_bucket.state.bucket
+}
+
+output "dynamodb_table_name" {
+ value = aws_dynamodb_table.state_locks.name
+}
diff --git a/terraform/dev/backend_setting/s3.tf b/terraform/dev/backend_setting/s3.tf
new file mode 100644
index 0000000..f0f6e4e
--- /dev/null
+++ b/terraform/dev/backend_setting/s3.tf
@@ -0,0 +1,33 @@
+resource "aws_s3_bucket" "state" {
+ bucket_prefix = "terraform-state-"
+
+ tags = {
+ Terraform = "true"
+ }
+}
+
+resource "aws_s3_bucket_versioning" "state" {
+ bucket = aws_s3_bucket.state.bucket
+
+ versioning_configuration {
+ status = "Enabled"
+ }
+}
+
+resource "aws_s3_bucket_server_side_encryption_configuration" "state" {
+ bucket = aws_s3_bucket.state.bucket
+
+ rule {
+ apply_server_side_encryption_by_default {
+ sse_algorithm = "AES256"
+ }
+ }
+}
+
+resource "aws_s3_bucket_ownership_controls" "state" {
+ bucket = aws_s3_bucket.state.bucket
+
+ rule {
+ object_ownership = "BucketOwnerPreferred"
+ }
+}
diff --git a/terraform/dev/cloudfront.tf b/terraform/dev/cloudfront.tf
new file mode 100644
index 0000000..5039409
--- /dev/null
+++ b/terraform/dev/cloudfront.tf
@@ -0,0 +1,55 @@
+module "cloudfront" {
+ source = "terraform-aws-modules/cloudfront/aws"
+ version = "3.4.0"
+
+ aliases = [var.domain_name]
+
+ comment = "CloudFront distribution for multiple API Gateways"
+ enabled = true
+ is_ipv6_enabled = true
+ price_class = "PriceClass_All"
+ retain_on_delete = false
+ wait_for_deployment = false
+
+
+ origin = {
+ for o in var.api_gateway_origins : o.domain_name => {
+ domain_name = o.domain_name
+ custom_origin_config = {
+ http_port = 80
+ https_port = 443
+ origin_protocol_policy = "https-only"
+ origin_ssl_protocols = ["TLSv1.2"]
+ }
+ }
+ }
+
+ default_cache_behavior = {
+ target_origin_id = var.api_gateway_origins[0].domain_name
+ viewer_protocol_policy = "redirect-to-https"
+ allowed_methods = ["GET", "HEAD", "OPTIONS", "PUT", "POST", "PATCH", "DELETE"]
+ cached_methods = ["GET", "HEAD"]
+ compress = true
+ query_string = true
+ headers = ["Origin", "Access-Control-Request-Headers", "Access-Control-Request-Method"]
+ }
+
+ ordered_cache_behavior = [
+ for o in var.api_gateway_origins :
+ {
+ path_pattern = o.path_pattern
+ target_origin_id = o.domain_name
+ viewer_protocol_policy = "redirect-to-https"
+ allowed_methods = ["GET", "HEAD", "OPTIONS", "PUT", "POST", "PATCH", "DELETE"]
+ cached_methods = ["GET", "HEAD"]
+ compress = true
+ query_string = true
+ headers = ["Origin", "Access-Control-Request-Headers", "Access-Control-Request-Method"]
+ }
+ ]
+
+ viewer_certificate = {
+ acm_certificate_arn = var.acm_certificate_arn
+ ssl_support_method = "sni-only"
+ }
+}
diff --git a/terraform/dev/main.tf b/terraform/dev/main.tf
new file mode 100644
index 0000000..63d6835
--- /dev/null
+++ b/terraform/dev/main.tf
@@ -0,0 +1,8 @@
+module "list_files" {
+ source = "./../../src/file_service/terraform"
+ aws_region = var.aws_region
+ environment = var.environment
+}
+
+
+
diff --git a/terraform/dev/output.tf b/terraform/dev/output.tf
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/terraform/dev/output.tf
@@ -0,0 +1 @@
+
diff --git a/terraform/dev/provider.tf b/terraform/dev/provider.tf
new file mode 100644
index 0000000..e9b7251
--- /dev/null
+++ b/terraform/dev/provider.tf
@@ -0,0 +1,6 @@
+provider "aws" {
+ region = var.aws_region
+}
+
+
+
diff --git a/terraform/dev/route53.tf b/terraform/dev/route53.tf
new file mode 100644
index 0000000..ad0dbe1
--- /dev/null
+++ b/terraform/dev/route53.tf
@@ -0,0 +1,17 @@
+module "records" {
+ source = "terraform-aws-modules/route53/aws//modules/records"
+ version = "3.1.0"
+
+ zone_id = var.zone_id
+
+ records = [
+ {
+ name = "api.tpet"
+ type = "A"
+ alias = {
+ name = module.cloudfront.cloudfront_distribution_domain_name
+ zone_id = module.cloudfront.cloudfront_distribution_hosted_zone_id
+ }
+ }
+ ]
+}
diff --git a/terraform/dev/terraform.tf b/terraform/dev/terraform.tf
new file mode 100644
index 0000000..a1f678e
--- /dev/null
+++ b/terraform/dev/terraform.tf
@@ -0,0 +1,9 @@
+terraform {
+ backend "s3" {
+ bucket = "terraform-state-20240610123048790400000001"
+ key = "dev/terraform.tfstate"
+ region = "ap-northeast-1"
+ dynamodb_table = "terraform-locks"
+ encrypt = true
+ }
+}
diff --git a/terraform/dev/variables.tf b/terraform/dev/variables.tf
new file mode 100644
index 0000000..b46d275
--- /dev/null
+++ b/terraform/dev/variables.tf
@@ -0,0 +1,69 @@
+variable "domain_name" {
+ description = "The custom domain name for CloudFront"
+ type = string
+ default = "api.tpet.awseducate.systems"
+}
+
+
+variable "acm_certificate_arn" {
+ description = "The ARN of the ACM certificate for the custom domain"
+ type = string
+ default = "arn:aws:acm:us-east-1:070576557102:certificate/6ef7979c-596b-42fd-a6ed-fceccc2efc0b"
+}
+
+variable "zone_id" {
+ description = "The Route 53 Hosted Zone ID for the domain"
+ type = string
+ default = "Z00402303DMA4KDX72AUO"
+}
+
+variable "api_gateway_origins" {
+ description = "List of API Gateway domain names and their corresponding path patterns"
+ type = list(object({
+ domain_name = string
+ path_pattern = string
+ }))
+ default = [
+ {
+ # Campaign Service
+ domain_name = "pihjp3tc7f.execute-api.ap-northeast-1.amazonaws.com"
+ path_pattern = "/dev/campaigns*"
+ },
+ {
+ # File Service - List files & Get file by ID
+ domain_name = "8um2zizr80.execute-api.ap-northeast-1.amazonaws.com"
+ path_pattern = "/dev/files*"
+ },
+ {
+ # File Service - Upload file
+ domain_name = "ssckvgoo10.execute-api.ap-northeast-1.amazonaws.com"
+ path_pattern = "/dev/upload-file*"
+ },
+ {
+ # File Service - Upload multiple files
+ domain_name = "sojek1stci.execute-api.ap-northeast-1.amazonaws.com"
+ path_pattern = "/dev/upload-multiple-file*"
+ },
+
+ {
+ # Email Service - Send Email
+ domain_name = "diyf4tafbl.execute-api.ap-northeast-1.amazonaws.com"
+ path_pattern = "/dev/send-email*"
+ }
+
+ ]
+}
+
+
+variable "aws_region" {
+ description = "The AWS region to deploy the service to"
+ type = string
+ default = "us-east-1"
+}
+
+variable "environment" {
+ description = "The environment to deploy the service to"
+ type = string
+ default = "dev"
+
+}
diff --git a/terraform/modules/public_s3/main.tf b/terraform/modules/public_s3/main.tf
new file mode 100644
index 0000000..aad0b30
--- /dev/null
+++ b/terraform/modules/public_s3/main.tf
@@ -0,0 +1,32 @@
+resource "aws_s3_bucket" "aws_educate_tpet_storage" {
+ bucket = "${var.environment}-aws-educate-tpet-storage"
+
+ tags = {
+ Name = "${var.environment}-aws-educate-tpet-storage"
+ Environment = var.environment
+ }
+}
+
+resource "aws_s3_bucket_public_access_block" "public_access_block" {
+ bucket = aws_s3_bucket.aws_educate_tpet_storage
+
+ block_public_acls = false
+ block_public_policy = false
+ ignore_public_acls = false
+ restrict_public_buckets = false
+}
+resource "aws_s3_bucket_policy" "aws_educate_tpet_storage_policy" {
+ bucket = aws_s3_bucket.aws_educate_tpet_storage.id
+
+ policy = jsonencode({
+ Version = "2012-10-17",
+ Statement = [
+ {
+ Effect = "Allow",
+ Principal = "*",
+ Action = "s3:GetObject",
+ Resource = "${aws_s3_bucket.aws_educate_tpet_storage.arn}/*"
+ }
+ ]
+ })
+}
diff --git a/terraform/modules/public_s3/outputs.tf b/terraform/modules/public_s3/outputs.tf
new file mode 100644
index 0000000..66e4916
--- /dev/null
+++ b/terraform/modules/public_s3/outputs.tf
@@ -0,0 +1,9 @@
+output "bucket_name" {
+ description = "The name of the S3 bucket"
+ value = aws_s3_bucket.aws_educate_tpet_storage.bucket
+}
+
+output "bucket_arn" {
+ description = "The ARN of the S3 bucket"
+ value = aws_s3_bucket.aws_educate_tpet_storage.arn
+}
diff --git a/terraform/modules/public_s3/provider.tf b/terraform/modules/public_s3/provider.tf
new file mode 100644
index 0000000..5ac480b
--- /dev/null
+++ b/terraform/modules/public_s3/provider.tf
@@ -0,0 +1,10 @@
+provider "aws" {
+ region = var.aws_region
+ default_tags {
+ tags = {
+ "Terraform" = "true",
+ "Environment" = var.environment,
+ "Project" = "AWS Educate TPET"
+ }
+ }
+}
diff --git a/terraform/modules/public_s3/variables.tf b/terraform/modules/public_s3/variables.tf
new file mode 100644
index 0000000..7ba6512
--- /dev/null
+++ b/terraform/modules/public_s3/variables.tf
@@ -0,0 +1,11 @@
+variable "aws_region" {
+ description = "aws region"
+}
+
+variable "environment" {
+ description = "Current environtment: prod(ap-northeast-1)/dev(us-east-1)/local-dev(us-west-2), default dev(us-east-1)"
+}
+
+variable "bucket_name" {
+ description = "The name of the S3 bucket"
+}
diff --git a/terraform/modules/public_s3/versions.tf b/terraform/modules/public_s3/versions.tf
new file mode 100644
index 0000000..ef78c8e
--- /dev/null
+++ b/terraform/modules/public_s3/versions.tf
@@ -0,0 +1,9 @@
+terraform {
+ required_version = "~> 1.8.0"
+ required_providers {
+ aws = {
+ source = "hashicorp/aws"
+ version = "~> 5.54.0"
+ }
+ }
+}
diff --git a/terraform/poc/.terraform.lock.hcl b/terraform/poc/.terraform.lock.hcl
new file mode 100644
index 0000000..f70412e
--- /dev/null
+++ b/terraform/poc/.terraform.lock.hcl
@@ -0,0 +1,43 @@
+# This file is maintained automatically by "terraform init".
+# Manual edits may be lost in future updates.
+
+provider "registry.terraform.io/hashicorp/archive" {
+ version = "2.4.2"
+ hashes = [
+ "h1:sCJKfmlbxo4w+AxizeJ3i1rPEfoVarCajyJuIODVPTg=",
+ "zh:08faed7c9f42d82bc3d406d0d9d4971e2d1c2d34eae268ad211b8aca57b7f758",
+ "zh:3564112ed2d097d7e0672378044a69b06642c326f6f1584d81c7cdd32ebf3a08",
+ "zh:53cd9afd223c15828c1916e68cb728d2be1cbccb9545568d6c2b122d0bac5102",
+ "zh:5ae4e41e3a1ce9d40b6458218a85bbde44f21723943982bca4a3b8bb7c103670",
+ "zh:5b65499218b315b96e95c5d3463ea6d7c66245b59461217c99eaa1611891cd2c",
+ "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
+ "zh:7f45b35a8330bebd184c2545a41782ff58240ed6ba947274d9881dd5da44b02e",
+ "zh:87e67891033214e55cfead1391d68e6a3bf37993b7607753237e82aa3250bb71",
+ "zh:de3590d14037ad81fc5cedf7cfa44614a92452d7b39676289b704a962050bc5e",
+ "zh:e7e6f2ea567f2dbb3baa81c6203be69f9cd6aeeb01204fd93e3cf181e099b610",
+ "zh:fd24d03c89a7702628c2e5a3c732c0dede56fa75a08da4a1efe17b5f881c88e2",
+ "zh:febf4b7b5f3ff2adff0573ef6361f09b6638105111644bdebc0e4f575373935f",
+ ]
+}
+
+provider "registry.terraform.io/hashicorp/aws" {
+ version = "5.53.0"
+ hashes = [
+ "h1:YCupEeam12IXAPo9j2wvnfJTqFFuaHjyzTgSj3GlOeg=",
+ "zh:2adad39412111d19a5195474d6b95577fc25ccf06d88a90019bee0efba33a1e3",
+ "zh:51226453a14f95b0d1163cfecafc9cf1a92ce5f66e42e6b4065d83a813836a2c",
+ "zh:62450fadb56db9c18d50bb8b7728a3d009be608d7ee0d4fe95c85ccb521dff83",
+ "zh:6f3ad977a9cc4800847c136690b1c0a0fd8437705062163d29dc4e9429598950",
+ "zh:71ca0a16b735b8d34b7127dd7d1e1e5d1eaac9c9f792e08abde291b5beb947d5",
+ "zh:7ae9cf4838eea80288305be0a3e69b39ffff86ede7b4319be421f06d32d04fb6",
+ "zh:93abc2db5ad995cfee014eb7446abc7caedc427e141d375a11993e6e199076b5",
+ "zh:9560b3424d97da804e98ee86b474b7370afefa09baf350cae7f33afb3f1aa209",
+ "zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425",
+ "zh:9eb57a9b649c217ac4eeb27af2a1935c18bd9bc8fb1be07434e7de74729eff46",
+ "zh:b5f32dcbe71ea22c2090eeeaec9af3e098d7b8c3e4491f34ffdfdc6f1c1abf81",
+ "zh:c9fbd5417f266c773055178e87bb4091df7f0542b72bf5ad0a4ae27045a2b7ca",
+ "zh:d518b3c52c8a9f79769dbe1b3683d25b4cdc8bfc77a3b3cd9c85f74e6c7383e1",
+ "zh:db741be21f32404bb87d73d25b1b7fd9b813b00aeb20a130ed8806d44dc26680",
+ "zh:ed1a8bb4d08653d87265ae534d6fc33bbdabae1608692a1ee364fce03548d36c",
+ ]
+}
diff --git a/terraform/poc/backend_setting/.terraform.lock.hcl b/terraform/poc/backend_setting/.terraform.lock.hcl
new file mode 100644
index 0000000..dba1e54
--- /dev/null
+++ b/terraform/poc/backend_setting/.terraform.lock.hcl
@@ -0,0 +1,24 @@
+# This file is maintained automatically by "terraform init".
+# Manual edits may be lost in future updates.
+
+provider "registry.terraform.io/hashicorp/aws" {
+ version = "5.53.0"
+ hashes = [
+ "h1:YCupEeam12IXAPo9j2wvnfJTqFFuaHjyzTgSj3GlOeg=",
+ "zh:2adad39412111d19a5195474d6b95577fc25ccf06d88a90019bee0efba33a1e3",
+ "zh:51226453a14f95b0d1163cfecafc9cf1a92ce5f66e42e6b4065d83a813836a2c",
+ "zh:62450fadb56db9c18d50bb8b7728a3d009be608d7ee0d4fe95c85ccb521dff83",
+ "zh:6f3ad977a9cc4800847c136690b1c0a0fd8437705062163d29dc4e9429598950",
+ "zh:71ca0a16b735b8d34b7127dd7d1e1e5d1eaac9c9f792e08abde291b5beb947d5",
+ "zh:7ae9cf4838eea80288305be0a3e69b39ffff86ede7b4319be421f06d32d04fb6",
+ "zh:93abc2db5ad995cfee014eb7446abc7caedc427e141d375a11993e6e199076b5",
+ "zh:9560b3424d97da804e98ee86b474b7370afefa09baf350cae7f33afb3f1aa209",
+ "zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425",
+ "zh:9eb57a9b649c217ac4eeb27af2a1935c18bd9bc8fb1be07434e7de74729eff46",
+ "zh:b5f32dcbe71ea22c2090eeeaec9af3e098d7b8c3e4491f34ffdfdc6f1c1abf81",
+ "zh:c9fbd5417f266c773055178e87bb4091df7f0542b72bf5ad0a4ae27045a2b7ca",
+ "zh:d518b3c52c8a9f79769dbe1b3683d25b4cdc8bfc77a3b3cd9c85f74e6c7383e1",
+ "zh:db741be21f32404bb87d73d25b1b7fd9b813b00aeb20a130ed8806d44dc26680",
+ "zh:ed1a8bb4d08653d87265ae534d6fc33bbdabae1608692a1ee364fce03548d36c",
+ ]
+}
diff --git a/terraform/poc/backend_setting/dynamodb.tf b/terraform/poc/backend_setting/dynamodb.tf
new file mode 100644
index 0000000..3f5de03
--- /dev/null
+++ b/terraform/poc/backend_setting/dynamodb.tf
@@ -0,0 +1,14 @@
+resource "aws_dynamodb_table" "state_locks" {
+ name = "terraform-locks"
+ billing_mode = "PAY_PER_REQUEST"
+ hash_key = "LockID"
+
+ attribute {
+ name = "LockID"
+ type = "S"
+ }
+
+ tags = {
+ Terraform = true
+ }
+}
diff --git a/terraform/poc/backend_setting/output.tf b/terraform/poc/backend_setting/output.tf
new file mode 100644
index 0000000..f2fa6d6
--- /dev/null
+++ b/terraform/poc/backend_setting/output.tf
@@ -0,0 +1,7 @@
+output "bucket_name" {
+ value = aws_s3_bucket.state.bucket
+}
+
+output "dynamodb_table_name" {
+ value = aws_dynamodb_table.state_locks.name
+}
diff --git a/terraform/poc/backend_setting/s3.tf b/terraform/poc/backend_setting/s3.tf
new file mode 100644
index 0000000..f0f6e4e
--- /dev/null
+++ b/terraform/poc/backend_setting/s3.tf
@@ -0,0 +1,33 @@
+resource "aws_s3_bucket" "state" {
+ bucket_prefix = "terraform-state-"
+
+ tags = {
+ Terraform = "true"
+ }
+}
+
+resource "aws_s3_bucket_versioning" "state" {
+ bucket = aws_s3_bucket.state.bucket
+
+ versioning_configuration {
+ status = "Enabled"
+ }
+}
+
+resource "aws_s3_bucket_server_side_encryption_configuration" "state" {
+ bucket = aws_s3_bucket.state.bucket
+
+ rule {
+ apply_server_side_encryption_by_default {
+ sse_algorithm = "AES256"
+ }
+ }
+}
+
+resource "aws_s3_bucket_ownership_controls" "state" {
+ bucket = aws_s3_bucket.state.bucket
+
+ rule {
+ object_ownership = "BucketOwnerPreferred"
+ }
+}
diff --git a/terraform/poc/cloudfront.tf b/terraform/poc/cloudfront.tf
new file mode 100644
index 0000000..5039409
--- /dev/null
+++ b/terraform/poc/cloudfront.tf
@@ -0,0 +1,55 @@
+module "cloudfront" {
+ source = "terraform-aws-modules/cloudfront/aws"
+ version = "3.4.0"
+
+ aliases = [var.domain_name]
+
+ comment = "CloudFront distribution for multiple API Gateways"
+ enabled = true
+ is_ipv6_enabled = true
+ price_class = "PriceClass_All"
+ retain_on_delete = false
+ wait_for_deployment = false
+
+
+ origin = {
+ for o in var.api_gateway_origins : o.domain_name => {
+ domain_name = o.domain_name
+ custom_origin_config = {
+ http_port = 80
+ https_port = 443
+ origin_protocol_policy = "https-only"
+ origin_ssl_protocols = ["TLSv1.2"]
+ }
+ }
+ }
+
+ default_cache_behavior = {
+ target_origin_id = var.api_gateway_origins[0].domain_name
+ viewer_protocol_policy = "redirect-to-https"
+ allowed_methods = ["GET", "HEAD", "OPTIONS", "PUT", "POST", "PATCH", "DELETE"]
+ cached_methods = ["GET", "HEAD"]
+ compress = true
+ query_string = true
+ headers = ["Origin", "Access-Control-Request-Headers", "Access-Control-Request-Method"]
+ }
+
+ ordered_cache_behavior = [
+ for o in var.api_gateway_origins :
+ {
+ path_pattern = o.path_pattern
+ target_origin_id = o.domain_name
+ viewer_protocol_policy = "redirect-to-https"
+ allowed_methods = ["GET", "HEAD", "OPTIONS", "PUT", "POST", "PATCH", "DELETE"]
+ cached_methods = ["GET", "HEAD"]
+ compress = true
+ query_string = true
+ headers = ["Origin", "Access-Control-Request-Headers", "Access-Control-Request-Method"]
+ }
+ ]
+
+ viewer_certificate = {
+ acm_certificate_arn = var.acm_certificate_arn
+ ssl_support_method = "sni-only"
+ }
+}
diff --git a/terraform/poc/main.tf b/terraform/poc/main.tf
new file mode 100644
index 0000000..63d6835
--- /dev/null
+++ b/terraform/poc/main.tf
@@ -0,0 +1,8 @@
+module "list_files" {
+ source = "./../../src/file_service/terraform"
+ aws_region = var.aws_region
+ environment = var.environment
+}
+
+
+
diff --git a/terraform/poc/output.tf b/terraform/poc/output.tf
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/terraform/poc/output.tf
@@ -0,0 +1 @@
+
diff --git a/terraform/poc/provider.tf b/terraform/poc/provider.tf
new file mode 100644
index 0000000..e9b7251
--- /dev/null
+++ b/terraform/poc/provider.tf
@@ -0,0 +1,6 @@
+provider "aws" {
+ region = var.aws_region
+}
+
+
+
diff --git a/terraform/poc/route53.tf b/terraform/poc/route53.tf
new file mode 100644
index 0000000..ad0dbe1
--- /dev/null
+++ b/terraform/poc/route53.tf
@@ -0,0 +1,17 @@
+module "records" {
+ source = "terraform-aws-modules/route53/aws//modules/records"
+ version = "3.1.0"
+
+ zone_id = var.zone_id
+
+ records = [
+ {
+ name = "api.tpet"
+ type = "A"
+ alias = {
+ name = module.cloudfront.cloudfront_distribution_domain_name
+ zone_id = module.cloudfront.cloudfront_distribution_hosted_zone_id
+ }
+ }
+ ]
+}
diff --git a/terraform/poc/terraform.tf b/terraform/poc/terraform.tf
new file mode 100644
index 0000000..0cd713a
--- /dev/null
+++ b/terraform/poc/terraform.tf
@@ -0,0 +1,9 @@
+terraform {
+ backend "s3" {
+ bucket = "terraform-state-20240610123048790400000001"
+ key = "poc/terraform.tfstate"
+ region = "ap-northeast-1"
+ dynamodb_table = "terraform-locks"
+ encrypt = true
+ }
+}
diff --git a/terraform/poc/variables.tf b/terraform/poc/variables.tf
new file mode 100644
index 0000000..b46d275
--- /dev/null
+++ b/terraform/poc/variables.tf
@@ -0,0 +1,69 @@
+variable "domain_name" {
+ description = "The custom domain name for CloudFront"
+ type = string
+ default = "api.tpet.awseducate.systems"
+}
+
+
+variable "acm_certificate_arn" {
+ description = "The ARN of the ACM certificate for the custom domain"
+ type = string
+ default = "arn:aws:acm:us-east-1:070576557102:certificate/6ef7979c-596b-42fd-a6ed-fceccc2efc0b"
+}
+
+variable "zone_id" {
+ description = "The Route 53 Hosted Zone ID for the domain"
+ type = string
+ default = "Z00402303DMA4KDX72AUO"
+}
+
+variable "api_gateway_origins" {
+ description = "List of API Gateway domain names and their corresponding path patterns"
+ type = list(object({
+ domain_name = string
+ path_pattern = string
+ }))
+ default = [
+ {
+ # Campaign Service
+ domain_name = "pihjp3tc7f.execute-api.ap-northeast-1.amazonaws.com"
+ path_pattern = "/dev/campaigns*"
+ },
+ {
+ # File Service - List files & Get file by ID
+ domain_name = "8um2zizr80.execute-api.ap-northeast-1.amazonaws.com"
+ path_pattern = "/dev/files*"
+ },
+ {
+ # File Service - Upload file
+ domain_name = "ssckvgoo10.execute-api.ap-northeast-1.amazonaws.com"
+ path_pattern = "/dev/upload-file*"
+ },
+ {
+ # File Service - Upload multiple files
+ domain_name = "sojek1stci.execute-api.ap-northeast-1.amazonaws.com"
+ path_pattern = "/dev/upload-multiple-file*"
+ },
+
+ {
+ # Email Service - Send Email
+ domain_name = "diyf4tafbl.execute-api.ap-northeast-1.amazonaws.com"
+ path_pattern = "/dev/send-email*"
+ }
+
+ ]
+}
+
+
+variable "aws_region" {
+ description = "The AWS region to deploy the service to"
+ type = string
+ default = "us-east-1"
+}
+
+variable "environment" {
+ description = "The environment to deploy the service to"
+ type = string
+ default = "dev"
+
+}
diff --git a/tests/__init__.py b/terraform/prod/backend.tf
similarity index 100%
rename from tests/__init__.py
rename to terraform/prod/backend.tf
diff --git a/terraform/prod/backend_setting/dynamodb.tf b/terraform/prod/backend_setting/dynamodb.tf
new file mode 100644
index 0000000..3f5de03
--- /dev/null
+++ b/terraform/prod/backend_setting/dynamodb.tf
@@ -0,0 +1,14 @@
+resource "aws_dynamodb_table" "state_locks" {
+ name = "terraform-locks"
+ billing_mode = "PAY_PER_REQUEST"
+ hash_key = "LockID"
+
+ attribute {
+ name = "LockID"
+ type = "S"
+ }
+
+ tags = {
+ Terraform = true
+ }
+}
diff --git a/terraform/prod/backend_setting/output.tf b/terraform/prod/backend_setting/output.tf
new file mode 100644
index 0000000..f2fa6d6
--- /dev/null
+++ b/terraform/prod/backend_setting/output.tf
@@ -0,0 +1,7 @@
+output "bucket_name" {
+ value = aws_s3_bucket.state.bucket
+}
+
+output "dynamodb_table_name" {
+ value = aws_dynamodb_table.state_locks.name
+}
diff --git a/terraform/prod/backend_setting/s3.tf b/terraform/prod/backend_setting/s3.tf
new file mode 100644
index 0000000..e72227c
--- /dev/null
+++ b/terraform/prod/backend_setting/s3.tf
@@ -0,0 +1,33 @@
+resource "aws_s3_bucket" "cloudfront_logging" {
+ bucket = "aws-educate-tpet-cloudfront-logs"
+
+ tags = {
+ Name = "cloudfront-logs"
+ Terraform = "true"
+ }
+}
+
+resource "aws_s3_bucket_policy" "cloudfront_logging_policy" {
+ bucket = aws_s3_bucket.cloudfront_logging.id
+
+ policy = jsonencode({
+ Version = "2012-10-17",
+ Statement = [
+ {
+ Effect = "Allow",
+ Principal = {
+ Service = "cloudfront.amazonaws.com"
+ },
+ Action = "s3:PutObject",
+ Resource = "${aws_s3_bucket.cloudfront_logging.arn}/*",
+ Condition = {
+ StringEquals = {
+ "AWS:SourceArn" = "arn:aws:cloudfront::${data.aws_caller_identity.current.account_id}:distribution/*"
+ }
+ }
+ }
+ ]
+ })
+}
+
+data "aws_caller_identity" "current" {}
diff --git a/terraform/prod/cloudfront.tf b/terraform/prod/cloudfront.tf
new file mode 100644
index 0000000..1f88cef
--- /dev/null
+++ b/terraform/prod/cloudfront.tf
@@ -0,0 +1,88 @@
+resource "aws_cloudfront_distribution" "api_distribution" {
+ enabled = true
+ is_ipv6_enabled = true
+ comment = "CloudFront distribution for multiple API Gateways"
+ default_root_object = ""
+
+ aliases = [var.domain_name]
+
+ viewer_certificate {
+ acm_certificate_arn = var.acm_certificate_arn
+ ssl_support_method = "sni-only"
+ minimum_protocol_version = "TLSv1.2_2021"
+ }
+
+ restrictions {
+ geo_restriction {
+ restriction_type = "none"
+ }
+ }
+
+ dynamic "origin" {
+ for_each = var.api_gateway_origins
+ content {
+ domain_name = origin.value.domain_name
+ origin_id = origin.value.domain_name
+
+ custom_origin_config {
+ http_port = 80
+ https_port = 443
+ origin_protocol_policy = "https-only"
+ origin_ssl_protocols = ["TLSv1.2"]
+ }
+ }
+ }
+
+ dynamic "cache_behavior" {
+ for_each = var.api_gateway_origins
+ content {
+ path_pattern = cache_behavior.value.path_pattern
+ target_origin_id = cache_behavior.value.domain_name
+ viewer_protocol_policy = "redirect-to-https"
+
+ allowed_methods = ["GET", "HEAD", "OPTIONS", "PUT", "POST", "PATCH", "DELETE"]
+ cached_methods = ["GET", "HEAD"]
+
+ forwarded_values {
+ query_string = true
+ headers = ["Origin", "Access-Control-Request-Headers", "Access-Control-Request-Method"]
+ }
+
+ min_ttl = 0
+ default_ttl = 3600
+ max_ttl = 86400
+
+ lambda_function_association {
+ event_type = "origin-response"
+ lambda_arn = var.simple_cors_lambda_arn
+ }
+ }
+ }
+
+ default_cache_behavior {
+ target_origin_id = var.api_gateway_origins[0].domain_name
+ viewer_protocol_policy = "redirect-to-https"
+
+ allowed_methods = ["GET", "HEAD", "OPTIONS", "PUT", "POST", "PATCH", "DELETE"]
+ cached_methods = ["GET", "HEAD"]
+
+ forwarded_values {
+ cookies {
+ forward = "none"
+ }
+ query_string = true
+ headers = ["Origin", "Access-Control-Request-Headers", "Access-Control-Request-Method"]
+ }
+
+ min_ttl = 0
+ default_ttl = 3600
+ max_ttl = 86400
+
+ }
+
+ logging_config {
+ include_cookies = false
+ bucket = "aws-eudcate-tpet-cloudfront-logging-bucket.s3.amazonaws.com"
+ prefix = "cloudfront/"
+ }
+}
diff --git a/terraform/prod/main.tf b/terraform/prod/main.tf
new file mode 100644
index 0000000..0f3c43e
--- /dev/null
+++ b/terraform/prod/main.tf
@@ -0,0 +1,8 @@
+module "list_files" {
+ source = "./../src/file_service/terraform"
+ aws_region = var.aws_region
+ environment = var.environment
+}
+
+
+
diff --git a/terraform/prod/output.tf b/terraform/prod/output.tf
new file mode 100644
index 0000000..f2fa6d6
--- /dev/null
+++ b/terraform/prod/output.tf
@@ -0,0 +1,7 @@
+output "bucket_name" {
+ value = aws_s3_bucket.state.bucket
+}
+
+output "dynamodb_table_name" {
+ value = aws_dynamodb_table.state_locks.name
+}
diff --git a/terraform/prod/provider.tf b/terraform/prod/provider.tf
new file mode 100644
index 0000000..e9b7251
--- /dev/null
+++ b/terraform/prod/provider.tf
@@ -0,0 +1,6 @@
+provider "aws" {
+ region = var.aws_region
+}
+
+
+
diff --git a/terraform/prod/route53.tf b/terraform/prod/route53.tf
new file mode 100644
index 0000000..6015717
--- /dev/null
+++ b/terraform/prod/route53.tf
@@ -0,0 +1,11 @@
+resource "aws_route53_record" "api_alias" {
+ zone_id = var.zone_id
+ name = var.domain_name
+ type = "A"
+
+ alias {
+ name = aws_cloudfront_distribution.api_distribution.domain_name
+ zone_id = aws_cloudfront_distribution.api_distribution.hosted_zone_id
+ evaluate_target_health = false
+ }
+}
diff --git a/terraform/prod/terraform.tf b/terraform/prod/terraform.tf
new file mode 100644
index 0000000..2cc4414
--- /dev/null
+++ b/terraform/prod/terraform.tf
@@ -0,0 +1,9 @@
+terraform {
+ backend "s3" {
+ bucket = "terraform-state-20240610095057872800000001"
+ key = "dev/terraform.tfstate"
+ region = "ap-northeast-1"
+ dynamodb_table = "terraform-locks"
+ encrypt = true
+ }
+}
diff --git a/terraform/prod/variables.tf b/terraform/prod/variables.tf
new file mode 100644
index 0000000..d69b07a
--- /dev/null
+++ b/terraform/prod/variables.tf
@@ -0,0 +1,52 @@
+variable "domain_name" {
+ description = "The custom domain name for CloudFront"
+ type = string
+ default = "api.tpet.awseducate.systems"
+}
+
+
+variable "acm_certificate_arn" {
+ description = "The ARN of the ACM certificate for the custom domain"
+ type = string
+ default = "arn:aws:acm:us-east-1:070576557102:certificate/6ef7979c-596b-42fd-a6ed-fceccc2efc0b"
+}
+
+variable "zone_id" {
+ description = "The Route 53 Hosted Zone ID for the domain"
+ type = string
+ default = "Z00402303DMA4KDX72AUO"
+}
+
+variable "api_gateway_origins" {
+ description = "List of API Gateway domain names and their corresponding path patterns"
+ type = list(object({
+ domain_name = string
+ path_pattern = string
+ }))
+ default = [
+ {
+ # Campaign Service
+ domain_name = "pihjp3tc7f.execute-api.ap-northeast-1.amazonaws.com"
+ path_pattern = "/dev/campaigns*"
+ },
+ {
+ # Files Service
+ domain_name = "8um2zizr80.execute-api.ap-northeast-1.amazonaws.com"
+ path_pattern = "/dev/files*"
+ }
+ ]
+}
+
+
+variable "aws_region" {
+ description = "The AWS region to deploy the service to"
+ type = string
+ default = "us-east-1"
+}
+
+variable "environment" {
+ description = "The environment to deploy the service to"
+ type = string
+ default = "dev"
+
+}
diff --git a/terraform/storage/s3/dev/.terraform.lock.hcl b/terraform/storage/s3/dev/.terraform.lock.hcl
new file mode 100644
index 0000000..2850dd3
--- /dev/null
+++ b/terraform/storage/s3/dev/.terraform.lock.hcl
@@ -0,0 +1,25 @@
+# This file is maintained automatically by "terraform init".
+# Manual edits may be lost in future updates.
+
+provider "registry.terraform.io/hashicorp/aws" {
+ version = "5.54.1"
+ constraints = "~> 5.54.0"
+ hashes = [
+ "h1:h6AA+TgBpDNQXFcLi4xKYiDbn94Dfhz7lt8Q8x8CEI8=",
+ "zh:37c09b9a0a0a2f7854fe52c6adb15f71593810b458a8283ed71d68036af7ba3a",
+ "zh:42fe11d87723d4e43b9c6224ae6bacdcb53faee8abc58f0fc625a161d1f71cb1",
+ "zh:57c6dfc46f28c9c2737559bd84acbc05aeae90431e731bb72a0024028a2d2412",
+ "zh:5ba9665a4ca0e182effd75575b19a4d47383ec02662024b9fe26f78286c36619",
+ "zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425",
+ "zh:b55980be0237644123a02a30b56d4cc03863ef29036c47d6e8ab5429ab45adf5",
+ "zh:b81e7664f10855a3a6fc234a18b4c4f1456273126a40c41516f2061696fb9870",
+ "zh:bd09736ffafd92af104c3c34b5add138ae8db4402eb687863ce472ca7e5ff2e2",
+ "zh:cc2eb1c62fba2a11d1f239e650cc2ae94bcab01c907384dcf2e213a6ee1bd5b2",
+ "zh:e5dc40205d9cf6f353c0ca532ae29afc6c83928bc9bcca47d74b640d3bb5a38c",
+ "zh:ebf1acdcd13f10db1b9c85050ddaadc70ab269c47c5a240753362446442d8371",
+ "zh:f2fc28a4ad94af5e6144a7309286505e3eb7a94d9dc106722b506c372ff7f591",
+ "zh:f49445e8435944df122aa89853260a2716ba8b73d6a6a70cae1661554926d5a2",
+ "zh:fc3b5046e60ae7cab20715be23de8436eb12736136fd6d0f0cc1549ebda6cc73",
+ "zh:fdb98a53500e245a3b5bec077b994da6959dba8fc4eb7534528658d820e06bd5",
+ ]
+}
diff --git a/terraform/storage/s3/dev/backend.tf b/terraform/storage/s3/dev/backend.tf
new file mode 100644
index 0000000..12c0dbe
--- /dev/null
+++ b/terraform/storage/s3/dev/backend.tf
@@ -0,0 +1,3 @@
+terraform {
+ backend "s3" {}
+}
diff --git a/terraform/storage/s3/dev/dev.tfbackend b/terraform/storage/s3/dev/dev.tfbackend
new file mode 100644
index 0000000..4e13b20
--- /dev/null
+++ b/terraform/storage/s3/dev/dev.tfbackend
@@ -0,0 +1,4 @@
+bucket = "terraform-state-20240618152116874600000001"
+region = "us-west-2"
+key = "storage/s3/dev/terraform.tfstate"
+dynamodb_table = "terraform-locks"
\ No newline at end of file
diff --git a/terraform/storage/s3/dev/dev.tfvars b/terraform/storage/s3/dev/dev.tfvars
new file mode 100644
index 0000000..086fd42
--- /dev/null
+++ b/terraform/storage/s3/dev/dev.tfvars
@@ -0,0 +1,4 @@
+aws_region = "us-east-1"
+environment = "dev"
+service_underscore = "common_storage"
+service_hyphen = "common_storage"
diff --git a/terraform/storage/s3/dev/provider.tf b/terraform/storage/s3/dev/provider.tf
new file mode 100644
index 0000000..5ac480b
--- /dev/null
+++ b/terraform/storage/s3/dev/provider.tf
@@ -0,0 +1,10 @@
+provider "aws" {
+ region = var.aws_region
+ default_tags {
+ tags = {
+ "Terraform" = "true",
+ "Environment" = var.environment,
+ "Project" = "AWS Educate TPET"
+ }
+ }
+}
diff --git a/terraform/storage/s3/dev/s3.tf b/terraform/storage/s3/dev/s3.tf
new file mode 100644
index 0000000..3f62d37
--- /dev/null
+++ b/terraform/storage/s3/dev/s3.tf
@@ -0,0 +1,32 @@
+resource "aws_s3_bucket" "aws_educate_tpet_storage" {
+ bucket = "${var.environment}-aws-educate-tpet-storage"
+
+ tags = {
+ Name = "${var.environment}-aws-educate-tpet-storage"
+ Environment = var.environment
+ }
+}
+
+resource "aws_s3_bucket_public_access_block" "public_access_block" {
+ bucket = aws_s3_bucket.aws_educate_tpet_storage.bucket
+
+ block_public_acls = false
+ block_public_policy = false
+ ignore_public_acls = false
+ restrict_public_buckets = false
+}
+resource "aws_s3_bucket_policy" "aws_educate_tpet_storage_policy" {
+ bucket = aws_s3_bucket.aws_educate_tpet_storage.id
+
+ policy = jsonencode({
+ Version = "2012-10-17",
+ Statement = [
+ {
+ Effect = "Allow",
+ Principal = "*",
+ Action = "s3:GetObject",
+ Resource = "${aws_s3_bucket.aws_educate_tpet_storage.arn}/*"
+ }
+ ]
+ })
+}
diff --git a/terraform/storage/s3/dev/variables.tf b/terraform/storage/s3/dev/variables.tf
new file mode 100644
index 0000000..74e866d
--- /dev/null
+++ b/terraform/storage/s3/dev/variables.tf
@@ -0,0 +1,16 @@
+variable "aws_region" {
+ description = "aws region"
+}
+
+variable "environment" {
+ description = "Current environtment: prod(ap-northeast-1)/dev(us-east-1)/local-dev(us-west-2), default dev(us-east-1)"
+}
+
+variable "service_underscore" {
+ description = "Current service name"
+}
+
+variable "service_hyphen" {
+ description = "This variable contains the current service name, but with hyphens instead of underscores. For example: demo-service."
+}
+
diff --git a/terraform/storage/s3/dev/versions.tf b/terraform/storage/s3/dev/versions.tf
new file mode 100644
index 0000000..ef78c8e
--- /dev/null
+++ b/terraform/storage/s3/dev/versions.tf
@@ -0,0 +1,9 @@
+terraform {
+ required_version = "~> 1.8.0"
+ required_providers {
+ aws = {
+ source = "hashicorp/aws"
+ version = "~> 5.54.0"
+ }
+ }
+}
diff --git a/terraform/storage/s3/local-dev/.terraform.lock.hcl b/terraform/storage/s3/local-dev/.terraform.lock.hcl
new file mode 100644
index 0000000..2850dd3
--- /dev/null
+++ b/terraform/storage/s3/local-dev/.terraform.lock.hcl
@@ -0,0 +1,25 @@
+# This file is maintained automatically by "terraform init".
+# Manual edits may be lost in future updates.
+
+provider "registry.terraform.io/hashicorp/aws" {
+ version = "5.54.1"
+ constraints = "~> 5.54.0"
+ hashes = [
+ "h1:h6AA+TgBpDNQXFcLi4xKYiDbn94Dfhz7lt8Q8x8CEI8=",
+ "zh:37c09b9a0a0a2f7854fe52c6adb15f71593810b458a8283ed71d68036af7ba3a",
+ "zh:42fe11d87723d4e43b9c6224ae6bacdcb53faee8abc58f0fc625a161d1f71cb1",
+ "zh:57c6dfc46f28c9c2737559bd84acbc05aeae90431e731bb72a0024028a2d2412",
+ "zh:5ba9665a4ca0e182effd75575b19a4d47383ec02662024b9fe26f78286c36619",
+ "zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425",
+ "zh:b55980be0237644123a02a30b56d4cc03863ef29036c47d6e8ab5429ab45adf5",
+ "zh:b81e7664f10855a3a6fc234a18b4c4f1456273126a40c41516f2061696fb9870",
+ "zh:bd09736ffafd92af104c3c34b5add138ae8db4402eb687863ce472ca7e5ff2e2",
+ "zh:cc2eb1c62fba2a11d1f239e650cc2ae94bcab01c907384dcf2e213a6ee1bd5b2",
+ "zh:e5dc40205d9cf6f353c0ca532ae29afc6c83928bc9bcca47d74b640d3bb5a38c",
+ "zh:ebf1acdcd13f10db1b9c85050ddaadc70ab269c47c5a240753362446442d8371",
+ "zh:f2fc28a4ad94af5e6144a7309286505e3eb7a94d9dc106722b506c372ff7f591",
+ "zh:f49445e8435944df122aa89853260a2716ba8b73d6a6a70cae1661554926d5a2",
+ "zh:fc3b5046e60ae7cab20715be23de8436eb12736136fd6d0f0cc1549ebda6cc73",
+ "zh:fdb98a53500e245a3b5bec077b994da6959dba8fc4eb7534528658d820e06bd5",
+ ]
+}
diff --git a/terraform/storage/s3/local-dev/backend.tf b/terraform/storage/s3/local-dev/backend.tf
new file mode 100644
index 0000000..12c0dbe
--- /dev/null
+++ b/terraform/storage/s3/local-dev/backend.tf
@@ -0,0 +1,3 @@
+terraform {
+ backend "s3" {}
+}
diff --git a/terraform/storage/s3/local-dev/local-dev.tfbackend b/terraform/storage/s3/local-dev/local-dev.tfbackend
new file mode 100644
index 0000000..5d9049f
--- /dev/null
+++ b/terraform/storage/s3/local-dev/local-dev.tfbackend
@@ -0,0 +1,4 @@
+bucket = "terraform-state-20240618152116874600000001"
+region = "us-west-2"
+key = "storage/s3/local-dev/terraform.tfstate"
+dynamodb_table = "terraform-locks"
\ No newline at end of file
diff --git a/terraform/storage/s3/local-dev/local-dev.tfvars b/terraform/storage/s3/local-dev/local-dev.tfvars
new file mode 100644
index 0000000..f1c9a3c
--- /dev/null
+++ b/terraform/storage/s3/local-dev/local-dev.tfvars
@@ -0,0 +1,4 @@
+aws_region = "us-west-2"
+environment = "local-dev"
+service_underscore = "common_storage"
+service_hyphen = "common_storage"
diff --git a/terraform/storage/s3/local-dev/provider.tf b/terraform/storage/s3/local-dev/provider.tf
new file mode 100644
index 0000000..5ac480b
--- /dev/null
+++ b/terraform/storage/s3/local-dev/provider.tf
@@ -0,0 +1,10 @@
+provider "aws" {
+ region = var.aws_region
+ default_tags {
+ tags = {
+ "Terraform" = "true",
+ "Environment" = var.environment,
+ "Project" = "AWS Educate TPET"
+ }
+ }
+}
diff --git a/terraform/storage/s3/local-dev/s3.tf b/terraform/storage/s3/local-dev/s3.tf
new file mode 100644
index 0000000..3f62d37
--- /dev/null
+++ b/terraform/storage/s3/local-dev/s3.tf
@@ -0,0 +1,32 @@
+resource "aws_s3_bucket" "aws_educate_tpet_storage" {
+ bucket = "${var.environment}-aws-educate-tpet-storage"
+
+ tags = {
+ Name = "${var.environment}-aws-educate-tpet-storage"
+ Environment = var.environment
+ }
+}
+
+resource "aws_s3_bucket_public_access_block" "public_access_block" {
+ bucket = aws_s3_bucket.aws_educate_tpet_storage.bucket
+
+ block_public_acls = false
+ block_public_policy = false
+ ignore_public_acls = false
+ restrict_public_buckets = false
+}
+resource "aws_s3_bucket_policy" "aws_educate_tpet_storage_policy" {
+ bucket = aws_s3_bucket.aws_educate_tpet_storage.id
+
+ policy = jsonencode({
+ Version = "2012-10-17",
+ Statement = [
+ {
+ Effect = "Allow",
+ Principal = "*",
+ Action = "s3:GetObject",
+ Resource = "${aws_s3_bucket.aws_educate_tpet_storage.arn}/*"
+ }
+ ]
+ })
+}
diff --git a/terraform/storage/s3/local-dev/variables.tf b/terraform/storage/s3/local-dev/variables.tf
new file mode 100644
index 0000000..74e866d
--- /dev/null
+++ b/terraform/storage/s3/local-dev/variables.tf
@@ -0,0 +1,16 @@
+variable "aws_region" {
+ description = "aws region"
+}
+
+variable "environment" {
+ description = "Current environtment: prod(ap-northeast-1)/dev(us-east-1)/local-dev(us-west-2), default dev(us-east-1)"
+}
+
+variable "service_underscore" {
+ description = "Current service name"
+}
+
+variable "service_hyphen" {
+ description = "This variable contains the current service name, but with hyphens instead of underscores. For example: demo-service."
+}
+
diff --git a/terraform/storage/s3/local-dev/versions.tf b/terraform/storage/s3/local-dev/versions.tf
new file mode 100644
index 0000000..ef78c8e
--- /dev/null
+++ b/terraform/storage/s3/local-dev/versions.tf
@@ -0,0 +1,9 @@
+terraform {
+ required_version = "~> 1.8.0"
+ required_providers {
+ aws = {
+ source = "hashicorp/aws"
+ version = "~> 5.54.0"
+ }
+ }
+}
diff --git a/terraform/storage/s3/preview/.terraform.lock.hcl b/terraform/storage/s3/preview/.terraform.lock.hcl
new file mode 100644
index 0000000..2850dd3
--- /dev/null
+++ b/terraform/storage/s3/preview/.terraform.lock.hcl
@@ -0,0 +1,25 @@
+# This file is maintained automatically by "terraform init".
+# Manual edits may be lost in future updates.
+
+provider "registry.terraform.io/hashicorp/aws" {
+ version = "5.54.1"
+ constraints = "~> 5.54.0"
+ hashes = [
+ "h1:h6AA+TgBpDNQXFcLi4xKYiDbn94Dfhz7lt8Q8x8CEI8=",
+ "zh:37c09b9a0a0a2f7854fe52c6adb15f71593810b458a8283ed71d68036af7ba3a",
+ "zh:42fe11d87723d4e43b9c6224ae6bacdcb53faee8abc58f0fc625a161d1f71cb1",
+ "zh:57c6dfc46f28c9c2737559bd84acbc05aeae90431e731bb72a0024028a2d2412",
+ "zh:5ba9665a4ca0e182effd75575b19a4d47383ec02662024b9fe26f78286c36619",
+ "zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425",
+ "zh:b55980be0237644123a02a30b56d4cc03863ef29036c47d6e8ab5429ab45adf5",
+ "zh:b81e7664f10855a3a6fc234a18b4c4f1456273126a40c41516f2061696fb9870",
+ "zh:bd09736ffafd92af104c3c34b5add138ae8db4402eb687863ce472ca7e5ff2e2",
+ "zh:cc2eb1c62fba2a11d1f239e650cc2ae94bcab01c907384dcf2e213a6ee1bd5b2",
+ "zh:e5dc40205d9cf6f353c0ca532ae29afc6c83928bc9bcca47d74b640d3bb5a38c",
+ "zh:ebf1acdcd13f10db1b9c85050ddaadc70ab269c47c5a240753362446442d8371",
+ "zh:f2fc28a4ad94af5e6144a7309286505e3eb7a94d9dc106722b506c372ff7f591",
+ "zh:f49445e8435944df122aa89853260a2716ba8b73d6a6a70cae1661554926d5a2",
+ "zh:fc3b5046e60ae7cab20715be23de8436eb12736136fd6d0f0cc1549ebda6cc73",
+ "zh:fdb98a53500e245a3b5bec077b994da6959dba8fc4eb7534528658d820e06bd5",
+ ]
+}
diff --git a/terraform/storage/s3/preview/backend.tf b/terraform/storage/s3/preview/backend.tf
new file mode 100644
index 0000000..12c0dbe
--- /dev/null
+++ b/terraform/storage/s3/preview/backend.tf
@@ -0,0 +1,3 @@
+terraform {
+ backend "s3" {}
+}
diff --git a/terraform/storage/s3/preview/preview.tfbackend b/terraform/storage/s3/preview/preview.tfbackend
new file mode 100644
index 0000000..bc37c5b
--- /dev/null
+++ b/terraform/storage/s3/preview/preview.tfbackend
@@ -0,0 +1,4 @@
+bucket = "terraform-state-20240618152116874600000001"
+region = "us-west-2"
+key = "storage/s3/preview/terraform.tfstate"
+dynamodb_table = "terraform-locks"
\ No newline at end of file
diff --git a/terraform/storage/s3/preview/preview.tfvars b/terraform/storage/s3/preview/preview.tfvars
new file mode 100644
index 0000000..43cbccc
--- /dev/null
+++ b/terraform/storage/s3/preview/preview.tfvars
@@ -0,0 +1,4 @@
+aws_region = "us-west-1"
+environment = "preview"
+service_underscore = "common_storage"
+service_hyphen = "common_storage"
diff --git a/terraform/storage/s3/preview/provider.tf b/terraform/storage/s3/preview/provider.tf
new file mode 100644
index 0000000..5ac480b
--- /dev/null
+++ b/terraform/storage/s3/preview/provider.tf
@@ -0,0 +1,10 @@
+provider "aws" {
+ region = var.aws_region
+ default_tags {
+ tags = {
+ "Terraform" = "true",
+ "Environment" = var.environment,
+ "Project" = "AWS Educate TPET"
+ }
+ }
+}
diff --git a/terraform/storage/s3/preview/s3.tf b/terraform/storage/s3/preview/s3.tf
new file mode 100644
index 0000000..3f62d37
--- /dev/null
+++ b/terraform/storage/s3/preview/s3.tf
@@ -0,0 +1,32 @@
+resource "aws_s3_bucket" "aws_educate_tpet_storage" {
+ bucket = "${var.environment}-aws-educate-tpet-storage"
+
+ tags = {
+ Name = "${var.environment}-aws-educate-tpet-storage"
+ Environment = var.environment
+ }
+}
+
+resource "aws_s3_bucket_public_access_block" "public_access_block" {
+ bucket = aws_s3_bucket.aws_educate_tpet_storage.bucket
+
+ block_public_acls = false
+ block_public_policy = false
+ ignore_public_acls = false
+ restrict_public_buckets = false
+}
+resource "aws_s3_bucket_policy" "aws_educate_tpet_storage_policy" {
+ bucket = aws_s3_bucket.aws_educate_tpet_storage.id
+
+ policy = jsonencode({
+ Version = "2012-10-17",
+ Statement = [
+ {
+ Effect = "Allow",
+ Principal = "*",
+ Action = "s3:GetObject",
+ Resource = "${aws_s3_bucket.aws_educate_tpet_storage.arn}/*"
+ }
+ ]
+ })
+}
diff --git a/terraform/storage/s3/preview/variables.tf b/terraform/storage/s3/preview/variables.tf
new file mode 100644
index 0000000..74e866d
--- /dev/null
+++ b/terraform/storage/s3/preview/variables.tf
@@ -0,0 +1,16 @@
+variable "aws_region" {
+ description = "aws region"
+}
+
+variable "environment" {
+ description = "Current environtment: prod(ap-northeast-1)/dev(us-east-1)/local-dev(us-west-2), default dev(us-east-1)"
+}
+
+variable "service_underscore" {
+ description = "Current service name"
+}
+
+variable "service_hyphen" {
+ description = "This variable contains the current service name, but with hyphens instead of underscores. For example: demo-service."
+}
+
diff --git a/terraform/storage/s3/preview/versions.tf b/terraform/storage/s3/preview/versions.tf
new file mode 100644
index 0000000..ef78c8e
--- /dev/null
+++ b/terraform/storage/s3/preview/versions.tf
@@ -0,0 +1,9 @@
+terraform {
+ required_version = "~> 1.8.0"
+ required_providers {
+ aws = {
+ source = "hashicorp/aws"
+ version = "~> 5.54.0"
+ }
+ }
+}
diff --git a/terraform/storage/s3/prod/.terraform.lock.hcl b/terraform/storage/s3/prod/.terraform.lock.hcl
new file mode 100644
index 0000000..2850dd3
--- /dev/null
+++ b/terraform/storage/s3/prod/.terraform.lock.hcl
@@ -0,0 +1,25 @@
+# This file is maintained automatically by "terraform init".
+# Manual edits may be lost in future updates.
+
+provider "registry.terraform.io/hashicorp/aws" {
+ version = "5.54.1"
+ constraints = "~> 5.54.0"
+ hashes = [
+ "h1:h6AA+TgBpDNQXFcLi4xKYiDbn94Dfhz7lt8Q8x8CEI8=",
+ "zh:37c09b9a0a0a2f7854fe52c6adb15f71593810b458a8283ed71d68036af7ba3a",
+ "zh:42fe11d87723d4e43b9c6224ae6bacdcb53faee8abc58f0fc625a161d1f71cb1",
+ "zh:57c6dfc46f28c9c2737559bd84acbc05aeae90431e731bb72a0024028a2d2412",
+ "zh:5ba9665a4ca0e182effd75575b19a4d47383ec02662024b9fe26f78286c36619",
+ "zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425",
+ "zh:b55980be0237644123a02a30b56d4cc03863ef29036c47d6e8ab5429ab45adf5",
+ "zh:b81e7664f10855a3a6fc234a18b4c4f1456273126a40c41516f2061696fb9870",
+ "zh:bd09736ffafd92af104c3c34b5add138ae8db4402eb687863ce472ca7e5ff2e2",
+ "zh:cc2eb1c62fba2a11d1f239e650cc2ae94bcab01c907384dcf2e213a6ee1bd5b2",
+ "zh:e5dc40205d9cf6f353c0ca532ae29afc6c83928bc9bcca47d74b640d3bb5a38c",
+ "zh:ebf1acdcd13f10db1b9c85050ddaadc70ab269c47c5a240753362446442d8371",
+ "zh:f2fc28a4ad94af5e6144a7309286505e3eb7a94d9dc106722b506c372ff7f591",
+ "zh:f49445e8435944df122aa89853260a2716ba8b73d6a6a70cae1661554926d5a2",
+ "zh:fc3b5046e60ae7cab20715be23de8436eb12736136fd6d0f0cc1549ebda6cc73",
+ "zh:fdb98a53500e245a3b5bec077b994da6959dba8fc4eb7534528658d820e06bd5",
+ ]
+}
diff --git a/terraform/storage/s3/prod/backend.tf b/terraform/storage/s3/prod/backend.tf
new file mode 100644
index 0000000..12c0dbe
--- /dev/null
+++ b/terraform/storage/s3/prod/backend.tf
@@ -0,0 +1,3 @@
+terraform {
+ backend "s3" {}
+}
diff --git a/terraform/storage/s3/prod/prod.tfbackend b/terraform/storage/s3/prod/prod.tfbackend
new file mode 100644
index 0000000..cc1f96f
--- /dev/null
+++ b/terraform/storage/s3/prod/prod.tfbackend
@@ -0,0 +1,4 @@
+bucket = "terraform-state-20240618152116874600000001"
+region = "us-west-2"
+key = "storage/s3/prod/terraform.tfstate"
+dynamodb_table = "terraform-locks"
\ No newline at end of file
diff --git a/terraform/storage/s3/prod/prod.tfvars b/terraform/storage/s3/prod/prod.tfvars
new file mode 100644
index 0000000..31e446b
--- /dev/null
+++ b/terraform/storage/s3/prod/prod.tfvars
@@ -0,0 +1,4 @@
+aws_region = "ap-northeast-1"
+environment = "prod"
+service_underscore = "common_storage"
+service_hyphen = "common_storage"
diff --git a/terraform/storage/s3/prod/provider.tf b/terraform/storage/s3/prod/provider.tf
new file mode 100644
index 0000000..5ac480b
--- /dev/null
+++ b/terraform/storage/s3/prod/provider.tf
@@ -0,0 +1,10 @@
+provider "aws" {
+ region = var.aws_region
+ default_tags {
+ tags = {
+ "Terraform" = "true",
+ "Environment" = var.environment,
+ "Project" = "AWS Educate TPET"
+ }
+ }
+}
diff --git a/terraform/storage/s3/prod/s3.tf b/terraform/storage/s3/prod/s3.tf
new file mode 100644
index 0000000..3f62d37
--- /dev/null
+++ b/terraform/storage/s3/prod/s3.tf
@@ -0,0 +1,32 @@
+resource "aws_s3_bucket" "aws_educate_tpet_storage" {
+ bucket = "${var.environment}-aws-educate-tpet-storage"
+
+ tags = {
+ Name = "${var.environment}-aws-educate-tpet-storage"
+ Environment = var.environment
+ }
+}
+
+resource "aws_s3_bucket_public_access_block" "public_access_block" {
+ bucket = aws_s3_bucket.aws_educate_tpet_storage.bucket
+
+ block_public_acls = false
+ block_public_policy = false
+ ignore_public_acls = false
+ restrict_public_buckets = false
+}
+resource "aws_s3_bucket_policy" "aws_educate_tpet_storage_policy" {
+ bucket = aws_s3_bucket.aws_educate_tpet_storage.id
+
+ policy = jsonencode({
+ Version = "2012-10-17",
+ Statement = [
+ {
+ Effect = "Allow",
+ Principal = "*",
+ Action = "s3:GetObject",
+ Resource = "${aws_s3_bucket.aws_educate_tpet_storage.arn}/*"
+ }
+ ]
+ })
+}
diff --git a/terraform/storage/s3/prod/variables.tf b/terraform/storage/s3/prod/variables.tf
new file mode 100644
index 0000000..74e866d
--- /dev/null
+++ b/terraform/storage/s3/prod/variables.tf
@@ -0,0 +1,16 @@
+variable "aws_region" {
+ description = "aws region"
+}
+
+variable "environment" {
+ description = "Current environtment: prod(ap-northeast-1)/dev(us-east-1)/local-dev(us-west-2), default dev(us-east-1)"
+}
+
+variable "service_underscore" {
+ description = "Current service name"
+}
+
+variable "service_hyphen" {
+ description = "This variable contains the current service name, but with hyphens instead of underscores. For example: demo-service."
+}
+
diff --git a/terraform/storage/s3/prod/versions.tf b/terraform/storage/s3/prod/versions.tf
new file mode 100644
index 0000000..ef78c8e
--- /dev/null
+++ b/terraform/storage/s3/prod/versions.tf
@@ -0,0 +1,9 @@
+terraform {
+ required_version = "~> 1.8.0"
+ required_providers {
+ aws = {
+ source = "hashicorp/aws"
+ version = "~> 5.54.0"
+ }
+ }
+}
diff --git a/test.txt b/test.txt
deleted file mode 100644
index 9daeafb..0000000
--- a/test.txt
+++ /dev/null
@@ -1 +0,0 @@
-test
diff --git a/tests/requirements.txt b/tests/requirements.txt
deleted file mode 100644
index b9cf27a..0000000
--- a/tests/requirements.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-pytest
-boto3
-requests