diff --git a/.devcontainer/Dockerfile.github b/.devcontainer/Dockerfile.github
index b1f32b1..bdea419 100644
--- a/.devcontainer/Dockerfile.github
+++ b/.devcontainer/Dockerfile.github
@@ -17,6 +17,7 @@ ENV EXTRA_CORS_ALLOWED_ORIGINS='*'
ENV DISABLE_CUSTOM_CORS_APIGATEWAY=1
ENV LOCALSTACK_APPINSPECTOR_ENABLE=1
ENV LOCALSTACK_APPINSPECTOR_DEV_ENABLE=1
+ENV LOCALSTACK_DEBUG=1
# Workshop token URL — set by organizer before each event.
# The setup script fetches the actual token from this endpoint.
diff --git a/01-serverless-app/lambdas/order_handler/handler.py b/01-serverless-app/lambdas/order_handler/handler.py
index 185e5ba..9522a5a 100644
--- a/01-serverless-app/lambdas/order_handler/handler.py
+++ b/01-serverless-app/lambdas/order_handler/handler.py
@@ -1,13 +1,22 @@
import json
import os
import uuid
+from datetime import datetime, timezone
+from decimal import Decimal
import boto3
-dynamodb = boto3.resource("dynamodb", endpoint_url=os.environ.get("AWS_ENDPOINT_URL"))
-sqs = boto3.client("sqs", endpoint_url=os.environ.get("AWS_ENDPOINT_URL"))
+dynamodb = boto3.resource("dynamodb")
+sqs = boto3.client("sqs")
+
+TABLE_NAME = os.environ["ORDERS_TABLE"]
+PRODUCTS_TABLE = os.environ["PRODUCTS_TABLE"]
+QUEUE_URL = os.environ["ORDERS_QUEUE_URL"]
+DLQ_URL = os.environ.get("ORDERS_DLQ_URL", "")
+
+class DecimalEncoder(json.JSONEncoder):
+ def default(self, o):
+ return int(o) if isinstance(o, Decimal) else super().default(o)
-TABLE_NAME = os.environ["ORDERS_TABLE"]
-QUEUE_URL = os.environ["ORDERS_QUEUE_URL"]
CORS_HEADERS = {
"Access-Control-Allow-Origin": "*",
@@ -18,10 +27,17 @@
def handler(event, context):
method = event.get("httpMethod", "")
+ path = event.get("path", "")
if method == "OPTIONS":
return {"statusCode": 200, "headers": CORS_HEADERS, "body": ""}
+ if method == "POST" and path.endswith("/replay"):
+ return replay_dlq()
+
+ if method == "GET" and "/products" in path:
+ return list_products()
+
if method == "GET":
return list_orders()
@@ -31,6 +47,29 @@ def handler(event, context):
return {"statusCode": 405, "headers": CORS_HEADERS, "body": "Method Not Allowed"}
+def replay_dlq():
+ resp = sqs.receive_message(QueueUrl=DLQ_URL, MaxNumberOfMessages=10)
+ messages = resp.get("Messages", [])
+ for msg in messages:
+ sqs.send_message(QueueUrl=QUEUE_URL, MessageBody=msg["Body"])
+ sqs.delete_message(QueueUrl=DLQ_URL, ReceiptHandle=msg["ReceiptHandle"])
+ return {
+ "statusCode": 200,
+ "headers": {**CORS_HEADERS, "Content-Type": "application/json"},
+ "body": json.dumps({"replayed": len(messages)}),
+ }
+
+
+def list_products():
+ table = dynamodb.Table(PRODUCTS_TABLE)
+ items = sorted(table.scan().get("Items", []), key=lambda x: x.get("name", ""))
+ return {
+ "statusCode": 200,
+ "headers": {**CORS_HEADERS, "Content-Type": "application/json"},
+ "body": json.dumps(items, cls=DecimalEncoder),
+ }
+
+
def list_orders():
table = dynamodb.Table(TABLE_NAME)
result = table.scan()
@@ -38,19 +77,20 @@ def list_orders():
return {
"statusCode": 200,
"headers": {**CORS_HEADERS, "Content-Type": "application/json"},
- "body": json.dumps(items),
+ "body": json.dumps(items, cls=DecimalEncoder),
}
def create_order(event):
body = json.loads(event.get("body") or "{}")
- order_id = str(uuid.uuid4())
+ order_id = uuid.uuid4().hex[:12]
order = {
"order_id": order_id,
"item": body.get("item", "unknown"),
"quantity": int(body.get("quantity", 1)),
"status": "pending",
+ "created_at": datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ"),
}
table = dynamodb.Table(TABLE_NAME)
diff --git a/01-serverless-app/lambdas/order_processor/handler.py b/01-serverless-app/lambdas/order_processor/handler.py
index a24a942..88d60af 100644
--- a/01-serverless-app/lambdas/order_processor/handler.py
+++ b/01-serverless-app/lambdas/order_processor/handler.py
@@ -1,38 +1,101 @@
import json
import os
+import time
+import uuid
+from datetime import datetime, timezone
import boto3
-dynamodb = boto3.resource("dynamodb", endpoint_url=os.environ.get("AWS_ENDPOINT_URL"))
-s3 = boto3.client("s3", endpoint_url=os.environ.get("AWS_ENDPOINT_URL"))
+dynamodb = boto3.resource("dynamodb")
+s3 = boto3.client("s3")
+sfn = boto3.client("stepfunctions")
TABLE_NAME = os.environ["ORDERS_TABLE"]
RECEIPTS_BUCKET = os.environ["RECEIPTS_BUCKET"]
+STATE_MACHINE_ARN = os.environ["STATE_MACHINE_ARN"]
+
+TERMINAL_STATUSES = {"fulfilled", "failed"}
+
+
+def now():
+ return datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
+
+
+def set_status(order_id, status):
+ ts_key = {
+ "validating": "validating_at",
+ "payment_processing": "payment_at",
+ "fulfilled": "fulfilled_at",
+ "failed": "failed_at",
+ }.get(status)
+
+ expression = "SET #s = :s"
+ names = {"#s": "status"}
+ values = {":s": status}
+
+ if ts_key:
+ expression += ", #ts = :ts"
+ names["#ts"] = ts_key
+ values[":ts"] = now()
+
+ dynamodb.Table(TABLE_NAME).update_item(
+ Key={"order_id": order_id},
+ UpdateExpression=expression,
+ ExpressionAttributeNames=names,
+ ExpressionAttributeValues=values,
+ )
def handler(event, context):
- for record in event["Records"]:
- order = json.loads(record["body"])
- order_id = order["order_id"]
-
- # Update order status in DynamoDB
- table = dynamodb.Table(TABLE_NAME)
- table.update_item(
- Key={"order_id": order_id},
- UpdateExpression="SET #s = :s",
- ExpressionAttributeNames={"#s": "status"},
- ExpressionAttributeValues={":s": "processed"},
- )
-
- # Store receipt in S3
- receipt = {
- "order_id": order_id,
- "item": order.get("item"),
- "quantity": order.get("quantity"),
- "status": "processed",
- }
- s3.put_object(
- Bucket=RECEIPTS_BUCKET,
- Key=f"receipts/{order_id}.json",
- Body=json.dumps(receipt),
- ContentType="application/json",
- )
+ # Triggered by SQS: start a state machine execution per order
+ if "Records" in event:
+ for record in event["Records"]:
+ order = json.loads(record["body"])
+ set_status(order["order_id"], "validating") # fails fast if DDB is faulted → SQS retry → DLQ
+ sfn.start_execution(
+ stateMachineArn=STATE_MACHINE_ARN,
+ name=f"order-{order['order_id']}-{uuid.uuid4().hex[:8]}",
+ input=json.dumps({"order": order}),
+ )
+ return
+
+ # Invoked by Step Functions
+ step = event["step"]
+ order = event["order"]
+
+ if step == "validate": return validate(order)
+ if step == "process_payment": return process_payment(order)
+ if step == "fulfill": return fulfill(order)
+ if step == "handle_failure": return handle_failure(order)
+
+ raise ValueError(f"Unknown step: {step}")
+
+
+def validate(order):
+ time.sleep(2)
+ set_status(order["order_id"], "validating")
+ return order
+
+
+def process_payment(order):
+ time.sleep(3)
+ set_status(order["order_id"], "payment_processing")
+ return order
+
+
+def fulfill(order):
+ time.sleep(2)
+ set_status(order["order_id"], "fulfilled")
+ receipt = {k: order[k] for k in ("order_id", "item", "quantity")}
+ receipt["status"] = "fulfilled"
+ s3.put_object(
+ Bucket=RECEIPTS_BUCKET,
+ Key=f"receipts/{order['order_id']}.json",
+ Body=json.dumps(receipt),
+ ContentType="application/json",
+ )
+ return order
+
+
+def handle_failure(order):
+ set_status(order["order_id"], "failed")
+ return order
diff --git a/01-serverless-app/terraform/main.tf b/01-serverless-app/terraform/main.tf
index 0638c27..1757116 100644
--- a/01-serverless-app/terraform/main.tf
+++ b/01-serverless-app/terraform/main.tf
@@ -14,18 +14,44 @@ provider "aws" {
skip_credentials_validation = true
skip_metadata_api_check = true
skip_requesting_account_id = true
+}
+
+# ── DynamoDB ──────────────────────────────────────────────────────────────────
- endpoints {
- apigateway = "http://localhost:4566"
- dynamodb = "http://localhost:4566"
- iam = "http://localhost:4566"
- lambda = "http://localhost:4566"
- s3 = "http://localhost:4566"
- sqs = "http://localhost:4566"
+resource "aws_dynamodb_table" "products" {
+ name = "products"
+ billing_mode = "PAY_PER_REQUEST"
+ hash_key = "product_id"
+
+ attribute {
+ name = "product_id"
+ type = "S"
}
}
-# ── DynamoDB ──────────────────────────────────────────────────────────────────
+locals {
+ products = [
+ { product_id = "ls-tshirt", name = "LocalStack T-Shirt", description = "Classic logo tee", price = "24.99" },
+ { product_id = "ls-hoodie", name = "LocalStack Hoodie", description = "Warm & cloud-native", price = "49.99" },
+ { product_id = "ls-cap", name = "LocalStack Cap", description = "Keep the sun off your stack", price = "19.99" },
+ { product_id = "ls-mug", name = "LocalStack Mug", description = "Fill it with local coffee", price = "14.99" },
+ { product_id = "ls-stickers", name = "Sticker Pack", description = "10 cloud-native stickers", price = "4.99" },
+ { product_id = "ls-socks", name = "LocalStack Socks", description = "Deploy faster on your feet", price = "9.99" },
+ ]
+}
+
+resource "aws_dynamodb_table_item" "products" {
+ for_each = { for p in local.products : p.product_id => p }
+ table_name = aws_dynamodb_table.products.name
+ hash_key = aws_dynamodb_table.products.hash_key
+
+ item = jsonencode({
+ product_id = { S = each.value.product_id }
+ name = { S = each.value.name }
+ description = { S = each.value.description }
+ price = { N = each.value.price }
+ })
+}
resource "aws_dynamodb_table" "orders" {
name = "orders"
@@ -81,9 +107,9 @@ resource "aws_iam_role_policy" "lambda_policy" {
Version = "2012-10-17"
Statement = [
{
- Effect = "Allow"
- Action = ["dynamodb:PutItem", "dynamodb:UpdateItem", "dynamodb:GetItem", "dynamodb:Scan"]
- Resource = aws_dynamodb_table.orders.arn
+ Effect = "Allow"
+ Action = ["dynamodb:PutItem", "dynamodb:UpdateItem", "dynamodb:GetItem", "dynamodb:Scan"]
+ Resource = [aws_dynamodb_table.orders.arn, aws_dynamodb_table.products.arn]
},
{
Effect = "Allow"
@@ -94,11 +120,42 @@ resource "aws_iam_role_policy" "lambda_policy" {
Effect = "Allow"
Action = ["s3:PutObject", "s3:GetObject"]
Resource = "${aws_s3_bucket.receipts.arn}/*"
+ },
+ {
+ Effect = "Allow"
+ Action = "states:StartExecution"
+ Resource = local.state_machine_arn
}
]
})
}
+resource "aws_iam_role" "sfn_exec" {
+ name = "sfn-exec-role"
+
+ assume_role_policy = jsonencode({
+ Version = "2012-10-17"
+ Statement = [{
+ Action = "sts:AssumeRole"
+ Effect = "Allow"
+ Principal = { Service = "states.amazonaws.com" }
+ }]
+ })
+}
+
+resource "aws_iam_role_policy" "sfn_policy" {
+ role = aws_iam_role.sfn_exec.id
+
+ policy = jsonencode({
+ Version = "2012-10-17"
+ Statement = [{
+ Effect = "Allow"
+ Action = "lambda:InvokeFunction"
+ Resource = aws_lambda_function.order_processor.arn
+ }]
+ })
+}
+
# ── Lambda: order_handler ─────────────────────────────────────────────────────
data "archive_file" "order_handler" {
@@ -117,9 +174,10 @@ resource "aws_lambda_function" "order_handler" {
environment {
variables = {
- ORDERS_TABLE = aws_dynamodb_table.orders.name
- ORDERS_QUEUE_URL = aws_sqs_queue.orders.url
- AWS_ENDPOINT_URL = "http://localhost:4566"
+ ORDERS_TABLE = aws_dynamodb_table.orders.name
+ PRODUCTS_TABLE = aws_dynamodb_table.products.name
+ ORDERS_QUEUE_URL = aws_sqs_queue.orders.url
+ ORDERS_DLQ_URL = aws_sqs_queue.orders_dlq.url
}
}
}
@@ -139,16 +197,83 @@ resource "aws_lambda_function" "order_processor" {
runtime = "python3.12"
filename = data.archive_file.order_processor.output_path
source_code_hash = data.archive_file.order_processor.output_base64sha256
+ timeout = 30
environment {
variables = {
- ORDERS_TABLE = aws_dynamodb_table.orders.name
- RECEIPTS_BUCKET = aws_s3_bucket.receipts.bucket
- AWS_ENDPOINT_URL = "http://localhost:4566"
+ ORDERS_TABLE = aws_dynamodb_table.orders.name
+ RECEIPTS_BUCKET = aws_s3_bucket.receipts.bucket
+ STATE_MACHINE_ARN = local.state_machine_arn
}
}
}
+# ── Step Functions ────────────────────────────────────────────────────────────
+
+resource "aws_sfn_state_machine" "order_processing" {
+ name = "order-processing"
+ role_arn = aws_iam_role.sfn_exec.arn
+
+ definition = jsonencode({
+ StartAt = "ValidateOrder"
+ States = {
+ ValidateOrder = {
+ Type = "Task"
+ Resource = aws_lambda_function.order_processor.arn
+ Parameters = {
+ "step" = "validate"
+ "order.$" = "$.order"
+ }
+ ResultPath = "$.order"
+ Catch = [{ ErrorEquals = ["States.ALL"], Next = "HandleFailure", ResultPath = "$.error" }]
+ Next = "WaitForPayment"
+ }
+ WaitForPayment = {
+ Type = "Wait"
+ Seconds = 3
+ Next = "ProcessPayment"
+ }
+ ProcessPayment = {
+ Type = "Task"
+ Resource = aws_lambda_function.order_processor.arn
+ Parameters = {
+ "step" = "process_payment"
+ "order.$" = "$.order"
+ }
+ ResultPath = "$.order"
+ Catch = [{ ErrorEquals = ["States.ALL"], Next = "HandleFailure", ResultPath = "$.error" }]
+ Next = "WaitForFulfillment"
+ }
+ WaitForFulfillment = {
+ Type = "Wait"
+ Seconds = 3
+ Next = "FulfillOrder"
+ }
+ FulfillOrder = {
+ Type = "Task"
+ Resource = aws_lambda_function.order_processor.arn
+ Parameters = {
+ "step" = "fulfill"
+ "order.$" = "$.order"
+ }
+ ResultPath = "$.order"
+ Catch = [{ ErrorEquals = ["States.ALL"], Next = "HandleFailure", ResultPath = "$.error" }]
+ End = true
+ }
+ HandleFailure = {
+ Type = "Task"
+ Resource = aws_lambda_function.order_processor.arn
+ Parameters = {
+ "step" = "handle_failure"
+ "order.$" = "$.order"
+ }
+ ResultPath = "$.order"
+ End = true
+ }
+ }
+ })
+}
+
resource "aws_lambda_event_source_mapping" "sqs_to_processor" {
event_source_arn = aws_sqs_queue.orders.arn
function_name = aws_lambda_function.order_processor.arn
@@ -159,6 +284,9 @@ resource "aws_lambda_event_source_mapping" "sqs_to_processor" {
resource "aws_api_gateway_rest_api" "orders_api" {
name = "orders-api"
+ tags = {
+ "_custom_id_" = "workshop"
+ }
}
resource "aws_api_gateway_resource" "orders" {
@@ -215,6 +343,82 @@ resource "aws_api_gateway_integration" "options_order_handler" {
uri = aws_lambda_function.order_handler.invoke_arn
}
+resource "aws_api_gateway_resource" "products" {
+ rest_api_id = aws_api_gateway_rest_api.orders_api.id
+ parent_id = aws_api_gateway_rest_api.orders_api.root_resource_id
+ path_part = "products"
+}
+
+resource "aws_api_gateway_method" "get_products" {
+ rest_api_id = aws_api_gateway_rest_api.orders_api.id
+ resource_id = aws_api_gateway_resource.products.id
+ http_method = "GET"
+ authorization = "NONE"
+}
+
+resource "aws_api_gateway_method" "options_products" {
+ rest_api_id = aws_api_gateway_rest_api.orders_api.id
+ resource_id = aws_api_gateway_resource.products.id
+ http_method = "OPTIONS"
+ authorization = "NONE"
+}
+
+resource "aws_api_gateway_integration" "get_products_handler" {
+ rest_api_id = aws_api_gateway_rest_api.orders_api.id
+ resource_id = aws_api_gateway_resource.products.id
+ http_method = aws_api_gateway_method.get_products.http_method
+ integration_http_method = "POST"
+ type = "AWS_PROXY"
+ uri = aws_lambda_function.order_handler.invoke_arn
+}
+
+resource "aws_api_gateway_integration" "options_products_handler" {
+ rest_api_id = aws_api_gateway_rest_api.orders_api.id
+ resource_id = aws_api_gateway_resource.products.id
+ http_method = aws_api_gateway_method.options_products.http_method
+ integration_http_method = "POST"
+ type = "AWS_PROXY"
+ uri = aws_lambda_function.order_handler.invoke_arn
+}
+
+resource "aws_api_gateway_resource" "orders_replay" {
+ rest_api_id = aws_api_gateway_rest_api.orders_api.id
+ parent_id = aws_api_gateway_resource.orders.id
+ path_part = "replay"
+}
+
+resource "aws_api_gateway_method" "post_replay" {
+ rest_api_id = aws_api_gateway_rest_api.orders_api.id
+ resource_id = aws_api_gateway_resource.orders_replay.id
+ http_method = "POST"
+ authorization = "NONE"
+}
+
+resource "aws_api_gateway_method" "options_replay" {
+ rest_api_id = aws_api_gateway_rest_api.orders_api.id
+ resource_id = aws_api_gateway_resource.orders_replay.id
+ http_method = "OPTIONS"
+ authorization = "NONE"
+}
+
+resource "aws_api_gateway_integration" "post_replay_handler" {
+ rest_api_id = aws_api_gateway_rest_api.orders_api.id
+ resource_id = aws_api_gateway_resource.orders_replay.id
+ http_method = aws_api_gateway_method.post_replay.http_method
+ integration_http_method = "POST"
+ type = "AWS_PROXY"
+ uri = aws_lambda_function.order_handler.invoke_arn
+}
+
+resource "aws_api_gateway_integration" "options_replay_handler" {
+ rest_api_id = aws_api_gateway_rest_api.orders_api.id
+ resource_id = aws_api_gateway_resource.orders_replay.id
+ http_method = aws_api_gateway_method.options_replay.http_method
+ integration_http_method = "POST"
+ type = "AWS_PROXY"
+ uri = aws_lambda_function.order_handler.invoke_arn
+}
+
resource "aws_lambda_permission" "apigw" {
action = "lambda:InvokeFunction"
function_name = aws_lambda_function.order_handler.function_name
@@ -226,17 +430,38 @@ resource "aws_api_gateway_deployment" "orders_api" {
rest_api_id = aws_api_gateway_rest_api.orders_api.id
stage_name = "local"
+ triggers = {
+ redeployment = sha1(jsonencode([
+ aws_api_gateway_integration.post_order_handler.id,
+ aws_api_gateway_integration.get_orders_handler.id,
+ aws_api_gateway_integration.options_order_handler.id,
+ aws_api_gateway_integration.post_replay_handler.id,
+ aws_api_gateway_integration.options_replay_handler.id,
+ aws_api_gateway_integration.get_products_handler.id,
+ aws_api_gateway_integration.options_products_handler.id,
+ ]))
+ }
+
depends_on = [
aws_api_gateway_integration.post_order_handler,
aws_api_gateway_integration.get_orders_handler,
aws_api_gateway_integration.options_order_handler,
+ aws_api_gateway_integration.post_replay_handler,
+ aws_api_gateway_integration.options_replay_handler,
+ aws_api_gateway_integration.get_products_handler,
+ aws_api_gateway_integration.options_products_handler,
]
}
# ── S3 Website ────────────────────────────────────────────────────────────────
+data "aws_caller_identity" "current" {}
+data "aws_region" "current" {}
+
locals {
- api_endpoint = "http://localhost:4566/restapis/${aws_api_gateway_rest_api.orders_api.id}/local/_user_request_"
+ api_id = "workshop"
+ api_endpoint = "http://localhost:4566/restapis/${local.api_id}/local/_user_request_"
+ state_machine_arn = "arn:aws:states:${data.aws_region.current.name}:${data.aws_caller_identity.current.account_id}:stateMachine:order-processing"
}
resource "aws_s3_bucket" "website" {
@@ -273,8 +498,9 @@ resource "aws_s3_bucket_policy" "website" {
resource "aws_s3_object" "index_html" {
bucket = aws_s3_bucket.website.id
key = "index.html"
- content = templatefile("${path.module}/../website/index.html.tpl", { api_endpoint = local.api_endpoint })
+ source = "${path.module}/../website/index.html"
content_type = "text/html"
+ etag = filemd5("${path.module}/../website/index.html")
}
output "api_endpoint" {
diff --git a/01-serverless-app/website/index.html b/01-serverless-app/website/index.html
new file mode 100644
index 0000000..6b21e27
--- /dev/null
+++ b/01-serverless-app/website/index.html
@@ -0,0 +1,729 @@
+
+
+
+
+
+ Order Processing — LocalStack Workshop
+
+
+
+
+
+
+ Order Processing Pipeline
+ LocalStack Workshop
+
+
+
+
+
+
+ Navigation
+
+ 📦 Orders
+
+
+ 🛍️ Products
+
+
+ ℹ️ About
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Order ID
+ Item
+ Qty
+ Created
+ Status
+ Pipeline
+
+
+
+ Loading…
+
+
+
+
+
+
+
+
+
Products
+
+
+
+ Name
+ Description
+ Price
+
+
+
+ Loading…
+
+
+
+
+
+
+
+
+
What is this?
+
+ A fully local serverless order-processing pipeline running on
+ LocalStack — an AWS cloud emulator that lets you develop
+ and test cloud-native applications on your laptop without touching a
+ real AWS account. Every service runs inside a single Docker container.
+
+
+flowchart LR
+ Browser -->|POST /orders| APIGW[API Gateway]
+ APIGW --> OH[Lambda\norder-handler]
+ OH -->|put_item| DDB[(DynamoDB)]
+ OH -->|send_message| SQS[SQS Queue]
+ SQS -->|on failure x3| DLQ[SQS DLQ]
+ SQS --> OP[Lambda\norder-processor]
+ OP -->|start_execution| SFN[Step Functions]
+ SFN --> V[Validate]
+ SFN --> P[Payment]
+ SFN --> F[Fulfill]
+ V -->|update status| DDB
+ P -->|update status| DDB
+ F -->|update status| DDB
+ F -->|put_object| S3[(S3 Receipts)]
+
+
+
+
+
Architecture
+
+
+
API Layer
+
+ API Gateway custom ID workshop — exposes POST /orders, GET /orders, POST /orders/replay
+ Lambda order-handler validates input, persists to DynamoDB, enqueues on SQS
+
+
+
+
Processing
+
+ SQS orders-queue decouples ingestion from processing; orders-dlq catches failures after 3 retries
+ Lambda order-processor consumes SQS and starts a Step Functions execution per order
+ Step Functions orchestrates: Validate → Wait(3s) → Payment → Wait(3s) → Fulfill , with error catch on every step
+
+
+
+
Storage
+
+ DynamoDB orders table — tracks order status and per-step timestamps (validating_at, payment_at, …)
+ S3 order-receipts bucket — stores a receipt JSON at receipts/<order_id>.json on fulfillment
+
+
+
+
+
+
+
Chaos Engineering
+
+
+
What we inject
+
Toggle Chaos Mode in the sidebar to inject a
+ ProvisionedThroughputExceededException
+ on every DynamoDB UpdateItem call via
+ the LocalStack Chaos API (POST /_localstack/chaos/faults).
+
+
+
What breaks
+
+ The order-processor Lambda tries to update order status before starting the state machine
+ DynamoDB throws → Lambda fails → SQS retries the message 3×
+ After 3 failures the message is routed to the DLQ
+ Order stays stuck in pending indefinitely
+
+
+
+
Recovery
+
+ Disable chaos mode to restore normal DynamoDB behaviour
+ SQS automatically re-delivers DLQ messages — orders resume processing through the full pipeline
+
+
+
+
+
+
+
Try it out
+
+
+
Happy path
+
+ Go to Orders and place an order
+ Watch the status badge and pipeline bar update in real time
+ Expand the row to see per-step timestamps
+
+
+
+
Chaos scenario
+
+ Enable Chaos Mode in the sidebar
+ Place one or more orders — they pile up in pending
+ Disable chaos — orders automatically recover and fulfill
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/01-serverless-app/website/index.html.tpl b/01-serverless-app/website/index.html.tpl
deleted file mode 100644
index 6bb8463..0000000
--- a/01-serverless-app/website/index.html.tpl
+++ /dev/null
@@ -1,212 +0,0 @@
-
-
-
-
-
- Order Processing — LocalStack Workshop
-
-
-
-
-
- Order Processing Pipeline
- LocalStack Workshop
-
-
-
- API endpoint: ${api_endpoint}
-
-
-
-
-
-
-
-
- Order ID
- Item
- Qty
- Status
-
-
-
- Loading…
-
-
-
-
-
-
-
-
diff --git a/04-chaos-engineering/faults/ddb-throttle-localstack.json b/04-chaos-engineering/faults/ddb-throttle-localstack.json
index 997b394..24cbf51 100644
--- a/04-chaos-engineering/faults/ddb-throttle-localstack.json
+++ b/04-chaos-engineering/faults/ddb-throttle-localstack.json
@@ -2,7 +2,9 @@
{
"service": "dynamodb",
"operation": "UpdateItem",
- "error": "ProvisionedThroughputExceededException",
- "rate": 1.0
+ "error": {
+ "code": "ProvisionedThroughputExceededException",
+ "statusCode": 400
+ }
}
]
diff --git a/04-chaos-engineering/scripts/replay_dlq.py b/04-chaos-engineering/scripts/replay_dlq.py
index 9566c3e..37c59dd 100644
--- a/04-chaos-engineering/scripts/replay_dlq.py
+++ b/04-chaos-engineering/scripts/replay_dlq.py
@@ -15,7 +15,12 @@
aws_secret_access_key="test",
)
-data = json.load(sys.stdin)
+raw = sys.stdin.read().strip()
+if not raw:
+ print("DLQ is empty.")
+ sys.exit(0)
+
+data = json.loads(raw)
messages = data.get("Messages", [])
if not messages:
diff --git a/Makefile b/Makefile
index 7289729..cf14524 100644
--- a/Makefile
+++ b/Makefile
@@ -28,7 +28,8 @@ init: ## Initialise Terraform (only needed once)
cd $(TERRAFORM_DIR) && tflocal init
deploy: ## Deploy the full app to LocalStack via Terraform
- cd $(TERRAFORM_DIR) && tflocal init && tflocal apply -auto-approve
+ @[ -d $(TERRAFORM_DIR)/.terraform ] || (cd $(TERRAFORM_DIR) && tflocal init)
+ cd $(TERRAFORM_DIR) && tflocal apply -auto-approve
destroy: ## Tear down all deployed resources
cd $(TERRAFORM_DIR) && tflocal destroy -auto-approve
@@ -63,7 +64,8 @@ inject-fault: ## Inject DynamoDB throttling fault (breaks order_processor)
-d @04-chaos-engineering/faults/ddb-throttle-localstack.json | python3 -m json.tool
remove-fault: ## Remove all active fault injections
- curl -s -X DELETE http://localhost:4566/_localstack/chaos/faults
+ curl -s -X POST http://localhost:4566/_localstack/chaos/faults \
+ -H "Content-Type: application/json" -d '[]'
replay-dlq: ## Replay messages from the DLQ back to the main queue
awslocal sqs receive-message \