Skip to content
This repository was archived by the owner on Jun 8, 2022. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,8 @@ function name unique per region, for example by setting
| attach\_dead\_letter\_config | Set this to true if using the dead_letter_config variable | string | `"false"` | no |
| attach\_policy | Set this to true if using the policy variable | string | `"false"` | no |
| attach\_vpc\_config | Set this to true if using the vpc_config variable | string | `"false"` | no |
| build\_command | The command that creates the Lambda package zip file | string | `"python build.py '$filename' '$runtime' '$source'"` | no |
| build\_paths | The files or directories used by the build command, to trigger new Lambda package builds whenever build scripts change | list | `<list>` | no |
| dead\_letter\_config | Dead letter configuration for the Lambda function | map | `<map>` | no |
| description | Description of what your Lambda function does | string | `"Managed by Terraform"` | no |
| enable\_cloudwatch\_logs | Set this to false to disable logging your Lambda output to CloudWatch Logs | string | `"true"` | no |
Expand Down
21 changes: 15 additions & 6 deletions archive.tf
Original file line number Diff line number Diff line change
@@ -1,11 +1,18 @@
locals {
module_relpath = "${substr(path.module, length(path.cwd) + 1, -1)}"
}

# Generates a filename for the zip archive based on the contents of the files
# in source_path. The filename will change when the source code changes.
data "external" "archive" {
program = ["python", "${path.module}/hash.py"]

query = {
runtime = "${var.runtime}"
source_path = "${var.source_path}"
build_command = "${var.build_command}"
build_paths = "${jsonencode(var.build_paths)}"
module_relpath = "${local.module_relpath}"
runtime = "${var.runtime}"
source_path = "${var.source_path}"
}
}

Expand All @@ -16,7 +23,8 @@ resource "null_resource" "archive" {
}

provisioner "local-exec" {
command = "${lookup(data.external.archive.result, "build_command")}"
command = "${lookup(data.external.archive.result, "build_command")}"
working_dir = "${path.module}"
}
}

Expand All @@ -29,8 +37,9 @@ data "external" "built" {
program = ["python", "${path.module}/built.py"]

query = {
build_command = "${lookup(data.external.archive.result, "build_command")}"
filename_old = "${lookup(null_resource.archive.triggers, "filename")}"
filename_new = "${lookup(data.external.archive.result, "filename")}"
build_command = "${lookup(data.external.archive.result, "build_command")}"
filename_old = "${lookup(null_resource.archive.triggers, "filename")}"
filename_new = "${lookup(data.external.archive.result, "filename")}"
module_relpath = "${local.module_relpath}"
}
}
11 changes: 4 additions & 7 deletions build.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
# Builds a zip file from the source_dir or source_file.
# Installs dependencies with pip automatically.

import base64
import json
import os
import shutil
import subprocess
Expand Down Expand Up @@ -105,11 +103,10 @@ def create_zip_file(source_dir, target_file):
root_dir=source_dir,
)

json_payload = bytes.decode(base64.b64decode(sys.argv[1]))
query = json.loads(json_payload)
filename = query['filename']
runtime = query['runtime']
source_path = query['source_path']

filename = sys.argv[1]
runtime = sys.argv[2]
source_path = sys.argv[3]

absolute_filename = os.path.abspath(filename)

Expand Down
1 change: 1 addition & 0 deletions builds/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
*.zip
5 changes: 3 additions & 2 deletions built.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
build_command = query['build_command']
filename_old = query['filename_old']
filename_new = query['filename_new']
module_relpath = query['module_relpath']

# If the old filename (from the Terraform state) matches the new filename
# (from hash.py) then the source code has not changed and thus the zip file
Expand All @@ -29,10 +30,10 @@
# console) then it is possible that Terraform will try to upload
# the missing file. I don't know how to tell if Terraform is going
# to try to upload the file or not, so always ensure the file exists.
subprocess.check_output(build_command, shell=True)
subprocess.check_output(build_command, shell=True, cwd=module_relpath)

# Output the filename to Terraform.
json.dump({
'filename': filename_new,
'filename': module_relpath + '/' + filename_new,
}, sys.stdout, indent=2)
sys.stdout.write('\n')
109 changes: 46 additions & 63 deletions hash.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,20 +3,14 @@
#
# Outputs a filename and a command to run if the archive needs to be built.

import base64
import datetime
import errno
import hashlib
import json
import os
import re
import sys


FILENAME_PREFIX = 'terraform-aws-lambda-'
FILENAME_PATTERN = re.compile(r'^' + FILENAME_PREFIX + r'[0-9a-f]{64}\.zip$')


def abort(message):
"""
Exits with an error message.
Expand All @@ -36,24 +30,21 @@ def delete_old_archives():
now = datetime.datetime.now()
delete_older_than = now - datetime.timedelta(days=7)

top = '.terraform'
if os.path.isdir(top):
for name in os.listdir(top):
if FILENAME_PATTERN.match(name):
path = os.path.join(top, name)
try:
file_modified = datetime.datetime.fromtimestamp(
os.path.getmtime(path)
)
if file_modified < delete_older_than:
os.remove(path)
except OSError as error:
if error.errno == errno.ENOENT:
# Ignore "not found" errors as they are probably race
# conditions between multiple usages of this module.
pass
else:
raise
for name in os.listdir('builds'):
if name.endswith('.zip'):
try:
file_modified = datetime.datetime.fromtimestamp(
os.path.getmtime(name)
)
if file_modified < delete_older_than:
os.remove(name)
except OSError as error:
if error.errno == errno.ENOENT:
# Ignore "not found" errors as they are probably race
# conditions between multiple usages of this module.
pass
else:
raise


def list_files(top_path):
Expand All @@ -72,22 +63,23 @@ def list_files(top_path):
return results


def generate_content_hash(source_path):
def generate_content_hash(source_paths):
"""
Generate a content hash of the source path.
Generate a content hash of the source paths.

"""

sha256 = hashlib.sha256()

if os.path.isdir(source_path):
source_dir = source_path
for source_file in list_files(source_dir):
for source_path in source_paths:
if os.path.isdir(source_path):
source_dir = source_path
for source_file in list_files(source_dir):
update_hash(sha256, source_dir, source_file)
else:
source_dir = os.path.dirname(source_path)
source_file = source_path
update_hash(sha256, source_dir, source_file)
else:
source_dir = os.path.dirname(source_path)
source_file = source_path
update_hash(sha256, source_dir, source_file)

return sha256

Expand All @@ -109,51 +101,42 @@ def update_hash(hash_obj, file_root, file_path):
hash_obj.update(data)



current_dir = os.path.dirname(__file__)

# Parse the query.
if len(sys.argv) > 1 and sys.argv[1] == '--test':
query = {
'runtime': 'python3.6',
'source_path': os.path.join(current_dir, 'tests', 'python3-pip', 'lambda'),
}
else:
query = json.load(sys.stdin)
query = json.load(sys.stdin)
build_command = query['build_command']
build_paths = json.loads(query['build_paths'])
module_relpath = query['module_relpath']
runtime = query['runtime']
source_path = query['source_path']

# Validate the query.
if not source_path:
abort('source_path must be set.')

# Change working directory to the module path
# so references to build.py will work.
os.chdir(module_relpath)

# Generate a hash based on file names and content. Also use the
# runtime value and content of build.py because they can have an
# effect on the resulting archive.
content_hash = generate_content_hash(source_path)
# runtime value, build command, and content of the build paths
# because they can have an effect on the resulting archive.
content_hash = generate_content_hash([source_path] + build_paths)
content_hash.update(runtime.encode())
with open(os.path.join(current_dir, 'build.py'), 'rb') as build_script_file:
content_hash.update(build_script_file.read())
content_hash.update(build_command.encode())

# Generate a unique filename based on the hash.
filename = '.terraform/{prefix}{content_hash}.zip'.format(
prefix=FILENAME_PREFIX,
filename = 'builds/{content_hash}.zip'.format(
content_hash=content_hash.hexdigest(),
)

# Determine the command to run if Terraform wants to build a new archive.
build_command = "python {build_script} {build_data}".format(
build_script=os.path.join(current_dir, 'build.py'),
build_data=bytes.decode(base64.b64encode(str.encode(
json.dumps({
'filename': filename,
'source_path': source_path,
'runtime': runtime,
})
)
),
)
)
# Replace variables in the build command with calculated values.
replacements = {
'$filename': filename,
'$runtime': runtime,
'$source': source_path,
}
for old, new in replacements.items():
build_command = build_command.replace(old, new)

# Delete previous archives.
delete_old_archives()
Expand Down
1 change: 1 addition & 0 deletions tests/.tool-versions
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
terraform 0.11.11
39 changes: 39 additions & 0 deletions tests/build-command/lambda/build.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
#!/bin/bash
#
# Compiles a Python package into a zip deployable on AWS Lambda.
#
# - Builds Python dependencies into the package, using a Docker image to
# correctly build native extensions
# - Able to be used with the terraform-aws-lambda module
#
# Dependencies:
#
# - Docker
#
# Usage:
#
# $ ./build.sh <output-zip-filename> <runtime> <source-path>

set -euo pipefail

# Read variables from command line arguments
FILENAME=$1
RUNTIME=$2
SOURCE_PATH=$3

# Convert to absolute paths
SOURCE_DIR=$(cd "$SOURCE_PATH" && pwd)
ZIP_DIR=$(cd "$(dirname "$FILENAME")" && pwd)
ZIP_NAME=$(basename "$FILENAME")

# Install dependencies, using a Docker image to correctly build native extensions
docker run --rm -t -v "$SOURCE_DIR:/src" -v "$ZIP_DIR:/out" lambci/lambda:build-$RUNTIME sh -c "
cp -r /src /build &&
cd /build &&
pip install --progress-bar off -r requirements.txt -t . &&
chmod -R 755 . &&
zip -r /out/$ZIP_NAME * &&
chown \$(stat -c '%u:%g' /out) /out/$ZIP_NAME
"

echo "Created $FILENAME from $SOURCE_PATH"
6 changes: 6 additions & 0 deletions tests/build-command/lambda/src/main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
def lambda_handler(event, context):
print('importing numpy package')
import numpy as np
print('checking numpy works correctly')
assert np.array_equal(np.array([1, 2]) + 3, np.array([4, 5]))
return 'test passed'
3 changes: 3 additions & 0 deletions tests/build-command/lambda/src/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# numpy has native extensions, needs a custom build script to
# install correctly if your host OS differs to Lambda OS
numpy
23 changes: 23 additions & 0 deletions tests/build-command/main.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
terraform {
backend "local" {
path = "terraform.tfstate"
}
}

provider "aws" {
region = "eu-west-1"
}

module "lambda" {
source = "../../"

function_name = "terraform-aws-lambda-test-build-command"
description = "Test custom build command functionality in terraform-aws-lambda"
handler = "main.lambda_handler"
runtime = "python3.7"

source_path = "${path.module}/lambda/src"

build_command = "${path.module}/lambda/build.sh '$filename' '$runtime' '$source'"
build_paths = ["${path.module}/lambda/build.sh"]
}
12 changes: 12 additions & 0 deletions variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,18 @@ variable "source_path" {
type = "string"
}

variable "build_command" {
description = "The command that creates the Lambda package zip file"
type = "string"
default = "python build.py '$filename' '$runtime' '$source'"
}

variable "build_paths" {
description = "The files or directories used by the build command, to trigger new Lambda package builds whenever build scripts change"
type = "list"
default = ["build.py"]
}

variable "description" {
description = "Description of what your Lambda function does"
type = "string"
Expand Down