From 01cf641b21fd76ab69a608af1ad20c9a1deb6a23 Mon Sep 17 00:00:00 2001 From: Aaron Date: Sun, 24 Sep 2017 17:51:30 -0500 Subject: [PATCH 1/3] Adding function to deploy using S3 as source --- README.rst | 16 +++++-- aws_lambda/__init__.py | 2 +- aws_lambda/aws_lambda.py | 98 ++++++++++++++++++++++++++++++++-------- scripts/lambda | 7 +++ 4 files changed, 99 insertions(+), 24 deletions(-) diff --git a/README.rst b/README.rst index 6e851dc..8444944 100644 --- a/README.rst +++ b/README.rst @@ -167,14 +167,20 @@ This would create environment variables in the lambda instance upon deploy. If y Uploading to S3 =============== -You may find that you do not need the toolkit to fully deploy your Lambda or that your code bundle is too large to upload via the API. You can use the `upload` command to send the bundle to an S3 bucket of your choosing. -Before doing this, you will need to set the following variables in `config.yaml`: -``` +You may find that you do not need the toolkit to fully deploy your Lambda or that your code bundle is too large to upload via the API. You can use the ``upload`` command to send the bundle to an S3 bucket of your choosing. +Before doing this, you will need to set the following variables in ``config.yaml`: + +.. code:: yaml + role: basic_s3_upload bucket_name: 'example-bucket' s3_key_prefix: 'path/to/file/' -``` -Your role must have `s3:PutObject` permission on the bucket/key that you specify for the upload to work properly. Once you have that set, you can execute `lambda upload` to initiate the transfer. + +Your role must have ``s3:PutObject`` permission on the bucket/key that you specify for the upload to work properly. Once you have that set, you can execute ``lambda upload`` to initiate the transfer. + +Deploying via S3 +=============== +You can also choose to use S3 as your source for Lambda deployments. This can be done by issuing ``lambda deploy_s3`` with the same variables/AWS permissions you'd set for executing the ``upload`` command. Development =========== diff --git a/aws_lambda/__init__.py b/aws_lambda/__init__.py index 23cf95e..e15bdee 100755 --- a/aws_lambda/__init__.py +++ b/aws_lambda/__init__.py @@ -4,7 +4,7 @@ __email__ = 'nficano@gmail.com' __version__ = '1.0.1' -from .aws_lambda import deploy, invoke, init, build, upload, cleanup_old_versions +from .aws_lambda import deploy, deploy_s3, invoke, init, build, upload, cleanup_old_versions # Set default logging handler to avoid "No handler found" warnings. import logging diff --git a/aws_lambda/aws_lambda.py b/aws_lambda/aws_lambda.py index 709574f..c0ee933 100755 --- a/aws_lambda/aws_lambda.py +++ b/aws_lambda/aws_lambda.py @@ -95,6 +95,33 @@ def deploy(src, requirements=False, local_package=None): else: create_function(cfg, path_to_zip_file) +def deploy_s3(src, requirements=False, local_package=None): + """Deploys a new function via AWS S3. + + :param str src: + The path to your Lambda ready project (folder must contain a valid + config.yaml and handler module (e.g.: service.py). + :param str local_package: + The path to a local package with should be included in the deploy as + well (and/or is not available on PyPi) + """ + # Load and parse the config file. + path_to_config_file = os.path.join(src, 'config.yaml') + cfg = read(path_to_config_file, loader=yaml.load) + + # Copy all the pip dependencies required to run your code into a temporary + # folder then add the handler file in the root of this directory. + # Zip the contents of this folder into a single file and output to the dist + # directory. + path_to_zip_file = build(src, requirements, local_package) + + use_s3 = True + s3_file = upload_s3(cfg, path_to_zip_file, use_s3) + if function_exists(cfg, cfg.get('function_name')): + update_function(cfg, path_to_zip_file, use_s3, s3_file) + else: + create_function(cfg, path_to_zip_file, use_s3, s3_file) + def upload(src, requirements=False, local_package=None): """Uploads a new function to AWS S3. @@ -363,7 +390,7 @@ def get_client(client, aws_access_key_id, aws_secret_access_key, region=None): ) -def create_function(cfg, path_to_zip_file): +def create_function(cfg, path_to_zip_file, *use_s3, **s3_file): """Register and upload a function to AWS Lambda.""" print('Creating your new Lambda function') @@ -378,21 +405,41 @@ def create_function(cfg, path_to_zip_file): cfg.get('region')) # Do we prefer development variable over config? + buck_name = ( + os.environ.get('S3_BUCKET_NAME') or cfg.get('bucket_name') + ) func_name = ( os.environ.get('LAMBDA_FUNCTION_NAME') or cfg.get('function_name') ) print('Creating lambda function with name: {}'.format(func_name)) - kwargs = { - 'FunctionName': func_name, - 'Runtime': cfg.get('runtime', 'python2.7'), - 'Role': role, - 'Handler': cfg.get('handler'), - 'Code': {'ZipFile': byte_stream}, - 'Description': cfg.get('description'), - 'Timeout': cfg.get('timeout', 15), - 'MemorySize': cfg.get('memory_size', 512), - 'Publish': True - } + + if use_s3 == True: + kwargs = { + 'FunctionName': func_name, + 'Runtime': cfg.get('runtime', 'python2.7'), + 'Role': role, + 'Handler': cfg.get('handler'), + 'Code': { + 'S3Bucket': '{}'.format(buck_name), + 'S3Key': '{}'.format(s3_file) + }, + 'Description': cfg.get('description'), + 'Timeout': cfg.get('timeout', 15), + 'MemorySize': cfg.get('memory_size', 512), + 'Publish': True + } + else: + kwargs = { + 'FunctionName': func_name, + 'Runtime': cfg.get('runtime', 'python2.7'), + 'Role': role, + 'Handler': cfg.get('handler'), + 'Code': {'ZipFile': byte_stream}, + 'Description': cfg.get('description'), + 'Timeout': cfg.get('timeout', 15), + 'MemorySize': cfg.get('memory_size', 512), + 'Publish': True + } if 'environment_variables' in cfg: kwargs.update( @@ -408,7 +455,7 @@ def create_function(cfg, path_to_zip_file): client.create_function(**kwargs) -def update_function(cfg, path_to_zip_file): +def update_function(cfg, path_to_zip_file, *use_s3, **s3_file): """Updates the code of an existing Lambda function""" print('Updating your Lambda function') @@ -422,12 +469,25 @@ def update_function(cfg, path_to_zip_file): client = get_client('lambda', aws_access_key_id, aws_secret_access_key, cfg.get('region')) - client.update_function_code( - FunctionName=cfg.get('function_name'), - ZipFile=byte_stream, - Publish=True + # Do we prefer development variable over config? + buck_name = ( + os.environ.get('S3_BUCKET_NAME') or cfg.get('bucket_name') ) + if use_s3 == True: + client.update_function_code( + FunctionName=cfg.get('function_name'), + S3Bucket='{}'.format(buck_name), + S3Key='{}'.format(s3_file), + Publish=True + ) + else: + client.update_function_code( + FunctionName=cfg.get('function_name'), + ZipFile=byte_stream, + Publish=True + ) + kwargs = { 'FunctionName': cfg.get('function_name'), 'Role': role, @@ -454,7 +514,7 @@ def update_function(cfg, path_to_zip_file): client.update_function_configuration(**kwargs) -def upload_s3(cfg, path_to_zip_file): +def upload_s3(cfg, path_to_zip_file, *use_s3): """Upload a function to AWS S3.""" print('Uploading your new Lambda function') @@ -487,6 +547,8 @@ def upload_s3(cfg, path_to_zip_file): client.put_object(**kwargs) print('Finished uploading {} to S3 bucket {}'.format(func_name, buck_name)) + if use_s3 == True: + return filename def function_exists(cfg, function_name): """Check whether a function exists or not""" diff --git a/scripts/lambda b/scripts/lambda index 257a52b..f95c939 100755 --- a/scripts/lambda +++ b/scripts/lambda @@ -52,6 +52,12 @@ def deploy(use_requirements, local_package): def upload(use_requirements, local_package): aws_lambda.upload(CURRENT_DIR, use_requirements, local_package) +@click.command(help="Deploy your lambda via S3.") +@click.option('--use-requirements', default=False, is_flag=True, help='Install all packages defined in requirements.txt') +@click.option('--local-package', default=None, help='Install local package as well.', type=click.Path(), multiple=True) +def deploy_s3(use_requirements, local_package): + aws_lambda.deploy_s3(CURRENT_DIR, use_requirements, local_package) + @click.command(help="Delete old versions of your functions") @click.option("--keep-last", type=int, prompt="Please enter the number of recent versions to keep") def cleanup(keep_last): @@ -62,6 +68,7 @@ if __name__ == '__main__': cli.add_command(invoke) cli.add_command(deploy) cli.add_command(upload) + cli.add_command(deploy_s3) cli.add_command(build) cli.add_command(cleanup) cli() From 8740957ca03174e6b356a96c3b75709d2229e6e9 Mon Sep 17 00:00:00 2001 From: Aaron Date: Sun, 24 Sep 2017 17:54:51 -0500 Subject: [PATCH 2/3] typo --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 8444944..c5e111b 100644 --- a/README.rst +++ b/README.rst @@ -168,7 +168,7 @@ This would create environment variables in the lambda instance upon deploy. If y Uploading to S3 =============== You may find that you do not need the toolkit to fully deploy your Lambda or that your code bundle is too large to upload via the API. You can use the ``upload`` command to send the bundle to an S3 bucket of your choosing. -Before doing this, you will need to set the following variables in ``config.yaml`: +Before doing this, you will need to set the following variables in ``config.yaml``: .. code:: yaml From c7fccf9a42ab739bcf6b72effaeca21731b913a8 Mon Sep 17 00:00:00 2001 From: Aaron Date: Sun, 24 Sep 2017 17:56:24 -0500 Subject: [PATCH 3/3] markdown formatting fail --- README.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.rst b/README.rst index c5e111b..2560d75 100644 --- a/README.rst +++ b/README.rst @@ -172,9 +172,9 @@ Before doing this, you will need to set the following variables in ``config.yaml .. code:: yaml -role: basic_s3_upload -bucket_name: 'example-bucket' -s3_key_prefix: 'path/to/file/' + role: basic_s3_upload + bucket_name: 'example-bucket' + s3_key_prefix: 'path/to/file/' Your role must have ``s3:PutObject`` permission on the bucket/key that you specify for the upload to work properly. Once you have that set, you can execute ``lambda upload`` to initiate the transfer.