Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add credstash implementation to obtain db password #201

Open
wants to merge 12 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 21 additions & 0 deletions common.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@ var async = require('async');
var uuid = require('uuid');
require('./constants');
var pjson = require('./package.json');
const Credstash = require('credstash');
const credstash = new Credstash();

// function which creates a string representation of now suitable for use in S3
// paths
Expand Down Expand Up @@ -805,4 +807,23 @@ exports.reprocessFile = function (dynamoDB, s3, region, file, callback) {
}
}
});
}

/**
* Get a value from credstash with given key
*
* @param {string} key The key to use
*
* @return {Promise} return a promise to be used with async/await
*/
exports.credstashValue = function(key) {
return new Promise((resolve, reject) => {
credstash.get(key, (e, secret) => {
if (e) {
reject(e);
} else {
resolve(secret);
}
});
})
}
103 changes: 51 additions & 52 deletions deploy.yaml
Original file line number Diff line number Diff line change
@@ -1,19 +1,19 @@
#This is deploy.yaml V 0.9
#This file will set up most of what is needed to configure the Lambda Redshift database
#This file will set up most of what is needed to configure the Lambda Redshift database
#loader
#It will not create the KMS key required for encrypting the database password
#The user will need to create that key and assign it the alias "LambdaRedshiftLoaderKey"
#The user will need to create that key and assign it the alias "LambdaRedshiftLoaderKey"
#The config script will use the key by its alias
#
#Parameters and notes
#
#AvailabilityZone - Note that the stack will create in the current region by default, and
# the list of availability zones available will be created based on the
# region in which the user is creating the CloudFormation stack
#SecurityGroup - sets up the networking. Will require SSH and default Redshift port
#SecurityGroup - sets up the networking. Will require SSH and default Redshift port
# access.
#KeyName - the access key for SSH access to the driver EC2 instance.
#SubnetId - The subnet within your selected availability zone to use for the
#SubnetId - The subnet within your selected availability zone to use for the
# driver EC2 instance. Note that CloudFormation will *not* cross-check
# at runtime to verify that the subnet and availability zone match.

Expand Down Expand Up @@ -56,9 +56,9 @@ Mappings:
HVM64: "ami-0ad42f4f66f6c1cc9"
sa-east-1:
HVM64: "ami-05145e0b28ad8e0b2"
Parameters:
AvailabilityZone:

Parameters:
AvailabilityZone:
Description: The availability zone in which to set up the driver and trigger
Type: AWS::EC2::AvailabilityZone::Name
Default: us-east-1a
Expand All @@ -71,31 +71,31 @@ Parameters:
SubnetId:
Description: "The existing Subnet for the EC2 driver instance"
Type: AWS::EC2::Subnet::Id
Resources:
EC2LambdaSetupRole:

Resources:
EC2LambdaSetupRole:
Type: "AWS::IAM::Role"
Properties:
AssumeRolePolicyDocument:
Properties:
AssumeRolePolicyDocument:
Version: "2012-10-17"
Statement:
-
Statement:
-
Effect: "Allow"
Principal:
Service:
Principal:
Service:
- "ec2.amazonaws.com"
Action:
Action:
- "sts:AssumeRole"
Path: "/"
Policies:
-
Policies:
-
PolicyName: "EC2-lambda-setup"
PolicyDocument:
PolicyDocument:
Version: "2012-10-17"
Statement:
-
Statement:
-
Effect: "Allow"
Action:
Action:
- "dynamodb:CreateTable"
- "dynamodb:PutItem"
- "dynamodb:ListTables"
Expand All @@ -115,7 +115,7 @@ Resources:
- "lambda:GetFunctionConfiguration"
- "lambda:DeleteFunction"
- "lambda:GetPolicy"
- "s3:*"
- "s3:*"
- "kms:Encrypt"
- "kms:Decrypt"
- "kms:ReEncrypt*"
Expand All @@ -125,20 +125,20 @@ Resources:

MyInstanceProfile:
Type: AWS::IAM::InstanceProfile
Properties:
Properties:
Path: '/'
Roles:
Roles:
- !Ref EC2LambdaSetupRole
InstanceProfileName: ec2-lambda-setup-instance-profile
MyEC2Instance:

MyEC2Instance:
Type: AWS::EC2::Instance
Properties:
Properties:
AvailabilityZone: !Ref AvailabilityZone
IamInstanceProfile: 'ec2-lambda-setup-instance-profile'
BlockDeviceMappings:
BlockDeviceMappings:
- DeviceName: "/dev/xvda"
Ebs:
Ebs:
DeleteOnTermination: "true"
VolumeSize: "10"
ImageId: !FindInMap
Expand All @@ -147,7 +147,7 @@ Resources:
- HVM64
InstanceType: "t2.micro"
KeyName: !Ref KeyName
SecurityGroupIds:
SecurityGroupIds:
- !Ref SecurityGroup
SubnetId: !Ref SubnetId
UserData:
Expand All @@ -158,7 +158,7 @@ Resources:
yum install git -y
sudo -u ec2-user git clone https://github.com/awslabs/aws-lambda-redshift-loader/ /home/ec2-user/aws-lambda-redshift-loader
sudo -u ec2-user git clone https://github.com/creationix/nvm.git /home/ec2-user/.nvm
chmod 755 /home/ec2-user/.nvm/nvm.sh
chmod 755 /home/ec2-user/.nvm/nvm.sh
sudo -u ec2-user [ -s "/home/ec2-user/.nvm/nvm.sh" ] && \. "/home/ec2-user/.nvm/nvm.sh"
sudo -u ec2-user echo 'export NVM_DIR="$HOME/.nvm"' >> /home/ec2-user/.bashrc
sudo -u ec2-user echo '[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh"' >> /home/ec2-user/.bashrc
Expand All @@ -172,35 +172,35 @@ Resources:
sudo chown ec2-user:ec2-user /usr/bin/setup.sh
sudo chmod +x /usr/bin/setup.sh
sudo -u ec2-user setup.sh
Tags:
- Key: "Name"
Tags:
- Key: "Name"
Value: "CF Created Instance"
- Key: "Purpose"
Value: "Lambda Trigger Setup"
EC2LambdaTriggerRole:

EC2LambdaTriggerRole:
Type: "AWS::IAM::Role"
Properties:
AssumeRolePolicyDocument:
Properties:
AssumeRolePolicyDocument:
Version: "2012-10-17"
Statement:
-
Statement:
-
Effect: "Allow"
Principal:
Service:
Principal:
Service:
- lambda.amazonaws.com
Action:
Action:
- sts:AssumeRole
Path: "/"
Policies:
-
Policies:
-
PolicyName: "EC2-lambda-trigger-policy"
PolicyDocument:
PolicyDocument:
Version: "2012-10-17"
Statement:
-
Statement:
-
Effect: "Allow"
Action:
Action:
- "dynamodb:DeleteItem"
- "dynamodb:DescribeTable"
- "dynamodb:GetItem"
Expand Down Expand Up @@ -232,7 +232,6 @@ Resources:
- Arn
Code:
S3Bucket: !Sub awslabs-code-${AWS::Region}
S3Key: 'LambdaRedshiftLoader/AWSLambdaRedshiftLoader-2.7.0.zip'
S3Key: 'LambdaRedshiftLoader/AWSLambdaRedshiftLoader-2.7.1.zip'
Runtime: nodejs8.10


Binary file added dist/AWSLambdaRedshiftLoader-2.7.1.zip
Binary file not shown.
16 changes: 11 additions & 5 deletions index.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,14 @@
var debug = process.env['DEBUG'] !== undefined
var pjson = require('./package.json');
var region = process.env['AWS_REGION'];
const awsXRay = require('aws-xray-sdk');

if (!region || region === null || region === "") {
region = "us-east-1";
console.log("AWS Lambda Redshift Database Loader using default region " + region);
}

var aws = require('aws-sdk');
const aws = awsXRay.captureAWS(require('aws-sdk'));
aws.config.update({
region: region
});
Expand Down Expand Up @@ -47,9 +48,10 @@ kmsCrypto.setRegion(region);
var common = require('./common');
var async = require('async');
var uuid = require('uuid');
const {Client} = require('pg');
const {Client} = awsXRay.capturePostgres(require('pg'));
const maxRetryMS = 200;


// empty import/invocation of the keepalive fix for node-postgres module
require('pg-ka-fix')();

Expand Down Expand Up @@ -1068,7 +1070,9 @@ exports.handler = function (event, context) {
}

// add the cluster password
encryptedItems[passwordKeyMapEntry] = new Buffer(clusterInfo.connectPassword.S, 'base64');
if (clusterInfo.connectPassword) {
encryptedItems[passwordKeyMapEntry] = new Buffer(clusterInfo.connectPassword.S, 'base64');
}

// add the master encryption key to the list of items to be decrypted,
// if there is one
Expand All @@ -1077,7 +1081,7 @@ exports.handler = function (event, context) {
}

// decrypt the encrypted items
kmsCrypto.decryptMap(encryptedItems, function (err, decryptedConfigItems) {
kmsCrypto.decryptMap(encryptedItems, async function (err, decryptedConfigItems) {
if (err) {
callback(err, {
status: ERROR,
Expand Down Expand Up @@ -1184,8 +1188,10 @@ exports.handler = function (event, context) {
console.log(copyCommand);
}

var password = clusterInfo.credstashPassKey && clusterInfo.credstashPassKey.S ? await common.credstashValue(clusterInfo.credstashPassKey.S) : encodeURIComponent(decryptedConfigItems[passwordKeyMapEntry].toString());

// build the connection string
var dbString = 'postgres://' + clusterInfo.connectUser.S + ":" + encodeURIComponent(decryptedConfigItems[passwordKeyMapEntry].toString()) + "@" + clusterInfo.clusterEndpoint.S + ":"
var dbString = 'postgres://' + clusterInfo.connectUser.S + ":" + password + "@" + clusterInfo.clusterEndpoint.S + ":"
+ clusterInfo.clusterPort.N;
if (clusterInfo.clusterDB) {
dbString = dbString + '/' + clusterInfo.clusterDB.S;
Expand Down
Loading