Skip to content

Commit

Permalink
Cleanup
Browse files Browse the repository at this point in the history
  • Loading branch information
Tuuleh committed Oct 18, 2018
1 parent f4d9290 commit 32ee119
Show file tree
Hide file tree
Showing 18 changed files with 73 additions and 90 deletions.
3 changes: 2 additions & 1 deletion .env.test
Original file line number Diff line number Diff line change
Expand Up @@ -14,4 +14,5 @@ export AK_MYSQL_HOST="iamahost",
export AK_MYSQL_USER="user",
export AK_MYSQL_PWD="pwd",
export AK_MYSQL_DB="mysli"
export MEMBER_SERVICES_EMAIL=info@example.com
export MEMBER_SERVICES_EMAIL=tuuli@sumofus.org
export LOCAL_TMP=true
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ coverage

# dotenv environment variables file
.env
.env.development
secrets.sh
.secrets

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,5 @@ const AKMockData = {
};

export function AKSubjectAccessData(email) {
console.log('Im in the ak subject access data mock function');
return Promise.resolve(AKMockData);
}
1 change: 0 additions & 1 deletion lib/clients/actionkit/resources/akSubjectAccessData.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ import { subjectAccessQueryParser } from './subjectAccessQueries/subjectAccessQu
import { json2csv } from 'json-2-csv';

export function AKSubjectAccessData(email) {
console.log('UH OH, ACTUALLY CONNECTING TO MYSQL');
AKMysqlClient.connect();

return (
Expand Down
3 changes: 2 additions & 1 deletion lib/dynamodb/operationsLogger.js
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ export class OperationsLogger {
const names = { '#s': 'status', ...reduce(keys, namesReducer, {}) };
const values = reduce(keys, valuesReducer, {});

return {
const updateObject = {
TableName: this.tableName,
Key: {
id: record.id,
Expand All @@ -79,6 +79,7 @@ export class OperationsLogger {
ExpressionAttributeNames: names,
ExpressionAttributeValues: values,
};
return updateObject;
}

dynamodbPutParams(logData: LogData) {
Expand Down
5 changes: 0 additions & 5 deletions lib/util/__mocks__/sendSAREmail.js

This file was deleted.

3 changes: 0 additions & 3 deletions lib/util/__mocks__/shipToS3.js

This file was deleted.

4 changes: 0 additions & 4 deletions lib/util/__mocks__/zipCSVFiles.js

This file was deleted.

14 changes: 5 additions & 9 deletions lib/util/processSubjectAccessRequest.js
Original file line number Diff line number Diff line change
Expand Up @@ -40,14 +40,9 @@ export function SARconstructor(
sendEmail = sendEmail
) {
return function(data, processor, email) {
console.log(
'PROCESS SUBJECT ACCESS REQUEST (FO REAL NO MOCK)...',
data,
processor
);
return new Promise(function(resolve, reject) {
const tmpDir = `${__dirname}/tmp`;
console.log('TMPDIR:', tmpDir);
// Lambda only allows you to write to /tmp. On your local environment you probably don't want to write to /tmp.
const tmpDir = process.env.LOCAL_TMP ? `${__dirname}/tmp` : '/tmp';
fs.ensureDirSync(`${tmpDir}/csv`);

_forOwn(data, function(val, key) {
Expand All @@ -74,15 +69,16 @@ export function SARconstructor(
})
.then(function(_) {
// Makes sense to do cleanup in case the lambda environment gets reused for multiple invocations:
return fs.remove(tmpDir);
return fs.emptyDir(tmpDir);
})
.then(function(_) {
resolve(
`Subject Access Data for ${processor} successfully sent for ${
email
}`
);
});
})
.catch(reject);
});
};
}
38 changes: 17 additions & 21 deletions lib/util/shipToS3.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,26 +6,22 @@ import fs from 'fs';
// Sends a file to a specified bucket on S3. Returns a promise that resolves with a temporary signed URL to the object
// that is valid for 10 minutes.
export function shipToS3(file, bucket) {
const s3 = new AWS.S3();

const signedUrlExpireSeconds = 60 * 10; // 10 minutes
const key = path.parse(file).base;
const readStream = fs.createReadStream(file);

const params = {
Bucket: bucket, // 'subjectAccessRequests'
Key: key, // e.g. tuuli@sumofus.org-champaign.zip
Body: readStream,
};

return s3
.upload(params)
.promise()
.then(function(_) {
return s3.getSignedUrl('getObject', {
Bucket: bucket,
Key: key,
Expires: signedUrlExpireSeconds,
return new Promise(function(resolve, reject) {
const readStream = fs.createReadStream(file);
const key = path.parse(file).base;
resolve({ Body: readStream, Key: key, Bucket: bucket });
}).then(function(params) {
const signedUrlExpireSeconds = 60 * 10; // 10 minutes
const s3 = new AWS.S3();
return s3
.upload(params)
.promise()
.then(function(_) {
return s3.getSignedUrl('getObject', {
Bucket: params.Bucket,
Key: params.Key,
Expires: signedUrlExpireSeconds,
});
});
});
});
}
13 changes: 8 additions & 5 deletions lib/util/zipCSVFiles.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,14 @@ import { moveSync } from 'fs-extra';
// Takes a directory and a name of a zip file, zips the directory into filename.zip, moves the resulting zipfile into
// the directory, and returns a promise that resolves to the complete path of the resulting zipfile.
export function zipCSVFiles(dir, filename) {
console.log('In the real zip csv function :(');
return new Promise(function(resolve, reject) {
var output = fs.createWriteStream(filename);
// PROBLEM: On Lambda, I can only create files in /tmp. Archiver can only deal with a single level path passed to
// the directory argument when zipping a directory. I have to create a zipfile in /tmp and then I have to zip the
// contents of /tmp, which leads to an archive that includes both the /csv subdirectory, and the zipfile itself.
// I resolved this earlier by creating the zip file elsewhere and moving it to /tmp, but that throws an error on
// Lambda since you can only write to /tmp. This current solution includes the .zip in the archive.
const zipPath = `${dir}/${filename}`;
var output = fs.createWriteStream(zipPath);
var archive = archiver('zip', {
zlib: { level: 9 },
});
Expand All @@ -31,9 +36,7 @@ export function zipCSVFiles(dir, filename) {
archive.finalize();

output.on('close', function() {
const destPath = `${dir}/${filename}`;
moveSync(filename, destPath, { overwrite: true });
resolve(destPath);
resolve(zipPath);
});
});
}
11 changes: 3 additions & 8 deletions members-service/akSubjectAccessData.js
Original file line number Diff line number Diff line change
Expand Up @@ -44,12 +44,6 @@ export const handlerFunc = (
return logger
.updateStatus(record, { actionkit: 'SUCCESS' })
.then(dynamodbSuccess => {
console.log(
'SUCCESSFUL SUBJECT ACCESS REQUEST EVENT - call callback with: ',
success,
' callback: ',
callback
);
return callback(null, success);
})
.catch(dynamodbError => {
Expand All @@ -60,11 +54,12 @@ export const handlerFunc = (
return logger
.updateStatus(record, { actionkit: 'FAILURE' })
.then(dynamodbSuccess => {
return callback(err);
return callback(null, err);
})
.catch(dynamodbError => {
// Wow, nothing is going right today. The request failed AND DynamoDB didn't update the record.
return callback(dynamodbError);
// return a success response because we don't want a week of retries.
return callback(null, dynamodbError);
});
});
};
Expand Down
12 changes: 3 additions & 9 deletions members-service/champaignSubjectAccessData.js
Original file line number Diff line number Diff line change
Expand Up @@ -47,28 +47,22 @@ export const handlerFunc = (
return logger
.updateStatus(record, { champaign: 'SUCCESS' })
.then(dynamodbSuccess => {
console.log(
'SUCCESSFUL SUBJECT ACCESS REQUEST EVENT - call callback with: ',
success,
' callback: ',
callback
);
return callback(null, success);
})
.catch(dynamodbError => {
return callback(dynamodbError);
});
})
.catch(err => {
console.log('FAILURE!', err);
return logger
.updateStatus(record, { champaign: 'FAILURE' })
.then(dynamodbSuccess => {
return callback(err);
return callback(null, err);
})
.catch(dynamodbError => {
// Wow, nothing is going right today. The request failed AND DynamoDB didn't update the record.
return callback(dynamodbError);
// return a success response because we don't want a week of retries.
return callback(null, dynamodbError);
});
});
};
Expand Down
37 changes: 17 additions & 20 deletions members-service/triggerSubjectAccessRequest.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import { OperationsLogger } from '../lib/dynamodb/operationsLogger';
import { DocumentClient } from 'aws-sdk/clients/dynamodb';
import { validateRequest } from '../lib/request-validator';
import { SUBJECT_ACCESS_REQUEST_SCHEMA } from './request-schemas';
import { response, badRequest, ok } from '../lib/lambda-utils/responses';
import { response, badRequest } from '../lib/lambda-utils/responses';

import log from '../lib/logger';

Expand All @@ -20,25 +20,22 @@ export const handlerFunc = (event: any, context: any, callback: any) => {
...payload,
};

return validateRequest(SUBJECT_ACCESS_REQUEST_SCHEMA, parameters).then(
params => {
logger
.log({
event: 'MEMBER:SUBJECT_ACCESS_REQUEST',
data: {
email: parameters.email,
},
status: { actionkit: 'PENDING', champaign: 'PENDING' },
})
.then(
result => callback(null, response(result)),
error => callback(null, response(error))
);
},
errors => {
callback(null, badRequest({ cors: true, body: errors }));
}
);
return validateRequest(SUBJECT_ACCESS_REQUEST_SCHEMA, parameters)
.then(params => {
return logger.log({
event: 'MEMBER:SUBJECT_ACCESS_REQUEST',
data: {
email: parameters.email,
},
status: { actionkit: 'PENDING', champaign: 'PENDING' },
});
})
.then(res => {
return callback(null, response({ cors: true, body: res }));
})
.catch(err => {
return callback(null, badRequest({ cors: true, body: err }));
});
};

export const handler = log(handlerFunc);
3 changes: 3 additions & 0 deletions members-service/triggerSubjectAccessRequest.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,9 @@ import { handlerFunc as handler } from './triggerSubjectAccessRequest';
import { DocumentClient } from 'aws-sdk/clients/dynamodb';
import { OperationsLogger } from '../lib/dynamodb/operationsLogger';

jest
.spyOn(DocumentClient.prototype, 'put')
.mockImplementation(opts => ({ promise: () => Promise.resolve(opts) }));
jest.spyOn(OperationsLogger.prototype, 'log');

describe('triggerSubjectAccessRequest handler', function() {
Expand Down
4 changes: 2 additions & 2 deletions members-service/updateMember.js
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,8 @@ export const handlerFunc = (event: any, context: any, callback: any) => {
return result;
})
.then(
result => callback(null, response(result)),
error => callback(null, response(error))
result => callback(null, response({ cors: true, body: result })),
error => callback(null, response({ cors: true, body: error }))
);
},
errors => callback(null, badRequest({ cors: true, body: errors }))
Expand Down
4 changes: 4 additions & 0 deletions serverless.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,10 @@ provider:
- dynamodb:ListStreams
Resource: "arn:aws:dynamodb:us-east-1:*:table/${self:provider.environment.DB_LOG_TABLE}"
Effect: Allow
- Action:
- s3:PutObject
Resource: "arn:aws:s3:::champaign/*"
Effect: Allow

resources:
Resources:
Expand Down
6 changes: 6 additions & 0 deletions settings/production.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,3 +14,9 @@ environment:
UNSUBSCRIBE_PAGE_NAME: ${ssm:/api-services/production/UNSUBSCRIBE_PAGE_NAME}
BRAINTREE_MERCHANT_CURRENCIES: ${ssm:/api-services/production/BRAINTREE_MERCHANT_CURRENCIES}
COGNITO_POOL_ARN: ${ssm:/api-services/production/COGNITO_POOL_ARN}
AK_MYSQL_HOST: ${ssm:/api-services/production/AK_MYSQL_HOST}
AK_MYSQL_USER: ${ssm:/api-services/production/AK_MYSQL_USER}
AK_MYSQL_PWD: ${ssm:/api-services/production/AK_MYSQL_PWD}
AK_MYSQL_DB: ${ssm:/api-services/production/AK_MYSQL_DB}
MEMBER_SERVICES_EMAIL: ${ssm:/api-services/production/MEMBER_SERVICES_EMAIL}

0 comments on commit 32ee119

Please sign in to comment.