Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

updated S3 set up #4629

Merged
merged 3 commits into from Mar 22, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
48 changes: 33 additions & 15 deletions api/files/s3.js
@@ -1,34 +1,52 @@
import { S3Client as awsS3Client } from '@aws-sdk/client-s3';
import { S3 as awsS3 } from '@aws-sdk/client-s3';

export const getFile = async (id, { S3Client = awsS3Client } = {}) => {
const config = {
apiVersion: '2006-03-01'
};

if (process.env.AWS_REGION) {
config.region = process.env.AWS_REGION;
}

export const getFile = async (id, { S3 = awsS3 } = {}) => {
if (!process.env.FILE_S3_BUCKET) {
return Promise.reject(new Error('No S3 bucket specified'));
}
if (!process.env.AWS_ACCESS_KEY_ID || !process.env.AWS_SECRET_ACCESS_KEY) {
return Promise.reject(new Error('No AWS credentials specified'));
}
return new S3Client({ apiVersion: '2006-03-01' })
.getObject({

const client = new S3(config);

const s3ResponseStream = (
await client.getObject({
Bucket: process.env.FILE_S3_BUCKET,
Key: id
})
.promise()
.then(data => data.Body);
).Body;
const chunks = [];

for await (const chunk of s3ResponseStream) {
chunks.push(chunk);
}

return Buffer.concat(chunks);
};

export const putFile = async (id, buffer, { S3Client = awsS3Client } = {}) => {
export const putFile = async (id, buffer, { S3 = awsS3 } = {}) => {
if (!process.env.FILE_S3_BUCKET) {
return Promise.reject(new Error('No S3 bucket specified'));
}
if (!process.env.AWS_ACCESS_KEY_ID || !process.env.AWS_SECRET_ACCESS_KEY) {
return Promise.reject(new Error('No AWS credentials specified'));
}
return new S3Client({ apiVersion: '2006-03-01' })
.putObject({
Body: buffer,
Bucket: process.env.FILE_S3_BUCKET,
ContentType: 'binary',
Key: id
})
.promise();

const client = new S3(config);

return client.putObject({
Body: buffer,
Bucket: process.env.FILE_S3_BUCKET,
ContentType: 'binary',
Key: id
});
};
46 changes: 26 additions & 20 deletions api/files/s3.test.js
Expand Up @@ -5,12 +5,11 @@ import { getFile, putFile } from './s3.js';
const sandbox = createSandbox();

tap.test('AWS S3 file storage module', async tests => {
const S3Client = sandbox.stub();
const S3 = sandbox.stub();
const s3Proto = {
getObject: sandbox.stub(),
putObject: sandbox.stub()
};
const s3Promise = sandbox.stub();

tests.beforeEach(async () => {
process.env.AWS_ACCESS_KEY_ID = 'aws access key id';
Expand All @@ -20,9 +19,15 @@ tap.test('AWS S3 file storage module', async tests => {
sandbox.resetBehavior();
sandbox.resetHistory();

S3Client.returns(s3Proto);
s3Proto.getObject.returns({ promise: s3Promise });
s3Proto.putObject.returns({ promise: s3Promise });
S3.returns(s3Proto);
s3Proto.getObject.resolves({
Body: [
Promise.resolve('chunk'),
Promise.resolve('of'),
Promise.resolve('file')
]
});
s3Proto.putObject.resolves({ success: true });
});

tests.test(
Expand Down Expand Up @@ -59,9 +64,9 @@ tap.test('AWS S3 file storage module', async tests => {
getTests.test(
'rejects if there is an error reading the file',
async test => {
s3Promise.rejects();
test.rejects(() => getFile('file id', { S3Client }));
test.ok(S3Client.calledWith({ apiVersion: '2006-03-01' }));
s3Proto.getObject.rejects();
test.rejects(() => getFile('file id', { S3 }));
test.ok(S3.calledWith({ apiVersion: '2006-03-01' }));
test.ok(
s3Proto.getObject.calledWith({
Bucket: 's3 bucket path',
Expand All @@ -72,10 +77,13 @@ tap.test('AWS S3 file storage module', async tests => {
);

getTests.test('returns data if it can read the file', async test => {
s3Promise.resolves({ Body: 'this is the data' });
const data = await getFile('file id', { S3Client });
test.equal(data, 'this is the data');
test.ok(S3Client.calledWith({ apiVersion: '2006-03-01' }));
const buffer = Buffer('this is the data');
s3Proto.getObject.resolves({
Body: [Promise.resolve(buffer)]
});
const data = await getFile('file id', { S3 });
test.equal(data.toString(), buffer.toString());
test.ok(S3.calledWith({ apiVersion: '2006-03-01' }));
test.ok(
s3Proto.getObject.calledWith({
Bucket: 's3 bucket path',
Expand All @@ -89,11 +97,9 @@ tap.test('AWS S3 file storage module', async tests => {
putTests.test(
'rejects if there is an error writing the file',
async test => {
s3Promise.rejects();
test.rejects(() =>
putFile('file id', 'file buffer data', { S3Client })
);
test.ok(S3Client.calledWith({ apiVersion: '2006-03-01' }));
s3Proto.putObject.rejects();
test.rejects(() => putFile('file id', 'file buffer data', { S3 }));
test.ok(S3.calledWith({ apiVersion: '2006-03-01' }));
test.ok(
s3Proto.putObject.calledWith({
Body: 'file buffer data',
Expand All @@ -106,9 +112,9 @@ tap.test('AWS S3 file storage module', async tests => {
);

putTests.test('resolves if it can write the file', async test => {
s3Promise.resolves();
test.resolves(() => putFile('file id', 'file buffer data', { S3Client }));
test.ok(S3Client.calledWith({ apiVersion: '2006-03-01' }));
s3Proto.putObject.resolves();
test.resolves(() => putFile('file id', 'file buffer data', { S3 }));
test.ok(S3.calledWith({ apiVersion: '2006-03-01' }));
test.ok(
s3Proto.putObject.calledWith({
Body: 'file buffer data',
Expand Down
5 changes: 5 additions & 0 deletions docker-compose.yml
Expand Up @@ -82,6 +82,11 @@ services:
- DEV_DB_NAME=${DEV_DB_NAME:-hitech_apd}
- TEST_DB_NAME=${TEST_DB_NAME:-hitech_apd_test}
- LD_API_KEY=${LD_API_KEY}
- FILE_STORE=${FILE_STORE}
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

these variables let us use S3 locally if we set them in our env

- FILE_S3_BUCKET=${FILE_S3_BUCKET}
- AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID}
- AWS_REGION=${AWS_REGION}
- AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY}
volumes:
- type: bind
source: ./api
Expand Down