Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Task5 #4

Merged
merged 10 commits into from Nov 17, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
9 changes: 6 additions & 3 deletions package.json
@@ -1,20 +1,23 @@
{
"name": "nodejs-aws-be",
"version": "1.0.4",
"version": "1.0.5",
"license": "MIT",
"private": true,
"workspaces": [
"packages/*",
"terraform"
],
"devDependencies": {
"prettier": "^2.1.2",
"serverless": "^2.8.0"
},
"scripts": {
"tf": "yarn workspace terraform run terraform",
"start:product-service": "yarn workspace product-service sls offline",
"start:import-service": "yarn workspace import-service sls offline",
"deploy:tf": "yarn tf apply -auto-approve",
"start": "yarn workspace product-service sls offline",
"deploy": "yarn workspace product-service sls deploy",
"deploy:product-service": "yarn workspace product-service sls deploy",
"deploy:import-service": "yarn workspace import-service sls deploy",
"create:sls": "yarn sls create --template-path=$(pwd)/scripts/template-aws-nodejs"
}
}
4 changes: 4 additions & 0 deletions packages/import-service/.env
@@ -0,0 +1,4 @@
IMPORT_S3_BUCKET=nodejs-aws-task5-csv
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Even this constants could be exposed, better to not commit .env file to a repo

Copy link
Owner Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

They have a bit different rules:

Note: .env, .env.development, and .env.production files should be included in your repository as they define defaults. .env*.local should be added to .gitignore, as those files are intended to be ignored. .env.local is where secrets can be stored.

https://www.serverless.com/plugins/serverless-dotenv-plugin

So, I'm storing confidential data in .env.*.local files.

Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ok

IMPORT_S3_REGION=us-east-2
IMPORT_S3_PARSED_PREFIX=parsed/
IMPORT_S3_UPLOAD_PREFIX=uploaded/
9 changes: 9 additions & 0 deletions packages/import-service/.gitignore
@@ -0,0 +1,9 @@
# package directories
node_modules
jspm_packages

# Serverless directories
.serverless

# Webpack directories
.webpack
12 changes: 12 additions & 0 deletions packages/import-service/babel.config.json
@@ -0,0 +1,12 @@
{
"presets": [
[
"@babel/preset-env",
{
"targets": {
"node": "12"
}
}
]
]
}
30 changes: 30 additions & 0 deletions packages/import-service/package.json
@@ -0,0 +1,30 @@
{
"name": "import-service",
"version": "1.0.0",
"description": "Serverless webpack example using ecma script",
"scripts": {
"test": "jest"
},
"devDependencies": {
"@babel/core": "^7.11.1",
"@babel/preset-env": "^7.11.0",
"@types/jest": "^26.0.15",
"babel-jest": "^26.6.1",
"babel-loader": "^8.1.0",
"babel-plugin-transform-runtime": "^6.23.0",
"babel-polyfill": "^6.23.0",
"babel-preset-env": "^1.6.0",
"jest": "^26.6.1",
"serverless-dotenv-plugin": "^3.1.0",
"serverless-offline": "^6.8.0",
"serverless-webpack": "^5.3.1",
"webpack": "^4.35.2"
},
"author": "The serverless webpack authors (https://github.com/elastic-coders/serverless-webpack)",
"license": "MIT",
"dependencies": {
"aws-sdk": "^2.792.0",
"csv-parser": "^2.3.3",
"http-errors": "^1.8.0"
}
}
62 changes: 62 additions & 0 deletions packages/import-service/serverless.yml
@@ -0,0 +1,62 @@
service:
name: import-service
# app and org for use with dashboard.serverless.com
#app: your-app-name
#org: your-org-name
frameworkVersion: '2'

plugins:
- serverless-dotenv-plugin
- serverless-webpack
- serverless-offline

custom:
dotenv:
required:
file: true
serverless-offline:
httpPort: 4000
lambdaPort: 4002
websocketPort: 4001
webpack:
packager: "yarn"

provider:
name: aws
runtime: nodejs12.x
region: us-east-2
# stage: dev

iamRoleStatements:
- Effect: "Allow"
Action: "s3:ListBucket"
Resource:
- "arn:aws:s3:::${env:IMPORT_S3_BUCKET}"
- Effect: "Allow"
Action: "s3:*"
Resource:
- "arn:aws:s3:::${env:IMPORT_S3_BUCKET}/*"

functions:
importProductsFile:
handler: src/handlers/importProductsFile.handler
events:
- http:
method: get
path: import
cors: true
request:
parameters:
querystrings:
name: true
type: true

importFileParser:
handler: src/handlers/importFileParser.handler
events:
- s3:
bucket: "${env:IMPORT_S3_BUCKET}"
event: "s3:ObjectCreated:*"
rules:
- prefix: "${env:IMPORT_S3_UPLOAD_PREFIX}"
existing: true
76 changes: 76 additions & 0 deletions packages/import-service/src/handlers/importFileParser.js
@@ -0,0 +1,76 @@
import AWS from "aws-sdk";
import csvParser from "csv-parser";
import { Transform, pipeline as _pipeline } from "stream";
import { promisify } from "util";
import middy from "@middy/core";
import middyRequestLogger from "middy-request-logger";

const pipeline = promisify(_pipeline);

const {
IMPORT_S3_BUCKET,
IMPORT_S3_UPLOAD_PREFIX,
IMPORT_S3_REGION,
IMPORT_S3_PARSED_PREFIX,
} = process.env;

export const handler = middy(importFileParser).use([middyRequestLogger()]);

export async function importFileParser(event, context, callback) {
const s3 = new AWS.S3({ region: IMPORT_S3_REGION });

const tasks = event.Records.map(async (record) => {
const srcKey = record.s3.object.key;
const destKey = srcKey.replace(
IMPORT_S3_UPLOAD_PREFIX,
IMPORT_S3_PARSED_PREFIX
);

// parse CSV
const uploadedObject = s3.getObject({
Bucket: IMPORT_S3_BUCKET,
Key: srcKey,
});
await pipeline(
uploadedObject.createReadStream(),
csvParser(),
streamTap(console.log)
);

// move
await s3
.copyObject({
Bucket: IMPORT_S3_BUCKET,
CopySource: IMPORT_S3_BUCKET + "/" + srcKey,
Key: destKey,
})
.promise();
await s3
.deleteObject({
Bucket: IMPORT_S3_BUCKET,
Key: srcKey,
})
.promise();
});

const results = await Promise.allSettled(tasks);
const success = results.filter(({ status }) => status === "fulfilled");
console.log(
`${success.length} of ${results.length} files was copied successfully`
);
}

export function streamTap(fn) {
return new Transform({
objectMode: true,
transform: (data, encoding, done) => {
try {
fn({ data, encoding });
} catch (err) {
done(err);
return;
}
done(null, data);
},
});
}
51 changes: 51 additions & 0 deletions packages/import-service/src/handlers/importProductsFile.js
@@ -0,0 +1,51 @@
import AWS from "aws-sdk";
import httpError from "http-errors";
import middy from "@middy/core";
import middyHttpCors from "@middy/http-cors";
import middyErrorHandler from "middy-error-handler";
import middyRequestLogger from "middy-request-logger";

const {
IMPORT_S3_BUCKET,
IMPORT_S3_UPLOAD_PREFIX,
IMPORT_S3_REGION,
} = process.env;
const ALLOWED_CONTENT_TYPES = [
"text/csv",
"application/vnd.ms-excel",
"text/x-csv",
];

export const handler = middy(importProductsFile).use([
middyErrorHandler(),
middyRequestLogger(),
middyHttpCors(),
]);

export async function importProductsFile(event) {
const { name: fileName, type: fileType } = event.queryStringParameters;

if (!fileName) {
throw new httpError.BadRequest(`'name' should not be empty`);
}
if (
!fileType ||
!ALLOWED_CONTENT_TYPES.find((allowedType) => fileType.includes(allowedType))
) {
throw new httpError.BadRequest(`Unsupported file type '${fileType}'`);
}

const s3 = new AWS.S3({ region: IMPORT_S3_REGION });
const uploadPath = IMPORT_S3_UPLOAD_PREFIX + fileName;
const url = await s3.getSignedUrlPromise("putObject", {
Bucket: IMPORT_S3_BUCKET,
Key: uploadPath,
Expires: 60,
ContentType: fileType,
});

return {
statusCode: 200,
body: JSON.stringify(url),
};
}
26 changes: 26 additions & 0 deletions packages/import-service/src/handlers/importProductsFile.spec.js
@@ -0,0 +1,26 @@
import { handler } from "./importProductsFile";
import AWS from "aws-sdk";

// aws-sdk-mock package does not support getSignedUrlPromise,
// so let's use jest
jest.mock("aws-sdk");
jest.mock("middy-request-logger", () => () => ({
before: (handler, next) => next(),
}));

beforeEach(() => {
jest
.spyOn(AWS.S3.prototype, "getSignedUrlPromise")
.mockResolvedValue("http://aws");
});

test("importProductsFile ", async () => {
const event = {
queryStringParameters: { name: "products.csv", type: "text/csv" },
};

const resp = await handler(event);

expect(resp.statusCode).toBe(200);
expect(resp.body).toBe(JSON.stringify("http://aws"));
});
28 changes: 28 additions & 0 deletions packages/import-service/webpack.config.js
@@ -0,0 +1,28 @@
const path = require("path");
const slsw = require("serverless-webpack");

module.exports = {
entry: slsw.lib.entries,
mode: slsw.lib.webpack.isLocal ? "development" : "production",
target: "node",
output: {
libraryTarget: "commonjs",
filename: "[name].js",
path: path.join(__dirname, ".webpack"),
},
module: {
rules: [
{
test: /\.js$/, // include .js files
enforce: "pre", // preload the jshint loader
exclude: /node_modules/, // exclude any and all files in the node_modules folder
include: __dirname,
use: [
{
loader: "babel-loader",
},
],
},
],
}
};
14 changes: 14 additions & 0 deletions terraform/main.tf
Expand Up @@ -72,3 +72,17 @@ resource "aws_security_group" "pg" {
cidr_blocks = ["0.0.0.0/0"]
}
}

// task 5
resource "aws_s3_bucket" "task5_csv" {
bucket = "nodejs-aws-task5-csv"
acl = "private"

cors_rule {
allowed_headers = ["*"]
allowed_methods = ["PUT"]
allowed_origins = ["*"]
expose_headers = ["ETag"]
max_age_seconds = 3000
}
}