diff --git a/.gitignore b/.gitignore index c970419..d65a3fa 100644 --- a/.gitignore +++ b/.gitignore @@ -5,3 +5,4 @@ node_modules .env .nyc_output coverage/ +docker/api.env diff --git a/ReadMe.md b/ReadMe.md index 6c2b802..223d4f2 100644 --- a/ReadMe.md +++ b/ReadMe.md @@ -5,7 +5,6 @@ - nodejs https://nodejs.org/en/ (v8) - Kafka - Informix -- Postgres - Docker, Docker Compose ## Configuration @@ -25,9 +24,15 @@ The following parameters can be set in config files or in env variables: - UPDATE_PROJECT_TOPIC: update project Kafka topic, default value is 'project.action.update' - DELETE_PROJECT_TOPIC: delete project member Kafka topic, default value is 'project.action.delete' - INFORMIX: Informix database configuration parameters, refer `config/default.js` for more information -- POSTGRES: Postgres database configuration parameters, refer `config/default.js` for more information +- AUTH0_URL: AUTH0 URL, used to get M2M token +- AUTH0_PROXY_SERVER_URL: AUTH0 proxy server URL, used to get M2M token +- AUTH0_AUDIENCE: AUTH0 audience, used to get M2M token +- TOKEN_CACHE_TIME: AUTH0 token cache time, used to get M2M token +- AUTH0_CLIENT_ID: AUTH0 client id, used to get M2M token +- AUTH0_CLIENT_SECRET: AUTH0 client secret, used to get M2M token +- PROJECTS_API: the topcoder projects API -generally, we only need to update INFORMIX_HOST, KAFKA_URL and POSTGRES_URL via environment variables, see INFORMIX_HOST, KAFKA_URL and POSTGRES_URL parameter in docker/sample.api.env +generally, we only need to update INFORMIX_HOST, KAFKA_URL, PROJECTS_API and M2M-related configuration via environment variables, see the parameters in docker/sample.api.env There is a `/health` endpoint that checks for the health of the app. This sets up an expressjs server and listens on the environment variable `PORT`. It's not part of the configuration file and needs to be passed as an environment variable @@ -68,17 +73,6 @@ We will use Topcoder Informix database setup on Docker. Go to `docker-ifx` folder and run `docker-compose up` -## Postgres database setup - -- Checkout tc-project-service `v5-upgrade` branch -```bash -git clone https://github.com/topcoder-platform/tc-project-service.git -git checkout v5-upgrade -``` -- Modify `dbConfig.masterUrl` in `config/default.json` -- Run command `npm install` to install dependencies -- Run command `npm run sync:db` to create tables on Postgres database - ## Local deployment - Given the fact that the library used to access Informix DB depends on Informix Client SDK. We will run the application on Docker using a base image with Informix Client SDK installed and properly configured. @@ -88,15 +82,15 @@ For deployment, please refer to next section 'Local Deployment with Docker' To run the Legacy Project Processor using docker, follow the steps below -1. Make sure that Kafka, Postgres and Informix are running as per instructions above. +1. Make sure that Kafka, Project Service and Informix are running as per instructions above. 2. Go to `docker` folder -3. Rename the file `sample.api.env` to `api.env` And properly update the IP addresses to match your environment for the variables : KAFKA_URL, INFORMIX_HOST and POSTGRES_URL( make sure to use IP address instead of hostname ( i.e localhost will not work)).Here is an example: +3. Rename the file `sample.api.env` to `api.env` And properly update M2M-related configuration and the IP addresses to match your environment for the variables : KAFKA_URL, INFORMIX_HOST and PROJECTS_API ( make sure to use IP address instead of hostname ( i.e localhost will not work)).Here is an example: ``` KAFKA_URL=192.168.31.8:9092 INFORMIX_HOST=192.168.31.8 -POSTGRES_URL=postgres://postgres:password@192.168.31.8:5432/postgres +PROJECTS_API=192.168.31.8:8001/v5 ``` 4. Once that is done, go to run the following command @@ -109,7 +103,7 @@ docker-compose up ## Running e2e tests You need to run `docker-compose build` if modify source files. -Make sure run `docker-compose up` in `docker` folder once to make sure application will install dependencies and run successfully with Kafka, Postgres and Informix. +Make sure run `docker-compose up` in `docker` folder once to make sure application will install dependencies and run successfully with Kafka and Informix. To run e2e tests Modify `docker/docker-compose.yml` with `command: run test`(uncomment it) and run `docker-compose up` in `docker` folder diff --git a/Verification.md b/Verification.md index 9a0ce0a..e5de17f 100644 --- a/Verification.md +++ b/Verification.md @@ -18,30 +18,30 @@ npm run test-data 1. start kafka-console-producer to write messages to `project.action.create` topic: `bin/kafka-console-producer.sh --broker-list localhost:9092 --topic project.action.create` 2. write message: - `{ "topic": "project.action.create", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project", "id": 1000, "name": "Develop website", "description": "Test

This is description

", "directProjectId": null, "billingAccountId": 70015983, "type": "Web Application", "createdBy": 132458 } }` + `{ "topic": "project.action.create", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project", "id": 1, "name": "Develop website", "description": "Test

This is description

", "directProjectId": null, "billingAccountId": 70015983, "type": "Web Application", "createdBy": 132458 } }` 3. check the app console to verify message has been properly handled. 4. Again, write another message(directProjectId is provided at this time): - `{ "topic": "project.action.create", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project", "id": 1001, "name": "

Test Project

", "description": "Test

This is description

", "directProjectId": 500, "billingAccountId": null, "type": "Web", "createdBy": 132458 } }` + `{ "topic": "project.action.create", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project", "id": 2, "name": "

Test Project

", "description": "Test

This is description

", "directProjectId": 500, "billingAccountId": null, "type": "Web", "createdBy": 132458 } }` 5. check the app console to verify message has been properly handled. 6. Try to write an invalid message: - `{ "topic": "project.action.create", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project", "id": 1001, "name": "

Test Project

", "description": "Test

This is description

", "directProjectId": 500, "billingAccountId": 100, "type": "Web", "createdBy": 132458 } }` + `{ "topic": "project.action.create", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project", "id": 2, "name": "

Test Project

", "description": "Test

This is description

", "directProjectId": 500, "billingAccountId": 100, "type": "Web", "createdBy": 132458 } }` 7. You will see error message in the app console. 8. start kafka-console-producer to write messages to `project.action.update` topic: `bin/kafka-console-producer.sh --broker-list localhost:9092 --topic project.action.update` 9. write message: - `{ "topic": "project.action.update", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project", "id": 1001, "directProjectId": 500, "billingAccountId": 70015984, "updatedBy": 132458 } }` + `{ "topic": "project.action.update", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project", "id": 2, "directProjectId": 500, "billingAccountId": 70015984, "updatedBy": 132458 } }` 10. check the app console to verify message has been properly handled. 11. Try to write an invalid message: - `{ "topic": "project.action.update", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project", "id": 1001, "directProjectId": 500, "billingAccountId": 1, "updatedBy": 132458 } }` + `{ "topic": "project.action.update", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project", "id": 2, "directProjectId": 500, "billingAccountId": 1, "updatedBy": 132458 } }` 12. You will see error message in the app console. 13. start kafka-console-producer to write messages to `project.action.update` topic: `bin/kafka-console-producer.sh --broker-list localhost:9092 --topic project.action.create` 14. write messages: -`{ "topic": "project.action.create", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project.member", "projectId": 1001, "userId": 132457, "role": "copilot", "createdBy": 132458 } }` +`{ "topic": "project.action.create", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project.member", "projectId": 2, "userId": 132457, "role": "copilot", "createdBy": 132458 } }` -`{ "topic": "project.action.create", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project.member", "projectId": 1001, "userId": 124835, "role": "manager", "createdBy": 132458 } }` +`{ "topic": "project.action.create", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project.member", "projectId": 2, "userId": 124835, "role": "manager", "createdBy": 132458 } }` -`{ "topic": "project.action.create", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project.member", "projectId": 1001, "userId": 124836, "role": "account_manager", "createdBy": 132458 } }` +`{ "topic": "project.action.create", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project.member", "projectId": 2, "userId": 124836, "role": "account_manager", "createdBy": 132458 } }` 15. check the app console to verify messages has been properly handled. 16. Repeat step 14 again. @@ -49,11 +49,11 @@ npm run test-data 18. start kafka-console-producer to write messages to `project.action.update` topic: `bin/kafka-console-producer.sh --broker-list localhost:9092 --topic project.action.delete` 19. write messages: -`{ "topic": "project.action.delete", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project.member", "projectId": 1001, "userId": 132457, "role": "copilot", "deletedBy": 132458 } }` +`{ "topic": "project.action.delete", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project.member", "projectId": 2, "userId": 132457, "role": "copilot", "deletedBy": 132458 } }` -`{ "topic": "project.action.delete", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project.member", "projectId": 1001, "userId": 124835, "role": "manager", "deletedBy": 132458 } }` +`{ "topic": "project.action.delete", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project.member", "projectId": 2, "userId": 124835, "role": "manager", "deletedBy": 132458 } }` -`{ "topic": "project.action.delete", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project.member", "projectId": 1001, "userId": 124836, "role": "account_manager", "deletedBy": 132458 } }` +`{ "topic": "project.action.delete", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project.member", "projectId": 2, "userId": 124836, "role": "account_manager", "deletedBy": 132458 } }` 20. check the app console to verify messages has been properly handled. 21. Repeat step 14 again. @@ -77,20 +77,29 @@ select * from projects; ## E2E tests coverage - 103 passing (3m) +``` code + 103 passing (2m) - File | % Stmts | % Branch | % Funcs | % Lines | Uncovered Line #s - ----------------------|----------|----------|----------|----------|------------------ - All files | 98.23 | 91.98 | 100 | 98.21 | - config | 100 | 89.74 | 100 | 100 | - default.js | 100 | 89.74 | 100 | 100 | 8,25,36 - test.js | 100 | 100 | 100 | 100 | - src | 98.57 | 85 | 100 | 98.51 | - app.js | 98.41 | 85 | 100 | 98.39 | 85 - bootstrap.js | 100 | 100 | 100 | 100 | - constants.js | 100 | 100 | 100 | 100 | - src/common | 92.59 | 70.83 | 100 | 92.59 | - helper.js | 100 | 100 | 100 | 100 | - logger.js | 90.63 | 65 | 100 | 90.63 |32,55,60,84,98,118 - src/services | 99.67 | 99.04 | 100 | 99.66 | - ProcessorService.js | 99.67 | 99.04 | 100 | 99.66 | 875 + + > legacy-project-processor@1.0.0 cover:report /legacy-project-processor + > nyc report --reporter=html --reporter=text + + ----------------------|----------|----------|----------|----------|-------------------| + File | % Stmts | % Branch | % Funcs | % Lines | Uncovered Line #s | + ----------------------|----------|----------|----------|----------|-------------------| + All files | 96.75 | 91.01 | 96.72 | 96.72 | | + config | 100 | 93.75 | 100 | 100 | | + default.js | 100 | 93.75 | 100 | 100 | 8,25 | + test.js | 100 | 100 | 100 | 100 | | + src | 90 | 75 | 71.43 | 89.55 | | + app.js | 88.89 | 75 | 60 | 88.71 |... 87,88,89,90,92 | + bootstrap.js | 100 | 100 | 100 | 100 | | + constants.js | 100 | 100 | 100 | 100 | | + src/common | 92.5 | 70.83 | 100 | 92.5 | | + helper.js | 100 | 100 | 100 | 100 | | + logger.js | 90.63 | 65 | 100 | 90.63 |32,55,60,84,98,118 | + src/services | 99.35 | 98.04 | 100 | 99.35 | | + ProcessorService.js | 99.33 | 98.04 | 100 | 99.33 | 712,882 | + ProjectService.js | 100 | 100 | 100 | 100 | | + ----------------------|----------|----------|----------|----------|-------------------| +``` diff --git a/config/default.js b/config/default.js index 88ce905..1e64183 100644 --- a/config/default.js +++ b/config/default.js @@ -31,12 +31,13 @@ module.exports = { POOL_MAX_SIZE: parseInt(process.env.IFX_POOL_MAX_SIZE) || 10 // use connection pool in processor, the pool size }, - // postgres database configuration - POSTGRES: { - URL: process.env.POSTGRES_URL || 'postgres://coder:mysecretpassword@dockerhost:5432/projectsdb', // url - MAX_POOL_SIZE: parseInt(process.env.POSTGRES_MAX_POOL_SIZE) || 50, // max pool size - MIN_POOL_SIZE: parseInt(process.env.POSTGRES_MIN_POOL_SIZE) || 4, // min pool size - IDLE_TIME_OUT: parseInt(process.env.POSTGRES_IDLE_TIME_OUT) || 1000, // idle time - PROJECT_TABLE_NAME: 'projects' // project table name - } + // used to get M2M token + AUTH0_URL: process.env.AUTH0_URL, + AUTH0_PROXY_SERVER_URL: process.env.AUTH0_PROXY_SERVER_URL, + AUTH0_AUDIENCE: process.env.AUTH0_AUDIENCE, + TOKEN_CACHE_TIME: process.env.TOKEN_CACHE_TIME, + AUTH0_CLIENT_ID: process.env.AUTH0_CLIENT_ID, + AUTH0_CLIENT_SECRET: process.env.AUTH0_CLIENT_SECRET, + + PROJECTS_API: process.env.PROJECTS_API || 'http://localhost:8001/v5' } diff --git a/docker/api.env b/docker/api.env deleted file mode 100644 index 45722ee..0000000 --- a/docker/api.env +++ /dev/null @@ -1,3 +0,0 @@ -KAFKA_URL=127.0.0.1:9092 -INFORMIX_HOST=127.0.0.1 -POSTGRES_URL=postgres://coder:mysecretpassword@localhost:5432/projectsdb \ No newline at end of file diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 8f486e1..eb5a588 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -6,8 +6,8 @@ services: build: context: ../ dockerfile: docker/Dockerfile - # env_file: - # - api.env - # command: run start + env_file: + - api.env + command: run start # command: run test - command: run test:cov + # command: run test:cov diff --git a/docker/sample.api.env b/docker/sample.api.env index de66eff..8cb4f0d 100644 --- a/docker/sample.api.env +++ b/docker/sample.api.env @@ -1,3 +1,8 @@ -KAFKA_URL= -INFORMIX_HOST= -POSTGRES_URL= +KAFKA_URL=host.docker.internal:9092 +INFORMIX_HOST=host.docker.internal +PROJECTS_API=host.docker.internal:8001/v5 +AUTH0_CLIENT_ID= +AUTH0_CLIENT_SECRET= +AUTH0_URL= +AUTH0_AUDIENCE= +AUTH0_PROXY_SERVER_URL= diff --git a/package.json b/package.json index 9312896..b440d78 100644 --- a/package.json +++ b/package.json @@ -9,21 +9,24 @@ "lint:fix": "standard --fix", "init-db": "node scripts/init-db.js", "test-data": "node scripts/test-data.js", - "test": "mocha test/helper.test.js && mocha test/processor.test.js --timeout 20000 --exit", + "test": "mocha test/helper.test.js && mocha --require test/prepare.js test/processor.test.js --timeout 20000 --exit", "helper:test": "nyc --silent mocha test/helper.test.js --exit", - "processor:test": "nyc --silent --no-clean mocha test/processor.test.js --timeout 20000 --exit", + "processor:test": "nyc --silent --no-clean mocha --require test/prepare.js test/processor.test.js --timeout 20000 --exit", "cover:report": "nyc report --reporter=html --reporter=text", "test:cov": "npm run helper:test && npm run processor:test && npm run cover:report" }, "author": "TCSCODER", "license": "none", "devDependencies": { - "should": "^13.2.3", "mocha": "^6.1.4", + "mocha-prepare": "^0.1.0", + "nock": "^11.7.0", "nyc": "^14.1.1", - "superagent": "^5.1.0", + "q": "^1.5.1", + "should": "^13.2.3", + "sinon": "^7.3.2", "standard": "^12.0.1", - "sinon": "^7.3.2" + "superagent": "^5.1.2" }, "dependencies": { "@hapi/joi": "^15.1.0", @@ -34,10 +37,7 @@ "ifxnjs": "^8.0.1", "lodash": "^4.17.11", "no-kafka": "^3.4.3", - "pg": "^7.11.0", - "pg-hstore": "^2.3.3", - "q": "^1.5.1", - "sequelize": "^5.9.0", + "tc-core-library-js": "github:appirio-tech/tc-core-library-js#v2.6.3", "topcoder-healthcheck-dropin": "^1.0.3", "winston": "^3.2.1" }, diff --git a/scripts/init-db.js b/scripts/init-db.js index 33beeac..8576015 100644 --- a/scripts/init-db.js +++ b/scripts/init-db.js @@ -3,11 +3,10 @@ */ require('../src/bootstrap') -const { getPostgresConnection, getInformixConnection } = require('../src/common/helper') +const { getInformixConnection } = require('../src/common/helper') const logger = require('../src/common/logger') async function initDB () { - await getPostgresConnection().query(`delete from projects`) const connection = await getInformixConnection() try { await connection.queryAsync(`delete from tcs_catalog:direct_project_metadata_audit`) diff --git a/scripts/test-data.js b/scripts/test-data.js index 1842f2c..258e1b6 100644 --- a/scripts/test-data.js +++ b/scripts/test-data.js @@ -3,13 +3,10 @@ */ require('../src/bootstrap') -const { getPostgresConnection, getInformixConnection } = require('../src/common/helper') +const { getInformixConnection } = require('../src/common/helper') const logger = require('../src/common/logger') async function insertData () { - await getPostgresConnection().query(`delete from projects`) - await getPostgresConnection().query(`insert into projects(id, name, description, terms, type, status, "createdBy", "updatedBy", version, "lastActivityAt", "lastActivityUserId") values(1000, 'name-1', 'description-1', '{1}', 'test', 'draft', 8547899, 8547899, '1.0', now(), '8547899')`) - await getPostgresConnection().query(`insert into projects(id, "directProjectId", name, description, terms, type, status, "createdBy", "updatedBy", version, "lastActivityAt", "lastActivityUserId") values(1001, 500, 'name-2', 'description-2', '{1}', 'test', 'draft', 8547899, 8547899, '1.0', now(), '8547899')`) const connection = await getInformixConnection() try { await connection.queryAsync(`delete from tcs_catalog:copilot_profile`) diff --git a/src/common/helper.js b/src/common/helper.js index 29a64e1..2504649 100644 --- a/src/common/helper.js +++ b/src/common/helper.js @@ -2,20 +2,11 @@ * Contains generic helper methods */ -require('pg').defaults.parseInt8 = true - -const Sequelize = require('sequelize') const config = require('config') const ifxnjs = require('ifxnjs') +const tcCoreLibAuth = require('tc-core-library-js').auth -const sequelize = new Sequelize(config.get('POSTGRES.URL'), { - logging: false, - pool: { - max: config.POSTGRES.MAX_POOL_SIZE, - min: config.POSTGRES.MIN_POOL_SIZE, - idle: config.POSTGRES.IDLE_TIME_OUT - } -}) +const m2m = tcCoreLibAuth.m2m(config) const Pool = ifxnjs.Pool const pool = Promise.promisifyAll(new Pool()) @@ -39,14 +30,6 @@ async function getInformixConnection () { return Promise.promisifyAll(conn) } -/** - * Get Postgres connection using the configured parameters - * @return {Object} Sequelize object - */ -function getPostgresConnection () { - return sequelize -} - /** * Get Kafka options * @return {Object} the Kafka options @@ -59,8 +42,28 @@ function getKafkaOptions () { return options } +/** + * Get machine to machine token. + * @returns {Promise} promise which resolves to the m2m token + */ +async function getM2MToken () { + return m2m.getMachineToken(config.AUTH0_CLIENT_ID, config.AUTH0_CLIENT_SECRET) +} + +/** + * Do nothing (delay) asynchronous + * + * @param {Number} ms time in milliseconds + * + * @returns {Promise} timeoutID + */ +async function sleep (ms) { + return new Promise((resolve) => setTimeout(resolve, ms)) +} + module.exports = { - getPostgresConnection, getInformixConnection, - getKafkaOptions + getKafkaOptions, + getM2MToken, + sleep } diff --git a/src/constants.js b/src/constants.js index 83d0713..5d34d68 100644 --- a/src/constants.js +++ b/src/constants.js @@ -24,5 +24,10 @@ module.exports = { ACCOUNT_MANAGER_ROLE: 'account_manager', PROJECT_RESOURCE: 'project', MEMBER_RESOURCE: 'project.member', - BUGR_CONTEST_TYPE_ID: 900001 + BUGR_CONTEST_TYPE_ID: 900001, + /** + * Time in milliseconds which we have to wait before update project using + * Project Service API + */ + SLEEP_MS_BEFORE_UPDATE_PROJECT_BY_API: 3000 } diff --git a/src/services/ProcessorService.js b/src/services/ProcessorService.js index 5f6bf35..acb2785 100644 --- a/src/services/ProcessorService.js +++ b/src/services/ProcessorService.js @@ -3,10 +3,10 @@ */ const _ = require('lodash') -const config = require('config') const joi = require('@hapi/joi') const logger = require('../common/logger') const helper = require('../common/helper') +const projectService = require('./ProjectService') const Entities = require('html-entities').AllHtmlEntities const entities = new Entities() const { @@ -19,7 +19,8 @@ const { MANAGER_ROLE, ACCOUNT_MANAGER_ROLE, MANAGER_METADATA_KEY, - ACCOUNT_MANAGER_METADATA_KEY + ACCOUNT_MANAGER_METADATA_KEY, + SLEEP_MS_BEFORE_UPDATE_PROJECT_BY_API } = require('../constants') /** @@ -465,10 +466,11 @@ async function processCreate (message) { await associateBillingAccountToProject(connection, directProjectId, billingAccountId) } - // update projects.directProjectId in Postgres - await helper.getPostgresConnection().query(`update ${config.POSTGRES.PROJECT_TABLE_NAME} set "directProjectId" = ${directProjectId} where id = ${message.payload.id}`) + // update projects.directProjectId + await helper.sleep(SLEEP_MS_BEFORE_UPDATE_PROJECT_BY_API) + await projectService.updateProject(message.payload.id, { directProjectId }) - // commit the transaction after successfully update projects.directProjectId in Postgres + // commit the transaction after successfully update projects.directProjectId await connection.commitTransactionAsync() } catch (e) { await connection.rollbackTransactionAsync() @@ -498,19 +500,14 @@ processCreate.schema = { } /** - * Get direct project id from postgres database + * Get direct project id from projects api * @param {Object} connection the Informix connection * @param {String} id the project id * @returns {String} the direct project id */ async function getDirectProjectId (connection, id) { - // retrieve projects.directProjectId from Postgres - const res = await helper.getPostgresConnection().query(`select "directProjectId" from ${config.POSTGRES.PROJECT_TABLE_NAME} where id = ${id}`) - if (res[0].length > 0) { - return res[0][0].directProjectId - } else { - throw new Error(`No project with id: ${id} exist in Postgres database`) - } + const res = await projectService.getProject(id) + return res.directProjectId } /** @@ -570,7 +567,7 @@ processUpdate.schema = { id: joi.numberId(), directProjectId: joi.optionalNumberId(), billingAccountId: joi.optionalNumberId().allow(null), - updatedBy: joi.numberId() + updatedBy: joi.number().integer().required() // could be negative for M2M token }).unknown(true).required() }).required() } @@ -618,7 +615,7 @@ async function addCopilot (userId, projectId, currentUser) { // get copilot profile id const copilotProfileId = await getCopilotProfileId(connection, userId) - // get direct project id from postgres database + // get direct project id from projects api const directProjectId = await getDirectProjectId(connection, projectId) // Check the current user permission on the project @@ -769,7 +766,7 @@ async function addManager (userId, projectId, isManager, currentUser) { // begin transaction await connection.beginTransactionAsync() - // get direct project id from postgres database + // get direct project id from projects api const directProjectId = await getDirectProjectId(connection, projectId) // Check the current user permission on the project @@ -846,7 +843,7 @@ async function removeCopilot (userId, projectId, currentUser) { // get copilot profile id const copilotProfileId = await getCopilotProfileId(connection, userId) - // get direct project id from postgres database + // get direct project id from projects api const directProjectId = await getDirectProjectId(connection, projectId) // Check the current user permission on the project @@ -937,7 +934,7 @@ async function removeManager (userId, projectId, isManager, currentUser) { // begin transaction await connection.beginTransactionAsync() - // get direct project id from postgres database + // get direct project id from projects api const directProjectId = await getDirectProjectId(connection, projectId) // Check the current user permission on the project diff --git a/src/services/ProjectService.js b/src/services/ProjectService.js new file mode 100644 index 0000000..9eb1734 --- /dev/null +++ b/src/services/ProjectService.js @@ -0,0 +1,46 @@ +/* + * Project Service + */ + +const config = require('config') +const request = require('superagent') +const helper = require('../common/helper') + +/** + * Get a project. + * + * @param {String} id the project id + * @returns {Promise} the project data + */ +async function getProject (id) { + const token = await helper.getM2MToken() + const res = await request.get(`${config.PROJECTS_API}/projects/${id}`) + .set({ + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }) + return res.body +} + +/** + * Update a project. + * + * @param {String} id the project id + * @param {Object} data the project patch data + * @returns {Promise} the updated project data + */ +async function updateProject (id, data) { + const token = await helper.getM2MToken() + const res = await request.patch(`${config.PROJECTS_API}/projects/${id}`) + .set({ + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` + }) + .send(data) + return res.body +} + +module.exports = { + getProject, + updateProject +} diff --git a/test/prepare.js b/test/prepare.js new file mode 100644 index 0000000..e89e0d3 --- /dev/null +++ b/test/prepare.js @@ -0,0 +1,36 @@ +/* + * Setup mock server + */ +process.env.NODE_ENV = 'test' +require('../src/bootstrap') + +const prepare = require('mocha-prepare') +const nock = require('nock') + +prepare(function (done) { + let directProjectId + nock(/.*/) + .persist() + .filteringPath(path => { + if (path.includes('/projects')) { + return '/_projects' + } + return path + }) + .get('/_projects') + .reply((uri) => { + if (uri.match(/\/99$/)) { // simulate non-existent project id + return [404, { message: 'project not found for id 99' }] + } + return [200, { directProjectId }] + }) + .patch('/_projects') + .reply(200, (uri, requestBody) => { + directProjectId = requestBody.directProjectId + }) + .get('/health') + .reply(200, { checksRun: 1 }) + done() +}, function (done) { + done() +}) diff --git a/test/processor.test.js b/test/processor.test.js index 68e5bb2..b0799f1 100644 --- a/test/processor.test.js +++ b/test/processor.test.js @@ -2,8 +2,6 @@ * E2E test of the legacy project processor. */ -process.env.NODE_ENV = 'test' -require('../src/bootstrap') const _ = require('lodash') const config = require('config') const should = require('should') @@ -13,7 +11,10 @@ const logger = require('../src/common/logger') const { testTopics } = require('./testData') const { initDB } = require('../scripts/init-db') const { insertData } = require('../scripts/test-data') -const { getPostgresConnection, getInformixConnection, getKafkaOptions } = require('../src/common/helper') +const { getInformixConnection, getKafkaOptions } = require('../src/common/helper') +const projectService = require('../src/services/ProjectService') +const sinon = require('sinon') +const helper = require('../src/common/helper') describe('Topcoder - Legacy Project Processor E2E Test', () => { let app @@ -91,6 +92,8 @@ describe('Topcoder - Legacy Project Processor E2E Test', () => { } before(async () => { + sinon.stub(helper, 'getM2MToken').value(() => Promise.resolve('dummy-token')) + // inject logger with log collector logger.info = (message) => { infoLogs.push(message) @@ -141,6 +144,8 @@ describe('Topcoder - Legacy Project Processor E2E Test', () => { } await connection.closeAsync() + + sinon.reset() }) beforeEach(() => { @@ -206,9 +211,9 @@ describe('Topcoder - Legacy Project Processor E2E Test', () => { await sendMessage(testTopics.create.testMessages[0]) await waitJob() - // verify data in postgres database - let res = await getPostgresConnection().query(`select * from projects where id = 1000`) - const directProjectId = res[0][0].directProjectId + // verify data in projects api + let res = await projectService.getProject(testTopics.create.testMessages[0].payload.id) + const directProjectId = res.directProjectId directProjectId.should.be.above(0) // verify data in informix database @@ -265,8 +270,8 @@ describe('Topcoder - Legacy Project Processor E2E Test', () => { await sendMessage(testTopics.create.testMessages[1]) await waitJob() - let res = await getPostgresConnection().query(`select * from projects where id = 1001`) - const directProjectId = res[0][0].directProjectId + let res = await projectService.getProject(testTopics.create.testMessages[1].payload.id) + const directProjectId = res.directProjectId should.equal(directProjectId, 500) res = await connection.queryAsync(`select * from tc_direct_project p where p.project_id = ${directProjectId}`) @@ -319,8 +324,9 @@ describe('Topcoder - Legacy Project Processor E2E Test', () => { await sendMessage(testTopics.create.testMessages[2]) await waitJob() - let res = await getPostgresConnection().query(`select * from projects where id = 1001`) - const directProjectId = res[0][0].directProjectId + let res = await projectService.getProject(testTopics.create.testMessages[2].payload.id) + const directProjectId = res.directProjectId + directProjectId.should.be.above(0) should.equal(directProjectId, 500) res = await connection.queryAsync(`select * from tc_direct_project p where p.project_id = ${directProjectId}`) @@ -607,11 +613,11 @@ describe('Topcoder - Legacy Project Processor E2E Test', () => { it('processor update project fail, incorrect project id', async () => { let message = _.cloneDeep(testTopics.addMember.testMessages[0]) - message.payload.projectId = 100 + message.payload.projectId = 99 await sendMessage(message) await waitJob() - assertErrorMessage(`No project with id: 100 exist in Postgres database`) + assertErrorMessage(`project not found for id 99`) }) it('processor update project fail, add the same copilot', async () => {