diff --git a/.gitignore b/.gitignore
index c970419..d65a3fa 100644
--- a/.gitignore
+++ b/.gitignore
@@ -5,3 +5,4 @@ node_modules
.env
.nyc_output
coverage/
+docker/api.env
diff --git a/ReadMe.md b/ReadMe.md
index b284461..0b73fd4 100644
--- a/ReadMe.md
+++ b/ReadMe.md
@@ -3,9 +3,8 @@
## Dependencies
- nodejs https://nodejs.org/en/ (v8)
-- Kafka
+- Kafka
- Informix
-- Postgres
- Docker, Docker Compose
## Configuration
@@ -21,13 +20,19 @@ The following parameters can be set in config files or in env variables:
if not provided, then SSL connection is not used, direct insecure connection is used;
if provided, it can be either path to private key file or private key content
- KAFKA_GROUP_ID: the Kafka group id, default value is 'legacy-project-processor'
-- CREATE_PROJECT_TOPIC: create project Kafka topic, default value is 'project.notification.create'
-- UPDATE_PROJECT_TOPIC: update project Kafka topic, default value is 'project.notification.update'
-- DELETE_PROJECT_TOPIC: delete project member Kafka topic, default value is 'project.notification.delete'
+- CREATE_PROJECT_TOPIC: create project Kafka topic, default value is 'project.action.create'
+- UPDATE_PROJECT_TOPIC: update project Kafka topic, default value is 'project.action.update'
+- DELETE_PROJECT_TOPIC: delete project member Kafka topic, default value is 'project.action.delete'
- INFORMIX: Informix database configuration parameters, refer `config/default.js` for more information
-- POSTGRES: Postgres database configuration parameters, refer `config/default.js` for more information
+- AUTH0_URL: AUTH0 URL, used to get M2M token
+- AUTH0_PROXY_SERVER_URL: AUTH0 proxy server URL, used to get M2M token
+- AUTH0_AUDIENCE: AUTH0 audience, used to get M2M token
+- TOKEN_CACHE_TIME: AUTH0 token cache time, used to get M2M token
+- AUTH0_CLIENT_ID: AUTH0 client id, used to get M2M token
+- AUTH0_CLIENT_SECRET: AUTH0 client secret, used to get M2M token
+- PROJECTS_API: the topcoder projects API
-generally, we only need to update INFORMIX_HOST, KAFKA_URL and POSTGRES_URL via environment variables, see INFORMIX_HOST, KAFKA_URL and POSTGRES_URL parameter in docker/sample.api.env
+generally, we only need to update INFORMIX_HOST, KAFKA_URL, PROJECTS_API and M2M-related configuration via environment variables, see the parameters in docker/sample.api.env
There is a `/health` endpoint that checks for the health of the app. This sets up an expressjs server and listens on the environment variable `PORT`. It's not part of the configuration file and needs to be passed as an environment variable
@@ -47,20 +52,20 @@ Configuration for the tests is at `config/test.js`, only add such new configurat
`bin/kafka-server-start.sh config/server.properties`
- note that the zookeeper server is at localhost:2181, and Kafka server is at localhost:9092
- use another terminal, go to same directory, create the needed topics:
- `bin/kafka-topics.sh --create --zookeeper localhost:2181 --replication-factor 1 --partitions 1 --topic project.notification.create`
+ `bin/kafka-topics.sh --create --zookeeper localhost:2181 --replication-factor 1 --partitions 1 --topic project.action.create`
- `bin/kafka-topics.sh --create --zookeeper localhost:2181 --replication-factor 1 --partitions 1 --topic project.notification.update`
+ `bin/kafka-topics.sh --create --zookeeper localhost:2181 --replication-factor 1 --partitions 1 --topic project.action.update`
- `bin/kafka-topics.sh --create --zookeeper localhost:2181 --replication-factor 1 --partitions 1 --topic project.notification.delete`
+ `bin/kafka-topics.sh --create --zookeeper localhost:2181 --replication-factor 1 --partitions 1 --topic project.action.delete`
- verify that the topics are created:
`bin/kafka-topics.sh --list --zookeeper localhost:2181`,
it should list out the created topics
-- run the producer and then write some message into the console to send to the `project.notification.create` topic:
+- run the producer and then write some message into the console to send to the `project.action.create` topic:
in the console, write message, one message per line:
- `{ "topic": "project.notification.create", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project", "id": 1000, "name": "Develop website", "description": "
This is description
", "type": "Develop website", "directProjectId": null, "billingAccountId": 70015983, "type": "Web Application", "createdBy": 8547899 } }` + `{ "topic": "project.action.create", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project", "id": 1000, "name": "Develop website", "description": "This is description
", "type": "Develop website", "directProjectId": null, "billingAccountId": 70015983, "type": "Web Application", "createdBy": 8547899 } }` - optionally, use another terminal, go to same directory, start a consumer to view the messages: - `bin/kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic project.notification.create --from-beginning` + `bin/kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic project.action.create --from-beginning` - writing/reading messages to/from other topics are similar ## Topcoder Informix Database Setup @@ -68,17 +73,6 @@ We will use Topcoder Informix database setup on Docker. Go to `docker-ifx` folder and run `docker-compose up` -## Postgres database setup - -- Checkout tc-project-service `v5-upgrade` branch -```bash -git clone https://github.com/topcoder-platform/tc-project-service.git -git checkout v5-upgrade -``` -- Modify `dbConfig.masterUrl` in `config/default.json` -- Run command `npm install` to install dependencies -- Run command `npm run sync:db` to create tables on Postgres database - ## Local deployment - Given the fact that the library used to access Informix DB depends on Informix Client SDK. We will run the application on Docker using a base image with Informix Client SDK installed and properly configured. @@ -88,28 +82,30 @@ For deployment, please refer to next section 'Local Deployment with Docker' To run the Legacy Project Processor using docker, follow the steps below -1. Make sure that Kafka, Postgres and Informix are running as per instructions above. +1. Make sure that Kafka, Project Service and Informix are running as per instructions above. 2. Go to `docker` folder -3. Rename the file `sample.api.env` to `api.env` And properly update the IP addresses to match your environment for the variables : KAFKA_URL, INFORMIX_HOST and POSTGRES_URL( make sure to use IP address instead of hostname ( i.e localhost will not work)).Here is an example: +3. Rename the file `sample.api.env` to `api.env` and uncomment lines `env_file:` and `- api.env` in `docker-compose.yml`. + +4. Properly update M2M-related configuration and the IP addresses to match your environment for the variables : KAFKA_URL, INFORMIX_HOST and PROJECTS_API ( make sure to use IP address instead of hostname ( i.e localhost will not work)).Here is an example: ``` KAFKA_URL=192.168.31.8:9092 INFORMIX_HOST=192.168.31.8 -POSTGRES_URL=postgres://postgres:password@192.168.31.8:5432/postgres +PROJECTS_API=192.168.31.8:8001/v5 ``` -4. Once that is done, go to run the following command +5. Once that is done, go to run the following command ``` docker-compose up ``` -5. When you are running the application for the first time, It will take some time initially to download the image and install the dependencies +6. When you are running the application for the first time, It will take some time initially to download the image and install the dependencies ## Running e2e tests You need to run `docker-compose build` if modify source files. -Make sure run `docker-compose up` in `docker` folder once to make sure application will install dependencies and run successfully with Kafka, Postgres and Informix. +Make sure run `docker-compose up` in `docker` folder once to make sure application will install dependencies and run successfully with Kafka and Informix. To run e2e tests Modify `docker/docker-compose.yml` with `command: run test`(uncomment it) and run `docker-compose up` in `docker` folder diff --git a/Verification.md b/Verification.md index f1b7c70..e5de17f 100644 --- a/Verification.md +++ b/Verification.md @@ -15,45 +15,45 @@ npm run test-data ``` ## Verification -1. start kafka-console-producer to write messages to `project.notification.create` topic: - `bin/kafka-console-producer.sh --broker-list localhost:9092 --topic project.notification.create` +1. start kafka-console-producer to write messages to `project.action.create` topic: + `bin/kafka-console-producer.sh --broker-list localhost:9092 --topic project.action.create` 2. write message: - `{ "topic": "project.notification.create", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project", "id": 1000, "name": "Develop website", "description": "This is description
", "directProjectId": null, "billingAccountId": 70015983, "type": "Web Application", "createdBy": 132458 } }` + `{ "topic": "project.action.create", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project", "id": 1, "name": "Develop website", "description": "This is description
", "directProjectId": null, "billingAccountId": 70015983, "type": "Web Application", "createdBy": 132458 } }` 3. check the app console to verify message has been properly handled. 4. Again, write another message(directProjectId is provided at this time): - `{ "topic": "project.notification.create", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project", "id": 1001, "name": "This is description
", "directProjectId": 500, "billingAccountId": null, "type": "Web", "createdBy": 132458 } }` + `{ "topic": "project.action.create", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project", "id": 2, "name": "This is description
", "directProjectId": 500, "billingAccountId": null, "type": "Web", "createdBy": 132458 } }` 5. check the app console to verify message has been properly handled. 6. Try to write an invalid message: - `{ "topic": "project.notification.create", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project", "id": 1001, "name": "This is description
", "directProjectId": 500, "billingAccountId": 100, "type": "Web", "createdBy": 132458 } }` + `{ "topic": "project.action.create", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project", "id": 2, "name": "This is description
", "directProjectId": 500, "billingAccountId": 100, "type": "Web", "createdBy": 132458 } }` 7. You will see error message in the app console. -8. start kafka-console-producer to write messages to `project.notification.update` topic: - `bin/kafka-console-producer.sh --broker-list localhost:9092 --topic project.notification.update` +8. start kafka-console-producer to write messages to `project.action.update` topic: + `bin/kafka-console-producer.sh --broker-list localhost:9092 --topic project.action.update` 9. write message: - `{ "topic": "project.notification.update", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project", "id": 1001, "directProjectId": 500, "billingAccountId": 70015984, "updatedBy": 132458 } }` + `{ "topic": "project.action.update", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project", "id": 2, "directProjectId": 500, "billingAccountId": 70015984, "updatedBy": 132458 } }` 10. check the app console to verify message has been properly handled. 11. Try to write an invalid message: - `{ "topic": "project.notification.update", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project", "id": 1001, "directProjectId": 500, "billingAccountId": 1, "updatedBy": 132458 } }` + `{ "topic": "project.action.update", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project", "id": 2, "directProjectId": 500, "billingAccountId": 1, "updatedBy": 132458 } }` 12. You will see error message in the app console. -13. start kafka-console-producer to write messages to `project.notification.update` topic: - `bin/kafka-console-producer.sh --broker-list localhost:9092 --topic project.notification.create` +13. start kafka-console-producer to write messages to `project.action.update` topic: + `bin/kafka-console-producer.sh --broker-list localhost:9092 --topic project.action.create` 14. write messages: -`{ "topic": "project.notification.create", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project.member", "projectId": 1001, "userId": 132457, "role": "copilot", "createdBy": 132458 } }` +`{ "topic": "project.action.create", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project.member", "projectId": 2, "userId": 132457, "role": "copilot", "createdBy": 132458 } }` -`{ "topic": "project.notification.create", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project.member", "projectId": 1001, "userId": 124835, "role": "manager", "createdBy": 132458 } }` +`{ "topic": "project.action.create", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project.member", "projectId": 2, "userId": 124835, "role": "manager", "createdBy": 132458 } }` -`{ "topic": "project.notification.create", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project.member", "projectId": 1001, "userId": 124836, "role": "account_manager", "createdBy": 132458 } }` +`{ "topic": "project.action.create", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project.member", "projectId": 2, "userId": 124836, "role": "account_manager", "createdBy": 132458 } }` 15. check the app console to verify messages has been properly handled. 16. Repeat step 14 again. 17. You will see error messages in the app console. -18. start kafka-console-producer to write messages to `project.notification.update` topic: - `bin/kafka-console-producer.sh --broker-list localhost:9092 --topic project.notification.delete` +18. start kafka-console-producer to write messages to `project.action.update` topic: + `bin/kafka-console-producer.sh --broker-list localhost:9092 --topic project.action.delete` 19. write messages: -`{ "topic": "project.notification.delete", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project.member", "projectId": 1001, "userId": 132457, "role": "copilot", "deletedBy": 132458 } }` +`{ "topic": "project.action.delete", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project.member", "projectId": 2, "userId": 132457, "role": "copilot", "deletedBy": 132458 } }` -`{ "topic": "project.notification.delete", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project.member", "projectId": 1001, "userId": 124835, "role": "manager", "deletedBy": 132458 } }` +`{ "topic": "project.action.delete", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project.member", "projectId": 2, "userId": 124835, "role": "manager", "deletedBy": 132458 } }` -`{ "topic": "project.notification.delete", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project.member", "projectId": 1001, "userId": 124836, "role": "account_manager", "deletedBy": 132458 } }` +`{ "topic": "project.action.delete", "originator": "project-api", "timestamp": "2018-07-02T00:00:00", "mime-type": "application/json", "payload": { "resource": "project.member", "projectId": 2, "userId": 124836, "role": "account_manager", "deletedBy": 132458 } }` 20. check the app console to verify messages has been properly handled. 21. Repeat step 14 again. @@ -77,20 +77,29 @@ select * from projects; ## E2E tests coverage - 103 passing (3m) +``` code + 103 passing (2m) - File | % Stmts | % Branch | % Funcs | % Lines | Uncovered Line #s - ----------------------|----------|----------|----------|----------|------------------ - All files | 98.23 | 91.98 | 100 | 98.21 | - config | 100 | 89.74 | 100 | 100 | - default.js | 100 | 89.74 | 100 | 100 | 8,25,36 - test.js | 100 | 100 | 100 | 100 | - src | 98.57 | 85 | 100 | 98.51 | - app.js | 98.41 | 85 | 100 | 98.39 | 85 - bootstrap.js | 100 | 100 | 100 | 100 | - constants.js | 100 | 100 | 100 | 100 | - src/common | 92.59 | 70.83 | 100 | 92.59 | - helper.js | 100 | 100 | 100 | 100 | - logger.js | 90.63 | 65 | 100 | 90.63 |32,55,60,84,98,118 - src/services | 99.67 | 99.04 | 100 | 99.66 | - ProcessorService.js | 99.67 | 99.04 | 100 | 99.66 | 875 + + > legacy-project-processor@1.0.0 cover:report /legacy-project-processor + > nyc report --reporter=html --reporter=text + + ----------------------|----------|----------|----------|----------|-------------------| + File | % Stmts | % Branch | % Funcs | % Lines | Uncovered Line #s | + ----------------------|----------|----------|----------|----------|-------------------| + All files | 96.75 | 91.01 | 96.72 | 96.72 | | + config | 100 | 93.75 | 100 | 100 | | + default.js | 100 | 93.75 | 100 | 100 | 8,25 | + test.js | 100 | 100 | 100 | 100 | | + src | 90 | 75 | 71.43 | 89.55 | | + app.js | 88.89 | 75 | 60 | 88.71 |... 87,88,89,90,92 | + bootstrap.js | 100 | 100 | 100 | 100 | | + constants.js | 100 | 100 | 100 | 100 | | + src/common | 92.5 | 70.83 | 100 | 92.5 | | + helper.js | 100 | 100 | 100 | 100 | | + logger.js | 90.63 | 65 | 100 | 90.63 |32,55,60,84,98,118 | + src/services | 99.35 | 98.04 | 100 | 99.35 | | + ProcessorService.js | 99.33 | 98.04 | 100 | 99.33 | 712,882 | + ProjectService.js | 100 | 100 | 100 | 100 | | + ----------------------|----------|----------|----------|----------|-------------------| +``` diff --git a/config/default.js b/config/default.js index 884da49..1e64183 100644 --- a/config/default.js +++ b/config/default.js @@ -14,9 +14,9 @@ module.exports = { // Kafka group id KAFKA_GROUP_ID: process.env.KAFKA_GROUP_ID || 'legacy-project-processor', - CREATE_PROJECT_TOPIC: process.env.CREATE_PROJECT_TOPIC || 'project.notification.create', - UPDATE_PROJECT_TOPIC: process.env.UPDATE_PROJECT_TOPIC || 'project.notification.update', - DELETE_PROJECT_TOPIC: process.env.DELETE_PROJECT_TOPIC || 'project.notification.delete', + CREATE_PROJECT_TOPIC: process.env.CREATE_PROJECT_TOPIC || 'project.action.create', + UPDATE_PROJECT_TOPIC: process.env.UPDATE_PROJECT_TOPIC || 'project.action.update', + DELETE_PROJECT_TOPIC: process.env.DELETE_PROJECT_TOPIC || 'project.action.delete', // informix database configuration INFORMIX: { @@ -31,12 +31,13 @@ module.exports = { POOL_MAX_SIZE: parseInt(process.env.IFX_POOL_MAX_SIZE) || 10 // use connection pool in processor, the pool size }, - // postgres database configuration - POSTGRES: { - URL: process.env.POSTGRES_URL || 'postgres://coder:mysecretpassword@dockerhost:5432/projectsdb', // url - MAX_POOL_SIZE: parseInt(process.env.POSTGRES_MAX_POOL_SIZE) || 50, // max pool size - MIN_POOL_SIZE: parseInt(process.env.POSTGRES_MIN_POOL_SIZE) || 4, // min pool size - IDLE_TIME_OUT: parseInt(process.env.POSTGRES_IDLE_TIME_OUT) || 1000, // idle time - PROJECT_TABLE_NAME: 'projects' // project table name - } + // used to get M2M token + AUTH0_URL: process.env.AUTH0_URL, + AUTH0_PROXY_SERVER_URL: process.env.AUTH0_PROXY_SERVER_URL, + AUTH0_AUDIENCE: process.env.AUTH0_AUDIENCE, + TOKEN_CACHE_TIME: process.env.TOKEN_CACHE_TIME, + AUTH0_CLIENT_ID: process.env.AUTH0_CLIENT_ID, + AUTH0_CLIENT_SECRET: process.env.AUTH0_CLIENT_SECRET, + + PROJECTS_API: process.env.PROJECTS_API || 'http://localhost:8001/v5' } diff --git a/docker/api.env b/docker/api.env deleted file mode 100644 index 45722ee..0000000 --- a/docker/api.env +++ /dev/null @@ -1,3 +0,0 @@ -KAFKA_URL=127.0.0.1:9092 -INFORMIX_HOST=127.0.0.1 -POSTGRES_URL=postgres://coder:mysecretpassword@localhost:5432/projectsdb \ No newline at end of file diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 8f486e1..d698a67 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -7,7 +7,7 @@ services: context: ../ dockerfile: docker/Dockerfile # env_file: - # - api.env - # command: run start + # - api.env + command: run start # command: run test - command: run test:cov + # command: run test:cov diff --git a/docker/sample.api.env b/docker/sample.api.env index de66eff..8cb4f0d 100644 --- a/docker/sample.api.env +++ b/docker/sample.api.env @@ -1,3 +1,8 @@ -KAFKA_URL=