From b8f1e54b8d0e8a47156df49727a2bc1a2f19fde0 Mon Sep 17 00:00:00 2001 From: Vikas Agarwal Date: Fri, 12 Oct 2018 12:59:26 +0530 Subject: [PATCH 01/20] Trying multi container tasks --- deploy.sh | 35 ++++++++++++++++------------- package.json | 1 + src/index-kafka.js | 51 +++++++++++++++++++++++++++++++++++++++++++ src/services/index.js | 12 +++++----- 4 files changed, 78 insertions(+), 21 deletions(-) create mode 100644 src/index-kafka.js diff --git a/deploy.sh b/deploy.sh index 1d4ea007..131099aa 100755 --- a/deploy.sh +++ b/deploy.sh @@ -7,7 +7,8 @@ ENV=$1 COUNTER_LIMIT=20 ACCOUNT_ID=$(eval "echo \$${ENV}_AWS_ACCOUNT_ID") AWS_REGION=$(eval "echo \$${ENV}_AWS_REGION") -AWS_ECS_CONTAINER_NAME="tc-project-service" +API_CONTAINER_NAME="tc-project-service" +BUS_CONSUMER_CONTAINER_NAME="tc-project-bus-consumer" AWS_REPOSITORY=$(eval "echo \$${ENV}_AWS_REPOSITORY") AWS_ECS_CLUSTER=$(eval "echo \$${ENV}_AWS_ECS_CLUSTER") AWS_ECS_SERVICE=$(eval "echo \$${ENV}_AWS_ECS_SERVICE") @@ -42,20 +43,13 @@ deploy_cluster() { } make_task_def(){ - task_template='{ - "family": "%s", - "requiresCompatibilities": ["EC2", "FARGATE"], - "networkMode": "awsvpc", - "executionRoleArn": "arn:aws:iam::%s:role/ecsTaskExecutionRole", - "cpu": "1024", - "memory": "2048", - "containerDefinitions": [ - { + container_template='{ "name": "%s", "image": "%s.dkr.ecr.%s.amazonaws.com/%s:%s", "essential": true, - "memory": 1536, - "cpu": 768, + "entryPoint": ["%s", "%s"], + "memory": 768, + "cpu": 384, "environment": [ { "name": "NODE_ENV", @@ -213,7 +207,17 @@ make_task_def(){ "awslogs-stream-prefix": "%s" } } - } + }' + task_template='{ + "family": "%s", + "requiresCompatibilities": ["EC2", "FARGATE"], + "networkMode": "awsvpc", + "executionRoleArn": "arn:aws:iam::%s:role/ecsTaskExecutionRole", + "cpu": "1024", + "memory": "2048", + "containerDefinitions": [ + %s, + %s ]}' API_VERSION=$(eval "echo \$${ENV}_API_VERSION") DB_MASTER_URL=$(eval "echo \$${ENV}_DB_MASTER_URL") @@ -252,8 +256,9 @@ make_task_def(){ KAFKA_GROUP_ID=$(eval "echo \$${ENV}_KAFKA_GROUP_ID") KAFKA_URL=$(eval "echo \$${ENV}_KAFKA_URL") - - task_def=$(printf "$task_template" $family $ACCOUNT_ID $AWS_ECS_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL $PORT $PORT $AWS_ECS_CLUSTER $AWS_REGION $NODE_ENV) + api_container_def=$(printf "$container_template" $API_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 "npm" "start" $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL $PORT $PORT $API_CONTAINER_NAME $AWS_REGION $NODE_ENV) + bus_container_def=$(printf "$container_template" $BUS_CONSUMER_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 "npm" "startKafkaConsumers" $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL $PORT $PORT $BUS_CONSUMER_CONTAINER_NAME $AWS_REGION $NODE_ENV) + task_def=$(printf "$task_template" $family $ACCOUNT_ID "$api_container_def" "$bus_container_def") } push_ecr_image(){ diff --git a/package.json b/package.json index 008d7ad1..06bf431c 100644 --- a/package.json +++ b/package.json @@ -15,6 +15,7 @@ "migrate:es": "./node_modules/.bin/babel-node migrations/seedElasticsearchIndex.js", "prestart": "npm run -s build", "start": "node dist", + "startKafkaConsumers": "npm run -s build && node dist/index-kafka.js", "start:dev": "NODE_ENV=development PORT=8001 nodemon -w src --exec \"babel-node src --presets es2015\" | ./node_modules/.bin/bunyan", "test": "NODE_ENV=test npm run lint && NODE_ENV=test npm run sync:es && NODE_ENV=test npm run sync:db && NODE_ENV=test ./node_modules/.bin/istanbul cover ./node_modules/mocha/bin/_mocha -- --timeout 10000 --compilers js:babel-core/register $(find src -path '*spec.js*')", "test:watch": "NODE_ENV=test ./node_modules/.bin/mocha -w --compilers js:babel-core/register $(find src -path '*spec.js*')", diff --git a/src/index-kafka.js b/src/index-kafka.js new file mode 100644 index 00000000..aa0123e5 --- /dev/null +++ b/src/index-kafka.js @@ -0,0 +1,51 @@ +import _ from 'lodash'; +import config from 'config'; +import startKafkaConsumer from './services/kafkaConsumer'; +import { kafkaHandlers } from './events'; +import models from './models'; + +const coreLib = require('tc-core-library-js'); + + +// ======================= +// Loger ========= +// ======================= +let appName = 'tc-projects-consumer'; +switch (process.env.NODE_ENV.toLowerCase()) { + case 'development': + appName += '-dev'; + break; + case 'qa': + appName += '-qa'; + break; + case 'production': + default: + appName += '-prod'; + break; +} + +const logger = coreLib.logger({ + name: appName, + level: _.get(config, 'logLevel', 'debug').toLowerCase(), + captureLogs: config.get('captureLogs'), + logentriesToken: _.get(config, 'logentriesToken', null), +}); + +// ======================= +// Database ========= +// ======================= +logger.info('Registering models ... ', !!models); + +/** + * Handle server shutdown gracefully + * @returns {undefined} + */ +function gracefulShutdown() { + // TODO +} +process.on('SIGTERM', gracefulShutdown); +process.on('SIGINT', gracefulShutdown); + +const app = { logger, models }; + +module.exports = startKafkaConsumer(kafkaHandlers, app, logger); diff --git a/src/services/index.js b/src/services/index.js index 017a6ec2..6c8306c8 100644 --- a/src/services/index.js +++ b/src/services/index.js @@ -2,8 +2,8 @@ import config from 'config'; import RabbitMQService from './rabbitmq'; -import startKafkaConsumer from './kafkaConsumer'; -import { kafkaHandlers } from '../events'; +// import startKafkaConsumer from './kafkaConsumer'; +// import { kafkaHandlers } from '../events'; /** * Responsible for establishing connections to all external services @@ -33,10 +33,10 @@ module.exports = (fapp, logger) => { .then(() => { logger.info('RabbitMQ service initialized'); }) - .then(() => startKafkaConsumer(kafkaHandlers, app, logger)) - .then(() => { - logger.info('Kafka consumer service initialized'); - }) + // .then(() => startKafkaConsumer(kafkaHandlers, app, logger)) + // .then(() => { + // logger.info('Kafka consumer service initialized'); + // }) .catch((err) => { logger.error('Error initializing services', err); // gracefulShutdown() From 9937ca892dc0626a3938817c12b584e948c6c272 Mon Sep 17 00:00:00 2001 From: Vikas Agarwal Date: Fri, 12 Oct 2018 13:03:32 +0530 Subject: [PATCH 02/20] Making the feature branch deployable temporarily --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 459ec723..08b90436 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -76,7 +76,7 @@ workflows: - test filters: branches: - only: ['dev'] + only: ['dev', 'feature/multi-container-task'] - deployProd: requires: - test From 1f1cfa4002c1fe481681d520631b26771235132a Mon Sep 17 00:00:00 2001 From: Vikas Agarwal Date: Fri, 12 Oct 2018 16:00:45 +0530 Subject: [PATCH 03/20] fixed issue with task register aws command --- deploy.sh | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/deploy.sh b/deploy.sh index 131099aa..3de5a4fb 100755 --- a/deploy.sh +++ b/deploy.sh @@ -15,7 +15,8 @@ AWS_ECS_SERVICE=$(eval "echo \$${ENV}_AWS_ECS_SERVICE") AUTH_DOMAIN=$(eval "echo \$${ENV}_AUTH_DOMAIN") AUTH_SECRET=$(eval "echo \$${ENV}_AUTH_SECRET") VALID_ISSUERS=$(eval "echo \$${ENV}_VALID_ISSUERS") -PORT=3000 +API_PORT=3000 +CONSUMER_PORT=3001 family="tc-project-service" # configures aws cli for further usage @@ -256,9 +257,10 @@ make_task_def(){ KAFKA_GROUP_ID=$(eval "echo \$${ENV}_KAFKA_GROUP_ID") KAFKA_URL=$(eval "echo \$${ENV}_KAFKA_URL") - api_container_def=$(printf "$container_template" $API_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 "npm" "start" $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL $PORT $PORT $API_CONTAINER_NAME $AWS_REGION $NODE_ENV) - bus_container_def=$(printf "$container_template" $BUS_CONSUMER_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 "npm" "startKafkaConsumers" $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL $PORT $PORT $BUS_CONSUMER_CONTAINER_NAME $AWS_REGION $NODE_ENV) + api_container_def=$(printf "$container_template" $API_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 "npm" "start" $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL $API_PORT $API_PORT $API_CONTAINER_NAME $AWS_REGION $NODE_ENV) + bus_container_def=$(printf "$container_template" $BUS_CONSUMER_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 "npm" "startKafkaConsumers" $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL $CONSUMER_PORT $CONSUMER_PORT $BUS_CONSUMER_CONTAINER_NAME $AWS_REGION $NODE_ENV) task_def=$(printf "$task_template" $family $ACCOUNT_ID "$api_container_def" "$bus_container_def") + # echo $task_def } push_ecr_image(){ From 2a50803aead55918d1e25be3dab750c2ba94c3f2 Mon Sep 17 00:00:00 2001 From: Vikas Agarwal Date: Fri, 12 Oct 2018 16:13:18 +0530 Subject: [PATCH 04/20] Fixing the way custom scripts are run --- deploy.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/deploy.sh b/deploy.sh index 3de5a4fb..bae0cde1 100755 --- a/deploy.sh +++ b/deploy.sh @@ -48,7 +48,7 @@ make_task_def(){ "name": "%s", "image": "%s.dkr.ecr.%s.amazonaws.com/%s:%s", "essential": true, - "entryPoint": ["%s", "%s"], + "entryPoint": ["%s", "%s", "%s"], "memory": 768, "cpu": 384, "environment": [ @@ -257,8 +257,8 @@ make_task_def(){ KAFKA_GROUP_ID=$(eval "echo \$${ENV}_KAFKA_GROUP_ID") KAFKA_URL=$(eval "echo \$${ENV}_KAFKA_URL") - api_container_def=$(printf "$container_template" $API_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 "npm" "start" $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL $API_PORT $API_PORT $API_CONTAINER_NAME $AWS_REGION $NODE_ENV) - bus_container_def=$(printf "$container_template" $BUS_CONSUMER_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 "npm" "startKafkaConsumers" $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL $CONSUMER_PORT $CONSUMER_PORT $BUS_CONSUMER_CONTAINER_NAME $AWS_REGION $NODE_ENV) + api_container_def=$(printf "$container_template" $API_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 "npm" "run" "start" $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL $API_PORT $API_PORT $API_CONTAINER_NAME $AWS_REGION $NODE_ENV) + bus_container_def=$(printf "$container_template" $BUS_CONSUMER_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 "npm" "run" "startKafkaConsumers" $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL $CONSUMER_PORT $CONSUMER_PORT $BUS_CONSUMER_CONTAINER_NAME $AWS_REGION $NODE_ENV) task_def=$(printf "$task_template" $family $ACCOUNT_ID "$api_container_def" "$bus_container_def") # echo $task_def } From 3e49aeb332842e5966545ee868b221dcad50a3bb Mon Sep 17 00:00:00 2001 From: Vikas Agarwal Date: Fri, 12 Oct 2018 17:51:05 +0530 Subject: [PATCH 05/20] trying to get rid of port of consumer container --- deploy.sh | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/deploy.sh b/deploy.sh index bae0cde1..bb1faedc 100755 --- a/deploy.sh +++ b/deploy.sh @@ -44,6 +44,13 @@ deploy_cluster() { } make_task_def(){ + port_mappings = '"portMappings": [ + { + "hostPort": %s, + "protocol": "tcp", + "containerPort": %s + } + ],' container_template='{ "name": "%s", "image": "%s.dkr.ecr.%s.amazonaws.com/%s:%s", @@ -193,13 +200,7 @@ make_task_def(){ "value": "%s" } ], - "portMappings": [ - { - "hostPort": %s, - "protocol": "tcp", - "containerPort": %s - } - ], + %s "logConfiguration": { "logDriver": "awslogs", "options": { @@ -257,8 +258,10 @@ make_task_def(){ KAFKA_GROUP_ID=$(eval "echo \$${ENV}_KAFKA_GROUP_ID") KAFKA_URL=$(eval "echo \$${ENV}_KAFKA_URL") - api_container_def=$(printf "$container_template" $API_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 "npm" "run" "start" $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL $API_PORT $API_PORT $API_CONTAINER_NAME $AWS_REGION $NODE_ENV) - bus_container_def=$(printf "$container_template" $BUS_CONSUMER_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 "npm" "run" "startKafkaConsumers" $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL $CONSUMER_PORT $CONSUMER_PORT $BUS_CONSUMER_CONTAINER_NAME $AWS_REGION $NODE_ENV) + api_container_port_mapping = $(printf "$port_mappings" $API_PORT $API_PORT) + bus_container_port_mapping = '"portMappings": [],' + api_container_def=$(printf "$container_template" $API_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 "npm" "run" "start" $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL $api_container_port_mapping $API_CONTAINER_NAME $AWS_REGION $NODE_ENV) + bus_container_def=$(printf "$container_template" $BUS_CONSUMER_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 "npm" "run" "startKafkaConsumers" $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL $bus_container_port_mapping $BUS_CONSUMER_CONTAINER_NAME $AWS_REGION $NODE_ENV) task_def=$(printf "$task_template" $family $ACCOUNT_ID "$api_container_def" "$bus_container_def") # echo $task_def } From 17105a8c6dfdb8e56522d1e459f0cc5d5eda93a7 Mon Sep 17 00:00:00 2001 From: Vikas Agarwal Date: Fri, 12 Oct 2018 18:06:03 +0530 Subject: [PATCH 06/20] syntax error fix --- deploy.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/deploy.sh b/deploy.sh index bb1faedc..e47e7e2c 100755 --- a/deploy.sh +++ b/deploy.sh @@ -44,7 +44,7 @@ deploy_cluster() { } make_task_def(){ - port_mappings = '"portMappings": [ + port_mappings='"portMappings": [ { "hostPort": %s, "protocol": "tcp", @@ -258,8 +258,8 @@ make_task_def(){ KAFKA_GROUP_ID=$(eval "echo \$${ENV}_KAFKA_GROUP_ID") KAFKA_URL=$(eval "echo \$${ENV}_KAFKA_URL") - api_container_port_mapping = $(printf "$port_mappings" $API_PORT $API_PORT) - bus_container_port_mapping = '"portMappings": [],' + api_container_port_mapping=$(printf "$port_mappings" $API_PORT $API_PORT) + bus_container_port_mapping='"portMappings": [],' api_container_def=$(printf "$container_template" $API_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 "npm" "run" "start" $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL $api_container_port_mapping $API_CONTAINER_NAME $AWS_REGION $NODE_ENV) bus_container_def=$(printf "$container_template" $BUS_CONSUMER_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 "npm" "run" "startKafkaConsumers" $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL $bus_container_port_mapping $BUS_CONSUMER_CONTAINER_NAME $AWS_REGION $NODE_ENV) task_def=$(printf "$task_template" $family $ACCOUNT_ID "$api_container_def" "$bus_container_def") From 54f4d41f253e4308c71021107857ff65437de83a Mon Sep 17 00:00:00 2001 From: Vikas Agarwal Date: Wed, 17 Oct 2018 16:39:35 +0530 Subject: [PATCH 07/20] fixed replacement of portMappings --- deploy.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/deploy.sh b/deploy.sh index e47e7e2c..efcc123e 100755 --- a/deploy.sh +++ b/deploy.sh @@ -260,8 +260,8 @@ make_task_def(){ api_container_port_mapping=$(printf "$port_mappings" $API_PORT $API_PORT) bus_container_port_mapping='"portMappings": [],' - api_container_def=$(printf "$container_template" $API_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 "npm" "run" "start" $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL $api_container_port_mapping $API_CONTAINER_NAME $AWS_REGION $NODE_ENV) - bus_container_def=$(printf "$container_template" $BUS_CONSUMER_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 "npm" "run" "startKafkaConsumers" $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL $bus_container_port_mapping $BUS_CONSUMER_CONTAINER_NAME $AWS_REGION $NODE_ENV) + api_container_def=$(printf "$container_template" $API_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 "npm" "run" "start" $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL "$api_container_port_mapping" $API_CONTAINER_NAME $AWS_REGION $NODE_ENV) + bus_container_def=$(printf "$container_template" $BUS_CONSUMER_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 "npm" "run" "startKafkaConsumers" $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL "$bus_container_port_mapping" $BUS_CONSUMER_CONTAINER_NAME $AWS_REGION $NODE_ENV) task_def=$(printf "$task_template" $family $ACCOUNT_ID "$api_container_def" "$bus_container_def") # echo $task_def } From 015947217a1eddcc7b9ebca0e4a0ad83e3e0b217 Mon Sep 17 00:00:00 2001 From: Vikas Agarwal Date: Wed, 17 Oct 2018 17:31:37 +0530 Subject: [PATCH 08/20] trying to give more memory and cpu to kafka consumers --- deploy.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/deploy.sh b/deploy.sh index efcc123e..836c8e6d 100755 --- a/deploy.sh +++ b/deploy.sh @@ -56,8 +56,8 @@ make_task_def(){ "image": "%s.dkr.ecr.%s.amazonaws.com/%s:%s", "essential": true, "entryPoint": ["%s", "%s", "%s"], - "memory": 768, - "cpu": 384, + "memory": %s, + "cpu": %s, "environment": [ { "name": "NODE_ENV", @@ -260,8 +260,8 @@ make_task_def(){ api_container_port_mapping=$(printf "$port_mappings" $API_PORT $API_PORT) bus_container_port_mapping='"portMappings": [],' - api_container_def=$(printf "$container_template" $API_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 "npm" "run" "start" $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL "$api_container_port_mapping" $API_CONTAINER_NAME $AWS_REGION $NODE_ENV) - bus_container_def=$(printf "$container_template" $BUS_CONSUMER_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 "npm" "run" "startKafkaConsumers" $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL "$bus_container_port_mapping" $BUS_CONSUMER_CONTAINER_NAME $AWS_REGION $NODE_ENV) + api_container_def=$(printf "$container_template" $API_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 "npm" "run" "start" "512" "256" $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL "$api_container_port_mapping" $API_CONTAINER_NAME $AWS_REGION $NODE_ENV) + bus_container_def=$(printf "$container_template" $BUS_CONSUMER_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 "npm" "run" "startKafkaConsumers" "1024" "512" $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL "$bus_container_port_mapping" $BUS_CONSUMER_CONTAINER_NAME $AWS_REGION $NODE_ENV) task_def=$(printf "$task_template" $family $ACCOUNT_ID "$api_container_def" "$bus_container_def") # echo $task_def } From 22a822aba1580f16b2b4e6e641e7d51df9afdc00 Mon Sep 17 00:00:00 2001 From: Vikas Agarwal Date: Thu, 18 Oct 2018 17:21:54 +0530 Subject: [PATCH 09/20] =?UTF-8?q?Trying=20separate=20service=20instead=20o?= =?UTF-8?q?f=20separate=20containers=20in=20the=20same=20task=20because=20?= =?UTF-8?q?aws=20target=20group=E2=80=99s=20health=20check=20fails=20the?= =?UTF-8?q?=20consumer=20container=20because=20it=20does=20not=20open=20up?= =?UTF-8?q?=20the=20same=20port?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- deploy.sh | 80 +++++++++++++++++++++++++++---------------------------- 1 file changed, 39 insertions(+), 41 deletions(-) diff --git a/deploy.sh b/deploy.sh index 836c8e6d..34a74f58 100755 --- a/deploy.sh +++ b/deploy.sh @@ -7,16 +7,16 @@ ENV=$1 COUNTER_LIMIT=20 ACCOUNT_ID=$(eval "echo \$${ENV}_AWS_ACCOUNT_ID") AWS_REGION=$(eval "echo \$${ENV}_AWS_REGION") -API_CONTAINER_NAME="tc-project-service" -BUS_CONSUMER_CONTAINER_NAME="tc-project-bus-consumer" +AWS_ECS_CONTAINER_NAME="tc-project-service" AWS_REPOSITORY=$(eval "echo \$${ENV}_AWS_REPOSITORY") +AWS_CONSUMERS_REPOSITORY=$(eval "echo \$${ENV}_AWS_CONSUMERS_REPOSITORY") AWS_ECS_CLUSTER=$(eval "echo \$${ENV}_AWS_ECS_CLUSTER") AWS_ECS_SERVICE=$(eval "echo \$${ENV}_AWS_ECS_SERVICE") +AWS_ECS_CONSUMERS_SERVICE=$(eval "echo \$${ENV}_AWS_ECS_CONSUMERS_SERVICE") AUTH_DOMAIN=$(eval "echo \$${ENV}_AUTH_DOMAIN") AUTH_SECRET=$(eval "echo \$${ENV}_AUTH_SECRET") VALID_ISSUERS=$(eval "echo \$${ENV}_VALID_ISSUERS") -API_PORT=3000 -CONSUMER_PORT=3001 +PORT=3000 family="tc-project-service" # configures aws cli for further usage @@ -31,9 +31,9 @@ configure_aws_cli() { # deploys the app to the ecs cluster deploy_cluster() { - make_task_def - register_definition - if [[ $(aws ecs update-service --cluster $AWS_ECS_CLUSTER --service $AWS_ECS_SERVICE --task-definition $revision | \ + make_task_def $2 $3 $4 + register_definition $1 + if [[ $(aws ecs update-service --cluster $AWS_ECS_CLUSTER --service $1 --task-definition $revision | \ $JQ '.service.taskDefinition') != $revision ]]; then echo "Error updating service." return 1 @@ -44,20 +44,21 @@ deploy_cluster() { } make_task_def(){ - port_mappings='"portMappings": [ - { - "hostPort": %s, - "protocol": "tcp", - "containerPort": %s - } - ],' - container_template='{ + task_template='{ + "family": "%s", + "requiresCompatibilities": ["EC2", "FARGATE"], + "networkMode": "awsvpc", + "executionRoleArn": "arn:aws:iam::%s:role/ecsTaskExecutionRole", + "cpu": "1024", + "memory": "2048", + "containerDefinitions": [ + { "name": "%s", "image": "%s.dkr.ecr.%s.amazonaws.com/%s:%s", "essential": true, + "memory": 1536, + "cpu": 768, "entryPoint": ["%s", "%s", "%s"], - "memory": %s, - "cpu": %s, "environment": [ { "name": "NODE_ENV", @@ -200,7 +201,13 @@ make_task_def(){ "value": "%s" } ], - %s + "portMappings": [ + { + "hostPort": %s, + "protocol": "tcp", + "containerPort": %s + } + ], "logConfiguration": { "logDriver": "awslogs", "options": { @@ -209,17 +216,7 @@ make_task_def(){ "awslogs-stream-prefix": "%s" } } - }' - task_template='{ - "family": "%s", - "requiresCompatibilities": ["EC2", "FARGATE"], - "networkMode": "awsvpc", - "executionRoleArn": "arn:aws:iam::%s:role/ecsTaskExecutionRole", - "cpu": "1024", - "memory": "2048", - "containerDefinitions": [ - %s, - %s + } ]}' API_VERSION=$(eval "echo \$${ENV}_API_VERSION") DB_MASTER_URL=$(eval "echo \$${ENV}_DB_MASTER_URL") @@ -258,17 +255,13 @@ make_task_def(){ KAFKA_GROUP_ID=$(eval "echo \$${ENV}_KAFKA_GROUP_ID") KAFKA_URL=$(eval "echo \$${ENV}_KAFKA_URL") - api_container_port_mapping=$(printf "$port_mappings" $API_PORT $API_PORT) - bus_container_port_mapping='"portMappings": [],' - api_container_def=$(printf "$container_template" $API_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 "npm" "run" "start" "512" "256" $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL "$api_container_port_mapping" $API_CONTAINER_NAME $AWS_REGION $NODE_ENV) - bus_container_def=$(printf "$container_template" $BUS_CONSUMER_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 "npm" "run" "startKafkaConsumers" "1024" "512" $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL "$bus_container_port_mapping" $BUS_CONSUMER_CONTAINER_NAME $AWS_REGION $NODE_ENV) - task_def=$(printf "$task_template" $family $ACCOUNT_ID "$api_container_def" "$bus_container_def") - # echo $task_def + + task_def=$(printf "$task_template" $family $ACCOUNT_ID $AWS_ECS_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 $1 $2 $3 $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL $PORT $PORT $AWS_ECS_CLUSTER $AWS_REGION $NODE_ENV) } push_ecr_image(){ eval $(aws ecr get-login --region $AWS_REGION --no-include-email) - docker push $ACCOUNT_ID.dkr.ecr.$AWS_REGION.amazonaws.com/$AWS_REPOSITORY:$CIRCLE_SHA1 + docker push $ACCOUNT_ID.dkr.ecr.$AWS_REGION.amazonaws.com/$1:$CIRCLE_SHA1 } register_definition() { @@ -283,13 +276,13 @@ register_definition() { check_service_status() { counter=0 sleep 60 - servicestatus=`aws ecs describe-services --service $AWS_ECS_SERVICE --cluster $AWS_ECS_CLUSTER | $JQ '.services[].events[0].message'` + servicestatus=`aws ecs describe-services --service $1 --cluster $AWS_ECS_CLUSTER | $JQ '.services[].events[0].message'` while [[ $servicestatus != *"steady state"* ]] do echo "Current event message : $servicestatus" echo "Waiting for 30 seconds to check the service status...." sleep 30 - servicestatus=`aws ecs describe-services --service $AWS_ECS_SERVICE --cluster $AWS_ECS_CLUSTER | $JQ '.services[].events[0].message'` + servicestatus=`aws ecs describe-services --service $1 --cluster $AWS_ECS_CLUSTER | $JQ '.services[].events[0].message'` counter=`expr $counter + 1` if [[ $counter -gt $COUNTER_LIMIT ]] ; then echo "Service does not reach steady state within 10 minutes. Please check" @@ -300,6 +293,11 @@ check_service_status() { } configure_aws_cli -push_ecr_image -deploy_cluster -check_service_status +push_ecr_image $AWS_REPOSITORY +deploy_cluster $AWS_ECS_SERVICE "npm" "run" "start" + +push_ecr_image $AWS_CONSUMERS_REPOSITORY +deploy_cluster $AWS_ECS_CONSUMERS_SERVICE "npm" "run" "startKafkaConsumers" + +check_service_status $AWS_ECS_SERVICE +check_service_status $AWS_ECS_CONSUMERS_SERVICE \ No newline at end of file From 1e94a9702a48faebd66cea63390a3c6f961bb585 Mon Sep 17 00:00:00 2001 From: Vikas Agarwal Date: Thu, 18 Oct 2018 17:33:55 +0530 Subject: [PATCH 10/20] Not using separate repository for consumers --- deploy.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deploy.sh b/deploy.sh index 34a74f58..70b40d6e 100755 --- a/deploy.sh +++ b/deploy.sh @@ -296,7 +296,7 @@ configure_aws_cli push_ecr_image $AWS_REPOSITORY deploy_cluster $AWS_ECS_SERVICE "npm" "run" "start" -push_ecr_image $AWS_CONSUMERS_REPOSITORY +# push_ecr_image $AWS_CONSUMERS_REPOSITORY deploy_cluster $AWS_ECS_CONSUMERS_SERVICE "npm" "run" "startKafkaConsumers" check_service_status $AWS_ECS_SERVICE From 27519d86752833ca62f148538ba657ddba5e79fa Mon Sep 17 00:00:00 2001 From: Vikas Agarwal Date: Thu, 18 Oct 2018 17:47:48 +0530 Subject: [PATCH 11/20] Removed unused variable and commands --- deploy.sh | 2 -- 1 file changed, 2 deletions(-) diff --git a/deploy.sh b/deploy.sh index 70b40d6e..c9b6016b 100755 --- a/deploy.sh +++ b/deploy.sh @@ -9,7 +9,6 @@ ACCOUNT_ID=$(eval "echo \$${ENV}_AWS_ACCOUNT_ID") AWS_REGION=$(eval "echo \$${ENV}_AWS_REGION") AWS_ECS_CONTAINER_NAME="tc-project-service" AWS_REPOSITORY=$(eval "echo \$${ENV}_AWS_REPOSITORY") -AWS_CONSUMERS_REPOSITORY=$(eval "echo \$${ENV}_AWS_CONSUMERS_REPOSITORY") AWS_ECS_CLUSTER=$(eval "echo \$${ENV}_AWS_ECS_CLUSTER") AWS_ECS_SERVICE=$(eval "echo \$${ENV}_AWS_ECS_SERVICE") AWS_ECS_CONSUMERS_SERVICE=$(eval "echo \$${ENV}_AWS_ECS_CONSUMERS_SERVICE") @@ -296,7 +295,6 @@ configure_aws_cli push_ecr_image $AWS_REPOSITORY deploy_cluster $AWS_ECS_SERVICE "npm" "run" "start" -# push_ecr_image $AWS_CONSUMERS_REPOSITORY deploy_cluster $AWS_ECS_CONSUMERS_SERVICE "npm" "run" "startKafkaConsumers" check_service_status $AWS_ECS_SERVICE From e1e0e615cee99bc7406c170aa8d789ce0e925021 Mon Sep 17 00:00:00 2001 From: Vikas Agarwal Date: Thu, 18 Oct 2018 17:54:17 +0530 Subject: [PATCH 12/20] trying separate task definitions for api and consumers --- deploy.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/deploy.sh b/deploy.sh index c9b6016b..91843808 100755 --- a/deploy.sh +++ b/deploy.sh @@ -30,7 +30,7 @@ configure_aws_cli() { # deploys the app to the ecs cluster deploy_cluster() { - make_task_def $2 $3 $4 + make_task_def $1 $2 $3 $4 register_definition $1 if [[ $(aws ecs update-service --cluster $AWS_ECS_CLUSTER --service $1 --task-definition $revision | \ $JQ '.service.taskDefinition') != $revision ]]; then @@ -255,7 +255,7 @@ make_task_def(){ KAFKA_URL=$(eval "echo \$${ENV}_KAFKA_URL") - task_def=$(printf "$task_template" $family $ACCOUNT_ID $AWS_ECS_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 $1 $2 $3 $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL $PORT $PORT $AWS_ECS_CLUSTER $AWS_REGION $NODE_ENV) + task_def=$(printf "$task_template" $1 $ACCOUNT_ID $AWS_ECS_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 $2 $3 $4 $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL $PORT $PORT $AWS_ECS_CLUSTER $AWS_REGION $NODE_ENV) } push_ecr_image(){ From f89dca164ca6351fdd15ed37c3a0df674f0d984a Mon Sep 17 00:00:00 2001 From: Vikas Agarwal Date: Wed, 24 Oct 2018 17:16:27 +0530 Subject: [PATCH 13/20] Better naming for consumers service env variable --- deploy.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/deploy.sh b/deploy.sh index 91843808..65c30cb8 100755 --- a/deploy.sh +++ b/deploy.sh @@ -11,7 +11,7 @@ AWS_ECS_CONTAINER_NAME="tc-project-service" AWS_REPOSITORY=$(eval "echo \$${ENV}_AWS_REPOSITORY") AWS_ECS_CLUSTER=$(eval "echo \$${ENV}_AWS_ECS_CLUSTER") AWS_ECS_SERVICE=$(eval "echo \$${ENV}_AWS_ECS_SERVICE") -AWS_ECS_CONSUMERS_SERVICE=$(eval "echo \$${ENV}_AWS_ECS_CONSUMERS_SERVICE") +AWS_ECS_SERVICE_CONSUMERS=$(eval "echo \$${ENV}_AWS_ECS_SERVICE_CONSUMERS") AUTH_DOMAIN=$(eval "echo \$${ENV}_AUTH_DOMAIN") AUTH_SECRET=$(eval "echo \$${ENV}_AUTH_SECRET") VALID_ISSUERS=$(eval "echo \$${ENV}_VALID_ISSUERS") @@ -295,7 +295,7 @@ configure_aws_cli push_ecr_image $AWS_REPOSITORY deploy_cluster $AWS_ECS_SERVICE "npm" "run" "start" -deploy_cluster $AWS_ECS_CONSUMERS_SERVICE "npm" "run" "startKafkaConsumers" +deploy_cluster $AWS_ECS_SERVICE_CONSUMERS "npm" "run" "startKafkaConsumers" check_service_status $AWS_ECS_SERVICE -check_service_status $AWS_ECS_CONSUMERS_SERVICE \ No newline at end of file +check_service_status $AWS_ECS_SERVICE_CONSUMERS \ No newline at end of file From 726725c8c0417762fbf01af14f7a168d48a47c32 Mon Sep 17 00:00:00 2001 From: Vikas Agarwal Date: Wed, 24 Oct 2018 17:32:01 +0530 Subject: [PATCH 14/20] trying deployment without AWS keys, to avoid unnecessary maintenance and exposure of keys. Trying to use task execution role instead --- deploy.sh | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/deploy.sh b/deploy.sh index 65c30cb8..a5b7c795 100755 --- a/deploy.sh +++ b/deploy.sh @@ -47,7 +47,7 @@ make_task_def(){ "family": "%s", "requiresCompatibilities": ["EC2", "FARGATE"], "networkMode": "awsvpc", - "executionRoleArn": "arn:aws:iam::%s:role/ecsTaskExecutionRole", + "executionRoleArn": "arn:aws:iam::%s:role/tc-project-service-ecs-task-role", "cpu": "1024", "memory": "2048", "containerDefinitions": [ @@ -87,14 +87,6 @@ make_task_def(){ "name": "AWS_REGION", "value": "%s" }, - { - "name": "AWS_ACCESS_KEY_ID", - "value": "%s" - }, - { - "name": "AWS_SECRET_ACCESS_KEY", - "value": "%s" - }, { "name": "AUTH_DOMAIN", "value": "%s" @@ -255,7 +247,7 @@ make_task_def(){ KAFKA_URL=$(eval "echo \$${ENV}_KAFKA_URL") - task_def=$(printf "$task_template" $1 $ACCOUNT_ID $AWS_ECS_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 $2 $3 $4 $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL $PORT $PORT $AWS_ECS_CLUSTER $AWS_REGION $NODE_ENV) + task_def=$(printf "$task_template" $1 $ACCOUNT_ID $AWS_ECS_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 $2 $3 $4 $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL $PORT $PORT $AWS_ECS_CLUSTER $AWS_REGION $NODE_ENV) } push_ecr_image(){ From b3a9d6d8216984ee116b46efd235518760b0b61a Mon Sep 17 00:00:00 2001 From: Vikas Agarwal Date: Wed, 24 Oct 2018 17:54:46 +0530 Subject: [PATCH 15/20] trying to avoid passing explicit aws config to let the connecter decide the credentials --- src/util.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/util.js b/src/util.js index 906f1389..e810a657 100644 --- a/src/util.js +++ b/src/util.js @@ -317,10 +317,10 @@ _.assignIn(util, { apiVersion: config.get('elasticsearchConfig.apiVersion'), hosts: esHost, connectionClass: require('http-aws-es'), // eslint-disable-line global-require - amazonES: { - region: 'us-east-1', - credentials: new AWS.EnvironmentCredentials('AWS'), - }, + // amazonES: { + // region: 'us-east-1', + // credentials: new AWS.EnvironmentCredentials('AWS'), + // }, }); } else { esClient = new elasticsearch.Client(_.cloneDeep(config.elasticsearchConfig)); From 8f0078ec9a496a3806f2e482a8ca4f7b59d433bd Mon Sep 17 00:00:00 2001 From: Vikas Agarwal Date: Wed, 24 Oct 2018 17:56:50 +0530 Subject: [PATCH 16/20] trying to avoid passing explicit aws config to let the connecter decide the credentials --- src/util.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/util.js b/src/util.js index e810a657..542c2ee1 100644 --- a/src/util.js +++ b/src/util.js @@ -16,7 +16,7 @@ import config from 'config'; import urlencode from 'urlencode'; import elasticsearch from 'elasticsearch'; import Promise from 'bluebird'; -import AWS from 'aws-sdk'; +// import AWS from 'aws-sdk'; import { ADMIN_ROLES, TOKEN_SCOPES } from './constants'; From a1bd643c2d77b44b00aacbcbab4242387dcc2a43 Mon Sep 17 00:00:00 2001 From: Vikas Agarwal Date: Thu, 25 Oct 2018 11:46:09 +0530 Subject: [PATCH 17/20] dependency update --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 06bf431c..64aa382d 100644 --- a/package.json +++ b/package.json @@ -49,7 +49,7 @@ "express-request-id": "^1.1.0", "express-sanitizer": "^1.0.2", "express-validation": "^0.6.0", - "http-aws-es": "^1.1.3", + "http-aws-es": "^6.0.0", "joi": "^8.0.5", "jsonwebtoken": "^8.3.0", "lodash": "^4.16.4", From 5bde4206cdcf2b732e18808d74f5b3245932537b Mon Sep 17 00:00:00 2001 From: Vikas Agarwal Date: Thu, 25 Oct 2018 11:48:07 +0530 Subject: [PATCH 18/20] using stable version instead of latest --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 64aa382d..9aadead8 100644 --- a/package.json +++ b/package.json @@ -49,7 +49,7 @@ "express-request-id": "^1.1.0", "express-sanitizer": "^1.0.2", "express-validation": "^0.6.0", - "http-aws-es": "^6.0.0", + "http-aws-es": "^4.0.0", "joi": "^8.0.5", "jsonwebtoken": "^8.3.0", "lodash": "^4.16.4", From b275a8e9d0d564390c95043f14902a8818605d52 Mon Sep 17 00:00:00 2001 From: Vikas Agarwal Date: Thu, 25 Oct 2018 12:39:39 +0530 Subject: [PATCH 19/20] added task role to the task definition --- deploy.sh | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/deploy.sh b/deploy.sh index a5b7c795..87f6fd23 100755 --- a/deploy.sh +++ b/deploy.sh @@ -47,7 +47,8 @@ make_task_def(){ "family": "%s", "requiresCompatibilities": ["EC2", "FARGATE"], "networkMode": "awsvpc", - "executionRoleArn": "arn:aws:iam::%s:role/tc-project-service-ecs-task-role", + "taskRoleArn": "arn:aws:iam::%s:role/tc-project-service-ecs-task-role", + "executionRoleArn": "arn:aws:iam::%s:role/ecsTaskExecutionRole", "cpu": "1024", "memory": "2048", "containerDefinitions": [ @@ -247,7 +248,7 @@ make_task_def(){ KAFKA_URL=$(eval "echo \$${ENV}_KAFKA_URL") - task_def=$(printf "$task_template" $1 $ACCOUNT_ID $AWS_ECS_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 $2 $3 $4 $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL $PORT $PORT $AWS_ECS_CLUSTER $AWS_REGION $NODE_ENV) + task_def=$(printf "$task_template" $1 $ACCOUNT_ID $ACCOUNT_ID $AWS_ECS_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 $2 $3 $4 $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL $PORT $PORT $AWS_ECS_CLUSTER $AWS_REGION $NODE_ENV) } push_ecr_image(){ From 6a44f74e6772901d60ae700bb34b5ede4109df12 Mon Sep 17 00:00:00 2001 From: Vikas Agarwal Date: Thu, 25 Oct 2018 15:00:21 +0530 Subject: [PATCH 20/20] removed feature branch from deployable branch list --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 08b90436..459ec723 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -76,7 +76,7 @@ workflows: - test filters: branches: - only: ['dev', 'feature/multi-container-task'] + only: ['dev'] - deployProd: requires: - test