diff --git a/.circleci/config.yml b/.circleci/config.yml new file mode 100644 index 0000000..573a9b9 --- /dev/null +++ b/.circleci/config.yml @@ -0,0 +1,76 @@ +version: 2 + +# Node image for node project +node_env: &node_env + docker: + - image: circleci/node:6.14.3-stretch-browsers + +# Python image to run aws utilities +python_env: &python_env + docker: + - image: circleci/python:2.7-stretch-browsers + +# Instructions of installing aws utilities +install_awscli: &install_awscli + name: "Install awscli" + command: | + sudo pip install awscli awsebcli --upgrade + aws --version + eb --version + +# Instructions of deployment +deploy_steps: &deploy_steps + - checkout + - attach_workspace: + at: . + - run: *install_awscli + - setup_remote_docker + - run: cd consumer && ./deploy/eb-deploy.sh tc-connect2sf DEV $CIRCLE_BUILD_NUM + +jobs: + build: + <<: *node_env + steps: + - checkout + - restore_cache: + key: node-modules-{{ checksum "consumer/package.json" }} + - run: cd consumer && npm install + - save_cache: + key: node-modules-{{ checksum "consumer/package.json" }} + paths: + - consumer/node_modules + - run: cd consumer && npm run test + - persist_to_workspace: + root: . + paths: + - ./consumer/node_modules + + deploy_prod: + <<: *python_env + environment: + DEPLOY_ENV: "PROD" + steps: *deploy_steps + + deploy_dev: + <<: *python_env + environment: + DEPLOY_ENV: "DEV" + steps: *deploy_steps + +workflows: + version: 2 + build-and-deploy: + jobs: + - build + - deploy_dev: + filters: + branches: + only: [ dev, dev-circleci2 ] + requires: + - build + - deploy_prod: + filters: + branches: + only: master + requires: + - build diff --git a/circle.yml b/circle.yml deleted file mode 100644 index 1ef76a5..0000000 --- a/circle.yml +++ /dev/null @@ -1,24 +0,0 @@ -general: - build_dir: consumer - -machine: - services: - - docker - -dependencies: - pre: - - pip install awsebcli - -test: - override: - - npm run test - -deployment: - development: - branch: dev - commands: - - ./deploy/eb-deploy.sh tc-connect2sf DEV $CIRCLE_BUILD_NUM - production: - branch: master - commands: - - ./deploy/eb-deploy.sh tc-connect2sf PROD $CIRCLE_BUILD_NUM diff --git a/consumer/.ebextensions/01-environment-variables.config b/consumer/.ebextensions/01-environment-variables.config index 7e2710c..9361caf 100644 --- a/consumer/.ebextensions/01-environment-variables.config +++ b/consumer/.ebextensions/01-environment-variables.config @@ -56,3 +56,15 @@ option_settings: - namespace: aws:elasticbeanstalk:application:environment option_name: SCHEDULED_WORKER_SCHEDULE value: '*/5 * * * *' + - namespace: aws:elasticbeanstalk:application:environment + option_name: PORT + value: 3000 + - namespace: aws:elasticbeanstalk:application:environment + option_name: API_VERSION + value: v4 + - namespace: aws:elasticbeanstalk:application:environment + option_name: AUTH_SECRET + value: TBD + - namespace: aws:elasticbeanstalk:application:environment + option_name: VALID_ISSUERS + value: TBD diff --git a/consumer/README.md b/consumer/README.md index 214edd3..d17f95d 100644 --- a/consumer/README.md +++ b/consumer/README.md @@ -255,3 +255,26 @@ For example: duplicated project id added to the queue, Lead cannot be found etc. In such situation, the message from rabbitmq will be marked as ACK (removed). If we won't remove it from queue, the message will be stuck forever. For any other type of error the message from the rabbitmq will me marked as ACK as well, however, it would requeued into another queue for later inspection. It right now publishes the message content to the same rabbitmq exchange (configured as mentioned in Configuration section) with routing key being `connect2sf.failed`. So, we have to map the exchange and routing key comibation to a queue to which no consumer is listeting e.g. `tc-connect2sf.failed` is used in dev environment. Now we can see messages, via rabbitmq manager UI, in this queue to check if any of the messages failed and what was id of the project which failed. We can either remove those messages from the queue, if we are going to add those leads manually in saleforce or move them again to the original queue after fixing the deployed environment. + + +## Express Rest API verification + +## Pre-requisites + +1. Postman + +### Steps to verify + +1. Open Postman + +2. Import Postman collection and environment from `docs` directory + +3. Assuming that API is served at http://localhost:3150/v5 and AUTH_SECRET used in the API is `mysecret`, requests in the Postman collection can be triggered + +4. Requests in Postman collection covers the status codes 200, 400 and 403 + +### Notes + +1. Express Rest API will be served at http://localhost:3150/v5 if you use the values from `config/sample-local.json`. + +2. JWT token in the Postman collection is signed with secret `mysecret` \ No newline at end of file diff --git a/consumer/config/constants.js b/consumer/config/constants.js index 1d4c5af..462099d 100644 --- a/consumer/config/constants.js +++ b/consumer/config/constants.js @@ -7,3 +7,9 @@ export const EVENT = { FAILED_SUFFIX: '.failed' }, }; + +export const ERROR = { + SERVER_ERROR: 500, + CLIENT_ERROR: 400, + MESSAGE: 'Internal Server Error' +}; \ No newline at end of file diff --git a/consumer/config/custom-environment-variables.json b/consumer/config/custom-environment-variables.json index 510378d..f4cae3e 100644 --- a/consumer/config/custom-environment-variables.json +++ b/consumer/config/custom-environment-variables.json @@ -27,5 +27,9 @@ "project": "QUEUE_PROJECTS", "connect2sf": "QUEUE_CONNECT2SF" } - } + }, + "apiVersion": "API_VERSION", + "port": "PORT", + "authSecret": "AUTH_SECRET", + "validIssuers": "VALID_ISSUERS" } diff --git a/consumer/config/sample-local.json b/consumer/config/sample-local.json index 1c72b90..2c7936b 100644 --- a/consumer/config/sample-local.json +++ b/consumer/config/sample-local.json @@ -2,6 +2,7 @@ "logLevel": "error", "rabbitmqURL": "****UPDATE****", "ownerId": "****UPDATE****", + "scheduledWorkerSchedule": "* * * 1 * *", "aws": { "endpoint": "http://dockerhost:7777", "region": "us-east-1", @@ -23,5 +24,9 @@ "queues": { "project": "dev.project.service" } - } + }, + "apiVersion": "/v5", + "port": 3150, + "authSecret": "mysecret", + "validIssuers": "[\"https://api.topcoder.com\",\"https://topcoder-dev.auth0.com\", \"https://topcoder-newauth.auth0.com\"]" } \ No newline at end of file diff --git a/consumer/docs/tc-connects2f.postman_collection.json b/consumer/docs/tc-connects2f.postman_collection.json new file mode 100644 index 0000000..97aa00a --- /dev/null +++ b/consumer/docs/tc-connects2f.postman_collection.json @@ -0,0 +1,102 @@ +{ + "info": { + "_postman_id": "23a8d300-23f5-40e8-ae69-edd7b7ff357e", + "name": "TC Connect S2F", + "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json" + }, + "item": [ + { + "name": "Valid LeadInfo request", + "request": { + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "Authorization", + "value": "Bearer {{ADMIN_TOKEN}}" + } + ], + "body": { + "mode": "raw", + "raw": "{\n\t\"firstName\": \"Test\",\n\t\"lastName\": \"Work\",\n\t\"businessEmail\": \"abc@tes\",\n\t\"title\": \"Mr\",\n\t\"companyName\": \"Topcoder\",\n\t\"companySize\": \"Big\",\n\t\"userName\": \"abcd\"\n}" + }, + "url": { + "raw": "{{URL}}/connect2sf/leadInfo", + "host": [ + "{{URL}}" + ], + "path": [ + "connect2sf", + "leadInfo" + ] + } + }, + "response": [] + }, + { + "name": "Invalid LeadInfo request", + "request": { + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "Authorization", + "value": "Bearer {{ADMIN_TOKEN}}" + } + ], + "body": { + "mode": "raw", + "raw": "{\n\t\"firstName\": \"Test\",\n\t\"lastName\": \"Work\",\n\t\"businessEmail\": \"abc@tes\",\n\t\"title\": \"Mr\",\n\t\"companyName\": \"Topcoder\",\n\t\"companySize\": \"Big\"\n}" + }, + "url": { + "raw": "{{URL}}/connect2sf/leadInfo", + "host": [ + "{{URL}}" + ], + "path": [ + "connect2sf", + "leadInfo" + ] + } + }, + "response": [] + }, + { + "name": "LeadInfo request with invalid token", + "request": { + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "Authorization", + "value": "Bearer 123" + } + ], + "body": { + "mode": "raw", + "raw": "{\n\t\"firstName\": \"Test\",\n\t\"lastName\": \"Work\",\n\t\"businessEmail\": \"abc@tes\",\n\t\"title\": \"Mr\",\n\t\"companyName\": \"Topcoder\",\n\t\"companySize\": \"Big\",\n\t\"userName\": \"abcd\"\n}" + }, + "url": { + "raw": "{{URL}}/connect2sf/leadInfo", + "host": [ + "{{URL}}" + ], + "path": [ + "connect2sf", + "leadInfo" + ] + } + }, + "response": [] + } + ] +} \ No newline at end of file diff --git a/consumer/docs/tc-connects2f.postman_environment.json b/consumer/docs/tc-connects2f.postman_environment.json new file mode 100644 index 0000000..3b4af56 --- /dev/null +++ b/consumer/docs/tc-connects2f.postman_environment.json @@ -0,0 +1,23 @@ +{ + "id": "493febdc-2611-442a-ae08-b42ed24688fd", + "name": "tc-connects2f", + "values": [ + { + "key": "ADMIN_TOKEN", + "value": "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJyb2xlcyI6WyJBZG1pbmlzdHJhdG9yIl0sImlzcyI6Imh0dHBzOi8vYXBpLnRvcGNvZGVyLmNvbSIsImhhbmRsZSI6IlRvbnlKIiwiZXhwIjo1NTUzMDE5OTI1OSwidXNlcklkIjoiNDA0MzMyODgiLCJpYXQiOjE1MzAxOTg2NTksImVtYWlsIjoiYWRtaW5AdG9wY29kZXIuY29tIiwianRpIjoiYzNhYzYwOGEtNTZiZS00NWQwLThmNmEtMzFmZTk0Yjk1NjFjIn0.pIHUtMwIV07ZgfaUk9916X49rgjKclM9kzQP419LBo0", + "description": "", + "type": "text", + "enabled": true + }, + { + "key": "URL", + "value": "http://localhost:3150/v5", + "description": "", + "type": "text", + "enabled": true + } + ], + "_postman_variable_scope": "environment", + "_postman_exported_at": "2018-09-24T08:55:49.448Z", + "_postman_exported_using": "Postman/6.1.3" +} \ No newline at end of file diff --git a/consumer/package.json b/consumer/package.json index 64c3dc3..1a2acb3 100644 --- a/consumer/package.json +++ b/consumer/package.json @@ -41,14 +41,18 @@ "babel-preset-stage-0": "^6.5.0", "babel-runtime": "^6.23.0", "better-npm-run": "0.0.10", + "body-parser": "^1.18.3", "config": "^1.21.0", + "cors": "^2.8.4", "debug": "^2.2.0", + "express": "^4.16.3", "joi": "^9.0.4", "jsonwebtoken": "^7.1.7", "lodash": "^4.14.2", "node-cron": "^1.1.3", "superagent": "^2.1.0", "superagent-promise": "^1.1.0", + "tc-core-library-js": "appirio-tech/tc-core-library-js.git", "winston": "^2.2.0" }, "devDependencies": { diff --git a/consumer/src/services/ConsumerService.js b/consumer/src/services/ConsumerService.js index 77bba75..40d045f 100644 --- a/consumer/src/services/ConsumerService.js +++ b/consumer/src/services/ConsumerService.js @@ -83,7 +83,7 @@ class ConsumerService { const campaignId = responses[0]; const user = responses[1]; const { accessToken, instanceUrl } = responses[2]; - const lead = { + const leadData = { FirstName: user.firstName, LastName: user.lastName, Email: user.email, @@ -98,20 +98,36 @@ class ConsumerService { TC_Connect_Cancel_Reason__c: _.get(project,"cancelReason",""), TC_Connect_Raw_Project__c: JSON.stringify(project), }; - return SalesforceService.createObject('Lead', lead, accessToken, instanceUrl) - .then((leadId) => { - const campaignMember = { - LeadId: leadId, - CampaignId: campaignId, - }; - return SalesforceService.createObject('CampaignMember', campaignMember, accessToken, instanceUrl); - }).catch( (e) => { - if (e.response && e.response.text && duplicateRecordRegex.test(e.response.text)) { - throw new UnprocessableError(`Lead already existing for project ${project.id}`); + let sql = `SELECT id,IsConverted FROM Lead WHERE Email = '${user.email}' AND LeadSource = 'Connect'`; + return SalesforceService.query(sql, accessToken, instanceUrl) + .then((response) => { + const {records: [lead]} = response; + if (!lead) { + // if lead does not exists, create new one + return SalesforceService.createObject('Lead', leadData, accessToken, instanceUrl) + .then((leadId) => { + const campaignMember = { + LeadId: leadId, + CampaignId: campaignId, + }; + return SalesforceService.createObject('CampaignMember', campaignMember, accessToken, instanceUrl); + }).catch( (e) => { + if (e.response && e.response.text && duplicateRecordRegex.test(e.response.text)) { + throw new UnprocessableError(`Lead already existing for project ${project.id}`); + } + throw e; + }) + } else { + // if lead does exists update it with project data + if (lead.IsConverted != true && !_.isEmpty(leadData)) { + return SalesforceService.updateObject(lead.Id, 'Lead', leadData, accessToken, instanceUrl); + } } - throw e; }) }).catch((error) => { + if (error.status === 400) { + error.shouldAck = true; // ignore bad requests, most probably it is because of malformed data + } throw error; }); } @@ -127,7 +143,6 @@ class ConsumerService { var project = projectEvent.original; var projectUpdated = projectEvent.updated; - return Promise.all([ ConfigurationService.getSalesforceCampaignId(), SalesforceService.authenticate(), @@ -159,7 +174,12 @@ class ConsumerService { // } // return SalesforceService.deleteObject('CampaignMember', member.Id, accessToken, instanceUrl); // }) - }) + }).catch((error) => { + if (error.status === 400) { + error.shouldAck = true; // ignore bad requests, most probably it is because of malformed data + } + throw error; + }); }); } } diff --git a/consumer/src/services/LeadService.js b/consumer/src/services/LeadService.js new file mode 100644 index 0000000..5a50e1a --- /dev/null +++ b/consumer/src/services/LeadService.js @@ -0,0 +1,72 @@ +/** + * Represents the Rest API service for leads + */ + +import Joi from 'joi'; +import config from 'config'; +import {logAndValidate} from '../common/decorators'; +import ConfigurationService from './ConfigurationService'; +import SalesforceService from './SalesforceService'; + +const postLeadSchema = Joi.object().keys({ + user: Joi.object().keys({ + firstName: Joi.string().required(), + lastName: Joi.string().required(), + businessEmail: Joi.string().email().required(), + title: Joi.string().required(), + companyName: Joi.string().required(), + companySize: Joi.string().required(), + userName: Joi.string().required(), + }), +}).required(); + +const leadSource = 'Connect'; + +class LeadService { + + /** + * Post the lead info to Salesforce + * @param {Object} user Request body + * @returns {Object} sample response + */ + @logAndValidate(['user'], postLeadSchema) + postLead(user) { // eslint-disable-line no-unused-vars + console.log(user, 'user'); + let leadId = 0; + return Promise.all([ + ConfigurationService.getSalesforceCampaignId(), + SalesforceService.authenticate(), + ]).then((responses) => { + const campaignId = responses[0]; + const { accessToken, instanceUrl } = responses[1]; + const lead = { + FirstName: user.firstName, + LastName: user.lastName, + Email: user.businessEmail, + LeadSource: leadSource, + Company: user.companyName, + No_of_Employees__c: user.companySize, + OwnerId: config.ownerId, + TC_Handle__c: user.userName, + }; + return SalesforceService.createObject('Lead', lead, accessToken, instanceUrl) + .then((_leadId) => { + leadId = _leadId; + const campaignMember = { + LeadId: _leadId, + CampaignId: campaignId, + }; + return SalesforceService.createObject('CampaignMember', campaignMember, accessToken, instanceUrl); + }).catch( (e) => { + throw e; + }) + }).then(() => { + return {success: true, leadId }; + }).catch((error) => { + throw error; + }); + } + +} + +export default new LeadService(); diff --git a/consumer/src/worker.js b/consumer/src/worker.js index 7fc835a..306e962 100644 --- a/consumer/src/worker.js +++ b/consumer/src/worker.js @@ -4,12 +4,17 @@ import config from 'config'; import amqp from 'amqplib'; +import express from 'express'; +import cors from 'cors'; +import cron from 'node-cron'; +import bodyParser from 'body-parser'; import _ from 'lodash'; +import { middleware } from 'tc-core-library-js'; import logger from './common/logger'; import ConsumerService from './services/ConsumerService'; -import { EVENT } from '../config/constants'; -import cron from 'node-cron'; -import { start as scheduleStart } from './scheduled-worker' +import LeadService from './services/LeadService'; +import { EVENT, ERROR } from '../config/constants'; +import { start as scheduleStart } from './scheduled-worker'; const debug = require('debug')('app:worker'); @@ -24,8 +29,8 @@ process.once('SIGINT', () => { let EVENT_HANDLERS = { [EVENT.ROUTING_KEY.PROJECT_DRAFT_CREATED]: ConsumerService.processProjectCreated, - [EVENT.ROUTING_KEY.PROJECT_UPDATED]: ConsumerService.processProjectUpdated -} + [EVENT.ROUTING_KEY.PROJECT_UPDATED]: ConsumerService.processProjectUpdated, +}; export function initHandlers(handlers) { EVENT_HANDLERS = handlers; @@ -59,7 +64,7 @@ export async function consume(channel, exchangeName, queue, publishChannel) { handler = EVENT_HANDLERS[key]; if (!_.isFunction(handler)) { logger.error(`Unknown message type: ${key}, NACKing... `); - channel.nack(msg, false, false) + channel.nack(msg, false, false); } data = JSON.parse(msg.content.toString()); } catch (ignore) { @@ -88,7 +93,7 @@ export async function consume(channel, exchangeName, queue, publishChannel) { key, new Buffer(msg.content.toString()) ); - } catch(e) { + } catch (e) { // TODO decide if we want nack the original msg here // for now just ignoring the error in requeue logger.logFullError(e, `Error in publising Exchange to ${exchangeName}`); @@ -102,11 +107,11 @@ export async function consume(channel, exchangeName, queue, publishChannel) { /** * Start the worker */ -async function start() { +async function startWorker() { try { - console.log("Worker Connecting to RabbitMQ: " + config.rabbitmqURL.substr(-5)); + console.log(`Worker Connecting to RabbitMQ: ${config.rabbitmqURL.substr(-5)}`); connection = await amqp.connect(config.rabbitmqURL); - debug('created connection successfully with URL: ' + config.rabbitmqURL); + debug(`created connection successfully with URL: ${config.rabbitmqURL}`); const channel = await connection.createConfirmChannel(); debug('Channel created for projects exchange ...'); const publishChannel = await connection.createConfirmChannel(); @@ -122,10 +127,61 @@ async function start() { } } +/* + * Error handler for Async functions + */ +const asyncHandler = fn => (req, res, next) => { + Promise + .resolve(fn(req, res, next)) + .catch(next); +}; + if (!module.parent) { - start(); - - cron.schedule(config.scheduledWorkerSchedule, function(){ + startWorker(); + + if (!process.env.NODE_ENV) { + process.env.NODE_ENV = 'development'; + } + + const app = express(); + + app.use(cors()); + app.use(bodyParser.json()); + app.use(bodyParser.urlencoded({ + extended: true, + })); + + app.use((req, res, next) => { + middleware.jwtAuthenticator({ + AUTH_SECRET: config.authSecret, + VALID_ISSUERS: config.validIssuers, + })(req, res, next); + }); + + app.post(`/${config.apiVersion}/connect2sf/leadInfo`, asyncHandler(async (req, res, next) => { + const result = await LeadService.postLead(req.body); + res.json(result); + })); + + // Error handler + app.use(async (err, req, res, next) => { + let status = ERROR.SERVER_ERROR; + let message = err.message; + // Fetch actual error message from details for JOI errors + if (err.isJoi) { + status = ERROR.CLIENT_ERROR; + message = err.details[0].message; + } + if (!message) { + message = ERROR.MESSAGE; + } + res.status(status).send({ message }); + }); + + app.listen(config.port); + debug(`Express server listening on port ${config.port} in ${process.env.NODE_ENV} mode`); + + cron.schedule(config.scheduledWorkerSchedule, () => { scheduleStart(); }); } diff --git a/consumer/test/ConsumerService.spec.js b/consumer/test/ConsumerService.spec.js index 08ffac1..1da74c4 100644 --- a/consumer/test/ConsumerService.spec.js +++ b/consumer/test/ConsumerService.spec.js @@ -82,8 +82,8 @@ describe('ConsumerService', () => { }); describe('processProjectCreated', () => { - it('should process project successfully', async() => { - + it('should process project successfully when lead does not exists', async() => { + const leadSql = `SELECT id,IsConverted FROM Lead WHERE Email = 'jd@example.com' AND LeadSource = 'Connect'`; const expectedLead = { FirstName: 'john', LastName: 'doe', @@ -105,17 +105,59 @@ describe('ConsumerService', () => { CampaignId: sfCampaignId, }; + const queryStub = sandbox.stub(SalesforceService, 'query'); + queryStub.onCall(0) + .returns(Promise.resolve({ records: [] })); const createObjectStub = sandbox.stub(SalesforceService, 'createObject', async() => leadId); await ConsumerService.processProjectCreated(logger, project); getCampaignIdStub.should.have.been.called; getUserStub.should.have.been.calledWith(userId); authenticateStub.should.have.been.called; + queryStub.should.have.been.calledWith(leadSql, sfAuth.accessToken, sfAuth.instanceUrl); createObjectStub.should.have.been.calledWith('Lead', expectedLead, sfAuth.accessToken, sfAuth.instanceUrl); createObjectStub.should.have.been.calledWith('CampaignMember', expectedCampaignMember, sfAuth.accessToken, sfAuth.instanceUrl); }); + it('should process project successfully when lead does exists', async() => { + const leadSql = `SELECT id,IsConverted FROM Lead WHERE Email = 'jd@example.com' AND LeadSource = 'Connect'`; + const expectedLead = { + FirstName: 'john', + LastName: 'doe', + Email: 'jd@example.com', + LeadSource: 'Connect', + Company: 'Unknown', + OwnerId: config.ownerId, + TC_Handle__c: 'jdoe', + TC_Connect_Project_Id__c: 1, + TC_Connect_Project_Status__c: '', + TC_Connect_Cancel_Reason__c: null, + TC_Connect_Direct_Project_Id__c: '', + TC_Connect_Description__c:'', + TC_Connect_Raw_Project__c: JSON.stringify(project) + }; + + const expectedCampaignMember = { + LeadId: leadId, + CampaignId: sfCampaignId, + }; + + const queryStub = sandbox.stub(SalesforceService, 'query'); + queryStub.onCall(0) + .returns(Promise.resolve({ records: [{ Id: leadId }] })); + const createObjectStub = sandbox.stub(SalesforceService, 'createObject', async() => leadId); + const updateStub = sandbox.stub(SalesforceService,'updateObject', async() => {}); + + await ConsumerService.processProjectCreated(logger, project); + getCampaignIdStub.should.have.been.called; + getUserStub.should.have.been.calledWith(userId); + authenticateStub.should.have.been.called; + createObjectStub.should.not.have.been.called; + queryStub.should.have.been.calledWith(leadSql, sfAuth.accessToken, sfAuth.instanceUrl); + updateStub.should.have.been.calledWith(leadId, 'Lead', expectedLead, sfAuth.accessToken, sfAuth.instanceUrl); + }); + it('should throw UnprocessableError primary customer is not found', async() => { const projectWihoutMembers = { id: 1, @@ -131,7 +173,8 @@ describe('ConsumerService', () => { } }); - it('should throw UnprocessableError if Lead already exists', async() => { + // Not a valid use case any more, we now allow updation of existing lead in project creation event as well + xit('should throw UnprocessableError if Lead already exists', async() => { const createObjectStub = sandbox.stub(SalesforceService, 'createObject', async() => { const err = new Error('Bad request'); err.response = { @@ -146,6 +189,9 @@ describe('ConsumerService', () => { }); it('should rethrow Error from createObject if error is not duplicate', async() => { + const queryStub = sandbox.stub(SalesforceService, 'query'); + queryStub.onCall(0) + .returns(Promise.resolve({ records: [] })); const createObjectStub = sandbox.stub(SalesforceService, 'createObject', async() => { throw new Error('Fake Error'); });