From e611f64d2ee1c3d95fb206a74afdc995cee22950 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Fri, 29 Nov 2019 17:26:06 +0530 Subject: [PATCH 001/101] circleci integration --- .circleci/config.yml | 80 ++ Dockerfile | 6 +- build.sh | 23 + package-lock.json | 2580 ++++++++++++++++++++++++++++++++++++++++++ 4 files changed, 2686 insertions(+), 3 deletions(-) create mode 100644 .circleci/config.yml create mode 100755 build.sh create mode 100644 package-lock.json diff --git a/.circleci/config.yml b/.circleci/config.yml new file mode 100644 index 0000000..189ebbe --- /dev/null +++ b/.circleci/config.yml @@ -0,0 +1,80 @@ +version: 2 +defaults: &defaults + docker: + - image: circleci/python:2.7-stretch-browsers +install_dependency: &install_dependency + name: Installation of build and deployment dependencies. + command: | + sudo apt install jq + sudo pip install awscli --upgrade + sudo pip install docker-compose +install_deploysuite: &install_deploysuite + name: Installation of install_deploysuite. + command: | + git clone --branch v1.4 https://github.com/topcoder-platform/tc-deploy-scripts ../buildscript + cp ./../buildscript/master_deploy.sh . + cp ./../buildscript/buildenv.sh . + cp ./../buildscript/awsconfiguration.sh . +restore_cache_settings_for_build: &restore_cache_settings_for_build + key: docker-node-modules-{{ checksum "package-lock.json" }} + +save_cache_settings: &save_cache_settings + key: docker-node-modules-{{ checksum "package-lock.json" }} + paths: + - node_modules + +builddeploy_steps: &builddeploy_steps + - checkout + - setup_remote_docker + - run: *install_dependency + - run: *install_deploysuite + - restore_cache: *restore_cache_settings_for_build + - run: ./build.sh ${APPNAME} + - save_cache: *save_cache_settings + - deploy: + name: Running MasterScript. + command: | + ./awsconfiguration.sh $DEPLOY_ENV + source awsenvconf +# ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-deployvar +# source buildenvvar +# ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + + +jobs: + # Build & Deploy against development backend + "build-dev": + <<: *defaults + environment: + DEPLOY_ENV: "DEV" + LOGICAL_ENV: "dev" + APPNAME: "ifxpg-nodeserver" + steps: *builddeploy_steps + + "build-prod": + <<: *defaults + environment: + DEPLOY_ENV: "PROD" + LOGICAL_ENV: "prod" + APPNAME: "ifxpg-nodeserver" + steps: *builddeploy_steps + +workflows: + version: 2 + build: + jobs: + # Development builds are executed on "develop" branch only. + - "build-dev": + context : org-global + filters: + branches: + only: + - dev + + # Production builds are exectuted only on tagged commits to the + # master branch. + - "build-prod": + context : org-global + filters: + branches: + only: master \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index ff90bb2..c7f7600 100644 --- a/Dockerfile +++ b/Dockerfile @@ -37,10 +37,10 @@ ENV LICENSE accept RUN rm /usr/bin/python && ln -s /usr/bin/python2.7 /usr/bin/python RUN echo "sqlexec 2021/tcp" >> /etc/services -RUN mkdir /app -COPY . /app +RUN mkdir /ifxpg-nodeserver +COPY . /ifxpg-nodeserver -WORKDIR /app +WORKDIR /ifxpg-nodeserver RUN rm -rf node_modules && npm install --unsafe-perm # ENTRYPOINT [ "/bin/bash" ] diff --git a/build.sh b/build.sh new file mode 100755 index 0000000..776967b --- /dev/null +++ b/build.sh @@ -0,0 +1,23 @@ +#!/bin/bash +set -eo pipefail +APP_NAME=$1 +UPDATE_CACHE="" +#docker-compose -f docker/docker-compose.yml build $APP_NAME +docker build -t $APP_NAME:latest . +docker create --name app $APP_NAME:latest + +if [ -d node_modules ] +then + mv package-lock.json old-package-lock.json + docker cp app:/$APP_NAME/package-lock.json package-lock.json + set +eo pipefail + UPDATE_CACHE=$(cmp package-lock.json old-package-lock.json) + set -eo pipefail +else + UPDATE_CACHE=1 +fi + +if [ "$UPDATE_CACHE" == 1 ] +then + docker cp app:/$APP_NAME/node_modules . +fi \ No newline at end of file diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 0000000..024ad19 --- /dev/null +++ b/package-lock.json @@ -0,0 +1,2580 @@ +{ + "name": "kafka-local", + "version": "1.0.0", + "lockfileVersion": 1, + "requires": true, + "dependencies": { + "@types/bluebird": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/@types/bluebird/-/bluebird-3.5.0.tgz", + "integrity": "sha1-JjNHCk6r6aR82aRf2yDtX5NAe8o=" + }, + "@types/lodash": { + "version": "4.14.149", + "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.149.tgz", + "integrity": "sha512-ijGqzZt/b7BfzcK9vTrS6MFljQRPn5BFWOx8oE0GYxribu6uV+aA9zZuXI1zc/etK9E8nrgdoF2+LgUw7+9tJQ==" + }, + "@types/node": { + "version": "12.12.14", + "resolved": "https://registry.npmjs.org/@types/node/-/node-12.12.14.tgz", + "integrity": "sha512-u/SJDyXwuihpwjXy7hOOghagLEV1KdAST6syfnOk6QZAMzZuWZqXy5aYYZbh8Jdpd4escVFP0MvftHNDb9pruA==" + }, + "abbrev": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", + "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==" + }, + "accepts": { + "version": "1.3.7", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.7.tgz", + "integrity": "sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA==", + "requires": { + "mime-types": "2.1.25", + "negotiator": "0.6.2" + } + }, + "ajv": { + "version": "6.10.2", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.10.2.tgz", + "integrity": "sha512-TXtUUEYHuaTEbLZWIKUr5pmBuhDLy+8KYtPYdcV8qC+pOZL+NKqYwvWSRrVXHn+ZmRRAu8vJTAznH7Oag6RVRw==", + "requires": { + "fast-deep-equal": "2.0.1", + "fast-json-stable-stringify": "2.0.0", + "json-schema-traverse": "0.4.1", + "uri-js": "4.2.2" + } + }, + "ansi-colors": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-3.2.3.tgz", + "integrity": "sha512-LEHHyuhlPY3TmuUYMh2oz89lTShfvgbmzaBcxve9t/9Wuy7Dwf4yoAKcND7KFT1HAQfqZ12qtc+DUrBMeKF9nw==", + "dev": true + }, + "ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=" + }, + "ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "requires": { + "color-convert": "1.9.3" + } + }, + "any-promise": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/any-promise/-/any-promise-1.3.0.tgz", + "integrity": "sha1-q8av7tzqUugJzcA3au0845Y10X8=" + }, + "aproba": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", + "integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==" + }, + "are-we-there-yet": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz", + "integrity": "sha512-5hYdAkZlcG8tOLujVDTgCT+uPX0VnpAH28gWsLfzpXYm7wP6mp5Q/gYyR7YQ0cKVJcXJnl3j2kpBan13PtQf6w==", + "requires": { + "delegates": "1.0.0", + "readable-stream": "2.3.6" + } + }, + "argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "requires": { + "sprintf-js": "1.0.3" + } + }, + "array-flatten": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", + "integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=" + }, + "asn1": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz", + "integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==", + "requires": { + "safer-buffer": "2.1.2" + } + }, + "assert-plus": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=" + }, + "assertion-error": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", + "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==", + "dev": true + }, + "async": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/async/-/async-2.6.3.tgz", + "integrity": "sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg==", + "requires": { + "lodash": "4.17.15" + }, + "dependencies": { + "lodash": { + "version": "4.17.15", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==" + } + } + }, + "asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=" + }, + "aws-sign2": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", + "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=" + }, + "aws4": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.9.0.tgz", + "integrity": "sha512-Uvq6hVe90D0B2WEnUqtdgY1bATGz3mw33nH9Y+dmA+w5DHvUmBgkr5rM/KCHpCsiFNRUfokW/szpPPgMK2hm4A==" + }, + "balanced-match": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", + "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=" + }, + "bcrypt-pbkdf": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", + "integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=", + "requires": { + "tweetnacl": "0.14.5" + } + }, + "bin-protocol": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/bin-protocol/-/bin-protocol-3.1.1.tgz", + "integrity": "sha512-9vCGfaHC2GBHZwGQdG+DpyXfmLvx9uKtf570wMLwIc9wmTIDgsdCBXQxTZu5X2GyogkfBks2Ode4N0sUVxJ2qQ==", + "requires": { + "lodash": "4.17.11", + "long": "4.0.0", + "protocol-buffers-schema": "3.3.2" + } + }, + "binary": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/binary/-/binary-0.3.0.tgz", + "integrity": "sha1-n2BVO8XOjDOG87VTz/R0Yq3sqnk=", + "requires": { + "buffers": "0.1.1", + "chainsaw": "0.1.0" + } + }, + "bindings": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.3.1.tgz", + "integrity": "sha512-i47mqjF9UbjxJhxGf+pZ6kSxrnI3wBLlnGI2ArWJ4r0VrvDS7ZYXkprq/pLaBWYq4GM0r4zdHY+NNRqEMU7uew==" + }, + "block-stream": { + "version": "0.0.9", + "resolved": "https://registry.npmjs.org/block-stream/-/block-stream-0.0.9.tgz", + "integrity": "sha1-E+v+d4oDIFz+A3UUgeu0szAMEmo=", + "requires": { + "inherits": "2.0.3" + } + }, + "bluebird": { + "version": "3.7.2", + "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", + "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==" + }, + "body-parser": { + "version": "1.19.0", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.19.0.tgz", + "integrity": "sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw==", + "requires": { + "bytes": "3.1.0", + "content-type": "1.0.4", + "debug": "2.6.9", + "depd": "1.1.2", + "http-errors": "1.7.2", + "iconv-lite": "0.4.24", + "on-finished": "2.3.0", + "qs": "6.7.0", + "raw-body": "2.4.0", + "type-is": "1.6.18" + } + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "requires": { + "balanced-match": "1.0.0", + "concat-map": "0.0.1" + } + }, + "browser-stdout": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", + "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", + "dev": true + }, + "buffer-crc32": { + "version": "0.2.13", + "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", + "integrity": "sha1-DTM+PwDqxQqhRUq9MO+MKl2ackI=" + }, + "buffer-writer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz", + "integrity": "sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==" + }, + "buffers": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/buffers/-/buffers-0.1.1.tgz", + "integrity": "sha1-skV5w77U1tOWru5tmorn9Ugqt7s=" + }, + "bytes": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.0.tgz", + "integrity": "sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==" + }, + "camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true + }, + "caseless": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", + "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=" + }, + "chai": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/chai/-/chai-4.2.0.tgz", + "integrity": "sha512-XQU3bhBukrOsQCuwZndwGcCVQHyZi53fQ6Ys1Fym7E4olpIqqZZhhoFJoaKVvV17lWQoXYwgWN2nF5crA8J2jw==", + "dev": true, + "requires": { + "assertion-error": "1.1.0", + "check-error": "1.0.2", + "deep-eql": "3.0.1", + "get-func-name": "2.0.0", + "pathval": "1.1.0", + "type-detect": "4.0.8" + } + }, + "chainsaw": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/chainsaw/-/chainsaw-0.1.0.tgz", + "integrity": "sha1-XqtQsor+WAdNDVgpE4iCi15fvJg=", + "requires": { + "traverse": "0.3.9" + } + }, + "chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "requires": { + "ansi-styles": "3.2.1", + "escape-string-regexp": "1.0.5", + "supports-color": "5.5.0" + }, + "dependencies": { + "supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "requires": { + "has-flag": "3.0.0" + } + } + } + }, + "check-error": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz", + "integrity": "sha1-V00xLt2Iu13YkS6Sht1sCu1KrII=", + "dev": true + }, + "cliui": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-5.0.0.tgz", + "integrity": "sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==", + "dev": true, + "requires": { + "string-width": "3.1.0", + "strip-ansi": "5.2.0", + "wrap-ansi": "5.1.0" + }, + "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "requires": { + "emoji-regex": "7.0.3", + "is-fullwidth-code-point": "2.0.0", + "strip-ansi": "5.2.0" + } + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "4.1.0" + } + } + } + }, + "cls-bluebird": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cls-bluebird/-/cls-bluebird-2.1.0.tgz", + "integrity": "sha1-N+8eCAqP+1XC9BZPU28ZGeeWiu4=", + "requires": { + "is-bluebird": "1.0.2", + "shimmer": "1.2.1" + } + }, + "code-point-at": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", + "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=" + }, + "color": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/color/-/color-3.0.0.tgz", + "integrity": "sha512-jCpd5+s0s0t7p3pHQKpnJ0TpQKKdleP71LWcA0aqiljpiuAkOSUFN/dyH8ZwF0hRmFlrIuRhufds1QyEP9EB+w==", + "requires": { + "color-convert": "1.9.3", + "color-string": "1.5.3" + } + }, + "color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "requires": { + "color-name": "1.1.3" + } + }, + "color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" + }, + "color-string": { + "version": "1.5.3", + "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.5.3.tgz", + "integrity": "sha512-dC2C5qeWoYkxki5UAXapdjqO672AM4vZuPGRQfO8b5HKuKGBbKWpITyDYN7TOFKvRW7kOgAn3746clDBMDJyQw==", + "requires": { + "color-name": "1.1.3", + "simple-swizzle": "0.2.2" + } + }, + "colornames": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/colornames/-/colornames-1.1.1.tgz", + "integrity": "sha1-+IiQMGhcfE/54qVZ9Qd+t2qBb5Y=" + }, + "colors": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/colors/-/colors-1.4.0.tgz", + "integrity": "sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA==" + }, + "colorspace": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/colorspace/-/colorspace-1.1.2.tgz", + "integrity": "sha512-vt+OoIP2d76xLhjwbBaucYlNSpPsrJWPlBTtwCpQKIu6/CSMutyzX93O/Do0qzpH3YoHEes8YEFXyZ797rEhzQ==", + "requires": { + "color": "3.0.0", + "text-hex": "1.0.0" + } + }, + "combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "requires": { + "delayed-stream": "1.0.0" + } + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" + }, + "config": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/config/-/config-3.2.4.tgz", + "integrity": "sha512-H1XIGfnU1EAkfjSLn9ZvYDRx9lOezDViuzLDgiJ/lMeqjYe3q6iQfpcLt2NInckJgpAeekbNhQkmnnbdEDs9rw==", + "requires": { + "json5": "1.0.1" + } + }, + "connection-parse": { + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/connection-parse/-/connection-parse-0.0.7.tgz", + "integrity": "sha1-GOcxiqsGppkmc3KxDFIm0locmmk=" + }, + "console-control-strings": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", + "integrity": "sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4=" + }, + "content-disposition": { + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.3.tgz", + "integrity": "sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g==", + "requires": { + "safe-buffer": "5.1.2" + } + }, + "content-type": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz", + "integrity": "sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==" + }, + "cookie": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.4.0.tgz", + "integrity": "sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg==" + }, + "cookie-signature": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", + "integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw=" + }, + "core-util-is": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" + }, + "dashdash": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", + "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", + "requires": { + "assert-plus": "1.0.0" + } + }, + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + } + }, + "decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=", + "dev": true + }, + "deep-eql": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz", + "integrity": "sha512-+QeIQyN5ZuO+3Uk5DYh6/1eKO0m0YmJFGNmFHGACpf1ClL1nmlV/p4gNgbl2pJGxgXb4faqo6UE+M5ACEMyVcw==", + "dev": true, + "requires": { + "type-detect": "4.0.8" + } + }, + "define-properties": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz", + "integrity": "sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==", + "dev": true, + "requires": { + "object-keys": "1.1.1" + } + }, + "delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=" + }, + "delegates": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", + "integrity": "sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o=" + }, + "depd": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", + "integrity": "sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=" + }, + "destroy": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz", + "integrity": "sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA=" + }, + "diagnostics": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/diagnostics/-/diagnostics-1.1.1.tgz", + "integrity": "sha512-8wn1PmdunLJ9Tqbx+Fx/ZEuHfJf4NKSN2ZBj7SJC/OWRWha843+WsTjqMe1B5E3p28jqBlp+mJ2fPVxPyNgYKQ==", + "requires": { + "colorspace": "1.1.2", + "enabled": "1.0.2", + "kuler": "1.0.1" + } + }, + "diff": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz", + "integrity": "sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==", + "dev": true + }, + "dottie": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/dottie/-/dottie-2.0.1.tgz", + "integrity": "sha512-ch5OQgvGDK2u8pSZeSYAQaV/lczImd7pMJ7BcEPXmnFVjy4yJIzP6CsODJUTH8mg1tyH1Z2abOiuJO3DjZ/GBw==" + }, + "ecc-jsbn": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", + "integrity": "sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=", + "requires": { + "jsbn": "0.1.1", + "safer-buffer": "2.1.2" + } + }, + "ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=" + }, + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", + "dev": true + }, + "enabled": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/enabled/-/enabled-1.0.2.tgz", + "integrity": "sha1-ll9lE9LC0cX0ZStkouM5ZGf8L5M=", + "requires": { + "env-variable": "0.0.5" + } + }, + "encodeurl": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k=" + }, + "env-variable": { + "version": "0.0.5", + "resolved": "https://registry.npmjs.org/env-variable/-/env-variable-0.0.5.tgz", + "integrity": "sha512-zoB603vQReOFvTg5xMl9I1P2PnHsHQQKTEowsKKD7nseUfJq6UWzK+4YtlWUO1nhiQUxe6XMkk+JleSZD1NZFA==" + }, + "es-abstract": { + "version": "1.16.2", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.16.2.tgz", + "integrity": "sha512-jYo/J8XU2emLXl3OLwfwtuFfuF2w6DYPs+xy9ZfVyPkDcrauu6LYrw/q2TyCtrbc/KUdCiC5e9UajRhgNkVopA==", + "dev": true, + "requires": { + "es-to-primitive": "1.2.1", + "function-bind": "1.1.1", + "has": "1.0.3", + "has-symbols": "1.0.1", + "is-callable": "1.1.4", + "is-regex": "1.0.4", + "object-inspect": "1.7.0", + "object-keys": "1.1.1", + "string.prototype.trimleft": "2.1.0", + "string.prototype.trimright": "2.1.0" + } + }, + "es-to-primitive": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", + "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "dev": true, + "requires": { + "is-callable": "1.1.4", + "is-date-object": "1.0.1", + "is-symbol": "1.0.3" + } + }, + "escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg=" + }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", + "dev": true + }, + "esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true + }, + "etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc=" + }, + "express": { + "version": "4.17.1", + "resolved": "https://registry.npmjs.org/express/-/express-4.17.1.tgz", + "integrity": "sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g==", + "requires": { + "accepts": "1.3.7", + "array-flatten": "1.1.1", + "body-parser": "1.19.0", + "content-disposition": "0.5.3", + "content-type": "1.0.4", + "cookie": "0.4.0", + "cookie-signature": "1.0.6", + "debug": "2.6.9", + "depd": "1.1.2", + "encodeurl": "1.0.2", + "escape-html": "1.0.3", + "etag": "1.8.1", + "finalhandler": "1.1.2", + "fresh": "0.5.2", + "merge-descriptors": "1.0.1", + "methods": "1.1.2", + "on-finished": "2.3.0", + "parseurl": "1.3.3", + "path-to-regexp": "0.1.7", + "proxy-addr": "2.0.5", + "qs": "6.7.0", + "range-parser": "1.2.1", + "safe-buffer": "5.1.2", + "send": "0.17.1", + "serve-static": "1.14.1", + "setprototypeof": "1.1.1", + "statuses": "1.5.0", + "type-is": "1.6.18", + "utils-merge": "1.0.1", + "vary": "1.1.2" + } + }, + "express-async-errors": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/express-async-errors/-/express-async-errors-3.1.1.tgz", + "integrity": "sha512-h6aK1da4tpqWSbyCa3FxB/V6Ehd4EEB15zyQq9qe75OZBp0krinNKuH4rAY+S/U/2I36vdLAUFSjQJ+TFmODng==" + }, + "extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" + }, + "extsprintf": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", + "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=" + }, + "fast-deep-equal": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz", + "integrity": "sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk=" + }, + "fast-json-stable-stringify": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz", + "integrity": "sha1-1RQsDK7msRifh9OnYREGT4bIu/I=" + }, + "fast-safe-stringify": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.0.7.tgz", + "integrity": "sha512-Utm6CdzT+6xsDk2m8S6uL8VHxNwI6Jub+e9NYTcAms28T84pTa25GJQV9j0CY0N1rM8hK4x6grpF2BQf+2qwVA==" + }, + "fecha": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fecha/-/fecha-2.3.3.tgz", + "integrity": "sha512-lUGBnIamTAwk4znq5BcqsDaxSmZ9nDVJaij6NvRt/Tg4R69gERA+otPKbS86ROw9nxVMw2/mp1fnaiWqbs6Sdg==" + }, + "finalhandler": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.2.tgz", + "integrity": "sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA==", + "requires": { + "debug": "2.6.9", + "encodeurl": "1.0.2", + "escape-html": "1.0.3", + "on-finished": "2.3.0", + "parseurl": "1.3.3", + "statuses": "1.5.0", + "unpipe": "1.0.0" + } + }, + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, + "requires": { + "locate-path": "3.0.0" + } + }, + "flat": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/flat/-/flat-4.1.0.tgz", + "integrity": "sha512-Px/TiLIznH7gEDlPXcUD4KnBusa6kR6ayRUVcnEAbreRIuhkqow/mun59BuRXwoYk7ZQOLW1ZM05ilIvK38hFw==", + "dev": true, + "requires": { + "is-buffer": "2.0.4" + } + }, + "forever-agent": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", + "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=" + }, + "form-data": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", + "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", + "requires": { + "asynckit": "0.4.0", + "combined-stream": "1.0.8", + "mime-types": "2.1.25" + } + }, + "forwarded": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.2.tgz", + "integrity": "sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ=" + }, + "fresh": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", + "integrity": "sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=" + }, + "fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" + }, + "fstream": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/fstream/-/fstream-1.0.12.tgz", + "integrity": "sha512-WvJ193OHa0GHPEL+AycEJgxvBEwyfRkN1vhjca23OaPVMCaLCXTd5qAu82AjTcgP1UJmytkOKb63Ypde7raDIg==", + "requires": { + "graceful-fs": "4.2.3", + "inherits": "2.0.3", + "mkdirp": "0.5.1", + "rimraf": "2.7.1" + } + }, + "function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "dev": true + }, + "gauge": { + "version": "2.7.4", + "resolved": "https://registry.npmjs.org/gauge/-/gauge-2.7.4.tgz", + "integrity": "sha1-LANAXHU4w51+s3sxcCLjJfsBi/c=", + "requires": { + "aproba": "1.2.0", + "console-control-strings": "1.1.0", + "has-unicode": "2.0.1", + "object-assign": "4.1.1", + "signal-exit": "3.0.2", + "string-width": "1.0.2", + "strip-ansi": "3.0.1", + "wide-align": "1.1.3" + } + }, + "get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true + }, + "get-func-name": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz", + "integrity": "sha1-6td0q+5y4gQJQzoGY2YCPdaIekE=", + "dev": true + }, + "getpass": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", + "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", + "requires": { + "assert-plus": "1.0.0" + } + }, + "glob": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "requires": { + "fs.realpath": "1.0.0", + "inflight": "1.0.6", + "inherits": "2.0.3", + "minimatch": "3.0.4", + "once": "1.4.0", + "path-is-absolute": "1.0.1" + } + }, + "graceful-fs": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.3.tgz", + "integrity": "sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ==" + }, + "growl": { + "version": "1.10.5", + "resolved": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz", + "integrity": "sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA==", + "dev": true + }, + "har-schema": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", + "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=" + }, + "har-validator": { + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.3.tgz", + "integrity": "sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g==", + "requires": { + "ajv": "6.10.2", + "har-schema": "2.0.0" + } + }, + "has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dev": true, + "requires": { + "function-bind": "1.1.1" + } + }, + "has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", + "dev": true + }, + "has-symbols": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.1.tgz", + "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==", + "dev": true + }, + "has-unicode": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", + "integrity": "sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk=" + }, + "hashring": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/hashring/-/hashring-3.2.0.tgz", + "integrity": "sha1-/aTv3oqiLNuX+x0qZeiEAeHBRM4=", + "requires": { + "connection-parse": "0.0.7", + "simple-lru-cache": "0.0.2" + } + }, + "he": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", + "dev": true + }, + "hoek": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/hoek/-/hoek-4.2.1.tgz", + "integrity": "sha512-QLg82fGkfnJ/4iy1xZ81/9SIJiq1NGFUMGs6ParyjBZr6jW2Ufj/snDqTHixNlHdPNwN2RLVD0Pi3igeK9+JfA==" + }, + "http-errors": { + "version": "1.7.2", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.7.2.tgz", + "integrity": "sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg==", + "requires": { + "depd": "1.1.2", + "inherits": "2.0.3", + "setprototypeof": "1.1.1", + "statuses": "1.5.0", + "toidentifier": "1.0.0" + } + }, + "http-signature": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", + "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", + "requires": { + "assert-plus": "1.0.0", + "jsprim": "1.4.1", + "sshpk": "1.16.1" + } + }, + "iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "requires": { + "safer-buffer": "2.1.2" + } + }, + "ifxnjs": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/ifxnjs/-/ifxnjs-8.0.1.tgz", + "integrity": "sha1-8v8ywEXDZ4Hkqf4PVKMWUwhYOwo=", + "requires": { + "bindings": "1.3.1", + "fstream": "1.0.12", + "nan": "2.14.0", + "node-gyp": "3.8.0", + "unzip": "0.1.11" + } + }, + "inflection": { + "version": "1.12.0", + "resolved": "https://registry.npmjs.org/inflection/-/inflection-1.12.0.tgz", + "integrity": "sha1-ogCTVlbW9fa8TcdQLhrstwMihBY=" + }, + "inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "requires": { + "once": "1.4.0", + "wrappy": "1.0.2" + } + }, + "inherits": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=" + }, + "ipaddr.js": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.0.tgz", + "integrity": "sha512-M4Sjn6N/+O6/IXSJseKqHoFc+5FdGJ22sXqnjTpdZweHK64MzEPAyQZyEU3R/KRv2GLoa7nNtg/C2Ev6m7z+eA==" + }, + "is-arrayish": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz", + "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==" + }, + "is-bluebird": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-bluebird/-/is-bluebird-1.0.2.tgz", + "integrity": "sha1-CWQ5Bg9KpBGr7hkUOoTWpVNG1uI=" + }, + "is-buffer": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.4.tgz", + "integrity": "sha512-Kq1rokWXOPXWuaMAqZiJW4XxsmD9zGx9q4aePabbn3qCRGedtH7Cm+zV8WETitMfu1wdh+Rvd6w5egwSngUX2A==", + "dev": true + }, + "is-callable": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.4.tgz", + "integrity": "sha512-r5p9sxJjYnArLjObpjA4xu5EKI3CuKHkJXMhT7kwbpUyIFD1n5PMAsoPvWnvtZiNz7LjkYDRZhd7FlI0eMijEA==", + "dev": true + }, + "is-date-object": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.1.tgz", + "integrity": "sha1-mqIOtq7rv/d/vTPnTKAbM1gdOhY=", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", + "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", + "requires": { + "number-is-nan": "1.0.1" + } + }, + "is-regex": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.4.tgz", + "integrity": "sha1-VRdIm1RwkbCTDglWVM7SXul+lJE=", + "dev": true, + "requires": { + "has": "1.0.3" + } + }, + "is-stream": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", + "integrity": "sha1-EtSj3U5o4Lec6428hBc66A2RykQ=" + }, + "is-symbol": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.3.tgz", + "integrity": "sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ==", + "dev": true, + "requires": { + "has-symbols": "1.0.1" + } + }, + "is-typedarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", + "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=" + }, + "is-utf8": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-utf8/-/is-utf8-0.2.1.tgz", + "integrity": "sha1-Sw2hRCEE0bM2NA6AeX6GXPOffXI=" + }, + "isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" + }, + "isemail": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/isemail/-/isemail-3.2.0.tgz", + "integrity": "sha512-zKqkK+O+dGqevc93KNsbZ/TqTUFd46MwWjYOoMrjIMZ51eU7DtQG3Wmd9SQQT7i7RVnuTPEiYEWHU3MSbxC1Tg==", + "requires": { + "punycode": "2.1.1" + } + }, + "isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=" + }, + "isstream": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", + "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=" + }, + "joi": { + "version": "12.0.0", + "resolved": "https://registry.npmjs.org/joi/-/joi-12.0.0.tgz", + "integrity": "sha512-z0FNlV4NGgjQN1fdtHYXf5kmgludM65fG/JlXzU6+rwkt9U5UWuXVYnXa2FpK0u6+qBuCmrm5byPNuiiddAHvQ==", + "requires": { + "hoek": "4.2.1", + "isemail": "3.2.0", + "topo": "2.0.2" + } + }, + "js-yaml": { + "version": "3.13.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.1.tgz", + "integrity": "sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==", + "dev": true, + "requires": { + "argparse": "1.0.10", + "esprima": "4.0.1" + } + }, + "jsbn": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", + "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=" + }, + "json-schema": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", + "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=" + }, + "json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" + }, + "json-stringify-safe": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=" + }, + "json5": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.1.tgz", + "integrity": "sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==", + "requires": { + "minimist": "1.2.0" + } + }, + "jsprim": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", + "integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=", + "requires": { + "assert-plus": "1.0.0", + "extsprintf": "1.3.0", + "json-schema": "0.2.3", + "verror": "1.10.0" + } + }, + "kuler": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/kuler/-/kuler-1.0.1.tgz", + "integrity": "sha512-J9nVUucG1p/skKul6DU3PUZrhs0LPulNaeUOox0IyXDi8S4CztTHs1gQphhuZmzXG7VOQSf6NJfKuzteQLv9gQ==", + "requires": { + "colornames": "1.1.1" + } + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, + "requires": { + "p-locate": "3.0.0", + "path-exists": "3.0.0" + } + }, + "lodash": { + "version": "4.17.11", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.11.tgz", + "integrity": "sha512-cQKh8igo5QUhZ7lg38DYWAxMvjSAKG0A8wGSVimP07SIUEK2UO+arSRKbRZWtelMtN5V0Hkwh5ryOto/SshYIg==" + }, + "log-symbols": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-2.2.0.tgz", + "integrity": "sha512-VeIAFslyIerEJLXHziedo2basKbMKtTw3vfn5IzG0XTjhAVEJyNHnL2p7vc+wBDSdQuUpNw3M2u6xb9QsAY5Eg==", + "dev": true, + "requires": { + "chalk": "2.4.2" + } + }, + "logform": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/logform/-/logform-2.1.2.tgz", + "integrity": "sha512-+lZh4OpERDBLqjiwDLpAWNQu6KMjnlXH2ByZwCuSqVPJletw0kTWJf5CgSNAUKn1KUkv3m2cUz/LK8zyEy7wzQ==", + "requires": { + "colors": "1.4.0", + "fast-safe-stringify": "2.0.7", + "fecha": "2.3.3", + "ms": "2.1.2", + "triple-beam": "1.3.0" + }, + "dependencies": { + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + } + } + }, + "long": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz", + "integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==" + }, + "match-stream": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/match-stream/-/match-stream-0.0.2.tgz", + "integrity": "sha1-mesFAJOzTf+t5CG5rAtBCpz6F88=", + "requires": { + "buffers": "0.1.1", + "readable-stream": "1.0.34" + }, + "dependencies": { + "isarray": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" + }, + "readable-stream": { + "version": "1.0.34", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz", + "integrity": "sha1-Elgg40vIQtLyqq+v5MKRbuMsFXw=", + "requires": { + "core-util-is": "1.0.2", + "inherits": "2.0.3", + "isarray": "0.0.1", + "string_decoder": "0.10.31" + } + }, + "string_decoder": { + "version": "0.10.31", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=" + } + } + }, + "media-typer": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=" + }, + "merge-descriptors": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", + "integrity": "sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E=" + }, + "methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=" + }, + "mime": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==" + }, + "mime-db": { + "version": "1.42.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.42.0.tgz", + "integrity": "sha512-UbfJCR4UAVRNgMpfImz05smAXK7+c+ZntjaA26ANtkXLlOe947Aag5zdIcKQULAiF9Cq4WxBi9jUs5zkA84bYQ==" + }, + "mime-types": { + "version": "2.1.25", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.25.tgz", + "integrity": "sha512-5KhStqB5xpTAeGqKBAMgwaYMnQik7teQN4IAzC7npDv6kzeU6prfkR67bc87J1kWMPGkoaZSq1npmexMgkmEVg==", + "requires": { + "mime-db": "1.42.0" + } + }, + "minimatch": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "requires": { + "brace-expansion": "1.1.11" + } + }, + "minimist": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz", + "integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=" + }, + "mkdirp": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", + "requires": { + "minimist": "0.0.8" + }, + "dependencies": { + "minimist": { + "version": "0.0.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", + "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=" + } + } + }, + "mocha": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-6.2.2.tgz", + "integrity": "sha512-FgDS9Re79yU1xz5d+C4rv1G7QagNGHZ+iXF81hO8zY35YZZcLEsJVfFolfsqKFWunATEvNzMK0r/CwWd/szO9A==", + "dev": true, + "requires": { + "ansi-colors": "3.2.3", + "browser-stdout": "1.3.1", + "debug": "3.2.6", + "diff": "3.5.0", + "escape-string-regexp": "1.0.5", + "find-up": "3.0.0", + "glob": "7.1.3", + "growl": "1.10.5", + "he": "1.2.0", + "js-yaml": "3.13.1", + "log-symbols": "2.2.0", + "minimatch": "3.0.4", + "mkdirp": "0.5.1", + "ms": "2.1.1", + "node-environment-flags": "1.0.5", + "object.assign": "4.1.0", + "strip-json-comments": "2.0.1", + "supports-color": "6.0.0", + "which": "1.3.1", + "wide-align": "1.1.3", + "yargs": "13.3.0", + "yargs-parser": "13.1.1", + "yargs-unparser": "1.6.0" + }, + "dependencies": { + "debug": { + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", + "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", + "dev": true, + "requires": { + "ms": "2.1.1" + } + }, + "glob": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.3.tgz", + "integrity": "sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==", + "dev": true, + "requires": { + "fs.realpath": "1.0.0", + "inflight": "1.0.6", + "inherits": "2.0.3", + "minimatch": "3.0.4", + "once": "1.4.0", + "path-is-absolute": "1.0.1" + } + }, + "ms": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz", + "integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==", + "dev": true + } + } + }, + "moment": { + "version": "2.24.0", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.24.0.tgz", + "integrity": "sha512-bV7f+6l2QigeBBZSM/6yTNq4P2fNpSWj/0e7jQcy87A8e7o2nAfP/34/2ky5Vw4B9S446EtIhodAzkFCcR4dQg==" + }, + "moment-timezone": { + "version": "0.5.27", + "resolved": "https://registry.npmjs.org/moment-timezone/-/moment-timezone-0.5.27.tgz", + "integrity": "sha512-EIKQs7h5sAsjhPCqN6ggx6cEbs94GK050254TIJySD1bzoM5JTYDwAU1IoVOeTOL6Gm27kYJ51/uuvq1kIlrbw==", + "requires": { + "moment": "2.24.0" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + }, + "murmur-hash-js": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/murmur-hash-js/-/murmur-hash-js-1.0.0.tgz", + "integrity": "sha1-UEEEkmnJZjPIZjhpYLL0KJ515bA=" + }, + "nan": { + "version": "2.14.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz", + "integrity": "sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg==" + }, + "natives": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/natives/-/natives-1.1.6.tgz", + "integrity": "sha512-6+TDFewD4yxY14ptjKaS63GVdtKiES1pTPyxn9Jb0rBqPMZ7VcCiooEhPNsr+mqHtMGxa/5c/HhcC4uPEUw/nA==" + }, + "negotiator": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.2.tgz", + "integrity": "sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw==" + }, + "nice-simple-logger": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/nice-simple-logger/-/nice-simple-logger-1.0.1.tgz", + "integrity": "sha1-D55khSe+e+PkmrdvqMjAmK+VG/Y=", + "requires": { + "lodash": "4.17.11" + } + }, + "no-kafka": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/no-kafka/-/no-kafka-3.4.3.tgz", + "integrity": "sha512-hYnkg1OWVdaxORdzVvdQ4ueWYpf7IICObPzd24BBiDyVG5219VkUnRxSH9wZmisFb6NpgABzlSIL1pIZaCKmXg==", + "requires": { + "@types/bluebird": "3.5.0", + "@types/lodash": "4.14.149", + "bin-protocol": "3.1.1", + "bluebird": "3.7.2", + "buffer-crc32": "0.2.13", + "hashring": "3.2.0", + "lodash": "4.17.11", + "murmur-hash-js": "1.0.0", + "nice-simple-logger": "1.0.1", + "wrr-pool": "1.1.4" + } + }, + "node-environment-flags": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/node-environment-flags/-/node-environment-flags-1.0.5.tgz", + "integrity": "sha512-VNYPRfGfmZLx0Ye20jWzHUjyTW/c+6Wq+iLhDzUI4XmhrDd9l/FozXV3F2xOaXjvp0co0+v1YSR3CMP6g+VvLQ==", + "dev": true, + "requires": { + "object.getownpropertydescriptors": "2.0.3", + "semver": "5.7.1" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + } + } + }, + "node-gyp": { + "version": "3.8.0", + "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-3.8.0.tgz", + "integrity": "sha512-3g8lYefrRRzvGeSowdJKAKyks8oUpLEd/DyPV4eMhVlhJ0aNaZqIrNUIPuEWWTAoPqyFkfGrM67MC69baqn6vA==", + "requires": { + "fstream": "1.0.12", + "glob": "7.1.6", + "graceful-fs": "4.2.3", + "mkdirp": "0.5.1", + "nopt": "3.0.6", + "npmlog": "4.1.2", + "osenv": "0.1.5", + "request": "2.88.0", + "rimraf": "2.7.1", + "semver": "5.3.0", + "tar": "2.2.2", + "which": "1.3.1" + } + }, + "nopt": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-3.0.6.tgz", + "integrity": "sha1-xkZdvwirzU2zWTF/eaxopkayj/k=", + "requires": { + "abbrev": "1.1.1" + } + }, + "npmlog": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-4.1.2.tgz", + "integrity": "sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg==", + "requires": { + "are-we-there-yet": "1.1.5", + "console-control-strings": "1.1.0", + "gauge": "2.7.4", + "set-blocking": "2.0.0" + } + }, + "number-is-nan": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", + "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=" + }, + "oauth-sign": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", + "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==" + }, + "object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=" + }, + "object-inspect": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.7.0.tgz", + "integrity": "sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw==", + "dev": true + }, + "object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "dev": true + }, + "object.assign": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.0.tgz", + "integrity": "sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w==", + "dev": true, + "requires": { + "define-properties": "1.1.3", + "function-bind": "1.1.1", + "has-symbols": "1.0.1", + "object-keys": "1.1.1" + } + }, + "object.getownpropertydescriptors": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.0.3.tgz", + "integrity": "sha1-h1jIRvW0B62rDyNuCYbxSwUcqhY=", + "dev": true, + "requires": { + "define-properties": "1.1.3", + "es-abstract": "1.16.2" + } + }, + "on-finished": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", + "integrity": "sha1-IPEzZIGwg811M3mSoWlxqi2QaUc=", + "requires": { + "ee-first": "1.1.1" + } + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "requires": { + "wrappy": "1.0.2" + } + }, + "one-time": { + "version": "0.0.4", + "resolved": "https://registry.npmjs.org/one-time/-/one-time-0.0.4.tgz", + "integrity": "sha1-+M33eISCb+Tf+T46nMN7HkSAdC4=" + }, + "optimist": { + "version": "0.3.7", + "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.3.7.tgz", + "integrity": "sha1-yQlBrVnkJzMokjB00s8ufLxuwNk=", + "requires": { + "wordwrap": "0.0.3" + } + }, + "os-homedir": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", + "integrity": "sha1-/7xJiDNuDoM94MFox+8VISGqf7M=" + }, + "os-tmpdir": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", + "integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=" + }, + "osenv": { + "version": "0.1.5", + "resolved": "https://registry.npmjs.org/osenv/-/osenv-0.1.5.tgz", + "integrity": "sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g==", + "requires": { + "os-homedir": "1.0.2", + "os-tmpdir": "1.0.2" + } + }, + "over": { + "version": "0.0.5", + "resolved": "https://registry.npmjs.org/over/-/over-0.0.5.tgz", + "integrity": "sha1-8phS5w/X4l82DgE6jsRMgq7bVwg=" + }, + "p-limit": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.1.tgz", + "integrity": "sha512-85Tk+90UCVWvbDavCLKPOLC9vvY8OwEX/RtKF+/1OADJMVlFfEHOiMTPVyxg7mk/dKa+ipdHm0OUkTvCpMTuwg==", + "dev": true, + "requires": { + "p-try": "2.2.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "requires": { + "p-limit": "2.2.1" + } + }, + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true + }, + "packet-reader": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/packet-reader/-/packet-reader-1.0.0.tgz", + "integrity": "sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==" + }, + "parseurl": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==" + }, + "path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", + "dev": true + }, + "path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=" + }, + "path-to-regexp": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", + "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=" + }, + "pathval": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.0.tgz", + "integrity": "sha1-uULm1L3mUwBe9rcTYd74cn0GReA=", + "dev": true + }, + "performance-now": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", + "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=" + }, + "pg": { + "version": "7.14.0", + "resolved": "https://registry.npmjs.org/pg/-/pg-7.14.0.tgz", + "integrity": "sha512-TLsdOWKFu44vHdejml4Uoo8h0EwCjdIj9Z9kpz7pA5i8iQxOTwVb1+Fy+X86kW5AXKxQpYpYDs4j/qPDbro/lg==", + "requires": { + "buffer-writer": "2.0.0", + "packet-reader": "1.0.0", + "pg-connection-string": "0.1.3", + "pg-pool": "2.0.7", + "pg-types": "2.2.0", + "pgpass": "1.0.2", + "semver": "4.3.2" + }, + "dependencies": { + "semver": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-4.3.2.tgz", + "integrity": "sha1-x6BxWKgL7dBSNVt3DYLWZA+AO+c=" + } + } + }, + "pg-connection-string": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-0.1.3.tgz", + "integrity": "sha1-2hhHsglA5C7hSSvq9l1J2RskXfc=" + }, + "pg-int8": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", + "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==" + }, + "pg-pool": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-2.0.7.tgz", + "integrity": "sha512-UiJyO5B9zZpu32GSlP0tXy8J2NsJ9EFGFfz5v6PSbdz/1hBLX1rNiiy5+mAm5iJJYwfCv4A0EBcQLGWwjbpzZw==" + }, + "pg-types": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", + "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", + "requires": { + "pg-int8": "1.0.1", + "postgres-array": "2.0.0", + "postgres-bytea": "1.0.0", + "postgres-date": "1.0.4", + "postgres-interval": "1.2.0" + } + }, + "pgpass": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.2.tgz", + "integrity": "sha1-Knu0G2BltnkH6R2hsHwYR8h3swY=", + "requires": { + "split": "1.0.1" + } + }, + "postgres-array": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", + "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==" + }, + "postgres-bytea": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz", + "integrity": "sha1-AntTPAqokOJtFy1Hz5zOzFIazTU=" + }, + "postgres-date": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.4.tgz", + "integrity": "sha512-bESRvKVuTrjoBluEcpv2346+6kgB7UlnqWZsnbnCccTNq/pqfj1j6oBaN5+b/NrDXepYUT/HKadqv3iS9lJuVA==" + }, + "postgres-interval": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", + "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", + "requires": { + "xtend": "4.0.2" + } + }, + "process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" + }, + "protocol-buffers-schema": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/protocol-buffers-schema/-/protocol-buffers-schema-3.3.2.tgz", + "integrity": "sha512-Xdayp8sB/mU+sUV4G7ws8xtYMGdQnxbeIfLjyO9TZZRJdztBGhlmbI5x1qcY4TG5hBkIKGnc28i7nXxaugu88w==" + }, + "proxy-addr": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.5.tgz", + "integrity": "sha512-t/7RxHXPH6cJtP0pRG6smSr9QJidhB+3kXu0KgXnbGYMgzEnUxRQ4/LDdfOwZEMyIh3/xHb8PX3t+lfL9z+YVQ==", + "requires": { + "forwarded": "0.1.2", + "ipaddr.js": "1.9.0" + } + }, + "psl": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/psl/-/psl-1.4.0.tgz", + "integrity": "sha512-HZzqCGPecFLyoRj5HLfuDSKYTJkAfB5thKBIkRHtGjWwY7p1dAyveIbXIq4tO0KYfDF2tHqPUgY9SDnGm00uFw==" + }, + "pullstream": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/pullstream/-/pullstream-0.4.1.tgz", + "integrity": "sha1-1vs79a7Wl+gxFQ6xACwlo/iuExQ=", + "requires": { + "over": "0.0.5", + "readable-stream": "1.0.34", + "setimmediate": "1.0.5", + "slice-stream": "1.0.0" + }, + "dependencies": { + "isarray": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" + }, + "readable-stream": { + "version": "1.0.34", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz", + "integrity": "sha1-Elgg40vIQtLyqq+v5MKRbuMsFXw=", + "requires": { + "core-util-is": "1.0.2", + "inherits": "2.0.3", + "isarray": "0.0.1", + "string_decoder": "0.10.31" + } + }, + "string_decoder": { + "version": "0.10.31", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=" + } + } + }, + "punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==" + }, + "q": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/q/-/q-1.5.1.tgz", + "integrity": "sha1-fjL3W0E4EpHQRhHxvxQQmsAGUdc=" + }, + "qs": { + "version": "6.7.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.7.0.tgz", + "integrity": "sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ==" + }, + "range-parser": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==" + }, + "raw-body": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.4.0.tgz", + "integrity": "sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q==", + "requires": { + "bytes": "3.1.0", + "http-errors": "1.7.2", + "iconv-lite": "0.4.24", + "unpipe": "1.0.0" + } + }, + "readable-stream": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", + "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", + "requires": { + "core-util-is": "1.0.2", + "inherits": "2.0.3", + "isarray": "1.0.0", + "process-nextick-args": "2.0.1", + "safe-buffer": "5.1.2", + "string_decoder": "1.1.1", + "util-deprecate": "1.0.2" + } + }, + "rebuild": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/rebuild/-/rebuild-0.1.2.tgz", + "integrity": "sha1-A6zepVFRMLR5CSdG4JPa+M+IPpM=", + "requires": { + "optimist": "0.3.7" + } + }, + "request": { + "version": "2.88.0", + "resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz", + "integrity": "sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg==", + "requires": { + "aws-sign2": "0.7.0", + "aws4": "1.9.0", + "caseless": "0.12.0", + "combined-stream": "1.0.8", + "extend": "3.0.2", + "forever-agent": "0.6.1", + "form-data": "2.3.3", + "har-validator": "5.1.3", + "http-signature": "1.2.0", + "is-typedarray": "1.0.0", + "isstream": "0.1.2", + "json-stringify-safe": "5.0.1", + "mime-types": "2.1.25", + "oauth-sign": "0.9.0", + "performance-now": "2.1.0", + "qs": "6.5.2", + "safe-buffer": "5.1.2", + "tough-cookie": "2.4.3", + "tunnel-agent": "0.6.0", + "uuid": "3.3.3" + }, + "dependencies": { + "qs": { + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", + "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==" + } + } + }, + "require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", + "dev": true + }, + "require-main-filename": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", + "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", + "dev": true + }, + "retry-as-promised": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/retry-as-promised/-/retry-as-promised-3.2.0.tgz", + "integrity": "sha512-CybGs60B7oYU/qSQ6kuaFmRd9sTZ6oXSc0toqePvV74Ac6/IFZSI1ReFQmtCN+uvW1Mtqdwpvt/LGOiCBAY2Mg==", + "requires": { + "any-promise": "1.3.0" + } + }, + "rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "requires": { + "glob": "7.1.6" + } + }, + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, + "safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" + }, + "semver": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.3.0.tgz", + "integrity": "sha1-myzl094C0XxgEq0yaqa00M9U+U8=" + }, + "send": { + "version": "0.17.1", + "resolved": "https://registry.npmjs.org/send/-/send-0.17.1.tgz", + "integrity": "sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg==", + "requires": { + "debug": "2.6.9", + "depd": "1.1.2", + "destroy": "1.0.4", + "encodeurl": "1.0.2", + "escape-html": "1.0.3", + "etag": "1.8.1", + "fresh": "0.5.2", + "http-errors": "1.7.2", + "mime": "1.6.0", + "ms": "2.1.1", + "on-finished": "2.3.0", + "range-parser": "1.2.1", + "statuses": "1.5.0" + }, + "dependencies": { + "ms": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz", + "integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==" + } + } + }, + "sequelize": { + "version": "5.21.2", + "resolved": "https://registry.npmjs.org/sequelize/-/sequelize-5.21.2.tgz", + "integrity": "sha512-MEqJ9NwQi4oy/ylLb2WkfPmhki/BOXC/gJfc8uWUUTETcpLwD1y/5bI1kqVh+qWcECHNsE9G4lmhj5hFbsxqvA==", + "requires": { + "bluebird": "3.7.2", + "cls-bluebird": "2.1.0", + "debug": "4.1.1", + "dottie": "2.0.1", + "inflection": "1.12.0", + "lodash": "4.17.15", + "moment": "2.24.0", + "moment-timezone": "0.5.27", + "retry-as-promised": "3.2.0", + "semver": "6.3.0", + "sequelize-pool": "2.3.0", + "toposort-class": "1.0.1", + "uuid": "3.3.3", + "validator": "10.11.0", + "wkx": "0.4.8" + }, + "dependencies": { + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "requires": { + "ms": "2.1.2" + } + }, + "lodash": { + "version": "4.17.15", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==" + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + } + } + }, + "sequelize-pool": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/sequelize-pool/-/sequelize-pool-2.3.0.tgz", + "integrity": "sha512-Ibz08vnXvkZ8LJTiUOxRcj1Ckdn7qafNZ2t59jYHMX1VIebTAOYefWdRYFt6z6+hy52WGthAHAoLc9hvk3onqA==" + }, + "serve-static": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.14.1.tgz", + "integrity": "sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg==", + "requires": { + "encodeurl": "1.0.2", + "escape-html": "1.0.3", + "parseurl": "1.3.3", + "send": "0.17.1" + } + }, + "set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=" + }, + "setimmediate": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz", + "integrity": "sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU=" + }, + "setprototypeof": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.1.tgz", + "integrity": "sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw==" + }, + "shimmer": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.2.1.tgz", + "integrity": "sha512-sQTKC1Re/rM6XyFM6fIAGHRPVGvyXfgzIDvzoq608vM+jeyVD0Tu1E6Np0Kc2zAIFWIj963V2800iF/9LPieQw==" + }, + "signal-exit": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz", + "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=" + }, + "simple-lru-cache": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/simple-lru-cache/-/simple-lru-cache-0.0.2.tgz", + "integrity": "sha1-1ZzDoZPBpdAyD4Tucy9uRxPlEd0=" + }, + "simple-swizzle": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz", + "integrity": "sha1-pNprY1/8zMoz9w0Xy5JZLeleVXo=", + "requires": { + "is-arrayish": "0.3.2" + } + }, + "slice-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/slice-stream/-/slice-stream-1.0.0.tgz", + "integrity": "sha1-WzO9ZvATsaf4ZGCwPUY97DmtPqA=", + "requires": { + "readable-stream": "1.0.34" + }, + "dependencies": { + "isarray": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" + }, + "readable-stream": { + "version": "1.0.34", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz", + "integrity": "sha1-Elgg40vIQtLyqq+v5MKRbuMsFXw=", + "requires": { + "core-util-is": "1.0.2", + "inherits": "2.0.3", + "isarray": "0.0.1", + "string_decoder": "0.10.31" + } + }, + "string_decoder": { + "version": "0.10.31", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=" + } + } + }, + "split": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz", + "integrity": "sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg==", + "requires": { + "through": "2.3.8" + } + }, + "sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", + "dev": true + }, + "sshpk": { + "version": "1.16.1", + "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz", + "integrity": "sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg==", + "requires": { + "asn1": "0.2.4", + "assert-plus": "1.0.0", + "bcrypt-pbkdf": "1.0.2", + "dashdash": "1.14.1", + "ecc-jsbn": "0.1.2", + "getpass": "0.1.7", + "jsbn": "0.1.1", + "safer-buffer": "2.1.2", + "tweetnacl": "0.14.5" + } + }, + "stack-trace": { + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz", + "integrity": "sha1-VHxws0fo0ytOEI6hoqFZ5f3eGcA=" + }, + "statuses": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", + "integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=" + }, + "string-width": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", + "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", + "requires": { + "code-point-at": "1.1.0", + "is-fullwidth-code-point": "1.0.0", + "strip-ansi": "3.0.1" + } + }, + "string.prototype.trimleft": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/string.prototype.trimleft/-/string.prototype.trimleft-2.1.0.tgz", + "integrity": "sha512-FJ6b7EgdKxxbDxc79cOlok6Afd++TTs5szo+zJTUyow3ycrRfJVE2pq3vcN53XexvKZu/DJMDfeI/qMiZTrjTw==", + "dev": true, + "requires": { + "define-properties": "1.1.3", + "function-bind": "1.1.1" + } + }, + "string.prototype.trimright": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/string.prototype.trimright/-/string.prototype.trimright-2.1.0.tgz", + "integrity": "sha512-fXZTSV55dNBwv16uw+hh5jkghxSnc5oHq+5K/gXgizHwAvMetdAJlHqqoFC1FSDVPYWLkAKl2cxpUT41sV7nSg==", + "dev": true, + "requires": { + "define-properties": "1.1.3", + "function-bind": "1.1.1" + } + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "requires": { + "safe-buffer": "5.1.2" + } + }, + "strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "requires": { + "ansi-regex": "2.1.1" + } + }, + "strip-json-comments": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", + "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=", + "dev": true + }, + "supports-color": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.0.0.tgz", + "integrity": "sha512-on9Kwidc1IUQo+bQdhi8+Tijpo0e1SS6RoGo2guUwn5vdaxw8RXOF9Vb2ws+ihWOmh4JnCJOvaziZWP1VABaLg==", + "dev": true, + "requires": { + "has-flag": "3.0.0" + } + }, + "tar": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/tar/-/tar-2.2.2.tgz", + "integrity": "sha512-FCEhQ/4rE1zYv9rYXJw/msRqsnmlje5jHP6huWeBZ704jUTy02c5AZyWujpMR1ax6mVw9NyJMfuK2CMDWVIfgA==", + "requires": { + "block-stream": "0.0.9", + "fstream": "1.0.12", + "inherits": "2.0.3" + } + }, + "text-hex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/text-hex/-/text-hex-1.0.0.tgz", + "integrity": "sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg==" + }, + "through": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", + "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=" + }, + "toidentifier": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.0.tgz", + "integrity": "sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==" + }, + "topo": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/topo/-/topo-2.0.2.tgz", + "integrity": "sha1-zVYVdSU5BXwNwEkaYhw7xvvh0YI=", + "requires": { + "hoek": "4.2.1" + } + }, + "toposort-class": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/toposort-class/-/toposort-class-1.0.1.tgz", + "integrity": "sha1-f/0feMi+KMO6Rc1OGj9e4ZO9mYg=" + }, + "tough-cookie": { + "version": "2.4.3", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz", + "integrity": "sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ==", + "requires": { + "psl": "1.4.0", + "punycode": "1.4.1" + }, + "dependencies": { + "punycode": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", + "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=" + } + } + }, + "traverse": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.3.9.tgz", + "integrity": "sha1-cXuPIgzAu3tE5AUUwisui7xw2Lk=" + }, + "triple-beam": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/triple-beam/-/triple-beam-1.3.0.tgz", + "integrity": "sha512-XrHUvV5HpdLmIj4uVMxHggLbFSZYIn7HEWsqePZcI50pco+MPqJ50wMGY794X7AOOhxOBAjbkqfAbEe/QMp2Lw==" + }, + "tunnel-agent": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", + "requires": { + "safe-buffer": "5.1.2" + } + }, + "tweetnacl": { + "version": "0.14.5", + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", + "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=" + }, + "type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true + }, + "type-is": { + "version": "1.6.18", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", + "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", + "requires": { + "media-typer": "0.3.0", + "mime-types": "2.1.25" + } + }, + "underscore": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.9.1.tgz", + "integrity": "sha512-5/4etnCkd9c8gwgowi5/om/mYO5ajCaOgdzj/oW+0eQV9WxKBDZw5+ycmKmeaTXjInS/W0BzpGLo2xR2aBwZdg==" + }, + "unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=" + }, + "unzip": { + "version": "0.1.11", + "resolved": "https://registry.npmjs.org/unzip/-/unzip-0.1.11.tgz", + "integrity": "sha1-iXScY7BY19kNYZ+GuYqhU107l/A=", + "requires": { + "binary": "0.3.0", + "fstream": "0.1.31", + "match-stream": "0.0.2", + "pullstream": "0.4.1", + "readable-stream": "1.0.34", + "setimmediate": "1.0.5" + }, + "dependencies": { + "fstream": { + "version": "0.1.31", + "resolved": "https://registry.npmjs.org/fstream/-/fstream-0.1.31.tgz", + "integrity": "sha1-czfwWPu7vvqMn1YaKMqwhJICyYg=", + "requires": { + "graceful-fs": "3.0.12", + "inherits": "2.0.3", + "mkdirp": "0.5.1", + "rimraf": "2.7.1" + } + }, + "graceful-fs": { + "version": "3.0.12", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-3.0.12.tgz", + "integrity": "sha512-J55gaCS4iTTJfTXIxSVw3EMQckcqkpdRv3IR7gu6sq0+tbC363Zx6KH/SEwXASK9JRbhyZmVjJEVJIOxYsB3Qg==", + "requires": { + "natives": "1.1.6" + } + }, + "isarray": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" + }, + "readable-stream": { + "version": "1.0.34", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz", + "integrity": "sha1-Elgg40vIQtLyqq+v5MKRbuMsFXw=", + "requires": { + "core-util-is": "1.0.2", + "inherits": "2.0.3", + "isarray": "0.0.1", + "string_decoder": "0.10.31" + } + }, + "string_decoder": { + "version": "0.10.31", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=" + } + } + }, + "uri-js": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.2.2.tgz", + "integrity": "sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ==", + "requires": { + "punycode": "2.1.1" + } + }, + "util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" + }, + "utils-merge": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", + "integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=" + }, + "uuid": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.3.tgz", + "integrity": "sha512-pW0No1RGHgzlpHJO1nsVrHKpOEIxkGg1xB+v0ZmdNH5OAeAwzAVrCnI2/6Mtx+Uys6iaylxa+D3g4j63IKKjSQ==" + }, + "validator": { + "version": "10.11.0", + "resolved": "https://registry.npmjs.org/validator/-/validator-10.11.0.tgz", + "integrity": "sha512-X/p3UZerAIsbBfN/IwahhYaBbY68EN/UQBWHtsbXGT5bfrH/p4NQzUCG1kF/rtKaNpnJ7jAu6NGTdSNtyNIXMw==" + }, + "vary": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=" + }, + "verror": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", + "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", + "requires": { + "assert-plus": "1.0.0", + "core-util-is": "1.0.2", + "extsprintf": "1.3.0" + } + }, + "which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "requires": { + "isexe": "2.0.0" + } + }, + "which-module": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", + "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=", + "dev": true + }, + "wide-align": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.3.tgz", + "integrity": "sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA==", + "requires": { + "string-width": "1.0.2" + } + }, + "winston": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/winston/-/winston-3.2.1.tgz", + "integrity": "sha512-zU6vgnS9dAWCEKg/QYigd6cgMVVNwyTzKs81XZtTFuRwJOcDdBg7AU0mXVyNbs7O5RH2zdv+BdNZUlx7mXPuOw==", + "requires": { + "async": "2.6.3", + "diagnostics": "1.1.1", + "is-stream": "1.1.0", + "logform": "2.1.2", + "one-time": "0.0.4", + "readable-stream": "3.4.0", + "stack-trace": "0.0.10", + "triple-beam": "1.3.0", + "winston-transport": "4.3.0" + }, + "dependencies": { + "readable-stream": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.4.0.tgz", + "integrity": "sha512-jItXPLmrSR8jmTRmRWJXCnGJsfy85mB3Wd/uINMXA65yrnFo0cPClFIUWzo2najVNSl+mx7/4W8ttlLWJe99pQ==", + "requires": { + "inherits": "2.0.3", + "string_decoder": "1.1.1", + "util-deprecate": "1.0.2" + } + } + } + }, + "winston-transport": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/winston-transport/-/winston-transport-4.3.0.tgz", + "integrity": "sha512-B2wPuwUi3vhzn/51Uukcao4dIduEiPOcOt9HJ3QeaXgkJ5Z7UwpBzxS4ZGNHtrxrUvTwemsQiSys0ihOf8Mp1A==", + "requires": { + "readable-stream": "2.3.6", + "triple-beam": "1.3.0" + } + }, + "wkx": { + "version": "0.4.8", + "resolved": "https://registry.npmjs.org/wkx/-/wkx-0.4.8.tgz", + "integrity": "sha512-ikPXMM9IR/gy/LwiOSqWlSL3X/J5uk9EO2hHNRXS41eTLXaUFEVw9fn/593jW/tE5tedNg8YjT5HkCa4FqQZyQ==", + "requires": { + "@types/node": "12.12.14" + } + }, + "wordwrap": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz", + "integrity": "sha1-o9XabNXAvAAI03I0u68b7WMFkQc=" + }, + "wrap-ansi": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", + "integrity": "sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==", + "dev": true, + "requires": { + "ansi-styles": "3.2.1", + "string-width": "3.1.0", + "strip-ansi": "5.2.0" + }, + "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "requires": { + "emoji-regex": "7.0.3", + "is-fullwidth-code-point": "2.0.0", + "strip-ansi": "5.2.0" + } + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "4.1.0" + } + } + } + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" + }, + "wrr-pool": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/wrr-pool/-/wrr-pool-1.1.4.tgz", + "integrity": "sha512-+lEdj42HlYqmzhvkZrx6xEymj0wzPBxqr7U1Xh9IWikMzOge03JSQT9YzTGq54SkOh/noViq32UejADZVzrgAg==", + "requires": { + "lodash": "4.17.11" + } + }, + "xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==" + }, + "y18n": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.0.tgz", + "integrity": "sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==", + "dev": true + }, + "yargs": { + "version": "13.3.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-13.3.0.tgz", + "integrity": "sha512-2eehun/8ALW8TLoIl7MVaRUrg+yCnenu8B4kBlRxj3GJGDKU1Og7sMXPNm1BYyM1DOJmTZ4YeN/Nwxv+8XJsUA==", + "dev": true, + "requires": { + "cliui": "5.0.0", + "find-up": "3.0.0", + "get-caller-file": "2.0.5", + "require-directory": "2.1.1", + "require-main-filename": "2.0.0", + "set-blocking": "2.0.0", + "string-width": "3.1.0", + "which-module": "2.0.0", + "y18n": "4.0.0", + "yargs-parser": "13.1.1" + }, + "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "requires": { + "emoji-regex": "7.0.3", + "is-fullwidth-code-point": "2.0.0", + "strip-ansi": "5.2.0" + } + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "4.1.0" + } + } + } + }, + "yargs-parser": { + "version": "13.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-13.1.1.tgz", + "integrity": "sha512-oVAVsHz6uFrg3XQheFII8ESO2ssAf9luWuAd6Wexsu4F3OtIW0o8IribPXYrD4WC24LWtPrJlGy87y5udK+dxQ==", + "dev": true, + "requires": { + "camelcase": "5.3.1", + "decamelize": "1.2.0" + } + }, + "yargs-unparser": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-1.6.0.tgz", + "integrity": "sha512-W9tKgmSn0DpSatfri0nx52Joq5hVXgeLiqR/5G0sZNDoLZFOr/xjBUDcShCOGNsBnEMNo1KAMBkTej1Hm62HTw==", + "dev": true, + "requires": { + "flat": "4.1.0", + "lodash": "4.17.15", + "yargs": "13.3.0" + }, + "dependencies": { + "lodash": { + "version": "4.17.15", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", + "dev": true + } + } + } + } +} From df771bca2393e0dc099ea86ac711fcf14d69c7c2 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Tue, 3 Dec 2019 17:43:53 +0530 Subject: [PATCH 002/101] service deployment --- .circleci/config.yml | 10 +++++++--- .gitignore | 1 + 2 files changed, 8 insertions(+), 3 deletions(-) create mode 100644 .gitignore diff --git a/.circleci/config.yml b/.circleci/config.yml index 189ebbe..c8cc6db 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -36,9 +36,13 @@ builddeploy_steps: &builddeploy_steps command: | ./awsconfiguration.sh $DEPLOY_ENV source awsenvconf -# ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-deployvar -# source buildenvvar -# ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + rm -rf buildenvvar + ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} jobs: diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..e43b0f9 --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +.DS_Store From 4e7e1bcabdbecec1b1bb90b4d2ad1ac7f283c7fb Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Tue, 3 Dec 2019 17:56:37 +0530 Subject: [PATCH 003/101] docker file change --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index c7f7600..5d30a1d 100644 --- a/Dockerfile +++ b/Dockerfile @@ -44,4 +44,4 @@ WORKDIR /ifxpg-nodeserver RUN rm -rf node_modules && npm install --unsafe-perm # ENTRYPOINT [ "/bin/bash" ] -ENTRYPOINT [ "npm" ] \ No newline at end of file +ENTRYPOINT [ "npm","run" ] \ No newline at end of file From 7bfa4d7cbcba871dabb4aac7f5c127fc133548f0 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Mon, 16 Dec 2019 23:00:21 +0530 Subject: [PATCH 004/101] code changes to autodeploy --- .circleci/config.yml | 6 ++- config/default.js | 4 ++ informix_auditing/audit_util.c | 17 +++++-- package.json | 5 ++- src/api/migratedynamodb.js | 30 +++++++++++++ src/node-server-console-view.js | 45 +++++++++++++++++++ .../node-server-without-kafka.js | 9 ++-- src/nodeserver.js | 2 +- 8 files changed, 106 insertions(+), 12 deletions(-) create mode 100644 src/api/migratedynamodb.js create mode 100644 src/node-server-console-view.js rename node-server-without-kafka.js => src/node-server-without-kafka.js (83%) diff --git a/.circleci/config.yml b/.circleci/config.yml index c8cc6db..dceeef8 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -11,7 +11,7 @@ install_dependency: &install_dependency install_deploysuite: &install_deploysuite name: Installation of install_deploysuite. command: | - git clone --branch v1.4 https://github.com/topcoder-platform/tc-deploy-scripts ../buildscript + git clone --branch v1.4.2 https://github.com/topcoder-platform/tc-deploy-scripts ../buildscript cp ./../buildscript/master_deploy.sh . cp ./../buildscript/buildenv.sh . cp ./../buildscript/awsconfiguration.sh . @@ -43,6 +43,10 @@ builddeploy_steps: &builddeploy_steps ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar source buildenvvar ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + rm -rf buildenvvar + ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-withoutkafka-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-${APPNAME}-withoutkafka-appvar -i ${APPNAME} jobs: diff --git a/config/default.js b/config/default.js index 03b2dae..1471051 100644 --- a/config/default.js +++ b/config/default.js @@ -41,5 +41,9 @@ module.exports = { USER: process.env.IFX_USER || 'informix', PASSWORD: process.env.IFX_PASSWORD || '1nf0rm1x', POOL_MAX_SIZE: parseInt(process.env.IFX_POOL_MAX_SIZE || '10') + }, + DYNAMODB: { + REGION: process.env.AWS_REGION || 'us-east-1', + TABLENAME: process.env.DYNAMODB_TABLENAME || 'ifxpg-migrator' } } diff --git a/informix_auditing/audit_util.c b/informix_auditing/audit_util.c index deb41fc..442878e 100644 --- a/informix_auditing/audit_util.c +++ b/informix_auditing/audit_util.c @@ -488,7 +488,9 @@ char* gettimestamp() int posttopic(char *jsondata) { char *postinfo = getenv("POSTTOPIC"); - char *localurl= "http://host.docker.internal:8080/events"; + char *fileeventsurl = "http://ifxpg-migrator.topcoder-dev.com/fileevents" + char *kafkaeventsurl = "http://ifxpg-migrator.topcoder-dev.com/kafkaevents" + //char *localurl= "http://host.docker.internal:8080/events"; //char *localurl= "http://localhost:8080/events"; char *posturl = getenv("POSTURL"); if (!postinfo) @@ -507,7 +509,8 @@ int posttopic(char *jsondata) printf("posting topic"); if (!posturl) { - posturl = localurl; + posturl = fileeventsurl; + //posturl = localurl; //printf("PATH : %s\n",ap); printf("no url provide in environment . So it is taking localurl"); } @@ -518,13 +521,19 @@ int posttopic(char *jsondata) headers = curl_slist_append(headers, "cache-control: no-cache"); headers = curl_slist_append(headers, "Content-Type: application/json"); curl_easy_setopt(hnd, CURLOPT_HTTPHEADER, headers); - curl_easy_setopt(hnd, CURLOPT_POSTFIELDS,jsondata); - CURLcode ret = curl_easy_perform(hnd); if(ret != CURLE_OK) + { + fprintf(stderr, "curl_easy_perform() failed: %s\n", + curl_easy_strerror(ret)); + } + curl_easy_setopt(hnd, CURLOPT_URL, kafkaeventsurl); + if(ret != CURLE_OK) + { fprintf(stderr, "curl_easy_perform() failed: %s\n", curl_easy_strerror(ret)); + } return 0; } diff --git a/package.json b/package.json index 4dceaa7..c82b313 100644 --- a/package.json +++ b/package.json @@ -7,7 +7,7 @@ "test": "echo \"Error: no test specified\" && exit 1", "producer": "node src/nodeserver.js", "consumer": "node src/consumer.js", - "producerwithoutkafka" : "node node-server-without-kafka.js" + "producerwithoutkafka" : "node src/node-server-without-kafka.js" }, "author": "", "license": "ISC", @@ -26,7 +26,8 @@ "sequelize": "^5.19.0", "underscore": "^1.9.1", "winston": "^3.1.0", - "is-utf8": "^0.2.1" + "is-utf8": "^0.2.1", + "aws-sdk": "latest" }, "devDependencies": { "chai": "^4.2.0", diff --git a/src/api/migratedynamodb.js b/src/api/migratedynamodb.js new file mode 100644 index 0000000..941d626 --- /dev/null +++ b/src/api/migratedynamodb.js @@ -0,0 +1,30 @@ +const config = require('config') +const logger = require('./common/logger') +const _ = require('lodash') +var AWS = require("aws-sdk"); +async function pushToDynamoDb(payload) { + try { console.log('----Inside DynomoDB code -------'); + // console.log(payload) + var params = { + TableName: config.DYNAMODB.TABLENAME, + Item: { + SequenceID: payload.TIME, + pl_document: payload, + pl_table: payload.TABLENAME, + pl_schemaname: payload.SCHEMANAME, + pl_operation: payload.OPERATION, + pl_uniquedatatype: payload.uniquedatatype, + NodeSequenceID: Date.now() + } + } + var docClient = new AWS.DynamoDB.DocumentClient({region: config.DYNAMODB.Region}); + docClient.put(params, function(err, data) { + if (err) console.log('DynamoDB error : ', err); + else console.log('DynamoDB Success : ',data); + }); + + } catch (e) { + console.log(e) + } +} +module.exports = pushToDynamoDb \ No newline at end of file diff --git a/src/node-server-console-view.js b/src/node-server-console-view.js new file mode 100644 index 0000000..d0cc982 --- /dev/null +++ b/src/node-server-console-view.js @@ -0,0 +1,45 @@ +const express = require('express') +const bodyParser = require('body-parser') + +const app = express() +const port = process.env.PORT || 8080; +app.use(bodyParser.json()); // to support JSON-encoded bodies +app.use(bodyParser.urlencoded({ // to support URL-encoded bodies + extended: true +})); +app.get('/', function (req, res) { + res.send('hello world') +}) + +app.post('/fileevents', function (req, res) { + const payload = req.body + // const topic = payload.topic + const topic = 'test-topic'; + console.log({ + topic: topic, + partition: 0, + message: { + value : JSON.stringify(payload) + } + }); + res.send('done'); + + // send response to client + //res.send('ok') + +}) + +// const producer = new Kafka.Producer() + +// producer.init().then(function () { +// console.log('connected to local kafka server on port 9092 ...'); + +// // start the server +// app.listen(port); +// console.log('Server started! At http://localhost:' + port); + +// } //end producer init +// ).catch(e => { console.log('Error : ', e) }); + +app.listen(port); +console.log('Server started! At http://localhost:' + port); diff --git a/node-server-without-kafka.js b/src/node-server-without-kafka.js similarity index 83% rename from node-server-without-kafka.js rename to src/node-server-without-kafka.js index d815a1a..e7775fb 100644 --- a/node-server-without-kafka.js +++ b/src/node-server-without-kafka.js @@ -1,6 +1,6 @@ const express = require('express') -//const Kafka = require('no-kafka') const bodyParser = require('body-parser') +const {pushToDynamoDb} = require('./api/migratedynamodb') const app = express() const port = process.env.PORT || 8080; @@ -12,17 +12,18 @@ app.get('/', function (req, res) { res.send('hello world') }) -app.post('/events', function (req, res) { +app.post('/fileevents', function (req, res) { const payload = req.body // const topic = payload.topic const topic = 'test-topic'; console.log({ - topic: topic, - partition: 0, + topic: config.topic.NAME, + partition: config.topic.PARTITION, message: { value : JSON.stringify(payload) } }); + await pushToDynamoDb(payload); res.send('done'); // send response to client diff --git a/src/nodeserver.js b/src/nodeserver.js index 75f3a45..6406cb9 100644 --- a/src/nodeserver.js +++ b/src/nodeserver.js @@ -15,7 +15,7 @@ app.get('/', function (req, res) { }) -app.post('/events', async (req, res, next) => { +app.post('/kafkaevents', async (req, res, next) => { const payload = req.body let seqID = 0 From f1f0188ba0b6f7a3ce600481a4cd81df55059009 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Tue, 17 Dec 2019 11:14:47 +0530 Subject: [PATCH 005/101] fix for async error --- src/node-server-without-kafka.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/node-server-without-kafka.js b/src/node-server-without-kafka.js index e7775fb..d060d6f 100644 --- a/src/node-server-without-kafka.js +++ b/src/node-server-without-kafka.js @@ -12,7 +12,7 @@ app.get('/', function (req, res) { res.send('hello world') }) -app.post('/fileevents', function (req, res) { +app.post('/fileevents', async function (req, res) { const payload = req.body // const topic = payload.topic const topic = 'test-topic'; From 5c20b0294a57b235d0e0ed3cd9735eed6d3a461c Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Tue, 17 Dec 2019 13:29:46 +0530 Subject: [PATCH 006/101] fix for async error --- src/api/migratedynamodb.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/api/migratedynamodb.js b/src/api/migratedynamodb.js index 941d626..22c7b17 100644 --- a/src/api/migratedynamodb.js +++ b/src/api/migratedynamodb.js @@ -1,5 +1,5 @@ const config = require('config') -const logger = require('./common/logger') +const logger = require('../common/logger') const _ = require('lodash') var AWS = require("aws-sdk"); async function pushToDynamoDb(payload) { From e694fa93094b47892a062dac06c59d3e4e237e29 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Tue, 17 Dec 2019 14:46:43 +0530 Subject: [PATCH 007/101] [skip ci] --- informix_auditing/{UNIX.mak => UNIX_x64.mak} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename informix_auditing/{UNIX.mak => UNIX_x64.mak} (100%) diff --git a/informix_auditing/UNIX.mak b/informix_auditing/UNIX_x64.mak similarity index 100% rename from informix_auditing/UNIX.mak rename to informix_auditing/UNIX_x64.mak From 0573e4236cdf99ebc1b4947a7b714fe14985bee4 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Tue, 17 Dec 2019 14:58:28 +0530 Subject: [PATCH 008/101] [skip ci] --- informix_auditing/audit_util.c | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/informix_auditing/audit_util.c b/informix_auditing/audit_util.c index 442878e..794b5b8 100644 --- a/informix_auditing/audit_util.c +++ b/informix_auditing/audit_util.c @@ -488,8 +488,8 @@ char* gettimestamp() int posttopic(char *jsondata) { char *postinfo = getenv("POSTTOPIC"); - char *fileeventsurl = "http://ifxpg-migrator.topcoder-dev.com/fileevents" - char *kafkaeventsurl = "http://ifxpg-migrator.topcoder-dev.com/kafkaevents" + char *fileeventsurl = "http://ifxpg-migrator.topcoder-dev.com/fileevents"; + char *kafkaeventsurl = "http://ifxpg-migrator.topcoder-dev.com/kafkaevents"; //char *localurl= "http://host.docker.internal:8080/events"; //char *localurl= "http://localhost:8080/events"; char *posturl = getenv("POSTURL"); From 2eac3a7c5a395409da00fff3ae3dce77ce2f78d6 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Tue, 17 Dec 2019 16:11:55 +0530 Subject: [PATCH 009/101] Update migratedynamodb.js --- src/api/migratedynamodb.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/api/migratedynamodb.js b/src/api/migratedynamodb.js index 22c7b17..d9094eb 100644 --- a/src/api/migratedynamodb.js +++ b/src/api/migratedynamodb.js @@ -2,6 +2,7 @@ const config = require('config') const logger = require('../common/logger') const _ = require('lodash') var AWS = require("aws-sdk"); +const config = require('config'); async function pushToDynamoDb(payload) { try { console.log('----Inside DynomoDB code -------'); // console.log(payload) @@ -27,4 +28,4 @@ async function pushToDynamoDb(payload) { console.log(e) } } -module.exports = pushToDynamoDb \ No newline at end of file +module.exports = pushToDynamoDb From a0fe97c562cffcc6bc08e854ae30db6cd88ffb48 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Tue, 17 Dec 2019 16:12:19 +0530 Subject: [PATCH 010/101] Update migratedynamodb.js [skip ci] --- src/api/migratedynamodb.js | 1 - 1 file changed, 1 deletion(-) diff --git a/src/api/migratedynamodb.js b/src/api/migratedynamodb.js index d9094eb..97064aa 100644 --- a/src/api/migratedynamodb.js +++ b/src/api/migratedynamodb.js @@ -2,7 +2,6 @@ const config = require('config') const logger = require('../common/logger') const _ = require('lodash') var AWS = require("aws-sdk"); -const config = require('config'); async function pushToDynamoDb(payload) { try { console.log('----Inside DynomoDB code -------'); // console.log(payload) From 6d4e8f30f0e39862bedd98b3066112032abfb255 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Tue, 17 Dec 2019 16:12:39 +0530 Subject: [PATCH 011/101] Update node-server-without-kafka.js --- src/node-server-without-kafka.js | 1 + 1 file changed, 1 insertion(+) diff --git a/src/node-server-without-kafka.js b/src/node-server-without-kafka.js index d060d6f..c4c78fc 100644 --- a/src/node-server-without-kafka.js +++ b/src/node-server-without-kafka.js @@ -1,6 +1,7 @@ const express = require('express') const bodyParser = require('body-parser') const {pushToDynamoDb} = require('./api/migratedynamodb') +const config = require('config'); const app = express() const port = process.env.PORT || 8080; From 2359647f8d5b32f62b9fec1c72e58fca1be58fa0 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Tue, 17 Dec 2019 21:54:32 +0530 Subject: [PATCH 012/101] [skip ci] --- testing/curltest.sh | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 testing/curltest.sh diff --git a/testing/curltest.sh b/testing/curltest.sh new file mode 100644 index 0000000..0e5eda3 --- /dev/null +++ b/testing/curltest.sh @@ -0,0 +1,17 @@ +curl -X POST \ + http://localhost:8080/events \ + -H 'Content-Type: application/json' \ + -H 'Postman-Token: 807a8b78-efc9-45c1-b921-57fc5774b1f5' \ + -H 'cache-control: no-cache' \ + -d '{ + "TIME": "2019-11-06T07:12:43.553779Z", + "SCHEMANAME": "testdb", + "TABLENAME": "test7_2", + "OPERATION": "INSERT", + "DATA": { + "abc": "1", + "bcd": "2", + "testingbyte": "no" + }, + "uniquedatatype": "false" +}' \ No newline at end of file From 8342f12f335fdc78bb28e903e9c4c7debe8b82ca Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Wed, 18 Dec 2019 09:55:31 +0530 Subject: [PATCH 013/101] error fixing --- src/node-server-without-kafka.js | 2 +- testing/curltest.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/node-server-without-kafka.js b/src/node-server-without-kafka.js index c4c78fc..4c02455 100644 --- a/src/node-server-without-kafka.js +++ b/src/node-server-without-kafka.js @@ -1,6 +1,6 @@ const express = require('express') const bodyParser = require('body-parser') -const {pushToDynamoDb} = require('./api/migratedynamodb') +const pushToDynamoDb = require('./api/migratedynamodb') const config = require('config'); const app = express() diff --git a/testing/curltest.sh b/testing/curltest.sh index 0e5eda3..1b36ed3 100644 --- a/testing/curltest.sh +++ b/testing/curltest.sh @@ -1,5 +1,5 @@ curl -X POST \ - http://localhost:8080/events \ + http://ifxpg-migrator.topcoder-dev.com/fileevents \ -H 'Content-Type: application/json' \ -H 'Postman-Token: 807a8b78-efc9-45c1-b921-57fc5774b1f5' \ -H 'cache-control: no-cache' \ From 9977d5b9925344d40297880b399f0bd8b7dfbb64 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Wed, 18 Dec 2019 10:30:58 +0530 Subject: [PATCH 014/101] error fixing --- .circleci/config.yml | 16 ++++++++-------- src/api/migratedynamodb.js | 2 +- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index dceeef8..a7bc3aa 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -36,14 +36,14 @@ builddeploy_steps: &builddeploy_steps command: | ./awsconfiguration.sh $DEPLOY_ENV source awsenvconf - ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} - rm -rf buildenvvar - ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} - rm -rf buildenvvar + # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar + # source buildenvvar + # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + # rm -rf buildenvvar + # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar + # source buildenvvar + # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + # rm -rf buildenvvar ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-withoutkafka-deployvar source buildenvvar ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-${APPNAME}-withoutkafka-appvar -i ${APPNAME} diff --git a/src/api/migratedynamodb.js b/src/api/migratedynamodb.js index 97064aa..fe80f07 100644 --- a/src/api/migratedynamodb.js +++ b/src/api/migratedynamodb.js @@ -17,7 +17,7 @@ async function pushToDynamoDb(payload) { NodeSequenceID: Date.now() } } - var docClient = new AWS.DynamoDB.DocumentClient({region: config.DYNAMODB.Region}); + var docClient = new AWS.DynamoDB.DocumentClient({region: config.DYNAMODB.REGION}); docClient.put(params, function(err, data) { if (err) console.log('DynamoDB error : ', err); else console.log('DynamoDB Success : ',data); From 9e7a1aeb2b5a5d78029cab09a08e4e6cbf219ab1 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Wed, 18 Dec 2019 10:32:25 +0530 Subject: [PATCH 015/101] error fixing --- .circleci/config.yml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index a7bc3aa..d87d2c7 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -36,14 +36,14 @@ builddeploy_steps: &builddeploy_steps command: | ./awsconfiguration.sh $DEPLOY_ENV source awsenvconf - # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar - # source buildenvvar - # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} - # rm -rf buildenvvar - # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar - # source buildenvvar - # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} - # rm -rf buildenvvar + # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar + # source buildenvvar + # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + # rm -rf buildenvvar + # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar + # source buildenvvar + # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + # rm -rf buildenvvar ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-withoutkafka-deployvar source buildenvvar ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-${APPNAME}-withoutkafka-appvar -i ${APPNAME} From 218c2d46bf91c76b51ae39d51ce2af3543c1ac10 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Wed, 18 Dec 2019 16:19:56 +0530 Subject: [PATCH 016/101] Update migratedynamodb.js --- src/api/migratedynamodb.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/api/migratedynamodb.js b/src/api/migratedynamodb.js index fe80f07..ccf7b41 100644 --- a/src/api/migratedynamodb.js +++ b/src/api/migratedynamodb.js @@ -17,7 +17,7 @@ async function pushToDynamoDb(payload) { NodeSequenceID: Date.now() } } - var docClient = new AWS.DynamoDB.DocumentClient({region: config.DYNAMODB.REGION}); + var docClient = new AWS.DynamoDB.DocumentClient({region: config.DYNAMODB.REGION,convertEmptyValues: true}}); docClient.put(params, function(err, data) { if (err) console.log('DynamoDB error : ', err); else console.log('DynamoDB Success : ',data); From ae7580d2148883d4853341e0145dfbfda09f4e21 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Wed, 18 Dec 2019 20:16:34 +0530 Subject: [PATCH 017/101] fixing error --- informix_auditing/audit_util.c | 14 ++++++++------ informix_auditing/auditing2.c | 3 ++- src/api/migratedynamodb.js | 2 +- 3 files changed, 11 insertions(+), 8 deletions(-) diff --git a/informix_auditing/audit_util.c b/informix_auditing/audit_util.c index 794b5b8..0c802dc 100644 --- a/informix_auditing/audit_util.c +++ b/informix_auditing/audit_util.c @@ -485,14 +485,14 @@ char* gettimestamp() /*--------------------------------------------------------------*/ /* post topic base don condition*/ -int posttopic(char *jsondata) +int posttopic(char *jsondata, char *posturl) { - char *postinfo = getenv("POSTTOPIC"); - char *fileeventsurl = "http://ifxpg-migrator.topcoder-dev.com/fileevents"; - char *kafkaeventsurl = "http://ifxpg-migrator.topcoder-dev.com/kafkaevents"; + // char *postinfo = getenv("POSTTOPIC"); + // char *fileeventsurl = "http://ifxpg-migrator.topcoder-dev.com/fileevents"; + // char *kafkaeventsurl = "http://ifxpg-migrator.topcoder-dev.com/kafkaevents"; //char *localurl= "http://host.docker.internal:8080/events"; //char *localurl= "http://localhost:8080/events"; - char *posturl = getenv("POSTURL"); + /* char *posturl = getenv("POSTURL"); if (!postinfo) { printf("no post topic set true or false. defualt it will post topic"); @@ -514,6 +514,7 @@ int posttopic(char *jsondata) //printf("PATH : %s\n",ap); printf("no url provide in environment . So it is taking localurl"); } +*/ CURL *hnd = curl_easy_init(); curl_easy_setopt(hnd, CURLOPT_CUSTOMREQUEST, "POST"); curl_easy_setopt(hnd, CURLOPT_URL, posturl); @@ -528,12 +529,13 @@ int posttopic(char *jsondata) fprintf(stderr, "curl_easy_perform() failed: %s\n", curl_easy_strerror(ret)); } - curl_easy_setopt(hnd, CURLOPT_URL, kafkaeventsurl); + /* curl_easy_setopt(hnd, CURLOPT_URL, kafkaeventsurl); if(ret != CURLE_OK) { fprintf(stderr, "curl_easy_perform() failed: %s\n", curl_easy_strerror(ret)); } + */ return 0; } diff --git a/informix_auditing/auditing2.c b/informix_auditing/auditing2.c index cb245bd..108f8a5 100644 --- a/informix_auditing/auditing2.c +++ b/informix_auditing/auditing2.c @@ -180,7 +180,8 @@ DPRINTF("logger", 80, ("cbfunc(): about to open file %s", buffer)); DPRINTF("logger", 80, ("cbfunc(): pcur->json is null")); } else { ret = mi_file_write(fd, pcur->json, strlen(pcur->json)); - int res=posttopic(pcur->json); + int res=posttopic(pcur->json, "http://ifxpg-migrator.topcoder-dev.com/fileevents"); + int res1=posttopic(pcur->json, "http://ifxpg-migrator.topcoder-dev.com/kafkaevents"); mi_file_close(fd); mi_free(pcur->json); } diff --git a/src/api/migratedynamodb.js b/src/api/migratedynamodb.js index ccf7b41..227d16f 100644 --- a/src/api/migratedynamodb.js +++ b/src/api/migratedynamodb.js @@ -17,7 +17,7 @@ async function pushToDynamoDb(payload) { NodeSequenceID: Date.now() } } - var docClient = new AWS.DynamoDB.DocumentClient({region: config.DYNAMODB.REGION,convertEmptyValues: true}}); + var docClient = new AWS.DynamoDB.DocumentClient({region: config.DYNAMODB.REGION,convertEmptyValues: true}); docClient.put(params, function(err, data) { if (err) console.log('DynamoDB error : ', err); else console.log('DynamoDB Success : ',data); From ae0493d841c40b361ef80d7a17b2b93fd066f025 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Wed, 18 Dec 2019 20:20:30 +0530 Subject: [PATCH 018/101] [skip ci] --- informix_auditing/audit_util.c | 1 + 1 file changed, 1 insertion(+) diff --git a/informix_auditing/audit_util.c b/informix_auditing/audit_util.c index 0c802dc..453803a 100644 --- a/informix_auditing/audit_util.c +++ b/informix_auditing/audit_util.c @@ -515,6 +515,7 @@ int posttopic(char *jsondata, char *posturl) printf("no url provide in environment . So it is taking localurl"); } */ + printf("posting topic to url %s", posturl); CURL *hnd = curl_easy_init(); curl_easy_setopt(hnd, CURLOPT_CUSTOMREQUEST, "POST"); curl_easy_setopt(hnd, CURLOPT_URL, posturl); From 3a41ea06c2cd7d015adffac06e206c0bfa21ee9a Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Wed, 18 Dec 2019 20:25:26 +0530 Subject: [PATCH 019/101] [skip ci] --- informix_auditing/audit_util.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/informix_auditing/audit_util.h b/informix_auditing/audit_util.h index fe4741a..d222c35 100644 --- a/informix_auditing/audit_util.h +++ b/informix_auditing/audit_util.h @@ -53,4 +53,4 @@ mi_string *doUpdateCN(); mi_integer set_tracing(mi_lvarchar *class, mi_integer lvl, mi_lvarchar *tfile, MI_FPARAM *fparam); char* gettimestamp(); -int posttopic(char *jsondata); +int posttopic(char *jsondata, char *posturl); From 439d9f0d66e5fcd71295a6678b11ee0100669cf8 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Thu, 19 Dec 2019 14:29:24 +0530 Subject: [PATCH 020/101] notify service integration --- .circleci/config.yml | 12 ++++++++-- config/default.js | 7 +++++- package.json | 8 +++++-- src/api/postslackinfo.js | 44 +++++++++++++++++++++++++++++++++++++ src/consumer-slacknotify.js | 36 ++++++++++++++++++++++++++++++ 5 files changed, 102 insertions(+), 5 deletions(-) create mode 100644 src/api/postslackinfo.js create mode 100644 src/consumer-slacknotify.js diff --git a/.circleci/config.yml b/.circleci/config.yml index d87d2c7..e239483 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -36,17 +36,25 @@ builddeploy_steps: &builddeploy_steps command: | ./awsconfiguration.sh $DEPLOY_ENV source awsenvconf + # producer deployment # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar # source buildenvvar # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + # consumer deployment # rm -rf buildenvvar # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar # source buildenvvar # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + # without kafka dynamodb # rm -rf buildenvvar - ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-withoutkafka-deployvar + # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-withoutkafka-deployvar + # source buildenvvar + # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-${APPNAME}-withoutkafka-appvar -i ${APPNAME} + # notify deployment + # rm -rf buildenvvar + ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-notify-deployvar source buildenvvar - ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-${APPNAME}-withoutkafka-appvar -i ${APPNAME} + ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-notify-appvar -i ${APPNAME} jobs: diff --git a/config/default.js b/config/default.js index 1471051..7394e97 100644 --- a/config/default.js +++ b/config/default.js @@ -45,5 +45,10 @@ module.exports = { DYNAMODB: { REGION: process.env.AWS_REGION || 'us-east-1', TABLENAME: process.env.DYNAMODB_TABLENAME || 'ifxpg-migrator' - } + }, + SLACK: { + URL: process.env.SLACKURL || 'us-east-1', + SLACKCHANNEL: process.env.SLACKCHANNEL || 'ifxpg-migrator', + SLACKNOTIFY: process.env.SLACKNOTIFY || 'false' + } } diff --git a/package.json b/package.json index c82b313..5098a81 100644 --- a/package.json +++ b/package.json @@ -7,7 +7,8 @@ "test": "echo \"Error: no test specified\" && exit 1", "producer": "node src/nodeserver.js", "consumer": "node src/consumer.js", - "producerwithoutkafka" : "node src/node-server-without-kafka.js" + "producerwithoutkafka" : "node src/node-server-without-kafka.js", + "ifxpgnotify": "node src/consumer-slacknotify.js" }, "author": "", "license": "ISC", @@ -27,7 +28,10 @@ "underscore": "^1.9.1", "winston": "^3.1.0", "is-utf8": "^0.2.1", - "aws-sdk": "latest" + "aws-sdk": "latest", + "zlib": "latest", + "url": "latest", + "https": "latest" }, "devDependencies": { "chai": "^4.2.0", diff --git a/src/api/postslackinfo.js b/src/api/postslackinfo.js new file mode 100644 index 0000000..33b0144 --- /dev/null +++ b/src/api/postslackinfo.js @@ -0,0 +1,44 @@ +const config = require('config'); +const zlib = require('zlib'); +const url = require('url'); +const https = require('https'); +hookUrl = config.SLACK.URL +slackChannel=config.SLACK.SLACKCHANNEL + +function postMessage(message, callback) { + var slackMessage = { + channel: `${slackChannel}`, + text: `${message}`, + } + const body = JSON.stringify(slackMessage); + const options = url.parse(hookUrl); + options.method = 'POST'; + options.headers = { + 'Content-Type': 'application/json', + 'Content-Length': Buffer.byteLength(body), + }; + + const postReq = https.request(options, (res) => { + const chunks = []; + res.setEncoding('utf8'); + res.on('data', (chunk) => chunks.push(chunk)); + res.on('end', () => { + if (callback) { + callback({ + body: chunks.join(''), + statusCode: res.statusCode, + statusMessage: res.statusMessage, + }); + } + }); + return res; + }); + + postReq.write(body); + postReq.end(); +} + +module.exports = { + postMessage + } + \ No newline at end of file diff --git a/src/consumer-slacknotify.js b/src/consumer-slacknotify.js new file mode 100644 index 0000000..42e0b1a --- /dev/null +++ b/src/consumer-slacknotify.js @@ -0,0 +1,36 @@ +const Kafka = require('no-kafka'); +const Promise = require('bluebird'); +const config = require('config'); +const postMessage = require('./api/postslackinfo'); +const consumer = new Kafka.GroupConsumer(); + +const dataHandler = function (messageSet, topic, partition) { + return Promise.each(messageSet, async function (m) { + const payload = JSON.parse(m.message.value) + if(config.SLACK.SLACKNOTIFY === 'true') { + postMessage(payload, (response) => { + if (response.statusCode < 400) { + console.info('Message posted successfully'); + // callback(null); + } else if (response.statusCode < 500) { + console.error(`Error posting message to Slack API: ${response.statusCode} - ${response.statusMessage}`); + // callback(null); // Don't retry because the error is due to a problem with the request + } else { + // Let Lambda retry + console.log(`Server error when processing message: ${response.statusCode} - ${response.statusMessage}`); + //callback(`Server error when processing message: ${response.statusCode} - ${response.statusMessage}`); + } + }); + } + + // commit offset + return consumer.commitOffset({ topic: topic, partition: partition, offset: m.offset, metadata: 'optional' }) + }).catch(err => console.log(err)) +}; + +const strategies = [{ + subscriptions: [config.topic_error.NAME], + handler: dataHandler +}]; + +consumer.init(strategies); From ded775f02e0780df0c520bc05754d4795ed3e176 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Thu, 19 Dec 2019 15:11:51 +0530 Subject: [PATCH 021/101] notify service integration --- src/api/postslackinfo.js | 5 ++--- src/consumer-slacknotify.js | 1 + 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/api/postslackinfo.js b/src/api/postslackinfo.js index 33b0144..e97bcac 100644 --- a/src/api/postslackinfo.js +++ b/src/api/postslackinfo.js @@ -38,7 +38,6 @@ function postMessage(message, callback) { postReq.end(); } -module.exports = { - postMessage - } +module.exports = postMessage + \ No newline at end of file diff --git a/src/consumer-slacknotify.js b/src/consumer-slacknotify.js index 42e0b1a..8883d98 100644 --- a/src/consumer-slacknotify.js +++ b/src/consumer-slacknotify.js @@ -8,6 +8,7 @@ const dataHandler = function (messageSet, topic, partition) { return Promise.each(messageSet, async function (m) { const payload = JSON.parse(m.message.value) if(config.SLACK.SLACKNOTIFY === 'true') { + console.log(payload) postMessage(payload, (response) => { if (response.statusCode < 400) { console.info('Message posted successfully'); From fa0ce71213564b84544b1fe7ae80563be042e847 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Thu, 19 Dec 2019 15:56:26 +0530 Subject: [PATCH 022/101] notify service integration --- src/api/postslackinfo.js | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/api/postslackinfo.js b/src/api/postslackinfo.js index e97bcac..c4cf06f 100644 --- a/src/api/postslackinfo.js +++ b/src/api/postslackinfo.js @@ -6,10 +6,15 @@ hookUrl = config.SLACK.URL slackChannel=config.SLACK.SLACKCHANNEL function postMessage(message, callback) { + var slackMessage = { channel: `${slackChannel}`, text: `${message}`, } + console.log("stringfied slack message"); + console.log(JSON.stringify(slackMessage)) + console.log("slack message"); + console.log(slackMessage) const body = JSON.stringify(slackMessage); const options = url.parse(hookUrl); options.method = 'POST'; From 026ac352828cc9d23bd8eb2953d78373e005af6c Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Thu, 19 Dec 2019 16:27:28 +0530 Subject: [PATCH 023/101] notify service integration --- src/consumer-slacknotify.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/consumer-slacknotify.js b/src/consumer-slacknotify.js index 8883d98..df8cbcd 100644 --- a/src/consumer-slacknotify.js +++ b/src/consumer-slacknotify.js @@ -9,7 +9,7 @@ const dataHandler = function (messageSet, topic, partition) { const payload = JSON.parse(m.message.value) if(config.SLACK.SLACKNOTIFY === 'true') { console.log(payload) - postMessage(payload, (response) => { + postMessage(Object.values(payload), (response) => { if (response.statusCode < 400) { console.info('Message posted successfully'); // callback(null); From b37a12e7656fe6b10a70defcad59eecbe996de31 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Thu, 19 Dec 2019 20:34:53 +0530 Subject: [PATCH 024/101] introducing retry count --- .circleci/config.yml | 12 ++++----- config/default.js | 1 + src/consumer.js | 63 +++++++++++++++++++++++++++++++++++++------- 3 files changed, 60 insertions(+), 16 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index e239483..b6f8ca1 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -42,9 +42,9 @@ builddeploy_steps: &builddeploy_steps # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} # consumer deployment # rm -rf buildenvvar - # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar - # source buildenvvar - # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} # without kafka dynamodb # rm -rf buildenvvar # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-withoutkafka-deployvar @@ -52,9 +52,9 @@ builddeploy_steps: &builddeploy_steps # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-${APPNAME}-withoutkafka-appvar -i ${APPNAME} # notify deployment # rm -rf buildenvvar - ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-notify-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-notify-appvar -i ${APPNAME} + # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-notify-deployvar + # source buildenvvar + # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-notify-appvar -i ${APPNAME} jobs: diff --git a/config/default.js b/config/default.js index 7394e97..f8391f0 100644 --- a/config/default.js +++ b/config/default.js @@ -8,6 +8,7 @@ module.exports = { PARTITION: 0 }, RETRY_COUNTER: 3, + KAFKA_REPOST_COUNT: 5, topic_error: { NAME: 'db.ifxpgmigrate.error', PARTITION: 0, diff --git a/src/consumer.js b/src/consumer.js index 4ce64b8..2e3ed06 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -104,6 +104,8 @@ const dataHandler = function (messageSet, topic, partition) { return consumer.commitOffset({ topic: topic, partition: partition, offset: m.offset, metadata: 'optional' }) } +//================ + //audit failure log await cAuditLog({ SEQ_ID: payload.SEQ_ID, @@ -115,15 +117,46 @@ const dataHandler = function (messageSet, topic, partition) { let msgValue = { ...postgreErr, - recipients: config.topic_error.EMAIL + recipients: config.topic_error.EMAIL, + payloadposted: JSON.stringify(payload) } - //send postgres_error message - await producer.send({ - topic: config.topic_error.NAME, - partition: config.topic_error.PARTITION, - message: { - value : JSON.stringify(msgValue), - } + +//=================================== + + if (!payload.retryCount) { + payload.retryCount = 0 + logger.debug('setting retry counter to 0 and max try count is : ', config.KAFKA.maxRetry); + } + if (payload.retryCount >= config.KAFKA_REPOST_COUNT) { + logger.debug('Recached at max retry counter, sending it to error queue: ', config.topic_error.NAME); + await producer.send({ + topic: config.topic_error.NAME, + partition: config.topic_error.PARTITION, + message: { + value : JSON.stringify(msgValue), + } + },{ + retries: { + attempts: config.RETRY_COUNTER, + delay: { + min: 100, + max: 300 + } + } + }).then(function (result) { + console.log(result) + }) + } + else + { + payload['retryCount'] = payload.retryCount + 1; + + await producer.send({ + topic: config.topic.NAME, + partition: config.topic.PARTITION, + message: { + value : JSON.stringify(payload) + } },{ retries: { attempts: config.RETRY_COUNTER, @@ -133,9 +166,19 @@ const dataHandler = function (messageSet, topic, partition) { } } }).then(function (result) { - console.log(result) - }) + if(result[0].error) + kafka_error = result[0].error + + console.log(kafka_error) + }) + + //await auditTrail([message.payload.payloadseqid,cs_processId,message.payload.table,message.payload.Uniquecolumn, + // message.payload.operation,"Error",message.payload['retryCount'],err.message,"",message.payload.data, message.timestamp,message.topic],'consumer') + // await pushToKafka(message) + } + //send postgres_error message +//=============================================== // commit offset return consumer.commitOffset({ topic: topic, partition: partition, offset: m.offset, metadata: 'optional' }) }).catch(err => console.log(err)) From 9750f51b41c8e95a1747dcfea7cb0ed1a2a6f925 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Thu, 19 Dec 2019 20:48:18 +0530 Subject: [PATCH 025/101] introducing retry count --- src/consumer.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/consumer.js b/src/consumer.js index 2e3ed06..06fd047 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -125,7 +125,7 @@ const dataHandler = function (messageSet, topic, partition) { if (!payload.retryCount) { payload.retryCount = 0 - logger.debug('setting retry counter to 0 and max try count is : ', config.KAFKA.maxRetry); + logger.debug('setting retry counter to 0 and max try count is : ', config.KAFKA_REPOST_COUNT); } if (payload.retryCount >= config.KAFKA_REPOST_COUNT) { logger.debug('Recached at max retry counter, sending it to error queue: ', config.topic_error.NAME); From 9860d7713244f1c45eb58175e6d187ba9c769cd4 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Fri, 20 Dec 2019 12:21:04 +0530 Subject: [PATCH 026/101] introducing retry count --- src/consumer.js | 64 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 64 insertions(+) diff --git a/src/consumer.js b/src/consumer.js index 06fd047..733eae2 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -3,6 +3,7 @@ const Promise = require('bluebird'); const config = require('config'); const consumer = new Kafka.GroupConsumer(); +const { producerLog, pAuditLog } = require('./api/audit') const { consumerLog, cAuditLog } = require('./api/audit') //const { migrateDelete, migrateInsert, migrateUpdate } = require('./api/migrate') const { migratepgDelete, migratepgInsert, migratepgUpdate } = require('./api/migratepg') @@ -151,6 +152,24 @@ const dataHandler = function (messageSet, topic, partition) { { payload['retryCount'] = payload.retryCount + 1; + let seqID = 0 + //add producer_log + await producerLog({ + TOPICNAME: config.topic.NAME, + SOURCE: config.SOURCE, + SCHEMA_NAME: payload.SCHEMANAME, + TABLE_NAME: payload.TABLENAME, + PRODUCER_PAYLOAD: payload.DATA, + OPERATION: payload.OPERATION + }).then((log) => seqID = log.SEQ_ID) + + if(!seqID){ + console.log('ProducerLog Failure') + return + } + console.log('ProducerLog Success') + payload['SEQ_ID'] = seqID; + //SEQ_ID: seqID await producer.send({ topic: config.topic.NAME, partition: config.topic.PARTITION, @@ -171,6 +190,51 @@ const dataHandler = function (messageSet, topic, partition) { console.log(kafka_error) }) + + //add auditlog + if(!kafka_error){ + await pAuditLog({ + SEQ_ID: seqID, + PRODUCER_PUBLISH_STATUS: 'success', + PRODUCER_PUBLISH_TIME: Date.now() + }).then((log) => console.log('Send Success')) + res.send('done') + return + } + + //add auditlog + await pAuditLog({ + SEQ_ID: seqID, + PRODUCER_PUBLISH_STATUS: 'failure', + PRODUCER_FAILURE_LOG: kafka_error, + PRODUCER_PUBLISH_TIME: Date.now() + }).then((log) => console.log('Send Failure')) + + msgValue = { + ...kafka_error, + SEQ_ID: seqID, + recipients: config.topic_error.EMAIL, + msgoriginator: "consumer-producer" + } + + //send error message to kafka + await producer.send({ + topic: config.topic_error.NAME, + partition: config.topic_error.PARTITION, + message: { + value : JSON.stringify(msgValue), + } + },{ + retries: { + attempts: config.RETRY_COUNTER, + delay: { + min: 100, + max: 300 + } + } + }).then(function (result) { + console.log(result) + }) //await auditTrail([message.payload.payloadseqid,cs_processId,message.payload.table,message.payload.Uniquecolumn, // message.payload.operation,"Error",message.payload['retryCount'],err.message,"",message.payload.data, message.timestamp,message.topic],'consumer') From 9e0310f871f74fac598957e26cf0706a24e390ad Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Fri, 20 Dec 2019 12:44:47 +0530 Subject: [PATCH 027/101] introducing retry count --- .circleci/config.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index b6f8ca1..676fbc6 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -37,11 +37,11 @@ builddeploy_steps: &builddeploy_steps ./awsconfiguration.sh $DEPLOY_ENV source awsenvconf # producer deployment - # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar - # source buildenvvar - # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} # consumer deployment - # rm -rf buildenvvar + rm -rf buildenvvar ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar source buildenvvar ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} From b0108895930f82c43adeadf23fad884662bb852e Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Fri, 20 Dec 2019 13:20:24 +0530 Subject: [PATCH 028/101] introducing retry count --- .circleci/config.yml | 8 ++++---- src/consumer.js | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 676fbc6..b6f8ca1 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -37,11 +37,11 @@ builddeploy_steps: &builddeploy_steps ./awsconfiguration.sh $DEPLOY_ENV source awsenvconf # producer deployment - ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar + # source buildenvvar + # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} # consumer deployment - rm -rf buildenvvar + # rm -rf buildenvvar ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar source buildenvvar ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} diff --git a/src/consumer.js b/src/consumer.js index 733eae2..20985f5 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -1,7 +1,7 @@ const Kafka = require('no-kafka'); const Promise = require('bluebird'); const config = require('config'); - +const logger = require('./common/logger'); const consumer = new Kafka.GroupConsumer(); const { producerLog, pAuditLog } = require('./api/audit') const { consumerLog, cAuditLog } = require('./api/audit') From 27943cd30eda87f4677ef115ca4078edce9ee946 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Fri, 20 Dec 2019 13:29:02 +0530 Subject: [PATCH 029/101] introducing retry count --- src/consumer.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/consumer.js b/src/consumer.js index 20985f5..3f627e2 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -198,7 +198,7 @@ const dataHandler = function (messageSet, topic, partition) { PRODUCER_PUBLISH_STATUS: 'success', PRODUCER_PUBLISH_TIME: Date.now() }).then((log) => console.log('Send Success')) - res.send('done') + //res.send('done') return } From 24e63d40b482ab2ed64fae809e8fca0ead6b52ec Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Fri, 20 Dec 2019 13:42:30 +0530 Subject: [PATCH 030/101] introducing retry count --- src/consumer.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/consumer.js b/src/consumer.js index 3f627e2..f3b9ff9 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -199,7 +199,7 @@ const dataHandler = function (messageSet, topic, partition) { PRODUCER_PUBLISH_TIME: Date.now() }).then((log) => console.log('Send Success')) //res.send('done') - return + return consumer.commitOffset({ topic: topic, partition: partition, offset: m.offset, metadata: 'optional' }) } //add auditlog From 8757ee41a6b0c15b1aeafe8f02a3e0c3a619d775 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Fri, 20 Dec 2019 16:02:19 +0530 Subject: [PATCH 031/101] introducing retry count --- src/api/pushToKafka.js | 36 +++++ src/consumer.js | 309 ++++++++++++++++++----------------------- 2 files changed, 174 insertions(+), 171 deletions(-) create mode 100644 src/api/pushToKafka.js diff --git a/src/api/pushToKafka.js b/src/api/pushToKafka.js new file mode 100644 index 0000000..54f9232 --- /dev/null +++ b/src/api/pushToKafka.js @@ -0,0 +1,36 @@ +/* + * Kafka producer that sends messages to Kafka server. + */ +const config = require('config') +const logger = require('../common/logger') +const _ = require('lodash') + +async function pushToKafka(producer, topicname, payload) { + let kafka_error + await producer.send({ + topic: topicname, + partition: config.topic.PARTITION, + message: { + value : JSON.stringify(payload) + } + },{ + retries: { + attempts: config.RETRY_COUNTER, + delay: { + min: 100, + max: 300 + } + } + }).then(function (result) { + if(result[0].error) + { + kafka_error = result[0].error + console.log(kafka_error) + return kafka_error + } + console.log(result) + }) + return +} + +module.exports = pushToKafka \ No newline at end of file diff --git a/src/consumer.js b/src/consumer.js index f3b9ff9..2a9fe01 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -3,35 +3,50 @@ const Promise = require('bluebird'); const config = require('config'); const logger = require('./common/logger'); const consumer = new Kafka.GroupConsumer(); -const { producerLog, pAuditLog } = require('./api/audit') -const { consumerLog, cAuditLog } = require('./api/audit') +const { + producerLog, + pAuditLog +} = require('./api/audit') +const { + consumerLog, + cAuditLog +} = require('./api/audit') //const { migrateDelete, migrateInsert, migrateUpdate } = require('./api/migrate') -const { migratepgDelete, migratepgInsert, migratepgUpdate } = require('./api/migratepg') -const { migrateifxinsertdata,migrateifxupdatedata } = require('./api/migrateifxpg') +const { + migratepgDelete, + migratepgInsert, + migratepgUpdate +} = require('./api/migratepg') +const { + migrateifxinsertdata, + migrateifxupdatedata +} = require('./api/migrateifxpg') +const pushToKafka = require('./api/pushToKafka') //const { migrateinsertdata } = require('./api/migrate-data') const producer = new Kafka.Producer() producer.init().then(function () { console.log('connected to local kafka server on port 9092 ...'); -} -).catch(e => { console.log('Error : ', e) }); +}).catch(e => { + console.log('Error : ', e) +}); const { createPool, } = require('./common/postgresWrapper'); -database=config.get('POSTGRES.database'); +database = config.get('POSTGRES.database'); const pool = createPool(database); pool.on('remove', client => { -console.log("setting property to on query completion"); + console.log("setting property to on query completion"); }) console.log('---------------------------------'); const dataHandler = function (messageSet, topic, partition) { - return Promise.each(messageSet, async function (m) { - const payload = JSON.parse(m.message.value) + return Promise.each(messageSet, async function (m) { + const payload = JSON.parse(m.message.value) - // insert consumer_log - consumerLog({ + // insert consumer_log + consumerLog({ SEQ_ID: payload.SEQ_ID, TOPICNAME: topic, SCHEMA_NAME: payload.SCHEMANAME, @@ -41,34 +56,32 @@ const dataHandler = function (messageSet, topic, partition) { }, DESTINATION: config.DESTINATION }).then(log => console.log('Add Consumer Log')) - .catch(err => console.log(err)) - - //update postgres table - let postgreErr - if(payload.uniquedatatype === 'true') { - //retrive teh data from info and insert in postgres - console.log("welcome") - //await migrateinsertdata(payload, pool) - console.log(pool); - if(payload.OPERATION === 'INSERT') { - await migrateifxinsertdata(payload, pool) - .catch(err => { - postgreErr = err - console.log(err) - }) - } - if(payload.OPERATION === 'UPDATE') { - await migrateifxupdatedata(payload, pool) - .catch(err => { - postgreErr = err - console.log(err) - }) - } - console.log("Different approach") + .catch(err => console.log(err)) + + //update postgres table + let postgreErr + if (payload.uniquedatatype === 'true') { + //retrive teh data from info and insert in postgres + console.log("welcome") + //await migrateinsertdata(payload, pool) + console.log(pool); + if (payload.OPERATION === 'INSERT') { + await migrateifxinsertdata(payload, pool) + .catch(err => { + postgreErr = err + console.log(err) + }) + } + if (payload.OPERATION === 'UPDATE') { + await migrateifxupdatedata(payload, pool) + .catch(err => { + postgreErr = err + console.log(err) + }) } - else - { - if(payload.OPERATION === 'INSERT') { + console.log("Different approach") + } else { + if (payload.OPERATION === 'INSERT') { let entity = payload.DATA await migratepgInsert(pool, entity, payload.SCHEMANAME, payload.TABLENAME) .catch(err => { @@ -76,14 +89,14 @@ const dataHandler = function (messageSet, topic, partition) { console.log(err) }) - } else if(payload.OPERATION === 'UPDATE') { + } else if (payload.OPERATION === 'UPDATE') { await migratepgUpdate(pool, payload.DATA, payload.SCHEMANAME, payload.TABLENAME) .catch(err => { postgreErr = err console.log(err) }) - } else if(payload.OPERATION === 'DELETE') { + } else if (payload.OPERATION === 'DELETE') { let entity = payload.DATA await migratepgDelete(pool, entity, payload.SCHEMANAME, payload.TABLENAME) .catch(err => { @@ -93,27 +106,30 @@ const dataHandler = function (messageSet, topic, partition) { } } - //audit success log - if(!postgreErr){ + //audit success log + if (!postgreErr) { await cAuditLog({ - SEQ_ID: payload.SEQ_ID, - CONSUMER_DEPLOY_STATUS: 'success', - CONSUMER_UPDATE_TIME: Date.now() - }).then(log => console.log('postgres '+ payload.OPERATION + ' success')) + SEQ_ID: payload.SEQ_ID, + CONSUMER_DEPLOY_STATUS: 'success', + CONSUMER_UPDATE_TIME: Date.now() + }).then(log => console.log('postgres ' + payload.OPERATION + ' success')) .catch(err => console.log(err)) - return consumer.commitOffset({ topic: topic, partition: partition, offset: m.offset, metadata: 'optional' }) - } - -//================ + return consumer.commitOffset({ + topic: topic, + partition: partition, + offset: m.offset, + metadata: 'optional' + }) + } else { //audit failure log await cAuditLog({ - SEQ_ID: payload.SEQ_ID, - CONSUMER_DEPLOY_STATUS: 'failure', - CONSUMER_FAILURE_LOG: postgreErr, - CONSUMER_UPDATE_TIME: Date.now() - }).then((log) => console.log('postgres '+ payload.OPERATION + ' failure')) + SEQ_ID: payload.SEQ_ID, + CONSUMER_DEPLOY_STATUS: 'failure', + CONSUMER_FAILURE_LOG: postgreErr, + CONSUMER_UPDATE_TIME: Date.now() + }).then((log) => console.log('postgres ' + payload.OPERATION + ' failure')) .catch(err => console.log(err)) let msgValue = { @@ -122,135 +138,86 @@ const dataHandler = function (messageSet, topic, partition) { payloadposted: JSON.stringify(payload) } -//=================================== - if (!payload.retryCount) { payload.retryCount = 0 logger.debug('setting retry counter to 0 and max try count is : ', config.KAFKA_REPOST_COUNT); } if (payload.retryCount >= config.KAFKA_REPOST_COUNT) { logger.debug('Recached at max retry counter, sending it to error queue: ', config.topic_error.NAME); - await producer.send({ - topic: config.topic_error.NAME, - partition: config.topic_error.PARTITION, - message: { - value : JSON.stringify(msgValue), - } - },{ - retries: { - attempts: config.RETRY_COUNTER, - delay: { - min: 100, - max: 300 - } - } - }).then(function (result) { - console.log(result) - }) - } - else - { - payload['retryCount'] = payload.retryCount + 1; - - let seqID = 0 - //add producer_log - await producerLog({ - TOPICNAME: config.topic.NAME, - SOURCE: config.SOURCE, - SCHEMA_NAME: payload.SCHEMANAME, - TABLE_NAME: payload.TABLENAME, - PRODUCER_PAYLOAD: payload.DATA, - OPERATION: payload.OPERATION - }).then((log) => seqID = log.SEQ_ID) - - if(!seqID){ - console.log('ProducerLog Failure') - return - } - console.log('ProducerLog Success') - payload['SEQ_ID'] = seqID; - //SEQ_ID: seqID - await producer.send({ - topic: config.topic.NAME, - partition: config.topic.PARTITION, - message: { - value : JSON.stringify(payload) - } - },{ - retries: { - attempts: config.RETRY_COUNTER, - delay: { - min: 100, - max: 300 - } - } - }).then(function (result) { - if(result[0].error) - kafka_error = result[0].error - - console.log(kafka_error) - }) - - //add auditlog - if(!kafka_error){ - await pAuditLog({ - SEQ_ID: seqID, - PRODUCER_PUBLISH_STATUS: 'success', - PRODUCER_PUBLISH_TIME: Date.now() - }).then((log) => console.log('Send Success')) - //res.send('done') - return consumer.commitOffset({ topic: topic, partition: partition, offset: m.offset, metadata: 'optional' }) - } - - //add auditlog - await pAuditLog({ - SEQ_ID: seqID, - PRODUCER_PUBLISH_STATUS: 'failure', - PRODUCER_FAILURE_LOG: kafka_error, - PRODUCER_PUBLISH_TIME: Date.now() - }).then((log) => console.log('Send Failure')) - - msgValue = { - ...kafka_error, - SEQ_ID: seqID, - recipients: config.topic_error.EMAIL, - msgoriginator: "consumer-producer" - } - - //send error message to kafka - await producer.send({ - topic: config.topic_error.NAME, - partition: config.topic_error.PARTITION, - message: { - value : JSON.stringify(msgValue), - } - },{ - retries: { - attempts: config.RETRY_COUNTER, - delay: { - min: 100, - max: 300 + kafka_error = await pushToKafka(producer, config.topic_error.NAME, msgValue) + + } else { + payload['retryCount'] = payload.retryCount + 1; + let seqID = 0 + //add producer_log + await producerLog({ + TOPICNAME: config.topic.NAME, + SOURCE: config.SOURCE, + SCHEMA_NAME: payload.SCHEMANAME, + TABLE_NAME: payload.TABLENAME, + PRODUCER_PAYLOAD: payload.DATA, + OPERATION: payload.OPERATION + }).then((log) => seqID = log.SEQ_ID) + + if (!seqID) { + console.log('ProducerLog Failure') + return + } + console.log('ProducerLog Success') + payload['SEQ_ID'] = seqID; + //SEQ_ID: seqID + kafka_error = await pushToKafka(producer, config.topic.NAME, payload) + //add auditlog + if (!kafka_error) { + await pAuditLog({ + SEQ_ID: seqID, + PRODUCER_PUBLISH_STATUS: 'success', + PRODUCER_PUBLISH_TIME: Date.now() + }).then((log) => console.log('Send Success')) + //res.send('done') + return consumer.commitOffset({ + topic: topic, + partition: partition, + offset: m.offset, + metadata: 'optional' + }) + } else { + //add auditlog + await pAuditLog({ + SEQ_ID: seqID, + PRODUCER_PUBLISH_STATUS: 'failure', + PRODUCER_FAILURE_LOG: kafka_error, + PRODUCER_PUBLISH_TIME: Date.now() + }).then((log) => console.log('Send Failure')) + + msgValue = { + ...kafka_error, + SEQ_ID: seqID, + recipients: config.topic_error.EMAIL, + msgoriginator: "consumer-producer" } + //send error message to kafka + kafka_error = await pushToKafka(producer, config.topic_error.NAME, msgValue) } - }).then(function (result) { - console.log(result) - }) - - //await auditTrail([message.payload.payloadseqid,cs_processId,message.payload.table,message.payload.Uniquecolumn, - // message.payload.operation,"Error",message.payload['retryCount'],err.message,"",message.payload.data, message.timestamp,message.topic],'consumer') - // await pushToKafka(message) } //send postgres_error message -//=============================================== + //=============================================== // commit offset - return consumer.commitOffset({ topic: topic, partition: partition, offset: m.offset, metadata: 'optional' }) - }).catch(err => console.log(err)) + return consumer.commitOffset({ + topic: topic, + partition: partition, + offset: m.offset, + metadata: 'optional' + }) + } + }).catch(err => console.log(err)) + }; const strategies = [{ - subscriptions: [config.topic.NAME], - handler: dataHandler + subscriptions: [config.topic.NAME], + handler: dataHandler }]; -consumer.init(strategies); +consumer.init(strategies); \ No newline at end of file From 4b438eb8cf0e55ae13af8c9ee4418b640610fdc6 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Fri, 20 Dec 2019 16:40:04 +0530 Subject: [PATCH 032/101] introducing retry count --- package.json | 5 +++-- src/consumer.js | 10 +++++----- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/package.json b/package.json index 5098a81..95d7771 100644 --- a/package.json +++ b/package.json @@ -6,7 +6,7 @@ "scripts": { "test": "echo \"Error: no test specified\" && exit 1", "producer": "node src/nodeserver.js", - "consumer": "node src/consumer.js", + "consumer": "nodemon src/consumer.js", "producerwithoutkafka" : "node src/node-server-without-kafka.js", "ifxpgnotify": "node src/consumer-slacknotify.js" }, @@ -31,7 +31,8 @@ "aws-sdk": "latest", "zlib": "latest", "url": "latest", - "https": "latest" + "https": "latest", + "nodemon": "latest" }, "devDependencies": { "chai": "^4.2.0", diff --git a/src/consumer.js b/src/consumer.js index 2a9fe01..faf326d 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -69,14 +69,14 @@ const dataHandler = function (messageSet, topic, partition) { await migrateifxinsertdata(payload, pool) .catch(err => { postgreErr = err - console.log(err) + //console.log(err) }) } if (payload.OPERATION === 'UPDATE') { await migrateifxupdatedata(payload, pool) .catch(err => { postgreErr = err - console.log(err) + //console.log(err) }) } console.log("Different approach") @@ -86,14 +86,14 @@ const dataHandler = function (messageSet, topic, partition) { await migratepgInsert(pool, entity, payload.SCHEMANAME, payload.TABLENAME) .catch(err => { postgreErr = err - console.log(err) + //console.log(err) }) } else if (payload.OPERATION === 'UPDATE') { await migratepgUpdate(pool, payload.DATA, payload.SCHEMANAME, payload.TABLENAME) .catch(err => { postgreErr = err - console.log(err) + //console.log(err) }) } else if (payload.OPERATION === 'DELETE') { @@ -101,7 +101,7 @@ const dataHandler = function (messageSet, topic, partition) { await migratepgDelete(pool, entity, payload.SCHEMANAME, payload.TABLENAME) .catch(err => { postgreErr = err - console.log(err) + //console.log(err) }) } From 2ec66af9f8902a923965930bde9ca6644bc1080a Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Fri, 20 Dec 2019 17:45:15 +0530 Subject: [PATCH 033/101] introducing retry count --- .circleci/config.yml | 8 ++++---- package.json | 2 +- src/api/pushToKafka.js | 2 +- src/consumer.js | 5 ++++- src/nodeserver.js | 4 ++-- 5 files changed, 12 insertions(+), 9 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index b6f8ca1..676fbc6 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -37,11 +37,11 @@ builddeploy_steps: &builddeploy_steps ./awsconfiguration.sh $DEPLOY_ENV source awsenvconf # producer deployment - # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar - # source buildenvvar - # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} # consumer deployment - # rm -rf buildenvvar + rm -rf buildenvvar ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar source buildenvvar ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} diff --git a/package.json b/package.json index 95d7771..c737507 100644 --- a/package.json +++ b/package.json @@ -5,7 +5,7 @@ "main": "src/nodeserver.js", "scripts": { "test": "echo \"Error: no test specified\" && exit 1", - "producer": "node src/nodeserver.js", + "producer": "nodemon src/nodeserver.js", "consumer": "nodemon src/consumer.js", "producerwithoutkafka" : "node src/node-server-without-kafka.js", "ifxpgnotify": "node src/consumer-slacknotify.js" diff --git a/src/api/pushToKafka.js b/src/api/pushToKafka.js index 54f9232..6cf31be 100644 --- a/src/api/pushToKafka.js +++ b/src/api/pushToKafka.js @@ -9,7 +9,7 @@ async function pushToKafka(producer, topicname, payload) { let kafka_error await producer.send({ topic: topicname, - partition: config.topic.PARTITION, + // partition: config.topic.PARTITION, message: { value : JSON.stringify(payload) } diff --git a/src/consumer.js b/src/consumer.js index faf326d..493d603 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -46,6 +46,9 @@ const dataHandler = function (messageSet, topic, partition) { const payload = JSON.parse(m.message.value) // insert consumer_log + try + { + console.log("payload sequece ID : " + payload.SEQ_ID ) consumerLog({ SEQ_ID: payload.SEQ_ID, TOPICNAME: topic, @@ -57,7 +60,7 @@ const dataHandler = function (messageSet, topic, partition) { DESTINATION: config.DESTINATION }).then(log => console.log('Add Consumer Log')) .catch(err => console.log(err)) - + } catch(error) {console.log(error)} //update postgres table let postgreErr if (payload.uniquedatatype === 'true') { diff --git a/src/nodeserver.js b/src/nodeserver.js index 6406cb9..5cea281 100644 --- a/src/nodeserver.js +++ b/src/nodeserver.js @@ -44,7 +44,7 @@ app.post('/kafkaevents', async (req, res, next) => { await producer.send({ topic: config.topic.NAME, - partition: config.topic.PARTITION, + //partition: config.topic.PARTITION, message: { value : JSON.stringify(msgValue) } @@ -89,7 +89,7 @@ app.post('/kafkaevents', async (req, res, next) => { //send error message to kafka await producer.send({ topic: config.topic_error.NAME, - partition: config.topic_error.PARTITION, + // partition: config.topic_error.PARTITION, message: { value : JSON.stringify(msgValue), } From 18012f16d2ad1652c0f2bf191089a59f48c615c8 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Fri, 20 Dec 2019 18:35:51 +0530 Subject: [PATCH 034/101] introducing without retry count --- src/consumer.js | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/consumer.js b/src/consumer.js index 493d603..d6b84a9 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -140,7 +140,7 @@ const dataHandler = function (messageSet, topic, partition) { recipients: config.topic_error.EMAIL, payloadposted: JSON.stringify(payload) } - +/* if (!payload.retryCount) { payload.retryCount = 0 logger.debug('setting retry counter to 0 and max try count is : ', config.KAFKA_REPOST_COUNT); @@ -204,7 +204,10 @@ const dataHandler = function (messageSet, topic, partition) { } } //send postgres_error message +*/ + logger.debug('Recached at max retry counter, sending it to error queue: ', config.topic_error.NAME); + kafka_error = await pushToKafka(producer, config.topic_error.NAME, msgValue) //=============================================== // commit offset return consumer.commitOffset({ From 94290eeff5d8cf525f2eecc5a18c769ac95a9e03 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Fri, 20 Dec 2019 20:34:27 +0530 Subject: [PATCH 035/101] Fixed consumer struck issue --- .circleci/config.yml | 16 +- src/api/migratedynamodb.js | 46 +++--- src/api/migrateifxpg.js | 303 ++++++++++++++++++------------------- src/api/migratepg.js | 97 ++++++------ src/api/postslackinfo.js | 10 +- src/api/pushToKafka.js | 49 +++--- src/consumer.js | 42 ++--- src/nodeserver.js | 58 ++++--- 8 files changed, 316 insertions(+), 305 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 676fbc6..3d21d1f 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -46,15 +46,15 @@ builddeploy_steps: &builddeploy_steps source buildenvvar ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} # without kafka dynamodb - # rm -rf buildenvvar - # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-withoutkafka-deployvar - # source buildenvvar - # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-${APPNAME}-withoutkafka-appvar -i ${APPNAME} + rm -rf buildenvvar + ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-withoutkafka-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-${APPNAME}-withoutkafka-appvar -i ${APPNAME} # notify deployment - # rm -rf buildenvvar - # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-notify-deployvar - # source buildenvvar - # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-notify-appvar -i ${APPNAME} + rm -rf buildenvvar + ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-notify-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-notify-appvar -i ${APPNAME} jobs: diff --git a/src/api/migratedynamodb.js b/src/api/migratedynamodb.js index 227d16f..8c115bd 100644 --- a/src/api/migratedynamodb.js +++ b/src/api/migratedynamodb.js @@ -3,28 +3,32 @@ const logger = require('../common/logger') const _ = require('lodash') var AWS = require("aws-sdk"); async function pushToDynamoDb(payload) { - try { console.log('----Inside DynomoDB code -------'); - // console.log(payload) - var params = { - TableName: config.DYNAMODB.TABLENAME, - Item: { - SequenceID: payload.TIME, - pl_document: payload, - pl_table: payload.TABLENAME, - pl_schemaname: payload.SCHEMANAME, - pl_operation: payload.OPERATION, - pl_uniquedatatype: payload.uniquedatatype, - NodeSequenceID: Date.now() - } - } - var docClient = new AWS.DynamoDB.DocumentClient({region: config.DYNAMODB.REGION,convertEmptyValues: true}); - docClient.put(params, function(err, data) { - if (err) console.log('DynamoDB error : ', err); - else console.log('DynamoDB Success : ',data); - }); + try { + console.log('----Inside DynomoDB code -------'); + // console.log(payload) + var params = { + TableName: config.DYNAMODB.TABLENAME, + Item: { + SequenceID: payload.TIME, + pl_document: payload, + pl_table: payload.TABLENAME, + pl_schemaname: payload.SCHEMANAME, + pl_operation: payload.OPERATION, + pl_uniquedatatype: payload.uniquedatatype, + NodeSequenceID: Date.now() + } + } + var docClient = new AWS.DynamoDB.DocumentClient({ + region: config.DYNAMODB.REGION, + convertEmptyValues: true + }); + docClient.put(params, function (err, data) { + if (err) console.log('DynamoDB error : ', err); + else console.log('DynamoDB Success : ', data); + }); } catch (e) { - console.log(e) + console.log(e) } } -module.exports = pushToDynamoDb +module.exports = pushToDynamoDb \ No newline at end of file diff --git a/src/api/migrateifxpg.js b/src/api/migrateifxpg.js index e429cfc..d326bed 100755 --- a/src/api/migrateifxpg.js +++ b/src/api/migrateifxpg.js @@ -8,191 +8,186 @@ const logger = require('../common/logger'); // executeQueryAsync //} = require('../common/informixWrapper'); -const { getInformixConnection, prepare, wrapTransaction } = require('../common/informix_ifxnjs') +const { + getInformixConnection, + prepare, + wrapTransaction +} = require('../common/informix_ifxnjs') // const { // createPool, // extractPostgresTablesInfoAsync, // } = require('../common/postgresWrapper'); const pg_dbname = config.get('POSTGRES.database') -async function migrateifxinsertdata( payload, client) { -//retrive data and construc query -console.log("work1---------------------------------------") -console.log(payload) -const columns = payload.DATA -console.log(columns) -const columnNames = Object.keys(columns) -const tablename = payload.TABLENAME -console.log("work2---------------------------------------") -const db_schema = payload.SCHEMANAME -var conditionstr = "" -const paramSql = Array.from(Array(columnNames.length).keys(), x => `$${x + 1}`).join(','); -const insertSql = `insert into "${tablename}" (${columnNames.map(x => `"${x}"`).join(',')}) values(${paramSql})`; -bufffercond=0 -console.log("work2---------------------------------------") -columnNames.forEach((colName) => { +async function migrateifxinsertdata(payload, client) { + //retrive data and construc query + console.log("work1---------------------------------------") + console.log(payload) + const columns = payload.DATA + console.log(columns) + const columnNames = Object.keys(columns) + const tablename = payload.TABLENAME + console.log("work2---------------------------------------") + const db_schema = payload.SCHEMANAME + var conditionstr = "" + const paramSql = Array.from(Array(columnNames.length).keys(), x => `$${x + 1}`).join(','); + const insertSql = `insert into "${tablename}" (${columnNames.map(x => `"${x}"`).join(',')}) values(${paramSql})`; + bufffercond = 0 + console.log("work2---------------------------------------") + columnNames.forEach((colName) => { console.log(colName) tempvar = columns[colName] console.log(tempvar) - if (columns[colName] != 'unsupportedtype') - { - if ( bufffercond == 1 ) { + if (columns[colName] != 'unsupportedtype') { + if (bufffercond == 1) { conditionstr = conditionstr + " and " } - console.log(columns[colName]) - tempvar = columns[colName] - conditionstr = conditionstr + tablename + "." + colName + "= '" + columns[colName] +"' " - bufffercond = 1 - } -}); -infsql = `select * from ${tablename} where ${conditionstr};` // "insert into : (col_1, col_2, ...) values (val_1, val_2, ...)" -console.log(`informix query ${infsql}`); -logger.debug(`informix query ${infsql}`); -//sql = `insert into ${payload.payload.schema}:${payload.payload.table} (${columnNames.join(', ')}) values (${columnNames.map((k) => `'${columns[k]}'`).join(', ')});` // "insert into :
(col_1, col_2, ...) values (val_1, val_2, ...)" + console.log(columns[colName]) + tempvar = columns[colName] + conditionstr = conditionstr + tablename + "." + colName + "= '" + columns[colName] + "' " + bufffercond = 1 + } + }); + infsql = `select * from ${tablename} where ${conditionstr};` // "insert into :
(col_1, col_2, ...) values (val_1, val_2, ...)" + console.log(`informix query ${infsql}`); + logger.debug(`informix query ${infsql}`); + //sql = `insert into ${payload.payload.schema}:${payload.payload.table} (${columnNames.join(', ')}) values (${columnNames.map((k) => `'${columns[k]}'`).join(', ')});` // "insert into :
(col_1, col_2, ...) values (val_1, val_2, ...)" //const data = await executeQueryAsync(db_schema, infsql); - // if (!data.length) { - // break; - // } + // if (!data.length) { + // break; + // } try { -const connection = await getInformixConnection(db_schema) -const queryStmt = await prepare(connection, infsql) -const queryResult = Promise.promisifyAll((await queryStmt.executeAsync())) -const data = await queryResult.fetchAllAsync(); -//console.log(data); -connection.closeAsync(); -//} catch(e) {console.log('Error', e )} + const connection = await getInformixConnection(db_schema) + const queryStmt = await prepare(connection, infsql) + const queryResult = Promise.promisifyAll((await queryStmt.executeAsync())) + const data = await queryResult.fetchAllAsync(); + //console.log(data); + connection.closeAsync(); + //} catch(e) {console.log('Error', e )} - for (const row of data) { - const values = []; - columnNames.forEach((colName) => { - if ( isUtf8(row[colName]) ) - { - console.log(`utf8 format ${colName}`); -// values.push(new Buffer.from(row[colName],'binary')); - values.push(row[colName]); - } - else - { - // values.push(row[colName]); - values.push(new Buffer.from(row[colName],'binary')); - } - //values.push(new Buffer.from(row[colName],'binary')); - }); - let schemaname = (db_schema == pg_dbname) ? 'public' : db_schema; - sql = `SET search_path TO ${schemaname};`; - console.log(sql); - await client.query(sql); - logger.debug(`postgres insert sql ${insertSql} with values[${JSON.stringify(values)}`); - console.log(client); - await client.query(insertSql, values); - } - } catch(e) { - console.log('Error', e ); + for (const row of data) { + const values = []; + columnNames.forEach((colName) => { + if (isUtf8(row[colName])) { + console.log(`utf8 format ${colName}`); + // values.push(new Buffer.from(row[colName],'binary')); + values.push(row[colName]); + } else { + // values.push(row[colName]); + values.push(new Buffer.from(row[colName], 'binary')); + } + //values.push(new Buffer.from(row[colName],'binary')); + }); + let schemaname = (db_schema == pg_dbname) ? 'public' : db_schema; + sql = `SET search_path TO ${schemaname};`; + console.log(sql); + await client.query(sql); + logger.debug(`postgres insert sql ${insertSql} with values[${JSON.stringify(values)}`); + console.log(client); + await client.query(insertSql, values); + } + } catch (e) { + console.log('Error', e); throw e; } } -async function migrateifxupdatedata( payload, client) { -console.log("--------------- Executing fun migrateifxupdatedata --------------------"); -console.log(payload); -console.log("work1---------------------------------------") -console.log(payload) -const columns = payload.DATA -console.log(columns) -const columnNames = Object.keys(columns) -console.log(columnNames); -const tablename = payload.TABLENAME -console.log("work2---------------------------------------") -const db_schema = payload.SCHEMANAME -console.log(tablename); -var conditionstr = "" -var updatestr = "" -bufffercond=0 -buffferupcond=0 -console.log("work2---------------------------------------") -bufffernewcond=0 -buffferoldcond=0 -var oldconditionstr="" -columnNames.forEach((colName) => { +async function migrateifxupdatedata(payload, client) { + console.log("--------------- Executing fun migrateifxupdatedata --------------------"); + console.log(payload); + console.log("work1---------------------------------------") + console.log(payload) + const columns = payload.DATA + console.log(columns) + const columnNames = Object.keys(columns) + console.log(columnNames); + const tablename = payload.TABLENAME + console.log("work2---------------------------------------") + const db_schema = payload.SCHEMANAME + console.log(tablename); + var conditionstr = "" + var updatestr = "" + bufffercond = 0 + buffferupcond = 0 + console.log("work2---------------------------------------") + bufffernewcond = 0 + buffferoldcond = 0 + var oldconditionstr = "" + columnNames.forEach((colName) => { console.log(colName) colobj = columns[colName] - console.log(typeof(colobj)) + console.log(typeof (colobj)) console.log(colobj) console.log(colobj.new) - if (colobj.new != 'unsupportedtype') - { - if ( bufffernewcond == 1 ) { + if (colobj.new != 'unsupportedtype') { + if (bufffernewcond == 1) { conditionstr = conditionstr + " and " - } - conditionstr = conditionstr + tablename + "." + colName + "= '" + colobj.new +"' " - bufffernewcond = 1 + } + conditionstr = conditionstr + tablename + "." + colName + "= '" + colobj.new + "' " + bufffernewcond = 1 } - if (colobj['old'] != 'unsupportedtype') - { - if ( buffferoldcond == 1 ) { + if (colobj['old'] != 'unsupportedtype') { + if (buffferoldcond == 1) { oldconditionstr = oldconditionstr + " and " } - console.log(colobj.old); - oldconditionstr = oldconditionstr + tablename + "." + colName + "= '" + colobj.old +"' " - buffferoldcond = 1 - } - -}); + console.log(colobj.old); + oldconditionstr = oldconditionstr + tablename + "." + colName + "= '" + colobj.old + "' " + buffferoldcond = 1 + } -console.log(conditionstr) -console.log(oldconditionstr); -infsql = `select * from ${tablename} where ${conditionstr};` -console.log(infsql) + }); + + console.log(conditionstr) + console.log(oldconditionstr); + infsql = `select * from ${tablename} where ${conditionstr};` + console.log(infsql) try { - const connection = await getInformixConnection(db_schema) - const queryStmt = await prepare(connection, infsql) - const queryResult = Promise.promisifyAll((await queryStmt.executeAsync())) - const data = await queryResult.fetchAllAsync(); - //console.log(data); - connection.closeAsync(); + const connection = await getInformixConnection(db_schema) + const queryStmt = await prepare(connection, infsql) + const queryResult = Promise.promisifyAll((await queryStmt.executeAsync())) + const data = await queryResult.fetchAllAsync(); + //console.log(data); + connection.closeAsync(); - var updatesql="" - updatesql=`UPDATE ${tablename} SET ` - counter=1 - for (const row of data) { - const values = []; - columnNames.forEach((colName) => { - if ( buffferupcond == 1 ) { - updatestr = updatestr + " , " - } - if ( isUtf8(row[colName]) ) - { - //console.log(`utf8 format ${colName}`); - values.push(row[colName]); - updatestr = updatestr + "\"" + colName + "\"= \$" + counter +" " - buffferupcond = 1 - counter = counter + 1 - } - else - { - values.push(new Buffer.from(row[colName],'binary')); - updatestr = updatestr + "\"" + colName + "\"= \$" + counter +" " - buffferupcond = 1 - counter = counter + 1 - } - }); - //logger.debug(`postgres insert sql ${insertSql} with values[${JSON.stringify(values)}`); + var updatesql = "" + updatesql = `UPDATE ${tablename} SET ` + counter = 1 + for (const row of data) { + const values = []; + columnNames.forEach((colName) => { + if (buffferupcond == 1) { + updatestr = updatestr + " , " + } + if (isUtf8(row[colName])) { + //console.log(`utf8 format ${colName}`); + values.push(row[colName]); + updatestr = updatestr + "\"" + colName + "\"= \$" + counter + " " + buffferupcond = 1 + counter = counter + 1 + } else { + values.push(new Buffer.from(row[colName], 'binary')); + updatestr = updatestr + "\"" + colName + "\"= \$" + counter + " " + buffferupcond = 1 + counter = counter + 1 + } + }); + //logger.debug(`postgres insert sql ${insertSql} with values[${JSON.stringify(values)}`); - let schemaname = (db_schema == pg_dbname) ? 'public' : db_schema; - sql = `SET search_path TO ${schemaname};`; - console.log(sql); - await client.query(sql); + let schemaname = (db_schema == pg_dbname) ? 'public' : db_schema; + sql = `SET search_path TO ${schemaname};`; + console.log(sql); + await client.query(sql); - updatesql = updatesql + updatestr + " where " + oldconditionstr + " ;" - console.log(updatesql) - await client.query(updatesql,values) + updatesql = updatesql + updatestr + " where " + oldconditionstr + " ;" + console.log(updatesql) + await client.query(updatesql, values) + } + } catch (e) { + console.log('Error', e); + throw e; } -} catch(e) { - console.log('Error', e ); - throw e; -} } // async function migrateupdatedata(client, database, tableName, informixTable, postgresTable) { @@ -202,5 +197,7 @@ console.log(infsql) -module.exports = {migrateifxinsertdata, - migrateifxupdatedata}; +module.exports = { + migrateifxinsertdata, + migrateifxupdatedata +}; \ No newline at end of file diff --git a/src/api/migratepg.js b/src/api/migratepg.js index 6997a40..8176c22 100644 --- a/src/api/migratepg.js +++ b/src/api/migratepg.js @@ -7,21 +7,22 @@ const pg_dbname = config.get('POSTGRES.database') async function migratepgInsert(dbpool, payload, dbname, table) { console.log(payload); try { - const client = await dbpool.connect(); - //console.log("welcome123"); - const columnNames = Object.keys(payload) - let schemaname = (dbname == pg_dbname) ? 'public' : dbname; - sql = `SET search_path TO ${schemaname};`; - console.log(sql); - await client.query(sql); - sql = `insert into ${table} (\"${columnNames.join('\", \"')}\") values (${columnNames.map((k) => `'${payload[k]}'`).join(', ')});` // "insert into :
(col_1, col_2, ...) values (val_1, val_2, ...)" - console.log(sql); - // sql = "insert into test6 (cityname) values ('verygoosdsdsdsd');"; - await client.query(sql); - await client.release(true); - console.log(`end connection of postgres for database`); + //const client = await dbpool.connect(); + const client = dbpool; + //console.log("welcome123"); + const columnNames = Object.keys(payload) + let schemaname = (dbname == pg_dbname) ? 'public' : dbname; + sql = `SET search_path TO ${schemaname};`; + console.log(sql); + await client.query(sql); + sql = `insert into ${table} (\"${columnNames.join('\", \"')}\") values (${columnNames.map((k) => `'${payload[k]}'`).join(', ')});` // "insert into :
(col_1, col_2, ...) values (val_1, val_2, ...)" + console.log(sql); + // sql = "insert into test6 (cityname) values ('verygoosdsdsdsd');"; + await client.query(sql); + //await client.release(true); + console.log(`end connection of postgres for database`); } catch (e) { - throw e; + throw e; } } //update payload @@ -29,22 +30,23 @@ async function migratepgUpdate(dbpool, payload, dbname, table) { console.log("-----------------------old update migratepgUpdate----------------"); console.log(payload); try { - const client = await dbpool.connect(); - //console.log("welcome123"); - const columnNames = Object.keys(payload) - let schemaname = (dbname == pg_dbname) ? 'public' : dbname; - sql = `SET search_path TO ${schemaname};`; - console.log(sql); - await client.query(sql); - sql = `update ${table} set ${Object.keys(payload).map((key) => `\"${key}\"='${payload[key]['new']}'`).join(', ')} where ${Object.keys(payload).map((key) => `\"${key}\"='${payload[key]['old']}'`).join(' AND ')} ;` // "update :
set col_1=val_1, col_2=val_2, ... where primary_key_col=primary_key_val" - console.log(sql); -//update test5 set id='[object Object].new', cityname='[object Object].new' where id='[object Object].old' AND cityname='[obddject Object].old' ; - // sql = "insert into test6 (cityname) values ('verygoosdsdsdsd');"; - await client.query(sql); - await client.release(true); - console.log(`end connection of postgres for database`); + //const client = await dbpool.connect(); + const client = dbpool; + //console.log("welcome123"); + const columnNames = Object.keys(payload) + let schemaname = (dbname == pg_dbname) ? 'public' : dbname; + sql = `SET search_path TO ${schemaname};`; + console.log(sql); + await client.query(sql); + sql = `update ${table} set ${Object.keys(payload).map((key) => `\"${key}\"='${payload[key]['new']}'`).join(', ')} where ${Object.keys(payload).map((key) => `\"${key}\"='${payload[key]['old']}'`).join(' AND ')} ;` // "update :
set col_1=val_1, col_2=val_2, ... where primary_key_col=primary_key_val" + console.log(sql); + //update test5 set id='[object Object].new', cityname='[object Object].new' where id='[object Object].old' AND cityname='[obddject Object].old' ; + // sql = "insert into test6 (cityname) values ('verygoosdsdsdsd');"; + await client.query(sql); + //await client.release(true); + console.log(`end connection of postgres for database`); } catch (e) { - throw e; + throw e; } } @@ -53,28 +55,29 @@ async function migratepgDelete(dbpool, payload, dbname, table) { console.log(payload); try { - - const client = await dbpool.connect(); - //console.log("welcome123"); - const columnNames = Object.keys(payload) - let schemaname = (dbname == pg_dbname) ? 'public' : dbname; - sql = `SET search_path TO ${schemaname};`; - console.log(sql); - await client.query(sql); - sql = `delete from ${table} where ${Object.keys(payload).map((key) => `${key}='${payload[key]['new']}'`).join(' AND ')} ;` // "delete query - console.log(sql); - // sql = "insert into test6 (cityname) values ('verygoosdsdsdsd');"; - await client.query(sql); - await client.release(true); - console.log(`end connection of postgres for database`); + + //const client = await dbpool.connect(); + const client = dbpool; + //console.log("welcome123"); + const columnNames = Object.keys(payload) + let schemaname = (dbname == pg_dbname) ? 'public' : dbname; + sql = `SET search_path TO ${schemaname};`; + console.log(sql); + await client.query(sql); + sql = `delete from ${table} where ${Object.keys(payload).map((key) => `${key}='${payload[key]['new']}'`).join(' AND ')} ;` // "delete query + console.log(sql); + // sql = "insert into test6 (cityname) values ('verygoosdsdsdsd');"; + await client.query(sql); + //await client.release(true); + console.log(`end connection of postgres for database`); } catch (e) { - throw e; - } - + throw e; + } + } module.exports = { migratepgDelete, migratepgInsert, migratepgUpdate -} +} \ No newline at end of file diff --git a/src/api/postslackinfo.js b/src/api/postslackinfo.js index c4cf06f..fabe519 100644 --- a/src/api/postslackinfo.js +++ b/src/api/postslackinfo.js @@ -3,14 +3,14 @@ const zlib = require('zlib'); const url = require('url'); const https = require('https'); hookUrl = config.SLACK.URL -slackChannel=config.SLACK.SLACKCHANNEL +slackChannel = config.SLACK.SLACKCHANNEL function postMessage(message, callback) { var slackMessage = { channel: `${slackChannel}`, - text: `${message}`, - } + text: `${message}`, + } console.log("stringfied slack message"); console.log(JSON.stringify(slackMessage)) console.log("slack message"); @@ -43,6 +43,4 @@ function postMessage(message, callback) { postReq.end(); } -module.exports = postMessage - - \ No newline at end of file +module.exports = postMessage \ No newline at end of file diff --git a/src/api/pushToKafka.js b/src/api/pushToKafka.js index 6cf31be..e033c43 100644 --- a/src/api/pushToKafka.js +++ b/src/api/pushToKafka.js @@ -6,31 +6,30 @@ const logger = require('../common/logger') const _ = require('lodash') async function pushToKafka(producer, topicname, payload) { - let kafka_error - await producer.send({ - topic: topicname, - // partition: config.topic.PARTITION, - message: { - value : JSON.stringify(payload) - } - },{ - retries: { - attempts: config.RETRY_COUNTER, - delay: { - min: 100, - max: 300 - } - } - }).then(function (result) { - if(result[0].error) - { - kafka_error = result[0].error - console.log(kafka_error) - return kafka_error - } - console.log(result) - }) - return + let kafka_error + await producer.send({ + topic: topicname, + // partition: config.topic.PARTITION, + message: { + value: JSON.stringify(payload) + } + }, { + retries: { + attempts: config.RETRY_COUNTER, + delay: { + min: 100, + max: 300 + } + } + }).then(function (result) { + if (result[0].error) { + kafka_error = result[0].error + console.log(kafka_error) + return kafka_error + } + console.log(result) + }) + return } module.exports = pushToKafka \ No newline at end of file diff --git a/src/consumer.js b/src/consumer.js index d6b84a9..42b5eb6 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -46,21 +46,22 @@ const dataHandler = function (messageSet, topic, partition) { const payload = JSON.parse(m.message.value) // insert consumer_log - try - { - console.log("payload sequece ID : " + payload.SEQ_ID ) - consumerLog({ - SEQ_ID: payload.SEQ_ID, - TOPICNAME: topic, - SCHEMA_NAME: payload.SCHEMANAME, - CONSUMAER_QUERY: { - OPERATION: payload.OPERATION, - DATA: payload.DATA - }, - DESTINATION: config.DESTINATION - }).then(log => console.log('Add Consumer Log')) - .catch(err => console.log(err)) - } catch(error) {console.log(error)} + try { + console.log("payload sequece ID : " + payload.SEQ_ID) + consumerLog({ + SEQ_ID: payload.SEQ_ID, + TOPICNAME: topic, + SCHEMA_NAME: payload.SCHEMANAME, + CONSUMAER_QUERY: { + OPERATION: payload.OPERATION, + DATA: payload.DATA + }, + DESTINATION: config.DESTINATION + }).then(log => console.log('Add Consumer Log')) + .catch(err => console.log(err)) + } catch (error) { + console.log(error) + } //update postgres table let postgreErr if (payload.uniquedatatype === 'true') { @@ -138,9 +139,10 @@ const dataHandler = function (messageSet, topic, partition) { let msgValue = { ...postgreErr, recipients: config.topic_error.EMAIL, - payloadposted: JSON.stringify(payload) + payloadposted: JSON.stringify(payload), + msgoriginator: "consumer-producer" } -/* + if (!payload.retryCount) { payload.retryCount = 0 logger.debug('setting retry counter to 0 and max try count is : ', config.KAFKA_REPOST_COUNT); @@ -204,10 +206,10 @@ const dataHandler = function (messageSet, topic, partition) { } } //send postgres_error message -*/ - logger.debug('Recached at max retry counter, sending it to error queue: ', config.topic_error.NAME); - kafka_error = await pushToKafka(producer, config.topic_error.NAME, msgValue) + + // logger.debug('Recached at max retry counter, sending it to error queue: ', config.topic_error.NAME); + // kafka_error = await pushToKafka(producer, config.topic_error.NAME, msgValue) //=============================================== // commit offset return consumer.commitOffset({ diff --git a/src/nodeserver.js b/src/nodeserver.js index 5cea281..926a4d9 100644 --- a/src/nodeserver.js +++ b/src/nodeserver.js @@ -3,11 +3,15 @@ require('express-async-errors'); const Kafka = require('no-kafka') const config = require('config') const bodyParser = require('body-parser') -const { producerLog, pAuditLog } = require('./api/audit') +const { + producerLog, + pAuditLog +} = require('./api/audit') +const pushToKafka = require('./api/pushToKafka') const app = express() -app.use(bodyParser.json()); // to support JSON-encoded bodies -app.use(bodyParser.urlencoded({ // to support URL-encoded bodies +app.use(bodyParser.json()); // to support JSON-encoded bodies +app.use(bodyParser.urlencoded({ // to support URL-encoded bodies extended: true })); app.get('/', function (req, res) { @@ -29,7 +33,7 @@ app.post('/kafkaevents', async (req, res, next) => { OPERATION: payload.OPERATION }).then((log) => seqID = log.SEQ_ID) - if(!seqID){ + if (!seqID) { console.log('ProducerLog Failure') return } @@ -41,7 +45,8 @@ app.post('/kafkaevents', async (req, res, next) => { ...payload, SEQ_ID: seqID } - + kafka_error = await pushToKafka(producer, config.topic.NAME, msgValue) + /* await producer.send({ topic: config.topic.NAME, //partition: config.topic.PARTITION, @@ -60,16 +65,16 @@ app.post('/kafkaevents', async (req, res, next) => { if(result[0].error) kafka_error = result[0].error }) - + */ //add auditlog - if(!kafka_error){ - await pAuditLog({ - SEQ_ID: seqID, - PRODUCER_PUBLISH_STATUS: 'success', - PRODUCER_PUBLISH_TIME: Date.now() - }).then((log) => console.log('Send Success')) - res.send('done') - return + if (!kafka_error) { + await pAuditLog({ + SEQ_ID: seqID, + PRODUCER_PUBLISH_STATUS: 'success', + PRODUCER_PUBLISH_TIME: Date.now() + }).then((log) => console.log('Send Success')) + res.send('done') + return } //add auditlog @@ -83,11 +88,12 @@ app.post('/kafkaevents', async (req, res, next) => { msgValue = { ...kafka_error, SEQ_ID: seqID, - recipients: config.topic_error.EMAIL + recipients: config.topic_error.EMAIL, + msgoriginator: "producer" } - + kafka_error = await pushToKafka(producer, config.topic_error.NAME, msgValue) //send error message to kafka - await producer.send({ + /* await producer.send({ topic: config.topic_error.NAME, // partition: config.topic_error.PARTITION, message: { @@ -104,8 +110,8 @@ app.post('/kafkaevents', async (req, res, next) => { }).then(function (result) { console.log(result) }) - - res.send('error') +*/ + res.send('error') }) @@ -113,11 +119,13 @@ app.post('/kafkaevents', async (req, res, next) => { const producer = new Kafka.Producer() producer.init().then(function () { - console.log('connected to local kafka server on port 9092 ...'); + console.log('connected to local kafka server on port 9092 ...'); - // start the server - app.listen(config.PORT); - console.log('Server started! At http://localhost:' + config.PORT); + // start the server + app.listen(config.PORT); + console.log('Server started! At http://localhost:' + config.PORT); -} //end producer init -).catch(e => { console.log('Error : ', e) }); + } //end producer init +).catch(e => { + console.log('Error : ', e) +}); \ No newline at end of file From c62b86e6fd6de371e38715b1f7b52b39e1f78ecd Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Fri, 20 Dec 2019 22:24:37 +0530 Subject: [PATCH 036/101] Added notify information --- .circleci/config.yml | 16 ++++++------- src/consumer.js | 33 ++++++++++++++++++++++++++ src/nodeserver.js | 55 ++++++++++++++------------------------------ 3 files changed, 58 insertions(+), 46 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 3d21d1f..676fbc6 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -46,15 +46,15 @@ builddeploy_steps: &builddeploy_steps source buildenvvar ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} # without kafka dynamodb - rm -rf buildenvvar - ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-withoutkafka-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-${APPNAME}-withoutkafka-appvar -i ${APPNAME} + # rm -rf buildenvvar + # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-withoutkafka-deployvar + # source buildenvvar + # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-${APPNAME}-withoutkafka-appvar -i ${APPNAME} # notify deployment - rm -rf buildenvvar - ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-notify-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-notify-appvar -i ${APPNAME} + # rm -rf buildenvvar + # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-notify-deployvar + # source buildenvvar + # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-notify-appvar -i ${APPNAME} jobs: diff --git a/src/consumer.js b/src/consumer.js index 42b5eb6..f1ab931 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -22,6 +22,7 @@ const { migrateifxupdatedata } = require('./api/migrateifxpg') const pushToKafka = require('./api/pushToKafka') +const postMessage = require('./api/postslackinfo') //const { migrateinsertdata } = require('./api/migrate-data') const producer = new Kafka.Producer() @@ -150,6 +151,22 @@ const dataHandler = function (messageSet, topic, partition) { if (payload.retryCount >= config.KAFKA_REPOST_COUNT) { logger.debug('Recached at max retry counter, sending it to error queue: ', config.topic_error.NAME); kafka_error = await pushToKafka(producer, config.topic_error.NAME, msgValue) + if (!kafka_error) { + console.log("Kafka Message posted successfully to the topic : " + config.topic_error.NAME) + } else { + if (config.SLACK.SLACKNOTIFY === 'true') { + postMessage("producer - kafka post fails", (response) => { + if (response.statusCode < 400) { + console.info('Message posted successfully'); + } else if (response.statusCode < 500) { + console.error(`Error posting message to Slack API: ${response.statusCode} - ${response.statusMessage}`); + } else { + console.log(`Server error when processing message: ${response.statusCode} - ${response.statusMessage}`); + } + }); + } + } + } else { payload['retryCount'] = payload.retryCount + 1; @@ -203,6 +220,22 @@ const dataHandler = function (messageSet, topic, partition) { } //send error message to kafka kafka_error = await pushToKafka(producer, config.topic_error.NAME, msgValue) + if (!kafka_error) { + console.log("Kafka Message posted successfully to the topic : " + config.topic_error.NAME) + } else { + if (config.SLACK.SLACKNOTIFY === 'true') { + postMessage("consumer - kafka post fails", (response) => { + if (response.statusCode < 400) { + console.info('Message posted successfully'); + } else if (response.statusCode < 500) { + console.error(`Error posting message to Slack API: ${response.statusCode} - ${response.statusMessage}`); + } else { + console.log(`Server error when processing message: ${response.statusCode} - ${response.statusMessage}`); + } + }); + } + } + } } //send postgres_error message diff --git a/src/nodeserver.js b/src/nodeserver.js index 926a4d9..b229dfb 100644 --- a/src/nodeserver.js +++ b/src/nodeserver.js @@ -8,6 +8,7 @@ const { pAuditLog } = require('./api/audit') const pushToKafka = require('./api/pushToKafka') +const postMessage = require('./api/postslackinfo') const app = express() app.use(bodyParser.json()); // to support JSON-encoded bodies @@ -46,26 +47,6 @@ app.post('/kafkaevents', async (req, res, next) => { SEQ_ID: seqID } kafka_error = await pushToKafka(producer, config.topic.NAME, msgValue) - /* - await producer.send({ - topic: config.topic.NAME, - //partition: config.topic.PARTITION, - message: { - value : JSON.stringify(msgValue) - } - },{ - retries: { - attempts: config.RETRY_COUNTER, - delay: { - min: 100, - max: 300 - } - } - }).then(function (result) { - if(result[0].error) - kafka_error = result[0].error - }) - */ //add auditlog if (!kafka_error) { await pAuditLog({ @@ -91,26 +72,24 @@ app.post('/kafkaevents', async (req, res, next) => { recipients: config.topic_error.EMAIL, msgoriginator: "producer" } - kafka_error = await pushToKafka(producer, config.topic_error.NAME, msgValue) //send error message to kafka - /* await producer.send({ - topic: config.topic_error.NAME, - // partition: config.topic_error.PARTITION, - message: { - value : JSON.stringify(msgValue), - } - },{ - retries: { - attempts: config.RETRY_COUNTER, - delay: { - min: 100, - max: 300 + kafka_error = await pushToKafka(producer, config.topic_error.NAME, msgValue) + if (!kafka_error) { + console.log("Kafka Message posted successfully to the topic : " + config.topic_error.NAME) + } else { + if (config.SLACK.SLACKNOTIFY === 'true') { + postMessage("producer - kafka post fails", (response) => { + if (response.statusCode < 400) { + console.info('Message posted successfully'); + } else if (response.statusCode < 500) { + console.error(`Error posting message to Slack API: ${response.statusCode} - ${response.statusMessage}`); + } else { + console.log(`Server error when processing message: ${response.statusCode} - ${response.statusMessage}`); } - } - }).then(function (result) { - console.log(result) - }) -*/ + }); + } + } + res.send('error') }) From 83e5c1ae1ea0d2dbc4001d74357f6818794776e0 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Fri, 20 Dec 2019 23:34:57 +0530 Subject: [PATCH 037/101] health checck integration in consumer --- .circleci/config.yml | 8 ++++---- package.json | 3 ++- src/consumer.js | 43 +++++++++++++++++++++++++++++++++++++------ 3 files changed, 43 insertions(+), 11 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 676fbc6..b6f8ca1 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -37,11 +37,11 @@ builddeploy_steps: &builddeploy_steps ./awsconfiguration.sh $DEPLOY_ENV source awsenvconf # producer deployment - ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar + # source buildenvvar + # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} # consumer deployment - rm -rf buildenvvar + # rm -rf buildenvvar ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar source buildenvvar ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} diff --git a/package.json b/package.json index c737507..a0325dc 100644 --- a/package.json +++ b/package.json @@ -32,7 +32,8 @@ "zlib": "latest", "url": "latest", "https": "latest", - "nodemon": "latest" + "nodemon": "latest", + "topcoder-healthcheck-dropin": "^1.0.3" }, "devDependencies": { "chai": "^4.2.0", diff --git a/src/consumer.js b/src/consumer.js index f1ab931..09164de 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -2,6 +2,7 @@ const Kafka = require('no-kafka'); const Promise = require('bluebird'); const config = require('config'); const logger = require('./common/logger'); +const healthcheck = require('topcoder-healthcheck-dropin'); const consumer = new Kafka.GroupConsumer(); const { producerLog, @@ -42,7 +43,7 @@ pool.on('remove', client => { }) console.log('---------------------------------'); -const dataHandler = function (messageSet, topic, partition) { +async function dataHandler(messageSet, topic, partition) { return Promise.each(messageSet, async function (m) { const payload = JSON.parse(m.message.value) @@ -256,9 +257,39 @@ const dataHandler = function (messageSet, topic, partition) { }; -const strategies = [{ - subscriptions: [config.topic.NAME], - handler: dataHandler -}]; +const check = function () { + if (!consumer.client.initialBrokers && !consumer.client.initialBrokers.length) { + return false; + } + let connected = true; + consumer.client.initialBrokers.forEach(conn => { + logger.debug(`url ${conn.server()} - connected=${conn.connected}`); + connected = conn.connected & connected; + }); + return connected; +}; + + + +/** + * Initialize kafka consumer + */ +async function setupKafkaConsumer() { + try { + const strategies = [{ + subscriptions: [config.topic.NAME], + handler: dataHandler + }]; + + await consumer.init(strategies); + + logger.info('Initialized kafka consumer') + healthcheck.init([check]) + } catch (err) { + logger.error('Could not setup kafka consumer') + logger.logFullError(err) + terminate() + } +} -consumer.init(strategies); \ No newline at end of file +setupKafkaConsumer() From 736365ba6f3020a0dc38d531362daeb103b3f166 Mon Sep 17 00:00:00 2001 From: informix Date: Tue, 7 Jan 2020 06:40:37 -0500 Subject: [PATCH 038/101] informix trigger changes [skip ci] --- informix_auditing/UNIX.mak | 105 ++++++++++++++++++++ informix_auditing/auditing2.c | 10 +- informix_auditing/build.sh | 4 + informix_auditing/infoserver.sh | 3 + informix_auditing/loglocation.sh | 1 + informix_auditing/samplesessionusertrig.sql | 8 ++ 6 files changed, 130 insertions(+), 1 deletion(-) create mode 100644 informix_auditing/UNIX.mak create mode 100755 informix_auditing/build.sh create mode 100755 informix_auditing/infoserver.sh create mode 100755 informix_auditing/loglocation.sh create mode 100644 informix_auditing/samplesessionusertrig.sql diff --git a/informix_auditing/UNIX.mak b/informix_auditing/UNIX.mak new file mode 100644 index 0000000..50e5ca4 --- /dev/null +++ b/informix_auditing/UNIX.mak @@ -0,0 +1,105 @@ +# (c) Copyright IBM Corp. 2004 All rights reserved. */ +# */ +# This sample program is owned by International Business Machines */ +# Corporation or one of its subsidiaries ("IBM") and is copyrighted */ +# and licensed, not sold. */ +# */ +# You may copy, modify, and distribute this sample program in any */ +# form without payment to IBM, for any purpose including developing,*/ +# using, marketing or distributing programs that include or are */ +# derivative works of the sample program. */ +# */ +# The sample program is provided to you on an "AS IS" basis, without */ +# warranty of any kind. IBM HEREBY EXPRESSLY DISCLAIMS ALL */ +# WARRANTIES EITHER EXPRESS OR IMPLIED, INCLUDING, BUT NOT LIMITED TO*/ +# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTIC-*/ +# ULAR PURPOSE. Some jurisdictions do not allow for the exclusion or */ +# limitation of implied warranties, so the above limitations or */ +# exclusions may not apply to you. IBM shall not be liable for any */ +# damages you suffer as a result of using, modifying or distributing */ +# the sample program or its derivatives. */ +# */ +# Each copy of any portion of this sample program or any derivative */ +# work, must include a the above copyright notice and disclaimer of */ +# warranty. */ +# */ +# ********************************************************************/ + +# This Makefile builds the logger libraries +# TARGET must be set to the location/filename +# of the platform-specific make include file. + +TARGET=$(INFORMIXDIR)/incl/dbdk/makeinc.linux +include $(TARGET) +# =============================================================== +# This is the project title. +PROJECT_TITLE = auditing + +BINDIR = $(OS_NAME)-$(PLATFORM) + +# Platform independent code goes here. +# The following code was generated by BladeSmith. +LDFLAGS = -m32 +LCURL = -lcurl +MI_INCL = $(INFORMIXDIR)/incl +COPTS=-O -DMI_SERVBUILD -DMITRACE_OFF=1 +CFLAGS =$(LDFLAGS) $(COPTS) $(CC_PIC) -I$(MI_INCL)/public -I$(MI_INCL) +PTCFLAGS =$(LDFLAGS) $(LCURL) $(COPTS) $(CC_PIC) -I$(MI_INCL)/public -I$(MI_INCL) +LINKFLAGS = $(SHLIBLFLAG) $(SYMFLAG) +LIBS = + +PROJECT_OBJS= $(BINDIR)/auditing1.$(OBJSUFF) $(BINDIR)/auditing2.$(OBJSUFF) $(BINDIR)/auditing3.$(OBJSUFF) $(BINDIR)/audit_util.$(OBJSUFF) + +PROJECT_LIBS=$(BINDIR)/$(PROJECT_TITLE).$(BLDLIB_SUFF) + +all: $(BINDIR) RecordAudit.jar + $(MAKE) $(MAKEFLAGS) -f UNIX.mak server + +# Construct the object file. + +$(BINDIR)/auditing1.$(OBJSUFF) : auditing1.c + $(CC) $(CFLAGS) -o $@ -c $? + +$(BINDIR)/auditing2.$(OBJSUFF) : auditing2.c + $(CC) $(PTCFLAGS) -o $@ -c $? + +$(BINDIR)/auditing3.$(OBJSUFF) : auditing3.c + $(CC) $(CFLAGS) -o $@ -c $? + +$(BINDIR)/audit_util.$(OBJSUFF) : audit_util.c + $(CC) $(PTCFLAGS) -o $@ -c $? + +RecordAudit.class: RecordAudit.java + javac RecordAudit.java + +RecordAudit.jar: RecordAudit.class + jar cf RecordAudit.jar RecordAudit.class + +# Construct the shared library. +# Do *NOT* link with client side libraries. +# You will see many undefined symbols during linking. This is +# normal since those symbols are resolved when the server loads +# your shared object. +# +# ATTENTION: +# The ld "Symbol referencing errors" warning is normal. +# These unresolved symbols are resolved when the server +# loads the shared object. This list should be examined, however, +# for symbol names that may have been inadvertently misspelled. +# Misspelled symbol names will not be resolved here or at load time. +# +$(PROJECT_LIBS) : $(PROJECT_OBJS) + $(SHLIBLOD) $(LDFLAGS) $(LCURL) $(LINKFLAGS) -o $(PROJECT_LIBS) \ + $(PROJECT_OBJS) $(LIBS) $(DATABLADE_LIBS) 2> link.errs + +server: $(PROJECT_LIBS) + +clean: + $(RM) $(RMFLAGS) $(PROJECT_LIBS) $(PROJECT_OBJS) RecordAudit.class RecordAudit.jar + +$(BINDIR): + -mkdir $(BINDIR) + +INSTALL: + cp $(BINDIR)/$(PROJECT_TITLE).bld $(INFORMIXDIR)/extend/auditing/$(PROJECT_TITLE).bld + cp RecordAudit.jar $(INFORMIXDIR)/extend/auditing/RecordAudit.jar diff --git a/informix_auditing/auditing2.c b/informix_auditing/auditing2.c index 108f8a5..7aab476 100644 --- a/informix_auditing/auditing2.c +++ b/informix_auditing/auditing2.c @@ -49,7 +49,8 @@ MI_CALLBACK_STATUS MI_PROC_CALLBACK cbfunc(MI_EVENT_TYPE event_type, MI_CONNECTION *conn, void *event_data, void *user_data); /*--------------------------------------------------------------*/ -void do_auditing2(MI_FPARAM *fp) + +void do_auditing2( mi_lvarchar *sessionusername, MI_FPARAM *fp) { MI_CONNECTION *sessionConnection; MI_CALLBACK_HANDLE *cbhandle; @@ -58,6 +59,13 @@ void do_auditing2(MI_FPARAM *fp) chains_t *curChain; mi_string buffer[32], *pdata; + DPRINTF("logger", 80, ("connected user %s", mi_lvarchar_to_string(sessionusername))); + printf("operating user %s welcome test",mi_lvarchar_to_string(sessionusername)); + if (strcmp(mi_lvarchar_to_string(sessionusername), "ifxsyncuser") == 0) + { + printf("automated user. skipping trigger"); + return; + } DPRINTF("logger", 80, ("Entering do_auditing2()")); /* Get the trigger event and make sure we are in a trigger */ trigger_operation = mi_trigger_event(); diff --git a/informix_auditing/build.sh b/informix_auditing/build.sh new file mode 100755 index 0000000..e06cddd --- /dev/null +++ b/informix_auditing/build.sh @@ -0,0 +1,4 @@ + make -f UNIX.mak clean + make -f UNIX.mak + make -f UNIX.mak INSTALL + diff --git a/informix_auditing/infoserver.sh b/informix_auditing/infoserver.sh new file mode 100755 index 0000000..4e58f41 --- /dev/null +++ b/informix_auditing/infoserver.sh @@ -0,0 +1,3 @@ +onmode -ky +oninit -vy + diff --git a/informix_auditing/loglocation.sh b/informix_auditing/loglocation.sh new file mode 100755 index 0000000..c429954 --- /dev/null +++ b/informix_auditing/loglocation.sh @@ -0,0 +1 @@ +cd /var/informix/messages/ diff --git a/informix_auditing/samplesessionusertrig.sql b/informix_auditing/samplesessionusertrig.sql new file mode 100644 index 0000000..8622d6d --- /dev/null +++ b/informix_auditing/samplesessionusertrig.sql @@ -0,0 +1,8 @@ +CREATE PROCEDURE informix.do_auditing2(sessionusername LVARCHAR) + +EXTERNAL NAME "$INFORMIXDIR/extend/auditing/auditing.bld(do_auditing2)" +LANGUAGE C; + +CREATE TRIGGER informix.access_trigger_insert insert on "informix".access for each row + ( + execute procedure "informix".do_auditing2(USER)); From 561c08e407642adef48b69c369406f70453e9d98 Mon Sep 17 00:00:00 2001 From: informix Date: Thu, 9 Jan 2020 06:31:58 -0500 Subject: [PATCH 039/101] cosmetic changes [skip ci] --- informix_auditing/audit_util.c | 7 +-- informix_auditing/auditing2.c | 4 +- informix_auditing/samplecrtable.sql | 66 +++++++++++++++++++++++++++++ 3 files changed, 72 insertions(+), 5 deletions(-) create mode 100644 informix_auditing/samplecrtable.sql diff --git a/informix_auditing/audit_util.c b/informix_auditing/audit_util.c index 453803a..6f0a5ee 100644 --- a/informix_auditing/audit_util.c +++ b/informix_auditing/audit_util.c @@ -65,6 +65,7 @@ mi_string *do_castl(MI_CONNECTION *conn, MI_DATUM *datum, MI_TYPE_DESC* dsc = mi_type_typedesc(conn, tid); mi_string* srcType = mi_type_typename(dsc); DPRINTF("logger",95,("-- typeName=%s --",srcType)); + printf("-- typeName=%s --",srcType); if ((strcmp("blob", srcType) == 0) || (strcmp("clob", srcType) == 0) || (strcmp("text", srcType) == 0) || (strcmp("byte", srcType) == 0)) { return("unsupportedtype"); } @@ -495,7 +496,7 @@ int posttopic(char *jsondata, char *posturl) /* char *posturl = getenv("POSTURL"); if (!postinfo) { - printf("no post topic set true or false. defualt it will post topic"); + printf("no post topic set true or false. defualt it will post topic \n"); // return 0; } else @@ -506,7 +507,7 @@ int posttopic(char *jsondata, char *posturl) return 0; } } - printf("posting topic"); + printf("posting topic \n"); if (!posturl) { posturl = fileeventsurl; @@ -515,7 +516,7 @@ int posttopic(char *jsondata, char *posturl) printf("no url provide in environment . So it is taking localurl"); } */ - printf("posting topic to url %s", posturl); + printf("posting topic to url %s \n", posturl); CURL *hnd = curl_easy_init(); curl_easy_setopt(hnd, CURLOPT_CUSTOMREQUEST, "POST"); curl_easy_setopt(hnd, CURLOPT_URL, posturl); diff --git a/informix_auditing/auditing2.c b/informix_auditing/auditing2.c index 7aab476..dc54768 100644 --- a/informix_auditing/auditing2.c +++ b/informix_auditing/auditing2.c @@ -60,10 +60,10 @@ void do_auditing2( mi_lvarchar *sessionusername, MI_FPARAM *fp) mi_string buffer[32], *pdata; DPRINTF("logger", 80, ("connected user %s", mi_lvarchar_to_string(sessionusername))); - printf("operating user %s welcome test",mi_lvarchar_to_string(sessionusername)); + printf("operating user %s welcome test \n",mi_lvarchar_to_string(sessionusername)); if (strcmp(mi_lvarchar_to_string(sessionusername), "ifxsyncuser") == 0) { - printf("automated user. skipping trigger"); + printf("automated user. skipping trigger\n"); return; } DPRINTF("logger", 80, ("Entering do_auditing2()")); diff --git a/informix_auditing/samplecrtable.sql b/informix_auditing/samplecrtable.sql new file mode 100644 index 0000000..608130d --- /dev/null +++ b/informix_auditing/samplecrtable.sql @@ -0,0 +1,66 @@ +CREATE TABLE testtable ( + dserial8 serial8, + dchar char(1), + dvarchar varchar(255), + dlvarchar lvarchar(20000), + ddecimal decimal(12,0), + dfloat float, + dint8 int8, + dinteger integer, + dmoney money(16,2), + dnchar nchar, + dnvarchar nvarchar, + dsmallint smallint +); + +CREATE TRIGGER informix.testtable_trigger_insert insert on "informix".testtable +for each row + ( + execute procedure "informix".do_auditing2(USER) + ); + + +CREATE TABLE testtable1 ( + dserial serial, + dlvarchar lvarchar(20000) +); + +CREATE TABLE testtable2 ( + dbigserial bigserial, + dlvarchar lvarchar(20000) +); + + +CREATE TRIGGER informix.testtable1_trigger_insert insert on "informix".testtable1 +for each row + ( + execute procedure "informix".do_auditing2(USER) + ); +CREATE TRIGGER informix.testtable12_trigger_insert insert on "informix".testtable2 +for each row + ( + execute procedure "informix".do_auditing2(USER) + ); + +CREATE TABLE testtable3 ( + dlvarchar lvarchar(20000), + ddate date +); + +CREATE TABLE testtable4( + dlvarchar lvarchar(20000), + ddatetime datetime +); + +CREATE TRIGGER informix.testtable3_trigger_insert insert on "informix".testtable3 +for each row + ( + execute procedure "informix".do_auditing2(USER) + ); +CREATE TRIGGER informix.testtable14_trigger_insert insert on "informix".testtable4 +for each row + ( + execute procedure "informix".do_auditing2(USER) + ); + + From 5b65c26ba1eddb43d319a1569fb26c9a9d1bf1a6 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Thu, 9 Jan 2020 19:39:10 +0530 Subject: [PATCH 040/101] Update samplecrtable.sql [skip ci] --- informix_auditing/samplecrtable.sql | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/informix_auditing/samplecrtable.sql b/informix_auditing/samplecrtable.sql index 608130d..758b03f 100644 --- a/informix_auditing/samplecrtable.sql +++ b/informix_auditing/samplecrtable.sql @@ -47,10 +47,10 @@ CREATE TABLE testtable3 ( ddate date ); -CREATE TABLE testtable4( - dlvarchar lvarchar(20000), - ddatetime datetime -); +CREATE TABLE testtable4 ( + dlvarchar lvarchar, + ddatetime DATETIME YEAR TO SECOND +); CREATE TRIGGER informix.testtable3_trigger_insert insert on "informix".testtable3 for each row From 87afdb0d221fe2f747c05cd64e34c4ffdc0f289d Mon Sep 17 00:00:00 2001 From: informix Date: Thu, 9 Jan 2020 09:54:32 -0500 Subject: [PATCH 041/101] date and datetime changes [skip ci] --- informix_auditing/audit_util.c | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/informix_auditing/audit_util.c b/informix_auditing/audit_util.c index 6f0a5ee..c0ddb21 100644 --- a/informix_auditing/audit_util.c +++ b/informix_auditing/audit_util.c @@ -70,6 +70,12 @@ mi_string *do_castl(MI_CONNECTION *conn, MI_DATUM *datum, return("unsupportedtype"); } else{ + if (strcmp("date", srcType) == 0) { + return (mi_date_to_string((mi_date *)datum)); + } + if (strcmp("datetime", srcType) == 0) { + return (mi_datetime_to_string((mi_datetime *)datum)); + } fn = mi_cast_get(conn, tid, lvar_id, &status); if (NULL == fn) { switch(status) { @@ -96,8 +102,13 @@ mi_string *do_castl(MI_CONNECTION *conn, MI_DATUM *datum, tdesc = mi_type_typedesc(conn, typeid); precision = mi_type_precision(tdesc); + printf("rputine read initiated \n"); + new_datum = mi_routine_exec(conn, fn, &ret, datum, collen, precision, fp); + printf("routine read completed \n"); pbuf = mi_lvarchar_to_string(new_datum); + //pbuf = mi_date_to_string((mi_date *)datum); + //printf("\ndate data %s \n",pbuf); mi_routine_end(conn, fn); //return mi_type_typename(mi_type_typedesc(conn, my_type_id)); } From e34d8a21b19aa3b55cd3716b884dc6d00054057b Mon Sep 17 00:00:00 2001 From: informix Date: Fri, 10 Jan 2020 02:39:22 -0500 Subject: [PATCH 042/101] indivitual file creation on log [skip ci] --- informix_auditing/auditing2.c | 11 +++++++++-- informix_auditing/samplecrtable.sql | 27 +++++++++++++++++++++++++++ 2 files changed, 36 insertions(+), 2 deletions(-) diff --git a/informix_auditing/auditing2.c b/informix_auditing/auditing2.c index dc54768..388b946 100644 --- a/informix_auditing/auditing2.c +++ b/informix_auditing/auditing2.c @@ -180,8 +180,15 @@ MI_CALLBACK_STATUS MI_PROC_CALLBACK if (pcur == NULL) { DPRINTF("logger", 80, ("cbfunc(): pcur is null")); } else { - sprintf(buffer, "%s%d_%d.json", LOGGERFILEPREFIX, - pmem->sessionId, pcur->seq); + char filetime_buffer[30]; + struct timeval file_tv; + time_t file_curtime; + gettimeofday(&file_tv, NULL); + file_curtime=file_tv.tv_sec; + strftime(filetime_buffer,30,"%m-%d-%Y_%T.",localtime(&file_curtime)); + printf("%s%ld\n",filetime_buffer,file_tv.tv_usec); + sprintf(buffer, "%s%d_%d_%s%ld.json", LOGGERFILEPREFIX, + pmem->sessionId, pcur->seq,filetime_buffer,file_tv.tv_usec); DPRINTF("logger", 80, ("cbfunc(): about to open file %s", buffer)); fd = mi_file_open(buffer, O_WRONLY | O_APPEND | O_CREAT, 0644); if (pcur->json == NULL) { diff --git a/informix_auditing/samplecrtable.sql b/informix_auditing/samplecrtable.sql index 758b03f..7a7851f 100644 --- a/informix_auditing/samplecrtable.sql +++ b/informix_auditing/samplecrtable.sql @@ -63,4 +63,31 @@ for each row execute procedure "informix".do_auditing2(USER) ); +CREATE TABLE testtable5 ( + dserial8 serial8, + dchar char(1), + dvarchar varchar(255), + dlvarchar lvarchar(20000), + ddecimal decimal(12,0), + dfloat float, + dint8 int8, + dinteger integer, + dmoney money(16,2), + dnchar nchar, + dnvarchar nvarchar, + dsmallint smallint, + ddate date, + ddatetime DATETIME YEAR TO SECOND +); + +CREATE TRIGGER informix.testtable5_trigger_insert insert on "informix".testtable5 +for each row + ( + execute procedure "informix".do_auditing2(USER) + ); +CREATE TRIGGER informix.testtable5_trigger_update update on "informix".testtable5 +for each row + ( + execute procedure "informix".do_auditing2(USER) + ); From e5e2b8ad1c4c7190a027dda7cda0ca3114606719 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Fri, 17 Jan 2020 16:44:40 +0530 Subject: [PATCH 043/101] audit code changes --- .circleci/config.yml | 8 ++++---- src/api/audit.js | 4 +++- src/consumer.js | 41 ++++++++++++++++++++++++++++++++++++++--- src/models/audit_log.js | 4 +++- src/nodeserver.js | 2 +- 5 files changed, 49 insertions(+), 10 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index b6f8ca1..676fbc6 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -37,11 +37,11 @@ builddeploy_steps: &builddeploy_steps ./awsconfiguration.sh $DEPLOY_ENV source awsenvconf # producer deployment - # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar - # source buildenvvar - # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} # consumer deployment - # rm -rf buildenvvar + rm -rf buildenvvar ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar source buildenvvar ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} diff --git a/src/api/audit.js b/src/api/audit.js index f7af7dd..2025331 100644 --- a/src/api/audit.js +++ b/src/api/audit.js @@ -65,7 +65,9 @@ cAuditLog.schema = Joi.object().keys({ SEQ_ID: Joi.number().required(), CONSUMER_DEPLOY_STATUS: Joi.string().valid('success','failure').required(), CONSUMER_FAILURE_LOG: Joi.object(), - CONSUMER_UPDATE_TIME: Joi.date().required() + CONSUMER_UPDATE_TIME: Joi.date().required(), + CONSUMER_RETRY_COUNT: Joi.number(), + PL_SQUENCE_ID: Joi.number() }) //add audit_log diff --git a/src/consumer.js b/src/consumer.js index 09164de..d5cfeae 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -113,11 +113,23 @@ async function dataHandler(messageSet, topic, partition) { } } //audit success log + let retrycountconsumer,pseqid + if (!postgreErr) { + retrycountconsumer = 0 + if (!payload.retryCount) { + pseqid = payload.SEQ_ID + } + else + { + pseqid = payload.parentseqid + } await cAuditLog({ SEQ_ID: payload.SEQ_ID, CONSUMER_DEPLOY_STATUS: 'success', - CONSUMER_UPDATE_TIME: Date.now() + CONSUMER_UPDATE_TIME: Date.now(), + CONSUMER_RETRY_COUNT: retrycountconsumer, + PL_SQUENCE_ID: pseqid }).then(log => console.log('postgres ' + payload.OPERATION + ' success')) .catch(err => console.log(err)) @@ -130,11 +142,30 @@ async function dataHandler(messageSet, topic, partition) { } else { //audit failure log + if (!payload.retryCount) { + retrycountconsumer = 1 + pseqid = payload.SEQ_ID + } + else + { + pseqid = payload.parentseqid + if (payload.retryCount >= config.KAFKA_REPOST_COUNT) + { + retrycountconsumer = 0 + } + else + { + retrycountconsumer = payload.retryCount + 1; + } + } + await cAuditLog({ SEQ_ID: payload.SEQ_ID, CONSUMER_DEPLOY_STATUS: 'failure', CONSUMER_FAILURE_LOG: postgreErr, - CONSUMER_UPDATE_TIME: Date.now() + CONSUMER_UPDATE_TIME: Date.now(), + CONSUMER_RETRY_COUNT: retrycountconsumer, + PL_SQUENCE_ID: pseqid }).then((log) => console.log('postgres ' + payload.OPERATION + ' failure')) .catch(err => console.log(err)) @@ -170,6 +201,10 @@ async function dataHandler(messageSet, topic, partition) { } else { + if (payload.retryCount = 0) + { + payload['parentseqid'] = payload.SEQ_ID + } payload['retryCount'] = payload.retryCount + 1; let seqID = 0 //add producer_log @@ -178,7 +213,7 @@ async function dataHandler(messageSet, topic, partition) { SOURCE: config.SOURCE, SCHEMA_NAME: payload.SCHEMANAME, TABLE_NAME: payload.TABLENAME, - PRODUCER_PAYLOAD: payload.DATA, + PRODUCER_PAYLOAD: payload, OPERATION: payload.OPERATION }).then((log) => seqID = log.SEQ_ID) diff --git a/src/models/audit_log.js b/src/models/audit_log.js index 01eb2ce..68231c1 100644 --- a/src/models/audit_log.js +++ b/src/models/audit_log.js @@ -9,7 +9,9 @@ module.exports = (sequelize, DataTypes) => PRODUCER_PUBLISH_TIME: { type: DataTypes.DATE }, CONSUMER_DEPLOY_STATUS: { type: DataTypes.STRING }, CONSUMER_FAILURE_LOG: { type: DataTypes.JSON }, - CONSUMER_UPDATE_TIME:{ type: DataTypes.DATE } + CONSUMER_UPDATE_TIME:{ type: DataTypes.DATE }, + CONSUMER_RETRY_COUNT:{ type: DataTypes.INTEGER }, + PL_SQUENCE_ID:{ type: DataTypes.INTEGER } }, { tableName: 'audit_log', paranoid: true, diff --git a/src/nodeserver.js b/src/nodeserver.js index b229dfb..e17f6f8 100644 --- a/src/nodeserver.js +++ b/src/nodeserver.js @@ -30,7 +30,7 @@ app.post('/kafkaevents', async (req, res, next) => { SOURCE: config.SOURCE, SCHEMA_NAME: payload.SCHEMANAME, TABLE_NAME: payload.TABLENAME, - PRODUCER_PAYLOAD: payload.DATA, + PRODUCER_PAYLOAD: payload, OPERATION: payload.OPERATION }).then((log) => seqID = log.SEQ_ID) From 708b2976af429c9560a70e38f4eba01e68c4ceb0 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Fri, 17 Jan 2020 17:48:02 +0530 Subject: [PATCH 044/101] audit code changes --- .circleci/config.yml | 8 ++++---- src/consumer.js | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 676fbc6..b6f8ca1 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -37,11 +37,11 @@ builddeploy_steps: &builddeploy_steps ./awsconfiguration.sh $DEPLOY_ENV source awsenvconf # producer deployment - ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar + # source buildenvvar + # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} # consumer deployment - rm -rf buildenvvar + # rm -rf buildenvvar ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar source buildenvvar ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} diff --git a/src/consumer.js b/src/consumer.js index d5cfeae..e7b9f2a 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -201,7 +201,7 @@ async function dataHandler(messageSet, topic, partition) { } else { - if (payload.retryCount = 0) + if (payload.retryCount === 0) { payload['parentseqid'] = payload.SEQ_ID } From 5f248a74ef014ffc6c42c8d40d7dd52e1ad27ac2 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Wed, 22 Jan 2020 23:51:45 +0530 Subject: [PATCH 045/101] datetime relate change --- src/api/migratepg.js | 67 ++++++++++++++++++++++++++++++++++++++++---- src/consumer.js | 6 ++-- 2 files changed, 65 insertions(+), 8 deletions(-) diff --git a/src/api/migratepg.js b/src/api/migratepg.js index 8176c22..634c2de 100644 --- a/src/api/migratepg.js +++ b/src/api/migratepg.js @@ -4,8 +4,11 @@ const Joi = require('joi') const config = require('config'); const pg_dbname = config.get('POSTGRES.database') //insert payload -async function migratepgInsert(dbpool, payload, dbname, table) { +async function migratepgInsert(dbpool, payload) { console.log(payload); + const table = payload.TABLENAME + const dbname = payload.SCHEMANAME + payload = payload.DATA try { //const client = await dbpool.connect(); const client = dbpool; @@ -26,19 +29,70 @@ async function migratepgInsert(dbpool, payload, dbname, table) { } } //update payload -async function migratepgUpdate(dbpool, payload, dbname, table) { +async function migratepgUpdate(dbpool, payload) { console.log("-----------------------old update migratepgUpdate----------------"); console.log(payload); + const table = payload.TABLENAME + const dbname = payload.SCHEMANAME + payload = payload.DATA try { //const client = await dbpool.connect(); const client = dbpool; - //console.log("welcome123"); + console.log("welcome123"); const columnNames = Object.keys(payload) let schemaname = (dbname == pg_dbname) ? 'public' : dbname; + var datatypeobj = new Object(); + const sqlfetchdatatype = 'SELECT column_name, udt_name FROM information_schema.COLUMNS WHERE table_schema=$1 and TABLE_NAME = $2'; + const sqlfetchdatatypevalues = [ schemaname , table ]; + await client.query(sqlfetchdatatype, sqlfetchdatatypevalues ).then(res => { + console.log("datatype fetched---------------------"); + //console.log(res); + + const data = res.rows; + data.forEach(row => datatypeobj[ row['column_name'] ]= row['udt_name'] ); + }) +// console.log(datatypeobj['dmoney']); + console.log("BBuidling condtion") + buffferoldcond = 0 + bufferforsetdatastr = 0 + var setdatastr = "" + var oldconditionstr = "" + columnNames.forEach((colName) => { + console.log(colName); + colobj = payload[colName] + if (buffferoldcond == 1) { + oldconditionstr = oldconditionstr + " and " + } + if (bufferforsetdatastr == 1) { + setdatastr = setdatastr + " , " + } + if ( datatypeobj[colName] == 'timestamp' && colobj['new'].toUpperCase() == 'NULL' ) + { + setdatastr = setdatastr + "\"" + colName + "\"= NULL " + } + else + { + setdatastr = setdatastr + "\"" + colName + "\"= '" + colobj.new + "' " + } + if ( datatypeobj[colName] == 'timestamp' && colobj['old'].toUpperCase() == 'NULL' ) + { + oldconditionstr = oldconditionstr + "\"" + colName + "\" is NULL " + } + else + { + oldconditionstr = oldconditionstr + "\"" + colName + "\"= '" + colobj.old + "' " + } + buffferoldcond = 1 + bufferforsetdatastr = 1 + }); + console.log(oldconditionstr); + console.log(setdatastr); sql = `SET search_path TO ${schemaname};`; console.log(sql); await client.query(sql); - sql = `update ${table} set ${Object.keys(payload).map((key) => `\"${key}\"='${payload[key]['new']}'`).join(', ')} where ${Object.keys(payload).map((key) => `\"${key}\"='${payload[key]['old']}'`).join(' AND ')} ;` // "update :
set col_1=val_1, col_2=val_2, ... where primary_key_col=primary_key_val" +// sql = `update ${table} set ${Object.keys(payload).map((key) => `\"${key}\"='${payload[key]['new']}'`).join(', ')} where ${Object.keys(payload).map((key) => `\"${key}\"='${payload[key]['old']}'`).join(' AND ')} ;` // "update :
set col_1=val_1, col_2=val_2, ... where primary_key_col=primary_key_val" + sql = `update ${table} set ${setdatastr} where ${oldconditionstr} ;` + console.log("sqlstring .............................."); console.log(sql); //update test5 set id='[object Object].new', cityname='[object Object].new' where id='[object Object].old' AND cityname='[obddject Object].old' ; // sql = "insert into test6 (cityname) values ('verygoosdsdsdsd');"; @@ -51,9 +105,12 @@ async function migratepgUpdate(dbpool, payload, dbname, table) { } //delete payload.id -async function migratepgDelete(dbpool, payload, dbname, table) { +async function migratepgDelete(dbpool, payload) { console.log(payload); + const table = payload.TABLENAME + const dbname = payload.SCHEMANAME + payload = payload.DATA try { //const client = await dbpool.connect(); diff --git a/src/consumer.js b/src/consumer.js index e7b9f2a..c7dd274 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -89,14 +89,14 @@ async function dataHandler(messageSet, topic, partition) { } else { if (payload.OPERATION === 'INSERT') { let entity = payload.DATA - await migratepgInsert(pool, entity, payload.SCHEMANAME, payload.TABLENAME) + await migratepgInsert(pool, payload) .catch(err => { postgreErr = err //console.log(err) }) } else if (payload.OPERATION === 'UPDATE') { - await migratepgUpdate(pool, payload.DATA, payload.SCHEMANAME, payload.TABLENAME) + await migratepgUpdate(pool, payload) .catch(err => { postgreErr = err //console.log(err) @@ -104,7 +104,7 @@ async function dataHandler(messageSet, topic, partition) { } else if (payload.OPERATION === 'DELETE') { let entity = payload.DATA - await migratepgDelete(pool, entity, payload.SCHEMANAME, payload.TABLENAME) + await migratepgDelete(pool, payload) .catch(err => { postgreErr = err //console.log(err) From 1ba26206c6e054273c50e36de45c4de5025e964a Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Thu, 23 Jan 2020 07:05:00 +0530 Subject: [PATCH 046/101] datetime relate change --- config/default.js | 7 ++++++- src/api/migratepg.js | 32 +++++++++++++++++++++++++++++--- 2 files changed, 35 insertions(+), 4 deletions(-) diff --git a/config/default.js b/config/default.js index f8391f0..2c056fe 100644 --- a/config/default.js +++ b/config/default.js @@ -51,5 +51,10 @@ module.exports = { URL: process.env.SLACKURL || 'us-east-1', SLACKCHANNEL: process.env.SLACKCHANNEL || 'ifxpg-migrator', SLACKNOTIFY: process.env.SLACKNOTIFY || 'false' - } + }, + EXEMPTIONDATATYPE : { + MONEY: { + testdb_testtable5 : 'dmoney' + } + } } diff --git a/src/api/migratepg.js b/src/api/migratepg.js index 634c2de..37ec1a2 100644 --- a/src/api/migratepg.js +++ b/src/api/migratepg.js @@ -11,15 +11,26 @@ async function migratepgInsert(dbpool, payload) { payload = payload.DATA try { //const client = await dbpool.connect(); + console.log("db name : " + dbname); + console.log("table name : " + table); + +if (config.has(`EXEMPTIONDATATYPE.MONEY.${dbname}_${table}`)) +{ +fieldname = config.get(`EXEMPTIONDATATYPE.MONEY.${dbname}_${table}`) +console.log("Exemption File Name : " + fieldname); +//payload[fieldname] = (payload.fieldname.toUpperCase == 'NULL') ? payload.fieldname:payload.fieldname.substr(1); +payload[fieldname] = (payload[fieldname].toUpperCase == 'NULL') ? payload[fieldname]:payload[fieldname].substr(1); +console.log(payload[fieldname]) +} const client = dbpool; - //console.log("welcome123"); + console.log("=========== pg insert without unique datatype =============="); const columnNames = Object.keys(payload) let schemaname = (dbname == pg_dbname) ? 'public' : dbname; sql = `SET search_path TO ${schemaname};`; console.log(sql); await client.query(sql); sql = `insert into ${table} (\"${columnNames.join('\", \"')}\") values (${columnNames.map((k) => `'${payload[k]}'`).join(', ')});` // "insert into :
(col_1, col_2, ...) values (val_1, val_2, ...)" - console.log(sql); + console.log("Executing query : " + sql); // sql = "insert into test6 (cityname) values ('verygoosdsdsdsd');"; await client.query(sql); //await client.release(true); @@ -37,8 +48,23 @@ async function migratepgUpdate(dbpool, payload) { payload = payload.DATA try { //const client = await dbpool.connect(); + console.log("db name : " + dbname); + console.log("table name : " + table); + +if (config.has(`EXEMPTIONDATATYPE.MONEY.${dbname}_${table}`)) +{ +fieldname = config.get(`EXEMPTIONDATATYPE.MONEY.${dbname}_${table}`) +console.log("Exemption File Name : " + fieldname); +//payload[fieldname] = (payload.fieldname.toUpperCase == 'NULL') ? payload.fieldname:payload.fieldname.substr(1); +//payload[fieldname] = (payload[fieldname].toUpperCase == 'NULL') ? payload[fieldname]:payload[fieldname].substr(1); +//console.log(payload[fieldname]) +payload[fieldname]['old'] = (payload[fieldname]['old'].toUpperCase == 'NULL') ? payload[fieldname]['old']:payload[fieldname]['old'].substr(1); +console.log(payload[fieldname]['old']) +payload[fieldname]['new'] = (payload[fieldname]['new'].toUpperCase == 'NULL') ? payload[fieldname]['new']:payload[fieldname]['new'].substr(1); +console.log(payload[fieldname]['old']) +} const client = dbpool; - console.log("welcome123"); + console.log("=========== pg update without unique datatype =============="); const columnNames = Object.keys(payload) let schemaname = (dbname == pg_dbname) ? 'public' : dbname; var datatypeobj = new Object(); From ac288f9aacd1498645a5eabfd385fe1944d3e4df Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Thu, 23 Jan 2020 13:16:59 +0530 Subject: [PATCH 047/101] sample table --- .circleci/config.yml | 6 +++--- informix_auditing/samplecrtable.sql | 14 ++++++++++++++ informix_auditing/samplecrtablepg.sql | 12 ++++++++++++ 3 files changed, 29 insertions(+), 3 deletions(-) create mode 100644 informix_auditing/samplecrtablepg.sql diff --git a/.circleci/config.yml b/.circleci/config.yml index b6f8ca1..fe78e9d 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -42,9 +42,9 @@ builddeploy_steps: &builddeploy_steps # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} # consumer deployment # rm -rf buildenvvar - ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar + # source buildenvvar + # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} # without kafka dynamodb # rm -rf buildenvvar # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-withoutkafka-deployvar diff --git a/informix_auditing/samplecrtable.sql b/informix_auditing/samplecrtable.sql index 7a7851f..6a93aac 100644 --- a/informix_auditing/samplecrtable.sql +++ b/informix_auditing/samplecrtable.sql @@ -91,3 +91,17 @@ for each row ( execute procedure "informix".do_auditing2(USER) ); + + +CREATE TABLE testtype_text ( + text_id int PRIMARY KEY, + testingtext text +); +CREATE TABLE testtype_byte ( + byte_id int PRIMARY KEY, + testingbyte byte +); +CREATE TABLE testtype_blob ( + blob_id int PRIMARY KEY, + testingblob blob +); \ No newline at end of file diff --git a/informix_auditing/samplecrtablepg.sql b/informix_auditing/samplecrtablepg.sql new file mode 100644 index 0000000..31a3af2 --- /dev/null +++ b/informix_auditing/samplecrtablepg.sql @@ -0,0 +1,12 @@ +CREATE TABLE testtype_text ( + text_id int PRIMARY KEY, + testingtext bytea +); +CREATE TABLE testtype_byte ( + byte_id int PRIMARY KEY, + testingbyte bytea +); +CREATE TABLE testtype_blob ( + blob_id int PRIMARY KEY, + testingblob bytea +); \ No newline at end of file From cfea39e444ebf8b699f3c513fbfc921ef9e9f343 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Thu, 23 Jan 2020 17:54:59 +0530 Subject: [PATCH 048/101] unique data type case handling issue --- .circleci/config.yml | 6 ++-- src/api/migrateifxpg.js | 65 ++++++++++++++++++++++++++++++++++------- src/api/migratepg.js | 1 - 3 files changed, 57 insertions(+), 15 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index fe78e9d..b6f8ca1 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -42,9 +42,9 @@ builddeploy_steps: &builddeploy_steps # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} # consumer deployment # rm -rf buildenvvar - # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar - # source buildenvvar - # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} # without kafka dynamodb # rm -rf buildenvvar # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-withoutkafka-deployvar diff --git a/src/api/migrateifxpg.js b/src/api/migrateifxpg.js index d326bed..0f0487a 100755 --- a/src/api/migrateifxpg.js +++ b/src/api/migrateifxpg.js @@ -21,7 +21,7 @@ const { const pg_dbname = config.get('POSTGRES.database') async function migrateifxinsertdata(payload, client) { //retrive data and construc query - console.log("work1---------------------------------------") + console.log("=========== pg insert with unique datatype ==============") console.log(payload) const columns = payload.DATA console.log(columns) @@ -29,6 +29,16 @@ async function migrateifxinsertdata(payload, client) { const tablename = payload.TABLENAME console.log("work2---------------------------------------") const db_schema = payload.SCHEMANAME + console.log("retriving data type ------") + var datatypeobj = new Object(); + const sqlfetchdatatype = 'SELECT column_name, udt_name FROM information_schema.COLUMNS WHERE table_schema=$1 and TABLE_NAME = $2'; + const sqlfetchdatatypevalues = [ db_schema , tablename ]; + await client.query(sqlfetchdatatype, sqlfetchdatatypevalues ).then(res => { + console.log("datatype fetched---------------------"); + //console.log(res); + const data = res.rows; + data.forEach(row => datatypeobj[ row['column_name'] ]= row['udt_name'] ); + }) var conditionstr = "" const paramSql = Array.from(Array(columnNames.length).keys(), x => `$${x + 1}`).join(','); const insertSql = `insert into "${tablename}" (${columnNames.map(x => `"${x}"`).join(',')}) values(${paramSql})`; @@ -44,7 +54,15 @@ async function migrateifxinsertdata(payload, client) { } console.log(columns[colName]) tempvar = columns[colName] - conditionstr = conditionstr + tablename + "." + colName + "= '" + columns[colName] + "' " + //conditionstr = conditionstr + tablename + "." + colName + "= '" + columns[colName] + "' " + if ( datatypeobj[colName] == 'timestamp' && colobj['new'].toUpperCase() == 'NULL' ) + { + conditionstr = conditionstr + tablename + "." + colName + " is NULL " + } + else + { + conditionstr = conditionstr + tablename + "." + colName + "= '" + colobj.new + "' " + } bufffercond = 1 } }); @@ -94,10 +112,10 @@ async function migrateifxinsertdata(payload, client) { } async function migrateifxupdatedata(payload, client) { - console.log("--------------- Executing fun migrateifxupdatedata --------------------"); + console.log("=========== pg update with unique datatype =============="); console.log(payload); - console.log("work1---------------------------------------") - console.log(payload) + //console.log("work1---------------------------------------") + //console.log(payload) const columns = payload.DATA console.log(columns) const columnNames = Object.keys(columns) @@ -106,6 +124,16 @@ async function migrateifxupdatedata(payload, client) { console.log("work2---------------------------------------") const db_schema = payload.SCHEMANAME console.log(tablename); + console.log("retriving data type ------") + var datatypeobj = new Object(); + const sqlfetchdatatype = 'SELECT column_name, udt_name FROM information_schema.COLUMNS WHERE table_schema=$1 and TABLE_NAME = $2'; + const sqlfetchdatatypevalues = [ db_schema , tablename ]; + await client.query(sqlfetchdatatype, sqlfetchdatatypevalues ).then(res => { + console.log("datatype fetched---------------------"); + //console.log(res); + const data = res.rows; + data.forEach(row => datatypeobj[ row['column_name'] ]= row['udt_name'] ); + }) var conditionstr = "" var updatestr = "" bufffercond = 0 @@ -117,22 +145,37 @@ async function migrateifxupdatedata(payload, client) { columnNames.forEach((colName) => { console.log(colName) colobj = columns[colName] - console.log(typeof (colobj)) - console.log(colobj) - console.log(colobj.new) + //console.log(typeof (colobj)) + //console.log(colobj) + //console.log(colobj.new) if (colobj.new != 'unsupportedtype') { if (bufffernewcond == 1) { conditionstr = conditionstr + " and " } - conditionstr = conditionstr + tablename + "." + colName + "= '" + colobj.new + "' " + if ( datatypeobj[colName] == 'timestamp' && colobj['new'].toUpperCase() == 'NULL' ) + { + conditionstr = conditionstr + tablename + "." + colName + " is NULL " + } + else + { + conditionstr = conditionstr + tablename + "." + colName + "= '" + colobj.new + "' " + } bufffernewcond = 1 } if (colobj['old'] != 'unsupportedtype') { if (buffferoldcond == 1) { oldconditionstr = oldconditionstr + " and " } - console.log(colobj.old); - oldconditionstr = oldconditionstr + tablename + "." + colName + "= '" + colobj.old + "' " + //console.log(colobj.old); + //oldconditionstr = oldconditionstr + tablename + "." + colName + "= '" + colobj.old + "' " + if ( datatypeobj[colName] == 'timestamp' && colobj['old'].toUpperCase() == 'NULL' ) + { + oldconditionstr = oldconditionstr + "\"" + colName + "\" is NULL " + } + else + { + oldconditionstr = oldconditionstr + "\"" + colName + "\"= '" + colobj.old + "' " + } buffferoldcond = 1 } diff --git a/src/api/migratepg.js b/src/api/migratepg.js index 37ec1a2..a067e0f 100644 --- a/src/api/migratepg.js +++ b/src/api/migratepg.js @@ -73,7 +73,6 @@ console.log(payload[fieldname]['old']) await client.query(sqlfetchdatatype, sqlfetchdatatypevalues ).then(res => { console.log("datatype fetched---------------------"); //console.log(res); - const data = res.rows; data.forEach(row => datatypeobj[ row['column_name'] ]= row['udt_name'] ); }) From 54b0ce6f780627170711a53eb4948e7b2e756179 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Thu, 23 Jan 2020 18:51:40 +0530 Subject: [PATCH 049/101] unique data type null case handling issue --- src/api/migrateifxpg.js | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/src/api/migrateifxpg.js b/src/api/migrateifxpg.js index 0f0487a..4c6efa6 100755 --- a/src/api/migrateifxpg.js +++ b/src/api/migrateifxpg.js @@ -86,6 +86,8 @@ async function migrateifxinsertdata(payload, client) { for (const row of data) { const values = []; columnNames.forEach((colName) => { + if (row[colName]) + { if (isUtf8(row[colName])) { console.log(`utf8 format ${colName}`); // values.push(new Buffer.from(row[colName],'binary')); @@ -94,6 +96,9 @@ async function migrateifxinsertdata(payload, client) { // values.push(row[colName]); values.push(new Buffer.from(row[colName], 'binary')); } + } else { + values.push(row[colName]); + } //values.push(new Buffer.from(row[colName],'binary')); }); let schemaname = (db_schema == pg_dbname) ? 'public' : db_schema; @@ -203,6 +208,8 @@ async function migrateifxupdatedata(payload, client) { if (buffferupcond == 1) { updatestr = updatestr + " , " } + if (row[colName]) + { if (isUtf8(row[colName])) { //console.log(`utf8 format ${colName}`); values.push(row[colName]); @@ -215,6 +222,12 @@ async function migrateifxupdatedata(payload, client) { buffferupcond = 1 counter = counter + 1 } + } else { + values.push(row[colName]); + updatestr = updatestr + "\"" + colName + "\"= \$" + counter + " " + buffferupcond = 1 + counter = counter + 1 + } }); //logger.debug(`postgres insert sql ${insertSql} with values[${JSON.stringify(values)}`); From 95733a88fd5cb8cccc0546ba044887bef999c138 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Thu, 23 Jan 2020 19:19:15 +0530 Subject: [PATCH 050/101] unique data type null case handling issue --- src/api/migrateifxpg.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/api/migrateifxpg.js b/src/api/migrateifxpg.js index 4c6efa6..260514a 100755 --- a/src/api/migrateifxpg.js +++ b/src/api/migrateifxpg.js @@ -61,7 +61,7 @@ async function migrateifxinsertdata(payload, client) { } else { - conditionstr = conditionstr + tablename + "." + colName + "= '" + colobj.new + "' " + conditionstr = conditionstr + tablename + "." + colName + "= '" + columns[colName] + "' " } bufffercond = 1 } From 40674e88d80a33224b1682f776bec985567d70e5 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Thu, 23 Jan 2020 20:16:50 +0530 Subject: [PATCH 051/101] unique data type null case handling issue --- src/api/migrateifxpg.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/api/migrateifxpg.js b/src/api/migrateifxpg.js index 260514a..06b27a5 100755 --- a/src/api/migrateifxpg.js +++ b/src/api/migrateifxpg.js @@ -55,7 +55,7 @@ async function migrateifxinsertdata(payload, client) { console.log(columns[colName]) tempvar = columns[colName] //conditionstr = conditionstr + tablename + "." + colName + "= '" + columns[colName] + "' " - if ( datatypeobj[colName] == 'timestamp' && colobj['new'].toUpperCase() == 'NULL' ) + if ( datatypeobj[colName] == 'timestamp' && columns[colName].toUpperCase() == 'NULL' ) { conditionstr = conditionstr + tablename + "." + colName + " is NULL " } @@ -88,8 +88,8 @@ async function migrateifxinsertdata(payload, client) { columnNames.forEach((colName) => { if (row[colName]) { - if (isUtf8(row[colName])) { - console.log(`utf8 format ${colName}`); + if (isUtf8(row[colName]) || datatypeobj[colName] == 'timestamp' ) { + console.log(`utf8 or datetime format ${colName}`); // values.push(new Buffer.from(row[colName],'binary')); values.push(row[colName]); } else { @@ -210,7 +210,7 @@ async function migrateifxupdatedata(payload, client) { } if (row[colName]) { - if (isUtf8(row[colName])) { + if (isUtf8(row[colName]) || datatypeobj[colName] == 'timestamp' ) { //console.log(`utf8 format ${colName}`); values.push(row[colName]); updatestr = updatestr + "\"" + colName + "\"= \$" + counter + " " From 2d5c7441905af949e7220e788e9c89a16657131f Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Fri, 24 Jan 2020 16:35:35 +0530 Subject: [PATCH 052/101] decimal data type null case handling issue --- src/api/migrateifxpg.js | 6 +++--- src/api/migratepg.js | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/api/migrateifxpg.js b/src/api/migrateifxpg.js index 06b27a5..82ccaa0 100755 --- a/src/api/migrateifxpg.js +++ b/src/api/migrateifxpg.js @@ -55,7 +55,7 @@ async function migrateifxinsertdata(payload, client) { console.log(columns[colName]) tempvar = columns[colName] //conditionstr = conditionstr + tablename + "." + colName + "= '" + columns[colName] + "' " - if ( datatypeobj[colName] == 'timestamp' && columns[colName].toUpperCase() == 'NULL' ) + if ( (datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'decimal' ) && columns[colName].toUpperCase() == 'NULL' ) { conditionstr = conditionstr + tablename + "." + colName + " is NULL " } @@ -157,7 +157,7 @@ async function migrateifxupdatedata(payload, client) { if (bufffernewcond == 1) { conditionstr = conditionstr + " and " } - if ( datatypeobj[colName] == 'timestamp' && colobj['new'].toUpperCase() == 'NULL' ) + if ( ( datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'decimal' ) && colobj['new'].toUpperCase() == 'NULL' ) { conditionstr = conditionstr + tablename + "." + colName + " is NULL " } @@ -173,7 +173,7 @@ async function migrateifxupdatedata(payload, client) { } //console.log(colobj.old); //oldconditionstr = oldconditionstr + tablename + "." + colName + "= '" + colobj.old + "' " - if ( datatypeobj[colName] == 'timestamp' && colobj['old'].toUpperCase() == 'NULL' ) + if ( (datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'decimal' ) && colobj['old'].toUpperCase() == 'NULL' ) { oldconditionstr = oldconditionstr + "\"" + colName + "\" is NULL " } diff --git a/src/api/migratepg.js b/src/api/migratepg.js index a067e0f..b21d9f9 100644 --- a/src/api/migratepg.js +++ b/src/api/migratepg.js @@ -91,7 +91,7 @@ console.log(payload[fieldname]['old']) if (bufferforsetdatastr == 1) { setdatastr = setdatastr + " , " } - if ( datatypeobj[colName] == 'timestamp' && colobj['new'].toUpperCase() == 'NULL' ) + if ( (datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'decimal' ) && colobj['new'].toUpperCase() == 'NULL' ) { setdatastr = setdatastr + "\"" + colName + "\"= NULL " } @@ -99,7 +99,7 @@ console.log(payload[fieldname]['old']) { setdatastr = setdatastr + "\"" + colName + "\"= '" + colobj.new + "' " } - if ( datatypeobj[colName] == 'timestamp' && colobj['old'].toUpperCase() == 'NULL' ) + if ( ( datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'decimal' ) && colobj['old'].toUpperCase() == 'NULL' ) { oldconditionstr = oldconditionstr + "\"" + colName + "\" is NULL " } From 01b4c201b54df428e3c3cbb2b7099f99d3b25847 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Fri, 24 Jan 2020 19:22:20 +0530 Subject: [PATCH 053/101] decimal data type null case handling issuei [skip ci] --- src/api/migrateifxpg.js | 6 +++--- src/api/migratepg.js | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/api/migrateifxpg.js b/src/api/migrateifxpg.js index 82ccaa0..7a85710 100755 --- a/src/api/migrateifxpg.js +++ b/src/api/migrateifxpg.js @@ -55,7 +55,7 @@ async function migrateifxinsertdata(payload, client) { console.log(columns[colName]) tempvar = columns[colName] //conditionstr = conditionstr + tablename + "." + colName + "= '" + columns[colName] + "' " - if ( (datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'decimal' ) && columns[colName].toUpperCase() == 'NULL' ) + if ( (datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric' ) && columns[colName].toUpperCase() == 'NULL' ) { conditionstr = conditionstr + tablename + "." + colName + " is NULL " } @@ -157,7 +157,7 @@ async function migrateifxupdatedata(payload, client) { if (bufffernewcond == 1) { conditionstr = conditionstr + " and " } - if ( ( datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'decimal' ) && colobj['new'].toUpperCase() == 'NULL' ) + if ( ( datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric' ) && colobj['new'].toUpperCase() == 'NULL' ) { conditionstr = conditionstr + tablename + "." + colName + " is NULL " } @@ -173,7 +173,7 @@ async function migrateifxupdatedata(payload, client) { } //console.log(colobj.old); //oldconditionstr = oldconditionstr + tablename + "." + colName + "= '" + colobj.old + "' " - if ( (datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'decimal' ) && colobj['old'].toUpperCase() == 'NULL' ) + if ( (datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric' ) && colobj['old'].toUpperCase() == 'NULL' ) { oldconditionstr = oldconditionstr + "\"" + colName + "\" is NULL " } diff --git a/src/api/migratepg.js b/src/api/migratepg.js index b21d9f9..22ba735 100644 --- a/src/api/migratepg.js +++ b/src/api/migratepg.js @@ -91,7 +91,7 @@ console.log(payload[fieldname]['old']) if (bufferforsetdatastr == 1) { setdatastr = setdatastr + " , " } - if ( (datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'decimal' ) && colobj['new'].toUpperCase() == 'NULL' ) + if ( (datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric' ) && colobj['new'].toUpperCase() == 'NULL' ) { setdatastr = setdatastr + "\"" + colName + "\"= NULL " } @@ -99,7 +99,7 @@ console.log(payload[fieldname]['old']) { setdatastr = setdatastr + "\"" + colName + "\"= '" + colobj.new + "' " } - if ( ( datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'decimal' ) && colobj['old'].toUpperCase() == 'NULL' ) + if ( ( datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric' ) && colobj['old'].toUpperCase() == 'NULL' ) { oldconditionstr = oldconditionstr + "\"" + colName + "\" is NULL " } From 68133e220de62c5425cbcfc3ef6ea073df298739 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Tue, 28 Jan 2020 16:50:57 +0530 Subject: [PATCH 054/101] modifying update condition for primary key usage [skip ci] --- src/api/migratepg.js | 62 ++++++++++++++++++++++++++++++++++++++------ 1 file changed, 54 insertions(+), 8 deletions(-) diff --git a/src/api/migratepg.js b/src/api/migratepg.js index 22ba735..6d7e274 100644 --- a/src/api/migratepg.js +++ b/src/api/migratepg.js @@ -72,22 +72,34 @@ console.log(payload[fieldname]['old']) const sqlfetchdatatypevalues = [ schemaname , table ]; await client.query(sqlfetchdatatype, sqlfetchdatatypevalues ).then(res => { console.log("datatype fetched---------------------"); - //console.log(res); + // console.log(res); const data = res.rows; data.forEach(row => datatypeobj[ row['column_name'] ]= row['udt_name'] ); }) // console.log(datatypeobj['dmoney']); + //Primary key retrival + var datapk = []; + //const sqlfetchdatatype = 'SELECT column_name, udt_name FROM information_schema.COLUMNS WHERE table_schema=$1 and TABLE_NAME = $2'; + const sqlfetchdatapk = 'SELECT c.column_name, c.ordinal_position FROM information_schema.key_column_usage AS c LEFT JOIN information_schema.table_constraints AS t ON t.constraint_name = c.constraint_name WHERE t.constraint_schema=$1 AND t.table_name = $2 AND t.constraint_type = $3'; + const sqlfetchdatapkvalues = [ schemaname , table , 'PRIMARY KEY' ]; + await client.query(sqlfetchdatapk, sqlfetchdatapkvalues ).then(res => { + console.log("primary fetched---------------------"); + //console.log(res); + const data = res.rows; + data.forEach(row => datapk.push(row['column_name']) ); + }) + console.log console.log("BBuidling condtion") buffferoldcond = 0 bufferforsetdatastr = 0 var setdatastr = "" var oldconditionstr = "" columnNames.forEach((colName) => { - console.log(colName); + // console.log(colName); colobj = payload[colName] - if (buffferoldcond == 1) { - oldconditionstr = oldconditionstr + " and " - } + // if (buffferoldcond == 1) { + // oldconditionstr = oldconditionstr + " and " + // } if (bufferforsetdatastr == 1) { setdatastr = setdatastr + " , " } @@ -98,7 +110,12 @@ console.log(payload[fieldname]['old']) else { setdatastr = setdatastr + "\"" + colName + "\"= '" + colobj.new + "' " - } + } + if (datapk.length == 0) + { + if (buffferoldcond == 1) { + oldconditionstr = oldconditionstr + " and " + } if ( ( datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric' ) && colobj['old'].toUpperCase() == 'NULL' ) { oldconditionstr = oldconditionstr + "\"" + colName + "\" is NULL " @@ -107,7 +124,36 @@ console.log(payload[fieldname]['old']) { oldconditionstr = oldconditionstr + "\"" + colName + "\"= '" + colobj.old + "' " } - buffferoldcond = 1 + buffferoldcond = 1 + } + else + { + if( datapk.includes(colName) ) + { + if (buffferoldcond == 1) { + oldconditionstr = oldconditionstr + " and " + } + if ( ( datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric' ) && colobj['old'].toUpperCase() == 'NULL' ) + { + oldconditionstr = oldconditionstr + "\"" + colName + "\" is NULL " + } + else + { + oldconditionstr = oldconditionstr + "\"" + colName + "\"= '" + colobj.old + "' " + } + buffferoldcond = 1 + } + + } + // if ( ( datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric' ) && colobj['old'].toUpperCase() == 'NULL' ) + // { + // oldconditionstr = oldconditionstr + "\"" + colName + "\" is NULL " + // } + // else + // { + // oldconditionstr = oldconditionstr + "\"" + colName + "\"= '" + colobj.old + "' " + // } + // buffferoldcond = 1 bufferforsetdatastr = 1 }); console.log(oldconditionstr); @@ -116,7 +162,7 @@ console.log(payload[fieldname]['old']) console.log(sql); await client.query(sql); // sql = `update ${table} set ${Object.keys(payload).map((key) => `\"${key}\"='${payload[key]['new']}'`).join(', ')} where ${Object.keys(payload).map((key) => `\"${key}\"='${payload[key]['old']}'`).join(' AND ')} ;` // "update :
set col_1=val_1, col_2=val_2, ... where primary_key_col=primary_key_val" - sql = `update ${table} set ${setdatastr} where ${oldconditionstr} ;` + sql = `update "${table}" set ${setdatastr} where ${oldconditionstr} ;` console.log("sqlstring .............................."); console.log(sql); //update test5 set id='[object Object].new', cityname='[object Object].new' where id='[object Object].old' AND cityname='[obddject Object].old' ; From 23f2bff87b8c9a04c9e1e224fc303d6b9fac54c4 Mon Sep 17 00:00:00 2001 From: informix Date: Wed, 29 Jan 2020 09:36:29 -0500 Subject: [PATCH 055/101] json esacape char [skip ci] --- informix_auditing/audit_util.c | 61 ++++++++++++++++++++++++++++++++-- informix_auditing/audit_util.h | 5 ++- 2 files changed, 62 insertions(+), 4 deletions(-) diff --git a/informix_auditing/audit_util.c b/informix_auditing/audit_util.c index c0ddb21..365a8c8 100644 --- a/informix_auditing/audit_util.c +++ b/informix_auditing/audit_util.c @@ -149,6 +149,7 @@ mi_string *doInsertCN() //fixname(pdbname); sprintf(&buffer[posi], "\"SCHEMANAME\": \"%s\", ", pdbname); posi = strlen(buffer); + printf("\"TABLENAME\": \"%s\", ", tabname); sprintf(&buffer[posi], "\"TABLENAME\": \"%s\", ", tabname); posi = strlen(buffer); sprintf(&buffer[posi], "\"OPERATION\": \"INSERT\", "); @@ -172,7 +173,7 @@ DPRINTF("logger", 90, ("insert: colname: (0x%x) [%s]", pcolname, pcolname)); sprintf(&buffer[posi], ", "); posi = strlen(buffer); } - sprintf(&buffer[posi], "\"%s\" : \"%s\"", pcolname, pcast); + sprintf(&buffer[posi], "\"%s\" : \"%s\"", pcolname, escapecharjson(pcast)); if (strcmp("unsupportedtype", pcast) == 0) { strcpy(uniquedatatype, "true"); } @@ -322,7 +323,11 @@ DPRINTF("logger", 90, ("delete: colname: (0x%x) [%s]", pcolname, pcolname)); sprintf(&buffer[posi], ", "); posi = strlen(buffer); } - sprintf(&buffer[posi], "\"%s\" : \"%s\"", pcolname, pcast); + //printf("%s",pcast); + + //pcast = escapecharjson(pcast); + //printf("%s",pcast); + sprintf(&buffer[posi], "\"%s\" : \"%s\"", pcolname, escapecharjson(pcast)); if (strcmp("unsupportedtype", pcast) == 0) { strcpy(uniquedatatype, "true"); } @@ -434,7 +439,7 @@ mi_string *doUpdateCN() sprintf(&buffer[pbufLen], ", "); pbufLen = strlen(buffer); } - sprintf(&buffer[pbufLen], "\"%s\" : { \"old\" : \"%s\", \"new\" : \"%s\" }", poldcolname, pcast, pcast2); + sprintf(&buffer[pbufLen], "\"%s\" : { \"old\" : \"%s\", \"new\" : \"%s\" }", poldcolname, escapecharjson(pcast), escapecharjson(pcast2)); if (strcmp("unsupportedtype", pcast2) == 0) { strcpy(uniquedatatype, "true"); } @@ -553,4 +558,54 @@ int posttopic(char *jsondata, char *posturl) } /*--------------------------------------------------------------*/ +char * escapecharjson( char *jsonvalue_org) +{ + char *jsonvalue_copy; // first copy the pointer to not change the original + char *escjsonvalue; + int posi = 0; + //char *p = jsonvalue_org; + //for (; *p != '\0'; p++) {} + //printf("length of string : %ld",(p - jsonvalue_org)); + escjsonvalue = (char *)malloc(10000); + for (jsonvalue_copy = jsonvalue_org; *jsonvalue_copy != '\0'; jsonvalue_copy++) { + printf("%c:%d\n", *jsonvalue_copy,*jsonvalue_copy); + if (*jsonvalue_copy == '"') { + posi = strlen(escjsonvalue); + sprintf(&escjsonvalue[posi], "%s","\\\"") ; + } else if (*jsonvalue_copy == '\t') { + posi = strlen(escjsonvalue); + sprintf(&escjsonvalue[posi], "%s","\\t") ; + } else if (*jsonvalue_copy == '\f') { + posi = strlen(escjsonvalue); + sprintf(&escjsonvalue[posi], "%s","\\f") ; + } else if (*jsonvalue_copy == '\n') { + posi = strlen(escjsonvalue); + sprintf(&escjsonvalue[posi], "%s","\\n") ; + } else if (*jsonvalue_copy == '\r') { + posi = strlen(escjsonvalue); + sprintf(&escjsonvalue[posi], "%s","\\r") ; + } else if (*jsonvalue_copy == '\\') { + posi = strlen(escjsonvalue); + sprintf(&escjsonvalue[posi], "%s","\\\\") ; + } else if (*jsonvalue_copy == '/') { + posi = strlen(escjsonvalue); + sprintf(&escjsonvalue[posi], "%s","\\/") ; + } else if (*jsonvalue_copy == '\b') { + posi = strlen(escjsonvalue); + sprintf(&escjsonvalue[posi], "%s","\\b") ; + } else if ('\x00' <= *jsonvalue_copy && *jsonvalue_copy <= '\x1f') { + posi = strlen(escjsonvalue); + sprintf(&escjsonvalue[posi], "\\u%4x",(int)*jsonvalue_copy) ; + } else { + posi = strlen(escjsonvalue); + sprintf(&escjsonvalue[posi], "%c",*jsonvalue_copy) ; + } + + + } + //p=NULL; + jsonvalue_copy=NULL; + //printf("%s", escjsonvalue); + return(escjsonvalue); + } diff --git a/informix_auditing/audit_util.h b/informix_auditing/audit_util.h index d222c35..1135736 100644 --- a/informix_auditing/audit_util.h +++ b/informix_auditing/audit_util.h @@ -40,6 +40,8 @@ #include #include #include +#include +#include #define BUFSIZE 29900 @@ -53,4 +55,5 @@ mi_string *doUpdateCN(); mi_integer set_tracing(mi_lvarchar *class, mi_integer lvl, mi_lvarchar *tfile, MI_FPARAM *fparam); char* gettimestamp(); -int posttopic(char *jsondata, char *posturl); +int posttopic(char *jsondata, char *posturl); +char * escapecharjson( char *jsonvalue_org); From 3d539689cfcb3f4b4a8c7c2cbd973b14d5032366 Mon Sep 17 00:00:00 2001 From: informix Date: Wed, 29 Jan 2020 12:13:09 -0500 Subject: [PATCH 056/101] code change on quotes [skip ci] --- informix_auditing/audit_util.c | 2 +- src/api/migratepg.js | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/informix_auditing/audit_util.c b/informix_auditing/audit_util.c index 365a8c8..f3489d3 100644 --- a/informix_auditing/audit_util.c +++ b/informix_auditing/audit_util.c @@ -594,7 +594,7 @@ char * escapecharjson( char *jsonvalue_org) } else if (*jsonvalue_copy == '\b') { posi = strlen(escjsonvalue); sprintf(&escjsonvalue[posi], "%s","\\b") ; - } else if ('\x00' <= *jsonvalue_copy && *jsonvalue_copy <= '\x1f') { + } else if ('\x00' >= *jsonvalue_copy && *jsonvalue_copy <= '\x1f') { posi = strlen(escjsonvalue); sprintf(&escjsonvalue[posi], "\\u%4x",(int)*jsonvalue_copy) ; } else { diff --git a/src/api/migratepg.js b/src/api/migratepg.js index 6d7e274..cc7d5b1 100644 --- a/src/api/migratepg.js +++ b/src/api/migratepg.js @@ -29,7 +29,7 @@ console.log(payload[fieldname]) sql = `SET search_path TO ${schemaname};`; console.log(sql); await client.query(sql); - sql = `insert into ${table} (\"${columnNames.join('\", \"')}\") values (${columnNames.map((k) => `'${payload[k]}'`).join(', ')});` // "insert into :
(col_1, col_2, ...) values (val_1, val_2, ...)" + sql = `insert into "${table}" (\"${columnNames.join('\", \"')}\") values (${columnNames.map((k) => `'${payload[k]}'`).join(', ')});` // "insert into :
(col_1, col_2, ...) values (val_1, val_2, ...)" console.log("Executing query : " + sql); // sql = "insert into test6 (cityname) values ('verygoosdsdsdsd');"; await client.query(sql); @@ -208,4 +208,4 @@ module.exports = { migratepgDelete, migratepgInsert, migratepgUpdate -} \ No newline at end of file +} From 7b2bd30986a2a485986ffb775555e2f91441c5ab Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Mon, 3 Feb 2020 11:12:27 +0530 Subject: [PATCH 057/101] Code changes for delete and unique condition addition --- src/api/migrateifxpg.js | 347 ++++++++++++++++++++++++++++++---------- src/api/migratepg.js | 2 +- src/consumer.js | 11 +- 3 files changed, 276 insertions(+), 84 deletions(-) diff --git a/src/api/migrateifxpg.js b/src/api/migrateifxpg.js index 7a85710..3621a3e 100755 --- a/src/api/migrateifxpg.js +++ b/src/api/migrateifxpg.js @@ -29,41 +29,79 @@ async function migrateifxinsertdata(payload, client) { const tablename = payload.TABLENAME console.log("work2---------------------------------------") const db_schema = payload.SCHEMANAME + let schemaname = (db_schema == pg_dbname) ? 'public' : db_schema; console.log("retriving data type ------") var datatypeobj = new Object(); const sqlfetchdatatype = 'SELECT column_name, udt_name FROM information_schema.COLUMNS WHERE table_schema=$1 and TABLE_NAME = $2'; - const sqlfetchdatatypevalues = [ db_schema , tablename ]; - await client.query(sqlfetchdatatype, sqlfetchdatatypevalues ).then(res => { + const sqlfetchdatatypevalues = [schemaname, tablename]; + await client.query(sqlfetchdatatype, sqlfetchdatatypevalues).then(res => { console.log("datatype fetched---------------------"); //console.log(res); - const data = res.rows; - data.forEach(row => datatypeobj[ row['column_name'] ]= row['udt_name'] ); - }) + const data = res.rows; + data.forEach(row => datatypeobj[row['column_name']] = row['udt_name']); + }) + //Primary key retrival + var datapk = []; + //const sqlfetchdatatype = 'SELECT column_name, udt_name FROM information_schema.COLUMNS WHERE table_schema=$1 and TABLE_NAME = $2'; + const sqlfetchdatapk = 'SELECT c.column_name, c.ordinal_position FROM information_schema.key_column_usage AS c LEFT JOIN information_schema.table_constraints AS t ON t.constraint_name = c.constraint_name WHERE t.constraint_schema=$1 AND t.table_name = $2 AND t.constraint_type = $3'; + const sqlfetchdatapkvalues = [schemaname, tablename, 'PRIMARY KEY']; + await client.query(sqlfetchdatapk, sqlfetchdatapkvalues).then(res => { + console.log("primary fetched---------------------"); + //console.log(res); + const data = res.rows; + data.forEach(row => datapk.push(row['column_name'])); + }) var conditionstr = "" const paramSql = Array.from(Array(columnNames.length).keys(), x => `$${x + 1}`).join(','); const insertSql = `insert into "${tablename}" (${columnNames.map(x => `"${x}"`).join(',')}) values(${paramSql})`; bufffercond = 0 console.log("work2---------------------------------------") + usepkforcond = 0 + if (datapk.length != 0) { + columnNames.forEach((colName) => { + if (datapk.includes(colName)) { + if (columns[colName] != 'unsupportedtype') { + usepkforcond = usepkforcond + 1 + } + } + + }); + } + columnNames.forEach((colName) => { console.log(colName) - tempvar = columns[colName] - console.log(tempvar) - if (columns[colName] != 'unsupportedtype') { - if (bufffercond == 1) { - conditionstr = conditionstr + " and " + //tempvar = columns[colName] + //console.log(tempvar) + if (usepkforcond == 0) { + if (columns[colName] != 'unsupportedtype') { + if (bufffercond == 1) { + conditionstr = conditionstr + " and " + } + //console.log(columns[colName]) + //conditionstr = conditionstr + tablename + "." + colName + "= '" + columns[colName] + "' " + if ((datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric') && columns[colName].toUpperCase() == 'NULL') { + conditionstr = conditionstr + tablename + "." + colName + " is NULL " + } else { + conditionstr = conditionstr + tablename + "." + colName + "= '" + columns[colName] + "' " + } + bufffercond = 1 } - console.log(columns[colName]) - tempvar = columns[colName] - //conditionstr = conditionstr + tablename + "." + colName + "= '" + columns[colName] + "' " - if ( (datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric' ) && columns[colName].toUpperCase() == 'NULL' ) - { - conditionstr = conditionstr + tablename + "." + colName + " is NULL " + } else { + if (datapk.includes(colName)) { + if (columns[colName] != 'unsupportedtype') { + if (bufffercond == 1) { + conditionstr = conditionstr + " and " + } + //console.log(columns[colName]) + //conditionstr = conditionstr + tablename + "." + colName + "= '" + columns[colName] + "' " + if ((datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric') && columns[colName].toUpperCase() == 'NULL') { + conditionstr = conditionstr + tablename + "." + colName + " is NULL " + } else { + conditionstr = conditionstr + tablename + "." + colName + "= '" + columns[colName] + "' " + } + bufffercond = 1 + } } - else - { - conditionstr = conditionstr + tablename + "." + colName + "= '" + columns[colName] + "' " - } - bufffercond = 1 } }); infsql = `select * from ${tablename} where ${conditionstr};` // "insert into :
(col_1, col_2, ...) values (val_1, val_2, ...)" @@ -86,22 +124,21 @@ async function migrateifxinsertdata(payload, client) { for (const row of data) { const values = []; columnNames.forEach((colName) => { - if (row[colName]) - { - if (isUtf8(row[colName]) || datatypeobj[colName] == 'timestamp' ) { - console.log(`utf8 or datetime format ${colName}`); - // values.push(new Buffer.from(row[colName],'binary')); - values.push(row[colName]); + if (row[colName]) { + if (isUtf8(row[colName]) || datatypeobj[colName] == 'timestamp') { + console.log(`utf8 or datetime format ${colName}`); + // values.push(new Buffer.from(row[colName],'binary')); + values.push(row[colName]); + } else { + // values.push(row[colName]); + values.push(new Buffer.from(row[colName], 'binary')); + } } else { - // values.push(row[colName]); - values.push(new Buffer.from(row[colName], 'binary')); + values.push(row[colName]); } - } else { - values.push(row[colName]); - } //values.push(new Buffer.from(row[colName],'binary')); }); - let schemaname = (db_schema == pg_dbname) ? 'public' : db_schema; + let schemaname = (db_schema == pg_dbname) ? 'public' : db_schema; sql = `SET search_path TO ${schemaname};`; console.log(sql); await client.query(sql); @@ -128,17 +165,29 @@ async function migrateifxupdatedata(payload, client) { const tablename = payload.TABLENAME console.log("work2---------------------------------------") const db_schema = payload.SCHEMANAME + let schemaname = (db_schema == pg_dbname) ? 'public' : db_schema; console.log(tablename); console.log("retriving data type ------") var datatypeobj = new Object(); const sqlfetchdatatype = 'SELECT column_name, udt_name FROM information_schema.COLUMNS WHERE table_schema=$1 and TABLE_NAME = $2'; - const sqlfetchdatatypevalues = [ db_schema , tablename ]; - await client.query(sqlfetchdatatype, sqlfetchdatatypevalues ).then(res => { + const sqlfetchdatatypevalues = [schemaname, tablename]; + await client.query(sqlfetchdatatype, sqlfetchdatatypevalues).then(res => { console.log("datatype fetched---------------------"); //console.log(res); - const data = res.rows; - data.forEach(row => datatypeobj[ row['column_name'] ]= row['udt_name'] ); - }) + const data = res.rows; + data.forEach(row => datatypeobj[row['column_name']] = row['udt_name']); + }) + //Primary key retrival + var datapk = []; + //const sqlfetchdatatype = 'SELECT column_name, udt_name FROM information_schema.COLUMNS WHERE table_schema=$1 and TABLE_NAME = $2'; + const sqlfetchdatapk = 'SELECT c.column_name, c.ordinal_position FROM information_schema.key_column_usage AS c LEFT JOIN information_schema.table_constraints AS t ON t.constraint_name = c.constraint_name WHERE t.constraint_schema=$1 AND t.table_name = $2 AND t.constraint_type = $3'; + const sqlfetchdatapkvalues = [schemaname, tablename, 'PRIMARY KEY']; + await client.query(sqlfetchdatapk, sqlfetchdatapkvalues).then(res => { + console.log("primary fetched---------------------"); + //console.log(res); + const data = res.rows; + data.forEach(row => datapk.push(row['column_name'])); + }) var conditionstr = "" var updatestr = "" bufffercond = 0 @@ -147,43 +196,78 @@ async function migrateifxupdatedata(payload, client) { bufffernewcond = 0 buffferoldcond = 0 var oldconditionstr = "" + usepkforcond = 0 + if (datapk.length != 0) { + columnNames.forEach((colName) => { + if (datapk.includes(colName)) { + colobj = columns[colName] + if (colobj.new != 'unsupportedtype') { + usepkforcond = usepkforcond + 1 + } + } + + }); + } + columnNames.forEach((colName) => { console.log(colName) colobj = columns[colName] //console.log(typeof (colobj)) //console.log(colobj) - //console.log(colobj.new) - if (colobj.new != 'unsupportedtype') { - if (bufffernewcond == 1) { - conditionstr = conditionstr + " and " - } - if ( ( datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric' ) && colobj['new'].toUpperCase() == 'NULL' ) - { - conditionstr = conditionstr + tablename + "." + colName + " is NULL " + //console.log(colobj.new) + if (usepkforcond == 0) { + if (colobj.new != 'unsupportedtype') { + if (bufffernewcond == 1) { + conditionstr = conditionstr + " and " + } + if ((datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric') && colobj['new'].toUpperCase() == 'NULL') { + conditionstr = conditionstr + tablename + "." + colName + " is NULL " + } else { + conditionstr = conditionstr + tablename + "." + colName + "= '" + colobj.new + "' " + } + bufffernewcond = 1 } - else - { - conditionstr = conditionstr + tablename + "." + colName + "= '" + colobj.new + "' " - } - bufffernewcond = 1 - } - if (colobj['old'] != 'unsupportedtype') { - if (buffferoldcond == 1) { - oldconditionstr = oldconditionstr + " and " + if (colobj['old'] != 'unsupportedtype') { + if (buffferoldcond == 1) { + oldconditionstr = oldconditionstr + " and " + } + //console.log(colobj.old); + //oldconditionstr = oldconditionstr + tablename + "." + colName + "= '" + colobj.old + "' " + if ((datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric') && colobj['old'].toUpperCase() == 'NULL') { + oldconditionstr = oldconditionstr + "\"" + colName + "\" is NULL " + } else { + oldconditionstr = oldconditionstr + "\"" + colName + "\"= '" + colobj.old + "' " + } + buffferoldcond = 1 } - //console.log(colobj.old); - //oldconditionstr = oldconditionstr + tablename + "." + colName + "= '" + colobj.old + "' " - if ( (datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric' ) && colobj['old'].toUpperCase() == 'NULL' ) - { - oldconditionstr = oldconditionstr + "\"" + colName + "\" is NULL " + } else { + if (datapk.includes(colName)) { + if (colobj.new != 'unsupportedtype') { + if (bufffernewcond == 1) { + conditionstr = conditionstr + " and " + } + if ((datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric') && colobj['new'].toUpperCase() == 'NULL') { + conditionstr = conditionstr + tablename + "." + colName + " is NULL " + } else { + conditionstr = conditionstr + tablename + "." + colName + "= '" + colobj.new + "' " + } + bufffernewcond = 1 + } + if (colobj['old'] != 'unsupportedtype') { + if (buffferoldcond == 1) { + oldconditionstr = oldconditionstr + " and " + } + //console.log(colobj.old); + //oldconditionstr = oldconditionstr + tablename + "." + colName + "= '" + colobj.old + "' " + if ((datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric') && colobj['old'].toUpperCase() == 'NULL') { + oldconditionstr = oldconditionstr + "\"" + colName + "\" is NULL " + } else { + oldconditionstr = oldconditionstr + "\"" + colName + "\"= '" + colobj.old + "' " + } + buffferoldcond = 1 + } } - else - { - oldconditionstr = oldconditionstr + "\"" + colName + "\"= '" + colobj.old + "' " - } - buffferoldcond = 1 } - }); console.log(conditionstr) @@ -208,25 +292,24 @@ async function migrateifxupdatedata(payload, client) { if (buffferupcond == 1) { updatestr = updatestr + " , " } - if (row[colName]) - { - if (isUtf8(row[colName]) || datatypeobj[colName] == 'timestamp' ) { - //console.log(`utf8 format ${colName}`); - values.push(row[colName]); - updatestr = updatestr + "\"" + colName + "\"= \$" + counter + " " - buffferupcond = 1 - counter = counter + 1 - } else { - values.push(new Buffer.from(row[colName], 'binary')); - updatestr = updatestr + "\"" + colName + "\"= \$" + counter + " " - buffferupcond = 1 - counter = counter + 1 - } + if (row[colName]) { + if (isUtf8(row[colName]) || datatypeobj[colName] == 'timestamp') { + //console.log(`utf8 format ${colName}`); + values.push(row[colName]); + updatestr = updatestr + "\"" + colName + "\"= \$" + counter + " " + buffferupcond = 1 + counter = counter + 1 + } else { + values.push(new Buffer.from(row[colName], 'binary')); + updatestr = updatestr + "\"" + colName + "\"= \$" + counter + " " + buffferupcond = 1 + counter = counter + 1 + } } else { values.push(row[colName]); updatestr = updatestr + "\"" + colName + "\"= \$" + counter + " " buffferupcond = 1 - counter = counter + 1 + counter = counter + 1 } }); //logger.debug(`postgres insert sql ${insertSql} with values[${JSON.stringify(values)}`); @@ -251,9 +334,109 @@ async function migrateifxupdatedata(payload, client) { // async function migratedeletedata(client, database, tableName, informixTable, postgresTable) { // } +async function migrateifxdeletedata(payload, client) { + console.log(payload); + const table = payload.TABLENAME + const tablename = payload.TABLENAME + const dbname = payload.SCHEMANAME + payload = payload.DATA + try { + //const client = await dbpool.connect(); + //const client = dbpool; + //console.log("welcome123"); + const columnNames = Object.keys(payload) + let schemaname = (dbname == pg_dbname) ? 'public' : dbname; + console.log("retriving data type ------") + var datatypeobj = new Object(); + const sqlfetchdatatype = 'SELECT column_name, udt_name FROM information_schema.COLUMNS WHERE table_schema=$1 and TABLE_NAME = $2'; + const sqlfetchdatatypevalues = [schemaname, tablename]; + await client.query(sqlfetchdatatype, sqlfetchdatatypevalues).then(res => { + console.log("datatype fetched---------------------"); + //console.log(res); + const data = res.rows; + data.forEach(row => datatypeobj[row['column_name']] = row['udt_name']); + }) + //Primary key retrival + var datapk = []; + //const sqlfetchdatatype = 'SELECT column_name, udt_name FROM information_schema.COLUMNS WHERE table_schema=$1 and TABLE_NAME = $2'; + const sqlfetchdatapk = 'SELECT c.column_name, c.ordinal_position FROM information_schema.key_column_usage AS c LEFT JOIN information_schema.table_constraints AS t ON t.constraint_name = c.constraint_name WHERE t.constraint_schema=$1 AND t.table_name = $2 AND t.constraint_type = $3'; + const sqlfetchdatapkvalues = [schemaname, tablename, 'PRIMARY KEY']; + await client.query(sqlfetchdatapk, sqlfetchdatapkvalues).then(res => { + console.log("primary fetched---------------------"); + //console.log(res); + const data = res.rows; + data.forEach(row => datapk.push(row['column_name'])); + }) + console.log("work2---------------------------------------") + usepkforcond = 0 + if (datapk.length != 0) { + columnNames.forEach((colName) => { + if (datapk.includes(colName)) { + if (columns[colName] != 'unsupportedtype') { + usepkforcond = usepkforcond + 1 + } + } + + }); + } + var conditionstr = "" + //const paramSql = Array.from(Array(columnNames.length).keys(), x => `$${x + 1}`).join(','); + //const insertSql = `insert into "${tablename}" (${columnNames.map(x => `"${x}"`).join(',')}) values(${paramSql})`; + bufffercond = 0 + columnNames.forEach((colName) => { + console.log(colName) + //tempvar = columns[colName] + //console.log(tempvar) + if (usepkforcond == 0) { + if (columns[colName] != 'unsupportedtype') { + if (bufffercond == 1) { + conditionstr = conditionstr + " and " + } + //console.log(columns[colName]) + //conditionstr = conditionstr + tablename + "." + colName + "= '" + columns[colName] + "' " + if ((datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric') && columns[colName].toUpperCase() == 'NULL') { + conditionstr = conditionstr + tablename + "." + colName + " is NULL " + } else { + conditionstr = conditionstr + tablename + "." + colName + "= '" + columns[colName] + "' " + } + bufffercond = 1 + } + } else { + if (datapk.includes(colName)) { + if (columns[colName] != 'unsupportedtype') { + if (bufffercond == 1) { + conditionstr = conditionstr + " and " + } + //console.log(columns[colName]) + //conditionstr = conditionstr + tablename + "." + colName + "= '" + columns[colName] + "' " + if ((datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric') && columns[colName].toUpperCase() == 'NULL') { + conditionstr = conditionstr + tablename + "." + colName + " is NULL " + } else { + conditionstr = conditionstr + tablename + "." + colName + "= '" + columns[colName] + "' " + } + bufffercond = 1 + } + } + } + }); + + sql = `SET search_path TO ${schemaname};`; + console.log(sql); + await client.query(sql); + sql = `delete from "${table}" where ${conditionstr} ;` // "delete query + console.log(sql); + // sql = "insert into test6 (cityname) values ('verygoosdsdsdsd');"; + await client.query(sql); + //await client.release(true); + console.log(`end connection of postgres for database`); + } catch (e) { + throw e; + } +} module.exports = { migrateifxinsertdata, - migrateifxupdatedata + migrateifxupdatedata, + migrateifxdeletedata }; \ No newline at end of file diff --git a/src/api/migratepg.js b/src/api/migratepg.js index cc7d5b1..0d8111e 100644 --- a/src/api/migratepg.js +++ b/src/api/migratepg.js @@ -192,7 +192,7 @@ async function migratepgDelete(dbpool, payload) { sql = `SET search_path TO ${schemaname};`; console.log(sql); await client.query(sql); - sql = `delete from ${table} where ${Object.keys(payload).map((key) => `${key}='${payload[key]['new']}'`).join(' AND ')} ;` // "delete query + sql = `delete from "${table}" where ${Object.keys(payload).map((key) => `${key}='${payload[key]}'`).join(' AND ')} ;` // "delete query console.log(sql); // sql = "insert into test6 (cityname) values ('verygoosdsdsdsd');"; await client.query(sql); diff --git a/src/consumer.js b/src/consumer.js index c7dd274..6c7b336 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -20,7 +20,8 @@ const { } = require('./api/migratepg') const { migrateifxinsertdata, - migrateifxupdatedata + migrateifxupdatedata, + migrateifxdeletedata } = require('./api/migrateifxpg') const pushToKafka = require('./api/pushToKafka') const postMessage = require('./api/postslackinfo') @@ -85,6 +86,14 @@ async function dataHandler(messageSet, topic, partition) { //console.log(err) }) } + if (payload.OPERATION === 'DELETE') { + await migrateifxdeletedata(payload, pool) + .catch(err => { + postgreErr = err + //console.log(err) + }) + } + console.log("Different approach") } else { if (payload.OPERATION === 'INSERT') { From 876c70089d7bbd38ccc8d14898adc35c686349a2 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Mon, 3 Feb 2020 12:00:10 +0530 Subject: [PATCH 058/101] Code changes for delete and unique condition addition --- src/api/migratepg.js | 59 +++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 58 insertions(+), 1 deletion(-) diff --git a/src/api/migratepg.js b/src/api/migratepg.js index 0d8111e..67c579c 100644 --- a/src/api/migratepg.js +++ b/src/api/migratepg.js @@ -180,6 +180,7 @@ async function migratepgDelete(dbpool, payload) { console.log(payload); const table = payload.TABLENAME + const tablename = payload.TABLENAME const dbname = payload.SCHEMANAME payload = payload.DATA try { @@ -189,10 +190,66 @@ async function migratepgDelete(dbpool, payload) { //console.log("welcome123"); const columnNames = Object.keys(payload) let schemaname = (dbname == pg_dbname) ? 'public' : dbname; + console.log("retriving data type ------") + var datatypeobj = new Object(); + const sqlfetchdatatype = 'SELECT column_name, udt_name FROM information_schema.COLUMNS WHERE table_schema=$1 and TABLE_NAME = $2'; + const sqlfetchdatatypevalues = [schemaname, tablename]; + await client.query(sqlfetchdatatype, sqlfetchdatatypevalues).then(res => { + console.log("datatype fetched---------------------"); + //console.log(res); + const data = res.rows; + data.forEach(row => datatypeobj[row['column_name']] = row['udt_name']); + }) + //Primary key retrival + var datapk = []; + //const sqlfetchdatatype = 'SELECT column_name, udt_name FROM information_schema.COLUMNS WHERE table_schema=$1 and TABLE_NAME = $2'; + const sqlfetchdatapk = 'SELECT c.column_name, c.ordinal_position FROM information_schema.key_column_usage AS c LEFT JOIN information_schema.table_constraints AS t ON t.constraint_name = c.constraint_name WHERE t.constraint_schema=$1 AND t.table_name = $2 AND t.constraint_type = $3'; + const sqlfetchdatapkvalues = [schemaname, tablename, 'PRIMARY KEY']; + await client.query(sqlfetchdatapk, sqlfetchdatapkvalues).then(res => { + console.log("primary fetched---------------------"); + //console.log(res); + const data = res.rows; + data.forEach(row => datapk.push(row['column_name'])); + }) + + var conditionstr = "" + bufffercond = 0 + columnNames.forEach((colName) => { + if (datapk.length == 0) + { + if (bufffercond == 1) { + conditionstr = conditionstr + " and " + } + if ((datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric') && columns[colName].toUpperCase() == 'NULL') { + conditionstr = conditionstr + tablename + "." + colName + " is NULL " + } else { + conditionstr = conditionstr + tablename + "." + colName + "= '" + columns[colName] + "' " + } + bufffercond = 1 + } else { + if( datapk.includes(colName) ) + { + if (datapk.includes(colName)) { + if (bufffercond == 1) { + conditionstr = conditionstr + " and " + } + if ((datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric') && columns[colName].toUpperCase() == 'NULL') { + conditionstr = conditionstr + tablename + "." + colName + " is NULL " + } else { + conditionstr = conditionstr + tablename + "." + colName + "= '" + columns[colName] + "' " + } + bufffercond = 1 + } + } + + } + }); sql = `SET search_path TO ${schemaname};`; console.log(sql); await client.query(sql); - sql = `delete from "${table}" where ${Object.keys(payload).map((key) => `${key}='${payload[key]}'`).join(' AND ')} ;` // "delete query + conditionstr + sql = `delete from "${table}" where ${conditionstr} ;` // "delete query + //sql = `delete from "${table}" where ${Object.keys(payload).map((key) => `${key}='${payload[key]}'`).join(' AND ')} ;` // "delete query console.log(sql); // sql = "insert into test6 (cityname) values ('verygoosdsdsdsd');"; await client.query(sql); From ab5d5674a14c89a6ff5786259697297579ffc0c7 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Mon, 3 Feb 2020 16:11:23 +0530 Subject: [PATCH 059/101] Code changes for delete and unique condition addition --- informix_auditing/audit_util.c | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/informix_auditing/audit_util.c b/informix_auditing/audit_util.c index f3489d3..3a893bd 100644 --- a/informix_auditing/audit_util.c +++ b/informix_auditing/audit_util.c @@ -566,7 +566,8 @@ char * escapecharjson( char *jsonvalue_org) //char *p = jsonvalue_org; //for (; *p != '\0'; p++) {} //printf("length of string : %ld",(p - jsonvalue_org)); - escjsonvalue = (char *)malloc(10000); + //escjsonvalue = (char *)malloc(10000); + escjsonvalue = (char *)calloc(10000, sizeof(char)); for (jsonvalue_copy = jsonvalue_org; *jsonvalue_copy != '\0'; jsonvalue_copy++) { printf("%c:%d\n", *jsonvalue_copy,*jsonvalue_copy); @@ -606,6 +607,6 @@ char * escapecharjson( char *jsonvalue_org) } //p=NULL; jsonvalue_copy=NULL; - //printf("%s", escjsonvalue); + printf("%s", escjsonvalue); return(escjsonvalue); } From 592b1ad5aac177eed671cf331a7218333c224d13 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Tue, 4 Feb 2020 22:30:45 +0530 Subject: [PATCH 060/101] column object fix --- src/api/migrateifxpg.js | 2 ++ src/api/migratepg.js | 9 ++++----- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/src/api/migrateifxpg.js b/src/api/migrateifxpg.js index 3621a3e..30ea692 100755 --- a/src/api/migrateifxpg.js +++ b/src/api/migrateifxpg.js @@ -339,12 +339,14 @@ async function migrateifxdeletedata(payload, client) { const table = payload.TABLENAME const tablename = payload.TABLENAME const dbname = payload.SCHEMANAME + columns = payload.DATA payload = payload.DATA try { //const client = await dbpool.connect(); //const client = dbpool; //console.log("welcome123"); + console.log("=========== pg delete with unique datatype =============="); const columnNames = Object.keys(payload) let schemaname = (dbname == pg_dbname) ? 'public' : dbname; console.log("retriving data type ------") diff --git a/src/api/migratepg.js b/src/api/migratepg.js index 67c579c..917ebaa 100644 --- a/src/api/migratepg.js +++ b/src/api/migratepg.js @@ -182,12 +182,13 @@ async function migratepgDelete(dbpool, payload) { const table = payload.TABLENAME const tablename = payload.TABLENAME const dbname = payload.SCHEMANAME + columns = payload.DATA payload = payload.DATA try { //const client = await dbpool.connect(); const client = dbpool; - //console.log("welcome123"); + console.log("=========== pg delete without unique datatype =============="); const columnNames = Object.keys(payload) let schemaname = (dbname == pg_dbname) ? 'public' : dbname; console.log("retriving data type ------") @@ -227,8 +228,7 @@ async function migratepgDelete(dbpool, payload) { } bufffercond = 1 } else { - if( datapk.includes(colName) ) - { + if (datapk.includes(colName)) { if (bufffercond == 1) { conditionstr = conditionstr + " and " @@ -240,14 +240,13 @@ async function migratepgDelete(dbpool, payload) { } bufffercond = 1 } - } + } }); sql = `SET search_path TO ${schemaname};`; console.log(sql); await client.query(sql); - conditionstr sql = `delete from "${table}" where ${conditionstr} ;` // "delete query //sql = `delete from "${table}" where ${Object.keys(payload).map((key) => `${key}='${payload[key]}'`).join(' AND ')} ;` // "delete query console.log(sql); From a1841dcbc9472e9fca240bad7ea7e39fa7c610d8 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Fri, 7 Feb 2020 13:13:57 +0530 Subject: [PATCH 061/101] Conciler logic and log implementation --- .circleci/config.yml | 16 +-- src/api/audit.js | 97 ++++++++++++----- src/api/consumer_retry.js | 51 +++++++++ src/api/migratedynamodb.js | 4 +- src/api/postslackinfo.js | 14 ++- src/api/pushToKafka.js | 2 +- src/common/app_log.js | 216 +++++++++++++++++++++++++++++++++++++ src/consumer.js | 203 ++++++++-------------------------- src/models/audit_log.js | 16 ++- src/models/consumer_log.js | 9 +- src/models/index.js | 4 +- src/models/producer_log.js | 9 +- src/nodeserver.js | 60 ++++------- src/reconciler.js | 72 +++++++++++++ 14 files changed, 522 insertions(+), 251 deletions(-) create mode 100644 src/api/consumer_retry.js create mode 100644 src/common/app_log.js create mode 100644 src/reconciler.js diff --git a/.circleci/config.yml b/.circleci/config.yml index b6f8ca1..2d956b1 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -37,19 +37,19 @@ builddeploy_steps: &builddeploy_steps ./awsconfiguration.sh $DEPLOY_ENV source awsenvconf # producer deployment - # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar - # source buildenvvar - # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} # consumer deployment - # rm -rf buildenvvar + rm -rf buildenvvar ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar source buildenvvar ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} # without kafka dynamodb - # rm -rf buildenvvar - # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-withoutkafka-deployvar - # source buildenvvar - # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-${APPNAME}-withoutkafka-appvar -i ${APPNAME} + rm -rf buildenvvar + ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-withoutkafka-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-${APPNAME}-withoutkafka-appvar -i ${APPNAME} # notify deployment # rm -rf buildenvvar # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-notify-deployvar diff --git a/src/api/audit.js b/src/api/audit.js index 2025331..57a63b2 100644 --- a/src/api/audit.js +++ b/src/api/audit.js @@ -1,36 +1,54 @@ const models = require('../models') const model=models.auditlog const Joi = require('joi') + producerLog.schema = Joi.object().keys({ - TOPICNAME: Joi.string().required(), - SOURCE: Joi.string().required(), - SCHEMA_NAME: Joi.string().required(), - TABLE_NAME: Joi.string().required(), - PRODUCER_PAYLOAD: Joi.object().required(), - OPERATION: Joi.string().valid('INSERT','UPDATE','DELETE').required() + SEQ_ID: Joi.string().required(), + PODUCER_PUBLISH_RETRY_COUNT: Joi.number(), + PRODUCER_PAYLOAD: Joi.object(), + PRODUCER_FAILURE_LOG: Joi.object() }) -//add producer_log +//add producer_log = used for data update about producer received function producerLog(payload) { const result = Joi.validate(payload, producerLog.schema) if(result.error !== null) { return Promise.resolve().then(function () { - throw new Error('Producer' + result.error) + throw new Error('Producer log create ' + result.error) }) } - return model.producer_log.create(payload) } +//update producer_log = used for failure update in case of success, it will not be executed +function producerLog_update(payload) { + const result = Joi.validate(payload, producerLog.schema) + if(result.error !== null) { + return Promise.resolve().then(function () { + throw new Error('Producer log create ' + result.error) + }) + } + //return model.producer_log.update(payload) + const { SEQ_ID,PODUCER_PUBLISH_RETRY_COUNT, ...change } = payload + return model.producer_log.update(change, { where: { SEQ_ID: payload.SEQ_ID, PODUCER_PUBLISH_RETRY_COUNT : payload.PODUCER_PUBLISH_RETRY_COUNT }}) +} + + + pAuditLog.schema = Joi.object().keys({ - SEQ_ID: Joi.number().required(), - PRODUCER_PUBLISH_STATUS: Joi.string().valid('success','failure').required(), + SEQ_ID: Joi.string().required(), + PAYLOAD_TIME: Joi.date(), + PRODUCER_PAYLOAD: Joi.object(), + PRODUCER_PUBLISH_STATUS: Joi.string().valid('success','failure'), PRODUCER_FAILURE_LOG: Joi.object(), - PRODUCER_PUBLISH_TIME: Joi.date().required() + PRODUCER_PUBLISH_TIME: Joi.date(), + PODUCER_PUBLISH_RETRY_COUNT: Joi.number(), + OVERALL_STATUS: Joi.string(), + RECONCILE_STATUS: Joi.number() }) -//add audit_log +//add audit_log about the producer details function pAuditLog(payload) { const result = Joi.validate(payload, pAuditLog.schema) if(result.error !== null) { @@ -41,15 +59,27 @@ function pAuditLog(payload) { return model.audit_log.create(payload) } +//updated audit_log about the producer status +function pAuditLog_update(payload) { + const result = Joi.validate(payload, pAuditLog.schema) + if(result.error !== null) { + return Promise.resolve().then(function () { + throw new Error('Audit' + result.error) + }) + } + //return model.audit_log.create(payload) + const { SEQ_ID, ...change } = payload + return model.audit_log.update(change, { where: { SEQ_ID: payload.SEQ_ID }}) +} + consumerLog.schema = Joi.object().keys({ - SEQ_ID: Joi.number().required(), - TOPICNAME: Joi.string().required(), - SCHEMA_NAME: Joi.string().required(), - CONSUMAER_QUERY: Joi.object().required(), - DESTINATION: Joi.string().required() + SEQ_ID: Joi.string().required(), + CONSUMER_UPDATE_RETRY_COUNT: Joi.number(), + CONSUMER_PAYLOAD: Joi.object(), + CONSUMER_FAILURE_LOG: Joi.object() }) -//add consumer_log +//add consumer_log = Entering received record function consumerLog(payload) { const result = Joi.validate(payload, consumerLog.schema) if(result.error !== null) { @@ -61,16 +91,30 @@ function consumerLog(payload) { return model.consumer_log.create(payload) } +//update consumer_log = used for failure log update +function consumerLog_update(payload) { + const result = Joi.validate(payload, consumerLog.schema) + if(result.error !== null) { + return Promise.resolve().then(function () { + throw new Error('Consumer' + result.error) + }) + } + //return model.consumer_log.create(payload) + const { SEQ_ID,CONSUMER_UPDATE_RETRY_COUNT, ...change } = payload + return model.consumer_log.update(change, { where: { SEQ_ID: payload.SEQ_ID, CONSUMER_UPDATE_RETRY_COUNT : payload.CONSUMER_UPDATE_RETRY_COUNT }}) +} + cAuditLog.schema = Joi.object().keys({ - SEQ_ID: Joi.number().required(), - CONSUMER_DEPLOY_STATUS: Joi.string().valid('success','failure').required(), + SEQ_ID: Joi.string().required(), + CONSUMER_PAYLOAD: Joi.object(), + CONSUMER_DEPLOY_STATUS: Joi.string().valid('success','failure'), CONSUMER_FAILURE_LOG: Joi.object(), - CONSUMER_UPDATE_TIME: Joi.date().required(), - CONSUMER_RETRY_COUNT: Joi.number(), - PL_SQUENCE_ID: Joi.number() + CONSUMER_UPDATE_TIME: Joi.date(), + CONSUMER_UPDATE_RETRY_COUNT: Joi.number(), + OVERALL_STATUS: Joi.string() }) -//add audit_log +//add audit_log = only update is possible function cAuditLog(payload) { const result = Joi.validate(payload, cAuditLog.schema) if(result.error !== null) { @@ -85,7 +129,10 @@ function cAuditLog(payload) { module.exports = { producerLog, + producerLog_update, pAuditLog, + pAuditLog_update, consumerLog, + consumerLog_update, cAuditLog } diff --git a/src/api/consumer_retry.js b/src/api/consumer_retry.js new file mode 100644 index 0000000..f1d2a59 --- /dev/null +++ b/src/api/consumer_retry.js @@ -0,0 +1,51 @@ +const config = require('config'); +const { + create_producer_app_log, + producerpost_success_log, + producerpost_failure_log + } = require('../common/app_log') +const pushToKafka = require('./pushToKafka') +const { + postMessage, + validateMsgPosted + } = require('./postslackinfo') +function consumerretry(producer, payload) +{ + payload['retryCount'] = payload.retryCount + 1; + //add producer_log + try { + await create_producer_app_log(payload, "ConsumerRetry") + } catch (error) { + console.log(error) + } + kafka_error = await pushToKafka(producer, config.topic.NAME, payload) + //add auditlog + if (!kafka_error) { + await producerpost_success_log(payload, "ConsumerReposted") + res.send('done') + //res.send('done') + } else { + //add auditlog + await producerpost_failure_log(payload, kafka_error, 'ConsumerRepostFailed') + msgValue = { + ...kafka_error, + SEQ_ID: payload.SEQ_ID, + recipients: config.topic_error.EMAIL, + msgoriginator: "consumer-producer" + } + //send error message to kafka + kafka_error = await pushToKafka(producer, config.topic_error.NAME, msgValue) + if (!kafka_error) { + console.log("Kafka Message posted successfully to the topic : " + config.topic_error.NAME) + } else { + if (config.SLACK.SLACKNOTIFY === 'true') { + postMessage("consumer repost failed - But unable to post message in kafka error topic due to errors", (response) => { + await validateMsgPosted(response.statusCode, response.statusMessage) + }); + } + } + + } +} + +module.exports = consumerretry \ No newline at end of file diff --git a/src/api/migratedynamodb.js b/src/api/migratedynamodb.js index 8c115bd..bea1d1a 100644 --- a/src/api/migratedynamodb.js +++ b/src/api/migratedynamodb.js @@ -5,11 +5,13 @@ var AWS = require("aws-sdk"); async function pushToDynamoDb(payload) { try { console.log('----Inside DynomoDB code -------'); + let seqID = payload.TIME + "_" + payload.TABLENAME // console.log(payload) var params = { TableName: config.DYNAMODB.TABLENAME, Item: { - SequenceID: payload.TIME, + SequenceID: seqID, + pl_time: payload.TIME, pl_document: payload, pl_table: payload.TABLENAME, pl_schemaname: payload.SCHEMANAME, diff --git a/src/api/postslackinfo.js b/src/api/postslackinfo.js index fabe519..64bc7bf 100644 --- a/src/api/postslackinfo.js +++ b/src/api/postslackinfo.js @@ -42,5 +42,17 @@ function postMessage(message, callback) { postReq.write(body); postReq.end(); } +function validateMsgPosted(responsecode,responsemsg) { + if (responsecode < 400) { + console.info('Message posted successfully'); + } else if (responsecode < 500) { + console.error(`Error posting message to Slack API: ${responsecode} - ${responsemsg}`); + } else { + console.log(`Server error when processing message: ${responsecode} - ${responsemsg}`); + } +} -module.exports = postMessage \ No newline at end of file +module.exports = { + postMessage, + validateMsgPosted +} \ No newline at end of file diff --git a/src/api/pushToKafka.js b/src/api/pushToKafka.js index e033c43..c665b5c 100644 --- a/src/api/pushToKafka.js +++ b/src/api/pushToKafka.js @@ -27,7 +27,7 @@ async function pushToKafka(producer, topicname, payload) { console.log(kafka_error) return kafka_error } - console.log(result) + //console.log(result) }) return } diff --git a/src/common/app_log.js b/src/common/app_log.js new file mode 100644 index 0000000..9f16c86 --- /dev/null +++ b/src/common/app_log.js @@ -0,0 +1,216 @@ +const { + producerLog, + producerLog_update, + pAuditLog, + pAuditLog_update, + consumerLog, + consumerLog_update, + cAuditLog +} = require('../api/audit') + +function create_producer_app_log(payload,overallstatus) { + let seqID = payload.TIME + "_" + payload.TABLENAME + let reconcile_flag = payload['RECONCILE_STATUS'] ? payload['RECONCILE_STATUS'] : 0 + let producer_retry_count + if (reconcile_flag == 0) { + if (!payload.retryCount) { + producer_retry_count = 0 + } else { + producer_retry_count = payload.retryCount + } + } else { + producer_retry_count = 100 + reconcile_flag + } + + try { + await producerLog({ + SEQ_ID: seqID, + PRODUCER_PAYLOAD: payload, + PODUCER_PUBLISH_RETRY_COUNT: producer_retry_count + }).then(log => console.log('Payload updated to Producer table')) + .catch(err => console.log(err)) + if ((reconcile_flag == 0) && (producer_retry_count == 0)) { + await pAuditLog({ + SEQ_ID: seqID, + PAYLOAD_TIME: payload.TIME, + PRODUCER_PAYLOAD: payload, + PODUCER_PUBLISH_RETRY_COUNT: producer_retry_count, + OVERALL_STATUS: overallstatus, + RECONCILE_STATUS: reconcile_flag + }).then((log) => console.log('updated the auditlog')) + } else { + await pAuditLog_update({ + SEQ_ID: seqID, + PRODUCER_PAYLOAD: payload, + PODUCER_PUBLISH_RETRY_COUNT: producer_retry_count, + OVERALL_STATUS: overallstatus, + RECONCILE_STATUS: reconcile_flag + }).then((log) => console.log('updated the auditlog')) + } + + + } catch (error) { + console.log(error) + } + + console.log('ProducerLog Success') + +} + +function producerpost_success_log(payload , overallstatus) { + let seqID = payload.TIME + "_" + payload.TABLENAME + await pAuditLog_update({ + SEQ_ID: seqID, + PRODUCER_PUBLISH_STATUS: 'success', + PRODUCER_PUBLISH_TIME: Date.now(), + OVERALL_STATUS: overallstatus + }).then((log) => console.log('Send Success')) + + const logMessage = `${seqID} ${payload.TABLENAME} ${payload.uniquedatatype} ${payload.OPERATION} ${payload.TIME}` + console.log(`producer : ${logMessage}`); +} + +function producerpost_failure_log(payload, kafka_error, overallstatus) { + let producer_retry_count + let seqID = payload.TIME + "_" + payload.TABLENAME + let reconcile_flag = payload['RECONCILE_STATUS'] ? payload['RECONCILE_STATUS'] : 0 + if (reconcile_flag == 0) { + if (!payload.retryCount) { + producer_retry_count = 0 + } else { + producer_retry_count = payload.retryCount + } + } else { + producer_retry_count = 100 + reconcile_flag + } + await pAuditLog_update({ + SEQ_ID: seqID, + PRODUCER_PUBLISH_STATUS: 'failure', + PRODUCER_FAILURE_LOG: kafka_error, + PRODUCER_PUBLISH_TIME: Date.now(), + OVERALL_STATUS: overallstatus + }).then((log) => console.log('Send Failure')) + + await producerLog_update({ + SEQ_ID: seqID, + PRODUCER_FAILURE_LOG: kafka_error, + PODUCER_PUBLISH_RETRY_COUNT: producer_retry_count + }).then(log => console.log('Payload updated to Producer table')) + .catch(err => console.log(err)) + + console.log(`error-sync: producer parse message : "${kafka_error}"`) +} + +// consumerLog.schema = Joi.object().keys({ +// SEQ_ID: Joi.string().required(), +// CONSUMER_UPDATE_RETRY_COUNT: Joi.number(), +// CONSUMER_PAYLOAD: Joi.object(), +// CONSUMER_FAILURE_LOG: Joi.object() +// }) +function create_consumer_app_log(payload) { + let consumer_retry_count + let reconcile_flag = payload['RECONCILE_STATUS'] ? payload['RECONCILE_STATUS'] : 0 + if (reconcile_flag == 0) { + if (!payload.retryCount) { + consumer_retry_count = 0 + } else { + consumer_retry_count = payload.retryCount + } + } else { + consumer_retry_count = 100 + reconcile_flag + } + + + try { + console.log("payload sequece ID : " + payload.SEQ_ID) + await consumerLog({ + SEQ_ID: payload.SEQ_ID, + CONSUMER_UPDATE_RETRY_COUNT: consumer_retry_count, + CONSUMER_PAYLOAD: payload + }).then(log => console.log('Added Consumer Log')) + .catch(err => console.log(err)) + + await cAuditLog({ + SEQ_ID: payload.SEQ_ID, + CONSUMER_PAYLOAD: payload, + CONSUMER_UPDATE_RETRY_COUNT: retrycountconsumer, + OVERALL_STATUS: 'ConsumerReceved' + }).then(log => console.log('Added consumer audit log successfully')) + .catch(err => console.log(err)) + } catch (error) { + console.log(error) + } +} + +function consumerpg_success_log(payload) { + let consumer_retry_count + //let reconcile_flag = payload['RECONCILE_STATUS'] ? payload['RECONCILE_STATUS'] : 0 + // let reconcile_flag = payload['RECONCILE_STATUS'] ? payload['RECONCILE_STATUS'] : 0 + // if (reconcile_flag == 0) { + // if (!payload.retryCount) { + // consumer_retry_count = 0 + // } else { + // consumer_retry_count = payload.retryCount + // } + // } else { + // consumer_retry_count = 100 + // } + // CONSUMER_UPDATE_RETRY_COUNT: consumer_retry_count, + await cAuditLog({ + SEQ_ID: payload.SEQ_ID, + CONSUMER_DEPLOY_STATUS: 'success', + CONSUMER_UPDATE_TIME: Date.now(), + OVERALL_STATUS: 'PostgresUpdated' + }).then(log => console.log('postgres ' + payload.OPERATION + ' success')) + .catch(err => console.log(err)) + const logMessage = `${payload.SEQ_ID} ${payload.TABLENAME} ${payload.uniquedatatype} ${payload.OPERATION} ${payload.TIME}` + console.log(`consumer : ${logMessage}`); +} + +function consumerpg_failure_log(payload, postgreErr) { + //consumser table + let consumer_retry_count + //let reconcile_flag = payload['RECONCILE_STATUS'] ? payload['RECONCILE_STATUS'] : 0 + let reconcile_flag = payload['RECONCILE_STATUS'] ? payload['RECONCILE_STATUS'] : 0 + if (reconcile_flag == 0) { + if (!payload.retryCount) { + consumer_retry_count = 0 + } else { + consumer_retry_count = payload.retryCount + } + } else { + consumer_retry_count = 100 + } + await cAuditLog({ + SEQ_ID: payload.SEQ_ID, + CONSUMER_DEPLOY_STATUS: 'failure', + CONSUMER_FAILURE_LOG: postgreErr, + CONSUMER_UPDATE_TIME: Date.now(), + CONSUMER_UPDATE_RETRY_COUNT: consumer_retry_count, + OVERALL_STATUS: 'PostgresUpdateFailed' + + }).then((log) => console.log('postgres ' + payload.OPERATION + ' failure')) + .catch(err => console.log(err)) + await consumerLog_update({ + SEQ_ID: payload.SEQ_ID, + CONSUMER_UPDATE_RETRY_COUNT: consumer_retry_count, + CONSUMER_FAILURE_LOG: postgreErr + }).then(log => console.log('Added Error in Consumer Log Table')) + .catch(err => console.log(err)) + console.log(`error-sync: consumer failed to update : "${postgreErr}"`) + //audit table update +} +// CONSUMER_PAYLOAD: { type: DataTypes.JSON, allowNull: false }, +// CONSUMER_DEPLOY_STATUS: { type: DataTypes.STRING }, +// CONSUMER_FAILURE_LOG: { type: DataTypes.JSON }, +// CONSUMER_UPDATE_TIME:{ type: DataTypes.DATE }, +// CONSUMER_UPDATE_RETRY_COUNT:{ type: DataTypes.INTEGER ,defaultValue: 0 }, +// OVERALL_STATUS:{ type: DataTypes.STRING }, +module.exports = { + create_producer_app_log, + producerpost_success_log, + producerpost_failure_log, + create_consumer_app_log, + consumerpg_success_log, + consumerpg_failure_log +} \ No newline at end of file diff --git a/src/consumer.js b/src/consumer.js index 6c7b336..75b9932 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -5,13 +5,10 @@ const logger = require('./common/logger'); const healthcheck = require('topcoder-healthcheck-dropin'); const consumer = new Kafka.GroupConsumer(); const { - producerLog, - pAuditLog -} = require('./api/audit') -const { - consumerLog, - cAuditLog -} = require('./api/audit') + create_consumer_app_log, + consumerpg_success_log, + consumerpg_failure_log +} = require('./common/app_log') //const { migrateDelete, migrateInsert, migrateUpdate } = require('./api/migrate') const { migratepgDelete, @@ -24,7 +21,11 @@ const { migrateifxdeletedata } = require('./api/migrateifxpg') const pushToKafka = require('./api/pushToKafka') -const postMessage = require('./api/postslackinfo') +const { + postMessage, + validateMsgPosted +} = require('./api/postslackinfo') +const consumerretry = require('./api/consumer_retry') //const { migrateinsertdata } = require('./api/migrate-data') const producer = new Kafka.Producer() @@ -50,21 +51,11 @@ async function dataHandler(messageSet, topic, partition) { // insert consumer_log try { - console.log("payload sequece ID : " + payload.SEQ_ID) - consumerLog({ - SEQ_ID: payload.SEQ_ID, - TOPICNAME: topic, - SCHEMA_NAME: payload.SCHEMANAME, - CONSUMAER_QUERY: { - OPERATION: payload.OPERATION, - DATA: payload.DATA - }, - DESTINATION: config.DESTINATION - }).then(log => console.log('Add Consumer Log')) - .catch(err => console.log(err)) + await create_consumer_app_log(payload) } catch (error) { console.log(error) } + //update postgres table let postgreErr if (payload.uniquedatatype === 'true') { @@ -92,8 +83,8 @@ async function dataHandler(messageSet, topic, partition) { postgreErr = err //console.log(err) }) - } - + } + console.log("Different approach") } else { if (payload.OPERATION === 'INSERT') { @@ -122,26 +113,8 @@ async function dataHandler(messageSet, topic, partition) { } } //audit success log - let retrycountconsumer,pseqid - if (!postgreErr) { - retrycountconsumer = 0 - if (!payload.retryCount) { - pseqid = payload.SEQ_ID - } - else - { - pseqid = payload.parentseqid - } - await cAuditLog({ - SEQ_ID: payload.SEQ_ID, - CONSUMER_DEPLOY_STATUS: 'success', - CONSUMER_UPDATE_TIME: Date.now(), - CONSUMER_RETRY_COUNT: retrycountconsumer, - PL_SQUENCE_ID: pseqid - }).then(log => console.log('postgres ' + payload.OPERATION + ' success')) - .catch(err => console.log(err)) - + consumerpg_success_log(payload) return consumer.commitOffset({ topic: topic, partition: partition, @@ -151,32 +124,7 @@ async function dataHandler(messageSet, topic, partition) { } else { //audit failure log - if (!payload.retryCount) { - retrycountconsumer = 1 - pseqid = payload.SEQ_ID - } - else - { - pseqid = payload.parentseqid - if (payload.retryCount >= config.KAFKA_REPOST_COUNT) - { - retrycountconsumer = 0 - } - else - { - retrycountconsumer = payload.retryCount + 1; - } - } - - await cAuditLog({ - SEQ_ID: payload.SEQ_ID, - CONSUMER_DEPLOY_STATUS: 'failure', - CONSUMER_FAILURE_LOG: postgreErr, - CONSUMER_UPDATE_TIME: Date.now(), - CONSUMER_RETRY_COUNT: retrycountconsumer, - PL_SQUENCE_ID: pseqid - }).then((log) => console.log('postgres ' + payload.OPERATION + ' failure')) - .catch(err => console.log(err)) + consumerpg_failure_log(payload, postgreErr) let msgValue = { ...postgreErr, @@ -184,118 +132,55 @@ async function dataHandler(messageSet, topic, partition) { payloadposted: JSON.stringify(payload), msgoriginator: "consumer-producer" } - + let reconcile_flag = payload['RECONCILE_STATUS'] ? payload['RECONCILE_STATUS'] : 0 + if (reconcile_flag != 0) { + logger.debug('Reconcile failed, sending it to error queue: ', config.topic_error.NAME); + kafka_error = await pushToKafka(producer, config.topic_error.NAME, msgValue) + if (!kafka_error) { + console.log("Kafka Message posted successfully to the topic : " + config.topic_error.NAME) + } else { + if (config.SLACK.SLACKNOTIFY === 'true') { + postMessage("consumer_reconcile post fails - unable to post the error in kafka failure topic due to some errors", (response) => { + await validateMsgPosted(response.statusCode, response.statusMessage) + }); + } + } + return consumer.commitOffset({ + topic: topic, + partition: partition, + offset: m.offset, + metadata: 'optional' + }) + } if (!payload.retryCount) { payload.retryCount = 0 logger.debug('setting retry counter to 0 and max try count is : ', config.KAFKA_REPOST_COUNT); } if (payload.retryCount >= config.KAFKA_REPOST_COUNT) { - logger.debug('Recached at max retry counter, sending it to error queue: ', config.topic_error.NAME); + logger.debug('Reached at max retry counter, sending it to error queue: ', config.topic_error.NAME); kafka_error = await pushToKafka(producer, config.topic_error.NAME, msgValue) if (!kafka_error) { console.log("Kafka Message posted successfully to the topic : " + config.topic_error.NAME) } else { if (config.SLACK.SLACKNOTIFY === 'true') { - postMessage("producer - kafka post fails", (response) => { - if (response.statusCode < 400) { - console.info('Message posted successfully'); - } else if (response.statusCode < 500) { - console.error(`Error posting message to Slack API: ${response.statusCode} - ${response.statusMessage}`); - } else { - console.log(`Server error when processing message: ${response.statusCode} - ${response.statusMessage}`); - } + postMessage("Consumer Retry reached Max- But unable to post kafka due to errors", (response) => { + await validateMsgPosted(response.statusCode, response.statusMessage) }); } } } else { - if (payload.retryCount === 0) - { - payload['parentseqid'] = payload.SEQ_ID - } - payload['retryCount'] = payload.retryCount + 1; - let seqID = 0 - //add producer_log - await producerLog({ - TOPICNAME: config.topic.NAME, - SOURCE: config.SOURCE, - SCHEMA_NAME: payload.SCHEMANAME, - TABLE_NAME: payload.TABLENAME, - PRODUCER_PAYLOAD: payload, - OPERATION: payload.OPERATION - }).then((log) => seqID = log.SEQ_ID) - - if (!seqID) { - console.log('ProducerLog Failure') - return - } - console.log('ProducerLog Success') - payload['SEQ_ID'] = seqID; - //SEQ_ID: seqID - kafka_error = await pushToKafka(producer, config.topic.NAME, payload) - //add auditlog - if (!kafka_error) { - await pAuditLog({ - SEQ_ID: seqID, - PRODUCER_PUBLISH_STATUS: 'success', - PRODUCER_PUBLISH_TIME: Date.now() - }).then((log) => console.log('Send Success')) - //res.send('done') - return consumer.commitOffset({ - topic: topic, - partition: partition, - offset: m.offset, - metadata: 'optional' - }) - } else { - //add auditlog - await pAuditLog({ - SEQ_ID: seqID, - PRODUCER_PUBLISH_STATUS: 'failure', - PRODUCER_FAILURE_LOG: kafka_error, - PRODUCER_PUBLISH_TIME: Date.now() - }).then((log) => console.log('Send Failure')) - - msgValue = { - ...kafka_error, - SEQ_ID: seqID, - recipients: config.topic_error.EMAIL, - msgoriginator: "consumer-producer" - } - //send error message to kafka - kafka_error = await pushToKafka(producer, config.topic_error.NAME, msgValue) - if (!kafka_error) { - console.log("Kafka Message posted successfully to the topic : " + config.topic_error.NAME) - } else { - if (config.SLACK.SLACKNOTIFY === 'true') { - postMessage("consumer - kafka post fails", (response) => { - if (response.statusCode < 400) { - console.info('Message posted successfully'); - } else if (response.statusCode < 500) { - console.error(`Error posting message to Slack API: ${response.statusCode} - ${response.statusMessage}`); - } else { - console.log(`Server error when processing message: ${response.statusCode} - ${response.statusMessage}`); - } - }); - } - } - - } +//moved to consumerretry function + await consumerretry(producer,payload) } - //send postgres_error message - - - // logger.debug('Recached at max retry counter, sending it to error queue: ', config.topic_error.NAME); - // kafka_error = await pushToKafka(producer, config.topic_error.NAME, msgValue) - //=============================================== - // commit offset return consumer.commitOffset({ topic: topic, partition: partition, offset: m.offset, metadata: 'optional' }) + } }).catch(err => console.log(err)) @@ -324,9 +209,9 @@ async function setupKafkaConsumer() { subscriptions: [config.topic.NAME], handler: dataHandler }]; - + await consumer.init(strategies); - + logger.info('Initialized kafka consumer') healthcheck.init([check]) } catch (err) { @@ -336,4 +221,4 @@ async function setupKafkaConsumer() { } } -setupKafkaConsumer() +setupKafkaConsumer() \ No newline at end of file diff --git a/src/models/audit_log.js b/src/models/audit_log.js index 68231c1..239a39c 100644 --- a/src/models/audit_log.js +++ b/src/models/audit_log.js @@ -2,18 +2,26 @@ * Schema for audit_log. */ module.exports = (sequelize, DataTypes) => - sequelize.define('audit_log', { - SEQ_ID: { type: DataTypes.INTEGER, primaryKey: true }, + sequelize.define('audit_log', { + SEQ_ID: { type: DataTypes.STRING, primaryKey: true }, + PAYLOAD_TIME: { type: DataTypes.TIMESTAMP }, + PRODUCER_PAYLOAD: { type: DataTypes.JSON, allowNull: false }, PRODUCER_PUBLISH_STATUS: { type: DataTypes.STRING }, PRODUCER_FAILURE_LOG: { type: DataTypes.JSON }, PRODUCER_PUBLISH_TIME: { type: DataTypes.DATE }, + PODUCER_PUBLISH_RETRY_COUNT:{ type: DataTypes.INTEGER ,defaultValue: 0 }, + CONSUMER_PAYLOAD: { type: DataTypes.JSON, allowNull: false }, CONSUMER_DEPLOY_STATUS: { type: DataTypes.STRING }, CONSUMER_FAILURE_LOG: { type: DataTypes.JSON }, CONSUMER_UPDATE_TIME:{ type: DataTypes.DATE }, - CONSUMER_RETRY_COUNT:{ type: DataTypes.INTEGER }, - PL_SQUENCE_ID:{ type: DataTypes.INTEGER } + CONSUMER_UPDATE_RETRY_COUNT:{ type: DataTypes.INTEGER ,defaultValue: 0 }, + OVERALL_STATUS:{ type: DataTypes.STRING }, + RECONCILE_STATUS:{ type: DataTypes.INTEGER }, + MISC:{ type: DataTypes.STRING } }, { tableName: 'audit_log', paranoid: true, timestamps: false, }); + + diff --git a/src/models/consumer_log.js b/src/models/consumer_log.js index 8cdf401..cf3d600 100644 --- a/src/models/consumer_log.js +++ b/src/models/consumer_log.js @@ -3,11 +3,10 @@ */ module.exports = (sequelize, DataTypes) => sequelize.define('consumer_log', { - SEQ_ID: { type: DataTypes.INTEGER, primaryKey: true }, - TOPICNAME: { type: DataTypes.STRING, allowNull: false }, - SCHEMA_NAME:{ type: DataTypes.STRING, allowNull: false }, - DESTINATION:{ type: DataTypes.STRING, allowNull: false }, - CONSUMAER_QUERY: { type: DataTypes.JSON, allowNull: false } + SEQ_ID: { type: DataTypes.STRING, primaryKey: true }, + CONSUMER_UPDATE_RETRY_COUNT:{ type: DataTypes.INTEGER ,defaultValue: 0, primaryKey: true }, + CONSUMER_PAYLOAD: { type: DataTypes.JSON, allowNull: false }, + CONSUMER_FAILURE_LOG: { type: DataTypes.JSON } }, { tableName: 'consumer_log', paranoid: true, diff --git a/src/models/index.js b/src/models/index.js index e437453..7d62456 100644 --- a/src/models/index.js +++ b/src/models/index.js @@ -17,8 +17,8 @@ models['auditlog'].consumer_log = db['auditlog'].import('./consumer_log') //models['testdb'].test = db['testdb'].import('./test') -models['auditlog'].audit_log.belongsTo(models['auditlog'].producer_log, { foreignKey: 'SEQ_ID' }) -models['auditlog'].consumer_log.belongsTo(models['auditlog'].producer_log, { foreignKey: 'SEQ_ID' }) +// models['auditlog'].audit_log.belongsTo(models['auditlog'].producer_log, { foreignKey: 'SEQ_ID' }) +// models['auditlog'].consumer_log.belongsTo(models['auditlog'].producer_log, { foreignKey: 'SEQ_ID' }) config.db.DB_NAME.forEach(dbname =>{ db[dbname].sync({ force:false }) diff --git a/src/models/producer_log.js b/src/models/producer_log.js index 33eb25e..c1d17bb 100644 --- a/src/models/producer_log.js +++ b/src/models/producer_log.js @@ -3,13 +3,10 @@ */ module.exports = (sequelize, DataTypes) => sequelize.define('producer_log', { - SEQ_ID: { type: DataTypes.INTEGER, autoIncrement:true, primaryKey: true }, - TOPICNAME: { type: DataTypes.STRING, allowNull: false }, - SOURCE: { type: DataTypes.STRING, allowNull: false }, - SCHEMA_NAME:{ type: DataTypes.STRING, allowNull: false }, - TABLE_NAME:{ type: DataTypes.STRING, allowNull: false }, + SEQ_ID: { type: DataTypes.STRING, primaryKey: true }, + PODUCER_PUBLISH_RETRY_COUNT:{ type: DataTypes.INTEGER ,defaultValue: 0, primaryKey: true }, PRODUCER_PAYLOAD: { type: DataTypes.JSON, allowNull: false }, - OPERATION: { type: DataTypes.STRING, allowNull: false } + PRODUCER_FAILURE_LOG: { type: DataTypes.JSON } }, { tableName: 'producer_log', paranoid: true, diff --git a/src/nodeserver.js b/src/nodeserver.js index e17f6f8..b9ea4f3 100644 --- a/src/nodeserver.js +++ b/src/nodeserver.js @@ -4,11 +4,15 @@ const Kafka = require('no-kafka') const config = require('config') const bodyParser = require('body-parser') const { - producerLog, - pAuditLog -} = require('./api/audit') + create_producer_app_log, + producerpost_success_log, + producerpost_failure_log +} = require('./common/app_log') const pushToKafka = require('./api/pushToKafka') -const postMessage = require('./api/postslackinfo') +const { + postMessage, + validateMsgPosted +} = require('./api/postslackinfo') const app = express() app.use(bodyParser.json()); // to support JSON-encoded bodies @@ -22,23 +26,16 @@ app.get('/', function (req, res) { app.post('/kafkaevents', async (req, res, next) => { const payload = req.body - let seqID = 0 + let seqID = payload.TIME + "_" + payload.TABLENAME + //retry_count = payload['RETRY_COUNT'] ? payload['RETRY_COUNT'] : 0 + //let reconcile_flag = payload['RECONCILE_STATUS'] ? payload['RECONCILE_STATUS'] : 0 + let producer_retry_count - //add producer_log - await producerLog({ - TOPICNAME: config.topic.NAME, - SOURCE: config.SOURCE, - SCHEMA_NAME: payload.SCHEMANAME, - TABLE_NAME: payload.TABLENAME, - PRODUCER_PAYLOAD: payload, - OPERATION: payload.OPERATION - }).then((log) => seqID = log.SEQ_ID) - - if (!seqID) { - console.log('ProducerLog Failure') - return + try { + await create_producer_app_log(payload,"PayloadReceived") + } catch (error) { + console.log(error) } - console.log('ProducerLog Success') //send kafka message let kafka_error @@ -49,23 +46,14 @@ app.post('/kafkaevents', async (req, res, next) => { kafka_error = await pushToKafka(producer, config.topic.NAME, msgValue) //add auditlog if (!kafka_error) { - await pAuditLog({ - SEQ_ID: seqID, - PRODUCER_PUBLISH_STATUS: 'success', - PRODUCER_PUBLISH_TIME: Date.now() - }).then((log) => console.log('Send Success')) + await producerpost_success_log(payload, "PayloadPosted") res.send('done') return } //add auditlog - await pAuditLog({ - SEQ_ID: seqID, - PRODUCER_PUBLISH_STATUS: 'failure', - PRODUCER_FAILURE_LOG: kafka_error, - PRODUCER_PUBLISH_TIME: Date.now() - }).then((log) => console.log('Send Failure')) - + await producerpost_failure_log(payload,kafka_error,'PayloadPostFailed') + msgValue = { ...kafka_error, SEQ_ID: seqID, @@ -78,14 +66,8 @@ app.post('/kafkaevents', async (req, res, next) => { console.log("Kafka Message posted successfully to the topic : " + config.topic_error.NAME) } else { if (config.SLACK.SLACKNOTIFY === 'true') { - postMessage("producer - kafka post fails", (response) => { - if (response.statusCode < 400) { - console.info('Message posted successfully'); - } else if (response.statusCode < 500) { - console.error(`Error posting message to Slack API: ${response.statusCode} - ${response.statusMessage}`); - } else { - console.log(`Server error when processing message: ${response.statusCode} - ${response.statusMessage}`); - } + postMessage("producer post meesage failed- But usable to post the error in kafka error topic due to errors", (response) => { + await validateMsgPosted(response.statusCode,response.statusMessage) }); } } diff --git a/src/reconciler.js b/src/reconciler.js new file mode 100644 index 0000000..cc558e1 --- /dev/null +++ b/src/reconciler.js @@ -0,0 +1,72 @@ +const config = require('config') +//Establishing connection in postgress +const pg = require('pg') +const pgOptions = config.get('POSTGRES') +const database = 'auditlog' +const pgConnectionString = `postgresql://${pgOptions.user}:${pgOptions.password}@${pgOptions.host}:${pgOptions.port}/${database}` +const pgClient = new pg.Client(pgConnectionString) + +//const auditTrail = require('./services/auditTrail'); +//const port = 3000 + +const logger = require('./common/logger') +const _ = require('lodash') +var AWS = require("aws-sdk"); + +var docClient = new AWS.DynamoDB.DocumentClient({ + region: config.DYNAMODB.REGION, + convertEmptyValues: true + }); + +// SequenceID: seqID, +// pl_time: payload.TIME, +// pl_document: payload, +// NodeSequenceID: Date.now() +ElapsedTime = 600000 + var params = { + TableName: config.DYNAMODB.TABLENAME, + KeyConditionExpression: "pl_time between :time_1 and :time_2", + ExpressionAttributeValues: { + ":time_1": Date.now() - ElapsedTime, + ":time_2": Date.now() + } + } + +docClient.get(params, function(err, data) { + if (err) { + console.error("Unable to read item. Error JSON:", JSON.stringify(err, null, 2)); + } else { + console.log("GetItem succeeded:", JSON.stringify(data, null, 2)); + + //select query from for last 10 mins pg fethte seq_id + //compare the dynamo seqid exist with pgseqid + //if not exist , post res api with paylaod from dynamodb + + } +}); +//case 1 Reading dequence ID from dynamo DB + + +// case 1 : + +// Dynamo DB only exist . But not in pg + +// Get last 10 minutes sequenceid based on payload time pl_time from dynamodb +// Check the sequenceid existance in PG audit auditlog +// If exist, please ignore +// If not exist, retrive the payload for the respective sequenceid from dynamodb +// Post the payload to producer with restapi + +// Case 2: + +// Fetch the row with below condition from auditlog + +// 1) PAYLOAD_TIME < currenttime -5 and PAYLOAD_TIME > currenttime -25 (This will max time limit will the reconcile logic need to considered) +// 2) OVERALL_STATUS should not be PostgresUpdated (This will help identify which need to be reposted) +// 3) RECONCILE_STATUS is equal to 0 (This will help to set reconcile try count with in particular time schedule) + +// Get the payload from PRODUCER_PAYLOAD on audit log for the abiove condition +// Check the RECONCILE_STATUS exist on payload. +// if not, inititate the RECONCILE_STATUS as 1 and embed in payload +// if exist, increase by 1 and update in payload +// Repost the payload to producer \ No newline at end of file From dababa8a4645596c20938a2bd75604b74d7d9847 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Fri, 7 Feb 2020 14:28:07 +0530 Subject: [PATCH 062/101] Conciler logic and log implementation --- src/api/postslackinfo.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/api/postslackinfo.js b/src/api/postslackinfo.js index 64bc7bf..ce0a1f3 100644 --- a/src/api/postslackinfo.js +++ b/src/api/postslackinfo.js @@ -42,7 +42,7 @@ function postMessage(message, callback) { postReq.write(body); postReq.end(); } -function validateMsgPosted(responsecode,responsemsg) { +async function validateMsgPosted(responsecode,responsemsg) { if (responsecode < 400) { console.info('Message posted successfully'); } else if (responsecode < 500) { From c7d3cfca62e111ab1a6abe554fdfb41aaef0604b Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Fri, 7 Feb 2020 14:31:54 +0530 Subject: [PATCH 063/101] Conciler logic and log implementation --- src/api/consumer_retry.js | 2 +- src/common/app_log.js | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/api/consumer_retry.js b/src/api/consumer_retry.js index f1d2a59..53aa65e 100644 --- a/src/api/consumer_retry.js +++ b/src/api/consumer_retry.js @@ -9,7 +9,7 @@ const { postMessage, validateMsgPosted } = require('./postslackinfo') -function consumerretry(producer, payload) +async function consumerretry(producer, payload) { payload['retryCount'] = payload.retryCount + 1; //add producer_log diff --git a/src/common/app_log.js b/src/common/app_log.js index 9f16c86..575bac9 100644 --- a/src/common/app_log.js +++ b/src/common/app_log.js @@ -8,7 +8,7 @@ const { cAuditLog } = require('../api/audit') -function create_producer_app_log(payload,overallstatus) { +async function create_producer_app_log(payload,overallstatus) { let seqID = payload.TIME + "_" + payload.TABLENAME let reconcile_flag = payload['RECONCILE_STATUS'] ? payload['RECONCILE_STATUS'] : 0 let producer_retry_count @@ -57,7 +57,7 @@ function create_producer_app_log(payload,overallstatus) { } -function producerpost_success_log(payload , overallstatus) { +async function producerpost_success_log(payload , overallstatus) { let seqID = payload.TIME + "_" + payload.TABLENAME await pAuditLog_update({ SEQ_ID: seqID, @@ -70,7 +70,7 @@ function producerpost_success_log(payload , overallstatus) { console.log(`producer : ${logMessage}`); } -function producerpost_failure_log(payload, kafka_error, overallstatus) { +async function producerpost_failure_log(payload, kafka_error, overallstatus) { let producer_retry_count let seqID = payload.TIME + "_" + payload.TABLENAME let reconcile_flag = payload['RECONCILE_STATUS'] ? payload['RECONCILE_STATUS'] : 0 @@ -107,7 +107,7 @@ function producerpost_failure_log(payload, kafka_error, overallstatus) { // CONSUMER_PAYLOAD: Joi.object(), // CONSUMER_FAILURE_LOG: Joi.object() // }) -function create_consumer_app_log(payload) { +async function create_consumer_app_log(payload) { let consumer_retry_count let reconcile_flag = payload['RECONCILE_STATUS'] ? payload['RECONCILE_STATUS'] : 0 if (reconcile_flag == 0) { @@ -142,7 +142,7 @@ function create_consumer_app_log(payload) { } } -function consumerpg_success_log(payload) { +async function consumerpg_success_log(payload) { let consumer_retry_count //let reconcile_flag = payload['RECONCILE_STATUS'] ? payload['RECONCILE_STATUS'] : 0 // let reconcile_flag = payload['RECONCILE_STATUS'] ? payload['RECONCILE_STATUS'] : 0 @@ -167,7 +167,7 @@ function consumerpg_success_log(payload) { console.log(`consumer : ${logMessage}`); } -function consumerpg_failure_log(payload, postgreErr) { +async function consumerpg_failure_log(payload, postgreErr) { //consumser table let consumer_retry_count //let reconcile_flag = payload['RECONCILE_STATUS'] ? payload['RECONCILE_STATUS'] : 0 From 6e2a0f0b91494feed8b7876cdf8fe6d58905f083 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Fri, 7 Feb 2020 14:52:25 +0530 Subject: [PATCH 064/101] Conciler logic and log implementation --- .circleci/config.yml | 22 +++++++++++----------- src/api/postslackinfo.js | 2 +- src/nodeserver.js | 2 +- 3 files changed, 13 insertions(+), 13 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 2d956b1..216e160 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -40,17 +40,17 @@ builddeploy_steps: &builddeploy_steps ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar source buildenvvar ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} - # consumer deployment - rm -rf buildenvvar - ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} - # without kafka dynamodb - rm -rf buildenvvar - ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-withoutkafka-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-${APPNAME}-withoutkafka-appvar -i ${APPNAME} - # notify deployment + # # consumer deployment + # rm -rf buildenvvar + # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar + # source buildenvvar + # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + # # without kafka dynamodb + # rm -rf buildenvvar + # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-withoutkafka-deployvar + # source buildenvvar + # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-${APPNAME}-withoutkafka-appvar -i ${APPNAME} + # # notify deployment # rm -rf buildenvvar # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-notify-deployvar # source buildenvvar diff --git a/src/api/postslackinfo.js b/src/api/postslackinfo.js index ce0a1f3..f4997e6 100644 --- a/src/api/postslackinfo.js +++ b/src/api/postslackinfo.js @@ -5,7 +5,7 @@ const https = require('https'); hookUrl = config.SLACK.URL slackChannel = config.SLACK.SLACKCHANNEL -function postMessage(message, callback) { +async function postMessage(message, callback) { var slackMessage = { channel: `${slackChannel}`, diff --git a/src/nodeserver.js b/src/nodeserver.js index b9ea4f3..5b2aadd 100644 --- a/src/nodeserver.js +++ b/src/nodeserver.js @@ -66,7 +66,7 @@ app.post('/kafkaevents', async (req, res, next) => { console.log("Kafka Message posted successfully to the topic : " + config.topic_error.NAME) } else { if (config.SLACK.SLACKNOTIFY === 'true') { - postMessage("producer post meesage failed- But usable to post the error in kafka error topic due to errors", (response) => { + await postMessage("producer post meesage failed- But usable to post the error in kafka error topic due to errors", (response) => { await validateMsgPosted(response.statusCode,response.statusMessage) }); } From 8228b53af3487055d422ced828074aa29725f605 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Fri, 7 Feb 2020 15:05:14 +0530 Subject: [PATCH 065/101] Conciler logic and log implementation --- src/nodeserver.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/nodeserver.js b/src/nodeserver.js index 5b2aadd..cd01fcd 100644 --- a/src/nodeserver.js +++ b/src/nodeserver.js @@ -66,8 +66,8 @@ app.post('/kafkaevents', async (req, res, next) => { console.log("Kafka Message posted successfully to the topic : " + config.topic_error.NAME) } else { if (config.SLACK.SLACKNOTIFY === 'true') { - await postMessage("producer post meesage failed- But usable to post the error in kafka error topic due to errors", (response) => { - await validateMsgPosted(response.statusCode,response.statusMessage) + postMessage("producer post meesage failed- But usable to post the error in kafka error topic due to errors", (response) => { + validateMsgPosted(response.statusCode,response.statusMessage) }); } } From d542b85dda2831b345d27e129e1b125d212020e3 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Fri, 7 Feb 2020 15:12:07 +0530 Subject: [PATCH 066/101] Conciler logic and log implementation --- src/nodeserver.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/nodeserver.js b/src/nodeserver.js index cd01fcd..4ac7068 100644 --- a/src/nodeserver.js +++ b/src/nodeserver.js @@ -66,8 +66,8 @@ app.post('/kafkaevents', async (req, res, next) => { console.log("Kafka Message posted successfully to the topic : " + config.topic_error.NAME) } else { if (config.SLACK.SLACKNOTIFY === 'true') { - postMessage("producer post meesage failed- But usable to post the error in kafka error topic due to errors", (response) => { - validateMsgPosted(response.statusCode,response.statusMessage) + await postMessage("producer post meesage failed- But usable to post the error in kafka error topic due to errors", async (response) => { + await validateMsgPosted(response.statusCode,response.statusMessage) }); } } From 5478bf7d8b966d5c77f38aa81225762af273d588 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Fri, 7 Feb 2020 15:17:23 +0530 Subject: [PATCH 067/101] Conciler logic and log implementation --- src/models/audit_log.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/models/audit_log.js b/src/models/audit_log.js index 239a39c..3fa5cd2 100644 --- a/src/models/audit_log.js +++ b/src/models/audit_log.js @@ -4,7 +4,7 @@ module.exports = (sequelize, DataTypes) => sequelize.define('audit_log', { SEQ_ID: { type: DataTypes.STRING, primaryKey: true }, - PAYLOAD_TIME: { type: DataTypes.TIMESTAMP }, + PAYLOAD_TIME: { type: DataTypes.DATE }, PRODUCER_PAYLOAD: { type: DataTypes.JSON, allowNull: false }, PRODUCER_PUBLISH_STATUS: { type: DataTypes.STRING }, PRODUCER_FAILURE_LOG: { type: DataTypes.JSON }, From 7e59c50d372a3bb62808abacfb77f6e0ee749e6a Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Fri, 7 Feb 2020 15:21:36 +0530 Subject: [PATCH 068/101] Conciler logic and log implementation --- src/consumer.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/consumer.js b/src/consumer.js index 75b9932..154b188 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -140,7 +140,7 @@ async function dataHandler(messageSet, topic, partition) { console.log("Kafka Message posted successfully to the topic : " + config.topic_error.NAME) } else { if (config.SLACK.SLACKNOTIFY === 'true') { - postMessage("consumer_reconcile post fails - unable to post the error in kafka failure topic due to some errors", (response) => { + await postMessage("consumer_reconcile post fails - unable to post the error in kafka failure topic due to some errors", async (response) => { await validateMsgPosted(response.statusCode, response.statusMessage) }); } @@ -163,7 +163,7 @@ async function dataHandler(messageSet, topic, partition) { console.log("Kafka Message posted successfully to the topic : " + config.topic_error.NAME) } else { if (config.SLACK.SLACKNOTIFY === 'true') { - postMessage("Consumer Retry reached Max- But unable to post kafka due to errors", (response) => { + await postMessage("Consumer Retry reached Max- But unable to post kafka due to errors", async (response) => { await validateMsgPosted(response.statusCode, response.statusMessage) }); } From 0495528ec61983b8e97c587d55febddc4a37bac7 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Fri, 7 Feb 2020 15:31:59 +0530 Subject: [PATCH 069/101] Conciler logic and log implementation --- .circleci/config.yml | 34 +++++++++++++++++----------------- src/api/consumer_retry.js | 2 +- src/consumer-slacknotify.js | 19 ++++++------------- 3 files changed, 24 insertions(+), 31 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 216e160..25b8029 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -36,25 +36,25 @@ builddeploy_steps: &builddeploy_steps command: | ./awsconfiguration.sh $DEPLOY_ENV source awsenvconf - # producer deployment - ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} - # # consumer deployment - # rm -rf buildenvvar - # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar - # source buildenvvar + # # producer deployment + # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar + # source buildenvvar # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} - # # without kafka dynamodb + # # consumer deployment # rm -rf buildenvvar - # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-withoutkafka-deployvar - # source buildenvvar - # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-${APPNAME}-withoutkafka-appvar -i ${APPNAME} - # # notify deployment - # rm -rf buildenvvar - # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-notify-deployvar - # source buildenvvar - # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-notify-appvar -i ${APPNAME} + ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + # without kafka dynamodb + rm -rf buildenvvar + ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-withoutkafka-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-${APPNAME}-withoutkafka-appvar -i ${APPNAME} + # notify deployment + rm -rf buildenvvar + ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-notify-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-notify-appvar -i ${APPNAME} jobs: diff --git a/src/api/consumer_retry.js b/src/api/consumer_retry.js index 53aa65e..9d1d481 100644 --- a/src/api/consumer_retry.js +++ b/src/api/consumer_retry.js @@ -39,7 +39,7 @@ async function consumerretry(producer, payload) console.log("Kafka Message posted successfully to the topic : " + config.topic_error.NAME) } else { if (config.SLACK.SLACKNOTIFY === 'true') { - postMessage("consumer repost failed - But unable to post message in kafka error topic due to errors", (response) => { + await postMessage("consumer repost failed - But unable to post message in kafka error topic due to errors", async (response) => { await validateMsgPosted(response.statusCode, response.statusMessage) }); } diff --git a/src/consumer-slacknotify.js b/src/consumer-slacknotify.js index df8cbcd..e879dc7 100644 --- a/src/consumer-slacknotify.js +++ b/src/consumer-slacknotify.js @@ -1,7 +1,10 @@ const Kafka = require('no-kafka'); const Promise = require('bluebird'); const config = require('config'); -const postMessage = require('./api/postslackinfo'); +const { + postMessage, + validateMsgPosted + } = require('./api/postslackinfo') const consumer = new Kafka.GroupConsumer(); const dataHandler = function (messageSet, topic, partition) { @@ -9,18 +12,8 @@ const dataHandler = function (messageSet, topic, partition) { const payload = JSON.parse(m.message.value) if(config.SLACK.SLACKNOTIFY === 'true') { console.log(payload) - postMessage(Object.values(payload), (response) => { - if (response.statusCode < 400) { - console.info('Message posted successfully'); - // callback(null); - } else if (response.statusCode < 500) { - console.error(`Error posting message to Slack API: ${response.statusCode} - ${response.statusMessage}`); - // callback(null); // Don't retry because the error is due to a problem with the request - } else { - // Let Lambda retry - console.log(`Server error when processing message: ${response.statusCode} - ${response.statusMessage}`); - //callback(`Server error when processing message: ${response.statusCode} - ${response.statusMessage}`); - } + await postMessage(Object.values(payload), async (response) => { + await validateMsgPosted(response.statusCode, response.statusMessage) }); } From ea0feb1fbb07a16a5c130dafbbe871b1c1efd61c Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Fri, 7 Feb 2020 16:40:55 +0530 Subject: [PATCH 070/101] Conciler logic and log implementation --- .circleci/config.yml | 20 ++++++++++---------- src/common/app_log.js | 2 +- src/consumer.js | 7 ++++--- src/models/audit_log.js | 4 ++-- 4 files changed, 17 insertions(+), 16 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 25b8029..b28ed2a 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -45,16 +45,16 @@ builddeploy_steps: &builddeploy_steps ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar source buildenvvar ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} - # without kafka dynamodb - rm -rf buildenvvar - ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-withoutkafka-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-${APPNAME}-withoutkafka-appvar -i ${APPNAME} - # notify deployment - rm -rf buildenvvar - ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-notify-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-notify-appvar -i ${APPNAME} + # # without kafka dynamodb + # rm -rf buildenvvar + # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-withoutkafka-deployvar + # source buildenvvar + # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-${APPNAME}-withoutkafka-appvar -i ${APPNAME} + # # notify deployment + # rm -rf buildenvvar + # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-notify-deployvar + # source buildenvvar + # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-notify-appvar -i ${APPNAME} jobs: diff --git a/src/common/app_log.js b/src/common/app_log.js index 575bac9..c66fb98 100644 --- a/src/common/app_log.js +++ b/src/common/app_log.js @@ -133,7 +133,7 @@ async function create_consumer_app_log(payload) { await cAuditLog({ SEQ_ID: payload.SEQ_ID, CONSUMER_PAYLOAD: payload, - CONSUMER_UPDATE_RETRY_COUNT: retrycountconsumer, + CONSUMER_UPDATE_RETRY_COUNT: consumer_retry_count, OVERALL_STATUS: 'ConsumerReceved' }).then(log => console.log('Added consumer audit log successfully')) .catch(err => console.log(err)) diff --git a/src/consumer.js b/src/consumer.js index 154b188..d39332c 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -114,7 +114,7 @@ async function dataHandler(messageSet, topic, partition) { } //audit success log if (!postgreErr) { - consumerpg_success_log(payload) + await consumerpg_success_log(payload) return consumer.commitOffset({ topic: topic, partition: partition, @@ -124,8 +124,9 @@ async function dataHandler(messageSet, topic, partition) { } else { //audit failure log - consumerpg_failure_log(payload, postgreErr) - + console.log(postgreErr) + await consumerpg_failure_log(payload, postgreErr) + let msgValue = { ...postgreErr, recipients: config.topic_error.EMAIL, diff --git a/src/models/audit_log.js b/src/models/audit_log.js index 3fa5cd2..b80bad8 100644 --- a/src/models/audit_log.js +++ b/src/models/audit_log.js @@ -5,12 +5,12 @@ module.exports = (sequelize, DataTypes) => sequelize.define('audit_log', { SEQ_ID: { type: DataTypes.STRING, primaryKey: true }, PAYLOAD_TIME: { type: DataTypes.DATE }, - PRODUCER_PAYLOAD: { type: DataTypes.JSON, allowNull: false }, + PRODUCER_PAYLOAD: { type: DataTypes.JSON }, PRODUCER_PUBLISH_STATUS: { type: DataTypes.STRING }, PRODUCER_FAILURE_LOG: { type: DataTypes.JSON }, PRODUCER_PUBLISH_TIME: { type: DataTypes.DATE }, PODUCER_PUBLISH_RETRY_COUNT:{ type: DataTypes.INTEGER ,defaultValue: 0 }, - CONSUMER_PAYLOAD: { type: DataTypes.JSON, allowNull: false }, + CONSUMER_PAYLOAD: { type: DataTypes.JSON }, CONSUMER_DEPLOY_STATUS: { type: DataTypes.STRING }, CONSUMER_FAILURE_LOG: { type: DataTypes.JSON }, CONSUMER_UPDATE_TIME:{ type: DataTypes.DATE }, From 4ec333149250fb387ab4d90194e75e4ce5c30e7a Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Fri, 7 Feb 2020 17:09:52 +0530 Subject: [PATCH 071/101] Conciler logic and log implementation --- .circleci/config.yml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index b28ed2a..216e160 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -36,15 +36,15 @@ builddeploy_steps: &builddeploy_steps command: | ./awsconfiguration.sh $DEPLOY_ENV source awsenvconf - # # producer deployment - # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar - # source buildenvvar - # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + # producer deployment + ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} # # consumer deployment # rm -rf buildenvvar - ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar + # source buildenvvar + # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} # # without kafka dynamodb # rm -rf buildenvvar # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-withoutkafka-deployvar From aeb5da3a0a021c796afd3dabafda5b703744989e Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Fri, 7 Feb 2020 20:18:05 +0530 Subject: [PATCH 072/101] Conciler logic and log implementation --- .circleci/config.yml | 14 +++++++------- src/consumer-slacknotify.js | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 216e160..00b9730 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -36,10 +36,10 @@ builddeploy_steps: &builddeploy_steps command: | ./awsconfiguration.sh $DEPLOY_ENV source awsenvconf - # producer deployment - ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + # # producer deployment + # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar + # source buildenvvar + # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} # # consumer deployment # rm -rf buildenvvar # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar @@ -52,9 +52,9 @@ builddeploy_steps: &builddeploy_steps # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-${APPNAME}-withoutkafka-appvar -i ${APPNAME} # # notify deployment # rm -rf buildenvvar - # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-notify-deployvar - # source buildenvvar - # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-notify-appvar -i ${APPNAME} + ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-notify-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-notify-appvar -i ${APPNAME} jobs: diff --git a/src/consumer-slacknotify.js b/src/consumer-slacknotify.js index e879dc7..6247fa1 100644 --- a/src/consumer-slacknotify.js +++ b/src/consumer-slacknotify.js @@ -18,7 +18,7 @@ const dataHandler = function (messageSet, topic, partition) { } // commit offset - return consumer.commitOffset({ topic: topic, partition: partition, offset: m.offset, metadata: 'optional' }) + consumer.commitOffset({ topic: topic, partition: partition, offset: m.offset, metadata: 'optional' }) }).catch(err => console.log(err)) }; From a21fb1f09a8edecb138e15a79cbb0580e7fcbbba Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Wed, 12 Feb 2020 15:21:37 +0530 Subject: [PATCH 073/101] table change --- .circleci/config.yml | 24 ++++++++++++------------ package.json | 4 ++-- src/api/audit.js | 2 +- src/common/app_log.js | 2 +- src/models/audit_log.js | 2 +- src/reconciler.js | 14 ++++++++------ 6 files changed, 25 insertions(+), 23 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 00b9730..d47c594 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -36,15 +36,15 @@ builddeploy_steps: &builddeploy_steps command: | ./awsconfiguration.sh $DEPLOY_ENV source awsenvconf - # # producer deployment - # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar - # source buildenvvar - # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} - # # consumer deployment - # rm -rf buildenvvar - # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar - # source buildenvvar - # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + # producer deployment + ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + # consumer deployment + rm -rf buildenvvar + ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} # # without kafka dynamodb # rm -rf buildenvvar # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-withoutkafka-deployvar @@ -52,9 +52,9 @@ builddeploy_steps: &builddeploy_steps # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-${APPNAME}-withoutkafka-appvar -i ${APPNAME} # # notify deployment # rm -rf buildenvvar - ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-notify-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-notify-appvar -i ${APPNAME} + # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-notify-deployvar + # source buildenvvar + # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-notify-appvar -i ${APPNAME} jobs: diff --git a/package.json b/package.json index a0325dc..31ba7d3 100644 --- a/package.json +++ b/package.json @@ -7,8 +7,8 @@ "test": "echo \"Error: no test specified\" && exit 1", "producer": "nodemon src/nodeserver.js", "consumer": "nodemon src/consumer.js", - "producerwithoutkafka" : "node src/node-server-without-kafka.js", - "ifxpgnotify": "node src/consumer-slacknotify.js" + "producerwithoutkafka" : "nodemon src/node-server-without-kafka.js", + "ifxpgnotify": "nodemon src/consumer-slacknotify.js" }, "author": "", "license": "ISC", diff --git a/src/api/audit.js b/src/api/audit.js index 57a63b2..a91b534 100644 --- a/src/api/audit.js +++ b/src/api/audit.js @@ -38,7 +38,7 @@ function producerLog_update(payload) { pAuditLog.schema = Joi.object().keys({ SEQ_ID: Joi.string().required(), - PAYLOAD_TIME: Joi.date(), + REQUEST_CREATE_TIME: Joi.date(), PRODUCER_PAYLOAD: Joi.object(), PRODUCER_PUBLISH_STATUS: Joi.string().valid('success','failure'), PRODUCER_FAILURE_LOG: Joi.object(), diff --git a/src/common/app_log.js b/src/common/app_log.js index c66fb98..7f94e48 100644 --- a/src/common/app_log.js +++ b/src/common/app_log.js @@ -32,7 +32,7 @@ async function create_producer_app_log(payload,overallstatus) { if ((reconcile_flag == 0) && (producer_retry_count == 0)) { await pAuditLog({ SEQ_ID: seqID, - PAYLOAD_TIME: payload.TIME, + REQUEST_CREATE_TIME: Date.now(), PRODUCER_PAYLOAD: payload, PODUCER_PUBLISH_RETRY_COUNT: producer_retry_count, OVERALL_STATUS: overallstatus, diff --git a/src/models/audit_log.js b/src/models/audit_log.js index b80bad8..02e9767 100644 --- a/src/models/audit_log.js +++ b/src/models/audit_log.js @@ -4,7 +4,7 @@ module.exports = (sequelize, DataTypes) => sequelize.define('audit_log', { SEQ_ID: { type: DataTypes.STRING, primaryKey: true }, - PAYLOAD_TIME: { type: DataTypes.DATE }, + REQUEST_CREATE_TIME: { type: DataTypes.DATE }, PRODUCER_PAYLOAD: { type: DataTypes.JSON }, PRODUCER_PUBLISH_STATUS: { type: DataTypes.STRING }, PRODUCER_FAILURE_LOG: { type: DataTypes.JSON }, diff --git a/src/reconciler.js b/src/reconciler.js index cc558e1..1733b38 100644 --- a/src/reconciler.js +++ b/src/reconciler.js @@ -32,16 +32,18 @@ ElapsedTime = 600000 } } -docClient.get(params, function(err, data) { + +docClient.query(params, function(err, data) { if (err) { - console.error("Unable to read item. Error JSON:", JSON.stringify(err, null, 2)); + console.error("Unable to query. Error:", JSON.stringify(err, null, 2)); } else { - console.log("GetItem succeeded:", JSON.stringify(data, null, 2)); - + console.log("Query succeeded."); + data.Items.forEach(function(item) { + console.log(" -", item.year + ": " + item.title); //select query from for last 10 mins pg fethte seq_id //compare the dynamo seqid exist with pgseqid - //if not exist , post res api with paylaod from dynamodb - + //if not exist , post res api with paylaod from dynamodb + }); } }); //case 1 Reading dequence ID from dynamo DB From 54e9a0ae55e833d9a51746263eed7f78b4a3c7a4 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Thu, 13 Feb 2020 12:55:09 +0530 Subject: [PATCH 074/101] code chang --- .circleci/config.yml | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index d47c594..70d367c 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -36,15 +36,15 @@ builddeploy_steps: &builddeploy_steps command: | ./awsconfiguration.sh $DEPLOY_ENV source awsenvconf - # producer deployment - ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} - # consumer deployment - rm -rf buildenvvar - ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + # # producer deployment + # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar + # source buildenvvar + # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + # # consumer deployment + # rm -rf buildenvvar + # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar + # source buildenvvar + # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} # # without kafka dynamodb # rm -rf buildenvvar # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-withoutkafka-deployvar From 5634a6aa028c54f426b59a1ce092922d8a80fc1a Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Thu, 13 Feb 2020 12:56:01 +0530 Subject: [PATCH 075/101] code chang --- src/reconciler.js | 117 +++++++++++++++++++++++++++++++++++++--------- 1 file changed, 95 insertions(+), 22 deletions(-) diff --git a/src/reconciler.js b/src/reconciler.js index 1733b38..48d8fd9 100644 --- a/src/reconciler.js +++ b/src/reconciler.js @@ -4,48 +4,121 @@ const pg = require('pg') const pgOptions = config.get('POSTGRES') const database = 'auditlog' const pgConnectionString = `postgresql://${pgOptions.user}:${pgOptions.password}@${pgOptions.host}:${pgOptions.port}/${database}` +console.log(pgConnectionString) const pgClient = new pg.Client(pgConnectionString) - -//const auditTrail = require('./services/auditTrail'); -//const port = 3000 - -const logger = require('./common/logger') +//pgClient.connect();i +async function setupPgClient () { + try { + await pgClient.connect() + logger.debug('Connected to Pg Client2 Audit:') + } + catch (err) { + logger.error('Could not setup postgres client2') + logger.logFullError(err) + process.exit() + } +} +const logger = require('./src/common/logger') const _ = require('lodash') var AWS = require("aws-sdk"); +async function dynamo_pg_validation() +{ var docClient = new AWS.DynamoDB.DocumentClient({ region: config.DYNAMODB.REGION, convertEmptyValues: true }); -// SequenceID: seqID, -// pl_time: payload.TIME, -// pl_document: payload, -// NodeSequenceID: Date.now() -ElapsedTime = 600000 +//ElapsedTime = 600000 +ElapsedTime = 4995999000 var params = { TableName: config.DYNAMODB.TABLENAME, - KeyConditionExpression: "pl_time between :time_1 and :time_2", + FilterExpression: "NodeSequenceID between :time_1 and :time_2", ExpressionAttributeValues: { ":time_1": Date.now() - ElapsedTime, ":time_2": Date.now() } } - - -docClient.query(params, function(err, data) { + console.log("scanning"); + await docClient.scan(params, onScan); +} +async function onScan(err, data) { if (err) { - console.error("Unable to query. Error:", JSON.stringify(err, null, 2)); + console.error("Unable to scan the table. Error JSON:", JSON.stringify(err, null, 2)); } else { - console.log("Query succeeded."); - data.Items.forEach(function(item) { - console.log(" -", item.year + ": " + item.title); - //select query from for last 10 mins pg fethte seq_id - //compare the dynamo seqid exist with pgseqid - //if not exist , post res api with paylaod from dynamodb + + console.log("Scan succeeded."); + data.Items.forEach(async function(item) { +// console.log(item); + await validate_data_in_pg(item.SequenceID,item.pl_document) + }); + + // continue scanning if we have more movies, because + // scan can retrieve a maximum of 1MB of data + + if (typeof data.LastEvaluatedKey != "undefined") { + console.log("Scanning for more..."); + params.ExclusiveStartKey = data.LastEvaluatedKey; + await docClient.scan(params, onScan); + } } -}); +} + +async function validate_data_in_pg(SequenceID,payload) +{ + console.log(SequenceID); + let schemaname = 'public'; + const sqlquerytovalidate = 'SELECT COUNT(*) FROM audit_log WHERE seq_id=$1'; + const sqlquerytovalidate_values =[SequenceID] + console.log(sqlquerytovalidate); + await pgClient.query(sqlquerytovalidate, sqlquerytovalidate_values,async (err,res) => { + + if (err) { + var errmsg0 = `error-sync: Audit reconsiler query "${err.message}"` + logger.debug (errmsg0) + // await callposttoslack(errmsg0) +} +else +{ + console.log("validating data count---------------------"); + const data = res.rows; + data.forEach(async(row) => { + if (row['count'] == 0 ) + { + //await posttopic(payload,0) + console.log("post the topic"); + } + else + { + console.log(`${SequenceID} is exist in pg`) + } + + + }); +} + }); +} +async function main() +{ +await setupPgClient() +await dynamo_pg_validation() +await pgClient.end() +} +main() +// docClient.query(params, function(err, data) { +// if (err) { +// console.error("Unable to query. Error:", JSON.stringify(err, null, 2)); +// } else { +// console.log("Query succeeded."); +// data.Items.forEach(function(item) { +// console.log(" -", item.year + ": " + item.title); +// //select query from for last 10 mins pg fethte seq_id +// //compare the dynamo seqid exist with pgseqid +// //if not exist , post res api with paylaod from dynamodb +// }); +// } +// }); //case 1 Reading dequence ID from dynamo DB From f5d53ce114476a8eb63ab446c7c36b696c473a46 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Thu, 13 Feb 2020 14:56:16 +0530 Subject: [PATCH 076/101] code chang --- src/reconciler.js | 25 ++++++++++++++++++++++--- 1 file changed, 22 insertions(+), 3 deletions(-) diff --git a/src/reconciler.js b/src/reconciler.js index 48d8fd9..994dc75 100644 --- a/src/reconciler.js +++ b/src/reconciler.js @@ -5,7 +5,7 @@ const pgOptions = config.get('POSTGRES') const database = 'auditlog' const pgConnectionString = `postgresql://${pgOptions.user}:${pgOptions.password}@${pgOptions.host}:${pgOptions.port}/${database}` console.log(pgConnectionString) -const pgClient = new pg.Client(pgConnectionString) +//const pgClient = new pg.Client(pgConnectionString) //pgClient.connect();i async function setupPgClient () { try { @@ -41,6 +41,7 @@ ElapsedTime = 4995999000 } console.log("scanning"); await docClient.scan(params, onScan); + return } async function onScan(err, data) { if (err) { @@ -62,11 +63,26 @@ async function onScan(err, data) { params.ExclusiveStartKey = data.LastEvaluatedKey; await docClient.scan(params, onScan); } + else + { + return + } } } async function validate_data_in_pg(SequenceID,payload) { + const pgClient = new pg.Client(pgConnectionString) + // await setupPgClient() + try { + await pgClient.connect() + logger.debug('Connected to Pg Client2 Audit:') + } + catch (err) { + logger.error('Could not setup postgres client2') + logger.logFullError(err) + process.exit() + } console.log(SequenceID); let schemaname = 'public'; const sqlquerytovalidate = 'SELECT COUNT(*) FROM audit_log WHERE seq_id=$1'; @@ -97,15 +113,18 @@ else }); } +pgClient.end(); }); +return } async function main() { -await setupPgClient() +//await setupPgClient() await dynamo_pg_validation() -await pgClient.end() +//await pgClient.on('end') } main() +//await pgClient.end() // docClient.query(params, function(err, data) { // if (err) { // console.error("Unable to query. Error:", JSON.stringify(err, null, 2)); From 4bc936841bdd81eb1ae1cd195241df8e887b0cc7 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Thu, 13 Feb 2020 22:44:48 +0530 Subject: [PATCH 077/101] Reconcile code completed --- config/default.js | 9 ++ src/reconciler.js | 263 +++++++++++++++++++++++++--------------------- 2 files changed, 152 insertions(+), 120 deletions(-) diff --git a/config/default.js b/config/default.js index 2c056fe..7d5198b 100644 --- a/config/default.js +++ b/config/default.js @@ -56,5 +56,14 @@ module.exports = { MONEY: { testdb_testtable5 : 'dmoney' } + }, + RECONCILER : + { + RECONCILER_IGNORE_STATUS : 'PostgresUpdated', + RECONCILER_START_ELAPSE_TIME : 1, + RECONCILER_DIFF_PERIOD : 10, + RECONCILER_DURATION_TYPE : 'm', + RECONCILER_RETRY_COUNT : 1, + RECONCILER_POST_URL : 'http://ifxpg-migrator.topcoder-dev.com/kafkaevents' } } diff --git a/src/reconciler.js b/src/reconciler.js index 994dc75..c8ea498 100644 --- a/src/reconciler.js +++ b/src/reconciler.js @@ -1,47 +1,36 @@ const config = require('config') //Establishing connection in postgress const pg = require('pg') +const request = require("request"); const pgOptions = config.get('POSTGRES') const database = 'auditlog' const pgConnectionString = `postgresql://${pgOptions.user}:${pgOptions.password}@${pgOptions.host}:${pgOptions.port}/${database}` console.log(pgConnectionString) //const pgClient = new pg.Client(pgConnectionString) -//pgClient.connect();i -async function setupPgClient () { - try { - await pgClient.connect() - logger.debug('Connected to Pg Client2 Audit:') - } - catch (err) { - logger.error('Could not setup postgres client2') - logger.logFullError(err) - process.exit() - } -} +//pgClient.connect(); const logger = require('./src/common/logger') const _ = require('lodash') var AWS = require("aws-sdk"); -async function dynamo_pg_validation() -{ -var docClient = new AWS.DynamoDB.DocumentClient({ - region: config.DYNAMODB.REGION, - convertEmptyValues: true - }); - -//ElapsedTime = 600000 -ElapsedTime = 4995999000 - var params = { - TableName: config.DYNAMODB.TABLENAME, - FilterExpression: "NodeSequenceID between :time_1 and :time_2", - ExpressionAttributeValues: { - ":time_1": Date.now() - ElapsedTime, - ":time_2": Date.now() +async function dynamo_pg_validation() { + var docClient = new AWS.DynamoDB.DocumentClient({ + region: config.DYNAMODB.REGION, + convertEmptyValues: true + }); + + //ElapsedTime = 600000 + ElapsedTime = 4995999000 + var params = { + TableName: config.DYNAMODB.TABLENAME, + FilterExpression: "NodeSequenceID between :time_1 and :time_2", + ExpressionAttributeValues: { + ":time_1": Date.now() - ElapsedTime, + ":time_2": Date.now() + } } - } - console.log("scanning"); - await docClient.scan(params, onScan); - return + console.log("scanning"); + await docClient.scan(params, onScan); + return } async function onScan(err, data) { if (err) { @@ -49,9 +38,9 @@ async function onScan(err, data) { } else { console.log("Scan succeeded."); - data.Items.forEach(async function(item) { -// console.log(item); - await validate_data_in_pg(item.SequenceID,item.pl_document) + data.Items.forEach(async function (item) { + // console.log(item); + await validate_data_in_pg(item.SequenceID, item.pl_document) }); @@ -62,105 +51,139 @@ async function onScan(err, data) { console.log("Scanning for more..."); params.ExclusiveStartKey = data.LastEvaluatedKey; await docClient.scan(params, onScan); - } - else - { - return + } else { + return } } } -async function validate_data_in_pg(SequenceID,payload) -{ - const pgClient = new pg.Client(pgConnectionString) - // await setupPgClient() - try { - await pgClient.connect() +async function validate_data_in_pg(SequenceID, payload) { + const pgClient = new pg.Client(pgConnectionString) + // await setupPgClient() + try { + await pgClient.connect() logger.debug('Connected to Pg Client2 Audit:') + } catch (err) { + logger.error('Could not setup postgres client2') + logger.logFullError(err) + process.exit() } - catch (err) { - logger.error('Could not setup postgres client2') - logger.logFullError(err) - process.exit() - } console.log(SequenceID); - let schemaname = 'public'; + let schemaname = 'public'; const sqlquerytovalidate = 'SELECT COUNT(*) FROM audit_log WHERE seq_id=$1'; - const sqlquerytovalidate_values =[SequenceID] + const sqlquerytovalidate_values = [SequenceID] console.log(sqlquerytovalidate); - await pgClient.query(sqlquerytovalidate, sqlquerytovalidate_values,async (err,res) => { - - if (err) { - var errmsg0 = `error-sync: Audit reconsiler query "${err.message}"` - logger.debug (errmsg0) - // await callposttoslack(errmsg0) + await pgClient.query(sqlquerytovalidate, sqlquerytovalidate_values, async (err, res) => { + + if (err) { + var errmsg0 = `error-sync: Audit reconsiler query "${err.message}"` + logger.debug(errmsg0) + // await callposttoslack(errmsg0) + } else { + console.log("validating data count---------------------"); + const data = res.rows; + data.forEach(async (row) => { + if (row['count'] == 0) { + await posttopic(payload, 0) + console.log("post the topic"); + } else { + console.log(`${SequenceID} is exist in pg`) + } + }); + } + pgClient.end(); + }); + return } -else -{ - console.log("validating data count---------------------"); - const data = res.rows; - data.forEach(async(row) => { - if (row['count'] == 0 ) - { - //await posttopic(payload,0) - console.log("post the topic"); - } - else - { - console.log(`${SequenceID} is exist in pg`) - } - - + +async function repostfailure() { + const pgClient = new pg.Client(pgConnectionString) + // await setupPgClient() + try { + await pgClient.connect() + logger.debug('Connected to Pg Client2 Audit:') + } catch (err) { + logger.error('Could not setup postgres client2') + logger.logFullError(err) + process.exit() + } + + // select seq_id, producer_payload, overall_status from audit_log where + // overall_status not in ('PostgresUpdated') and + // request_create_time between (timezone('utc',now()) - interval '10m') and (timezone('utc',now()) - interval '1m') and + // reconcile_status < 1 ; + rec_ignore_status = config.RECONCILER.RECONCILER_IGNORE_STATUS + rec_start_elapse = config.RECONCILER.RECONCILER_START_ELAPSE_TIME + rec_diff_period = config.RECONCILER.RECONCILER_DIFF_PERIOD + rec_interval_type = config.RECONCILER.RECONCILER_DURATION_TYPE + rec_retry_count = config.RECONCILER.RECONCILER_RETRY_COUNT + + sql1 = "select seq_id, producer_payload from audit_log where audit_log.overall_status not in ($1)" + sql2 = " and audit_log.request_create_time between (timezone('utc',now()) - interval '1" + rec_interval_type + "' * $2)" + sql3 = " and (timezone('utc',now()) - interval '1" + rec_interval_type + "' * $3)" + sql4 = " and audit_log.reconcile_status < $4 ;" + sqltofetchfailure = sql1 + sql2 + sql3 + sql4 + var sqltofetchfailure_values = [rec_ignore_status, rec_diff_period, rec_start_elapse, rec_retry_count] + console.log('sql : ', sqltofetchfailure) + await pgClient.query(sqltofetchfailure, sqltofetchfailure_values, async (err, res) => { + + if (err) { + var errmsg0 = `error-sync: Audit reconsiler query "${err.message}"` + logger.debug(errmsg0) + // await callposttoslack(errmsg0) + } else { + console.log("Reposting Data---------------------\n"); + const data = res.rows; + data.forEach(async (row) => { + console.log("\npost the topic for : " + row['seq_id']); + await posttopic(row['producer_payload'], 1) + }); + } + pgClient.end(); }); + return + } -pgClient.end(); + +async function postpayload_to_restapi(payload) { + let options = { + method: 'POST', + url: config.RECONCILER.RECONCILER_POST_URL, + headers: { + 'cache-control': 'no-cache', + 'Content-Type': 'application/json' + }, + body: JSON.stringify(payload), + json: true + }; + + request(options, function (error, response, body) { + if (error) throw new Error(error); + console.log(body); }); -return + return } -async function main() -{ -//await setupPgClient() -await dynamo_pg_validation() -//await pgClient.on('end') + +async function posttopic(payload, integratereconcileflag) { + console.log(payload + " " + integratereconcileflag); + if (integratereconcileflag == 1) { + //update payload with reconcile status + //post to rest api + let reconcile_flag = payload['RECONCILE_STATUS'] ? payload['RECONCILE_STATUS'] : 0 + reconcile_flag = reconcile_flag + 1 + payload.RECONCILE_STATUS = reconcile_flag + await postpayload_to_restapi(payload) + } else { + //post to rest api + await postpayload_to_restapi(payload) + } + return +} +async function main() { + //await setupPgClient() + await dynamo_pg_validation() + //await pgClient.on('end') + await repostfailure() + } main() -//await pgClient.end() -// docClient.query(params, function(err, data) { -// if (err) { -// console.error("Unable to query. Error:", JSON.stringify(err, null, 2)); -// } else { -// console.log("Query succeeded."); -// data.Items.forEach(function(item) { -// console.log(" -", item.year + ": " + item.title); -// //select query from for last 10 mins pg fethte seq_id -// //compare the dynamo seqid exist with pgseqid -// //if not exist , post res api with paylaod from dynamodb -// }); -// } -// }); -//case 1 Reading dequence ID from dynamo DB - - -// case 1 : - -// Dynamo DB only exist . But not in pg - -// Get last 10 minutes sequenceid based on payload time pl_time from dynamodb -// Check the sequenceid existance in PG audit auditlog -// If exist, please ignore -// If not exist, retrive the payload for the respective sequenceid from dynamodb -// Post the payload to producer with restapi - -// Case 2: - -// Fetch the row with below condition from auditlog - -// 1) PAYLOAD_TIME < currenttime -5 and PAYLOAD_TIME > currenttime -25 (This will max time limit will the reconcile logic need to considered) -// 2) OVERALL_STATUS should not be PostgresUpdated (This will help identify which need to be reposted) -// 3) RECONCILE_STATUS is equal to 0 (This will help to set reconcile try count with in particular time schedule) - -// Get the payload from PRODUCER_PAYLOAD on audit log for the abiove condition -// Check the RECONCILE_STATUS exist on payload. -// if not, inititate the RECONCILE_STATUS as 1 and embed in payload -// if exist, increase by 1 and update in payload -// Repost the payload to producer \ No newline at end of file From 2b7594b42416eaf377cfcf86de6adde7f10b7656 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Fri, 14 Feb 2020 11:46:48 +0530 Subject: [PATCH 078/101] Reconcile code change --- config/default.js | 1 + src/reconciler.js | 12 +++++------- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/config/default.js b/config/default.js index 7d5198b..cfbf29e 100644 --- a/config/default.js +++ b/config/default.js @@ -59,6 +59,7 @@ module.exports = { }, RECONCILER : { + RECONCILER_ELAPSE_TIME : 60000, RECONCILER_IGNORE_STATUS : 'PostgresUpdated', RECONCILER_START_ELAPSE_TIME : 1, RECONCILER_DIFF_PERIOD : 10, diff --git a/src/reconciler.js b/src/reconciler.js index c8ea498..5e35924 100644 --- a/src/reconciler.js +++ b/src/reconciler.js @@ -11,16 +11,16 @@ console.log(pgConnectionString) const logger = require('./src/common/logger') const _ = require('lodash') var AWS = require("aws-sdk"); - +var params async function dynamo_pg_validation() { var docClient = new AWS.DynamoDB.DocumentClient({ region: config.DYNAMODB.REGION, convertEmptyValues: true }); - //ElapsedTime = 600000 - ElapsedTime = 4995999000 - var params = { + ElapsedTime = config.RECONCILER.RECONCILER_ELAPSE_TIME + //ElapsedTime = 4995999000 + params = { TableName: config.DYNAMODB.TABLENAME, FilterExpression: "NodeSequenceID between :time_1 and :time_2", ExpressionAttributeValues: { @@ -69,7 +69,6 @@ async function validate_data_in_pg(SequenceID, payload) { process.exit() } console.log(SequenceID); - let schemaname = 'public'; const sqlquerytovalidate = 'SELECT COUNT(*) FROM audit_log WHERE seq_id=$1'; const sqlquerytovalidate_values = [SequenceID] console.log(sqlquerytovalidate); @@ -114,7 +113,7 @@ async function repostfailure() { // reconcile_status < 1 ; rec_ignore_status = config.RECONCILER.RECONCILER_IGNORE_STATUS rec_start_elapse = config.RECONCILER.RECONCILER_START_ELAPSE_TIME - rec_diff_period = config.RECONCILER.RECONCILER_DIFF_PERIOD + rec_diff_period = config.RECONCILER.RECONCILER_DIFF_PERIOD //Need to be equal to or greater than scheduler time rec_interval_type = config.RECONCILER.RECONCILER_DURATION_TYPE rec_retry_count = config.RECONCILER.RECONCILER_RETRY_COUNT @@ -126,7 +125,6 @@ async function repostfailure() { var sqltofetchfailure_values = [rec_ignore_status, rec_diff_period, rec_start_elapse, rec_retry_count] console.log('sql : ', sqltofetchfailure) await pgClient.query(sqltofetchfailure, sqltofetchfailure_values, async (err, res) => { - if (err) { var errmsg0 = `error-sync: Audit reconsiler query "${err.message}"` logger.debug(errmsg0) From 999f2305f31178306d574cf8e9b1cb4fcc5dc835 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Fri, 14 Feb 2020 22:05:07 +0530 Subject: [PATCH 079/101] Reconcile code change --- src/reconciler.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/reconciler.js b/src/reconciler.js index 5e35924..a5fa40c 100644 --- a/src/reconciler.js +++ b/src/reconciler.js @@ -11,7 +11,7 @@ console.log(pgConnectionString) const logger = require('./src/common/logger') const _ = require('lodash') var AWS = require("aws-sdk"); -var params +let params async function dynamo_pg_validation() { var docClient = new AWS.DynamoDB.DocumentClient({ region: config.DYNAMODB.REGION, From fa2c61c0d196344b0607bc3a96257edbb92a69a8 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Fri, 14 Feb 2020 23:26:49 +0530 Subject: [PATCH 080/101] Reconcile code change --- config/default.js | 2 +- src/reconciler.js | 11 +++++------ 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/config/default.js b/config/default.js index cfbf29e..f204984 100644 --- a/config/default.js +++ b/config/default.js @@ -59,7 +59,7 @@ module.exports = { }, RECONCILER : { - RECONCILER_ELAPSE_TIME : 60000, + RECONCILER_ELAPSE_TIME : 600000, RECONCILER_IGNORE_STATUS : 'PostgresUpdated', RECONCILER_START_ELAPSE_TIME : 1, RECONCILER_DIFF_PERIOD : 10, diff --git a/src/reconciler.js b/src/reconciler.js index a5fa40c..49fa887 100644 --- a/src/reconciler.js +++ b/src/reconciler.js @@ -11,13 +11,12 @@ console.log(pgConnectionString) const logger = require('./src/common/logger') const _ = require('lodash') var AWS = require("aws-sdk"); -let params +var params +var docClient = new AWS.DynamoDB.DocumentClient({ + region: config.DYNAMODB.REGION, + convertEmptyValues: true +}); async function dynamo_pg_validation() { - var docClient = new AWS.DynamoDB.DocumentClient({ - region: config.DYNAMODB.REGION, - convertEmptyValues: true - }); - ElapsedTime = config.RECONCILER.RECONCILER_ELAPSE_TIME //ElapsedTime = 4995999000 params = { From 4265921f6bb82e813ae2d514c9487f807199e925 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Mon, 17 Feb 2020 11:33:48 +0530 Subject: [PATCH 081/101] case sentive issue --- .circleci/config.yml | 8 ++++---- src/models/index.js | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 70d367c..216e160 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -36,10 +36,10 @@ builddeploy_steps: &builddeploy_steps command: | ./awsconfiguration.sh $DEPLOY_ENV source awsenvconf - # # producer deployment - # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar - # source buildenvvar - # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + # producer deployment + ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} # # consumer deployment # rm -rf buildenvvar # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar diff --git a/src/models/index.js b/src/models/index.js index 7d62456..7c5a701 100644 --- a/src/models/index.js +++ b/src/models/index.js @@ -23,6 +23,6 @@ models['auditlog'].consumer_log = db['auditlog'].import('./consumer_log') config.db.DB_NAME.forEach(dbname =>{ db[dbname].sync({ force:false }) }) - +models['auditlog'].sequelize.query('ALTER TABLE audit_log ALTER COLUMN REQUEST_CREATE_TIME TYPE TIMESTAMP without time zone;') models.Sequelize = Sequelize module.exports = models From 532e404e4cee3a249633201edf846fe223b7d74e Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Mon, 17 Feb 2020 15:40:46 +0530 Subject: [PATCH 082/101] case sentive issue --- src/models/index.js | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/models/index.js b/src/models/index.js index 7c5a701..3d69bf0 100644 --- a/src/models/index.js +++ b/src/models/index.js @@ -21,8 +21,11 @@ models['auditlog'].consumer_log = db['auditlog'].import('./consumer_log') // models['auditlog'].consumer_log.belongsTo(models['auditlog'].producer_log, { foreignKey: 'SEQ_ID' }) config.db.DB_NAME.forEach(dbname =>{ - db[dbname].sync({ force:false }) + db[dbname].sync({ force:false }).then(() => { + console.log(`Database & ${dbname} tables created!`) + models['auditlog'].sequelize.query('ALTER TABLE audit_log ALTER COLUMN REQUEST_CREATE_TIME TYPE TIMESTAMP without time zone;') + }) }) -models['auditlog'].sequelize.query('ALTER TABLE audit_log ALTER COLUMN REQUEST_CREATE_TIME TYPE TIMESTAMP without time zone;') +//models['auditlog'].sequelize.query('ALTER TABLE audit_log ALTER COLUMN REQUEST_CREATE_TIME TYPE TIMESTAMP without time zone;') models.Sequelize = Sequelize module.exports = models From d1c19c8a6b6ffb3aea6925797555198e76ca3f28 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Mon, 17 Feb 2020 16:33:26 +0530 Subject: [PATCH 083/101] case sentive issue --- src/models/index.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/models/index.js b/src/models/index.js index 3d69bf0..046c99d 100644 --- a/src/models/index.js +++ b/src/models/index.js @@ -23,7 +23,7 @@ models['auditlog'].consumer_log = db['auditlog'].import('./consumer_log') config.db.DB_NAME.forEach(dbname =>{ db[dbname].sync({ force:false }).then(() => { console.log(`Database & ${dbname} tables created!`) - models['auditlog'].sequelize.query('ALTER TABLE audit_log ALTER COLUMN REQUEST_CREATE_TIME TYPE TIMESTAMP without time zone;') + models['auditlog'].sequelize.query('ALTER TABLE audit_log ALTER COLUMN "REQUEST_CREATE_TIME" TYPE TIMESTAMP without time zone;') }) }) //models['auditlog'].sequelize.query('ALTER TABLE audit_log ALTER COLUMN REQUEST_CREATE_TIME TYPE TIMESTAMP without time zone;') From e5582bcb844561758659981fd34f9b7dda2f423c Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Mon, 17 Feb 2020 20:58:26 +0530 Subject: [PATCH 084/101] case sentive issue --- .circleci/config.yml | 8 ++++---- src/reconciler.js | 13 +++++++------ 2 files changed, 11 insertions(+), 10 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 216e160..70d367c 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -36,10 +36,10 @@ builddeploy_steps: &builddeploy_steps command: | ./awsconfiguration.sh $DEPLOY_ENV source awsenvconf - # producer deployment - ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + # # producer deployment + # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar + # source buildenvvar + # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} # # consumer deployment # rm -rf buildenvvar # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar diff --git a/src/reconciler.js b/src/reconciler.js index 49fa887..ddecbe6 100644 --- a/src/reconciler.js +++ b/src/reconciler.js @@ -116,12 +116,13 @@ async function repostfailure() { rec_interval_type = config.RECONCILER.RECONCILER_DURATION_TYPE rec_retry_count = config.RECONCILER.RECONCILER_RETRY_COUNT - sql1 = "select seq_id, producer_payload from audit_log where audit_log.overall_status not in ($1)" - sql2 = " and audit_log.request_create_time between (timezone('utc',now()) - interval '1" + rec_interval_type + "' * $2)" - sql3 = " and (timezone('utc',now()) - interval '1" + rec_interval_type + "' * $3)" - sql4 = " and audit_log.reconcile_status < $4 ;" + sql1 = `select "SEQ_ID", "PRODUCER_PAYLOAD" from audit_log where "OVERALL_STATUS" not in ($1)` + sql2 = ` and "REQUEST_CREATE_TIME" between (timezone('utc',now()) - interval '1${rec_interval_type}' * $2)` + sql3 = ` and (timezone('utc',now()) - interval '1${rec_interval_type}' * $3)` + sql4 = ` and "RECONCILE_STATUS" < $4 ;` sqltofetchfailure = sql1 + sql2 + sql3 + sql4 var sqltofetchfailure_values = [rec_ignore_status, rec_diff_period, rec_start_elapse, rec_retry_count] + //var sqltofetchfailure_values = [rec_ignore_status, rec_diff_period, rec_start_elapse, rec_retry_count] console.log('sql : ', sqltofetchfailure) await pgClient.query(sqltofetchfailure, sqltofetchfailure_values, async (err, res) => { if (err) { @@ -132,8 +133,8 @@ async function repostfailure() { console.log("Reposting Data---------------------\n"); const data = res.rows; data.forEach(async (row) => { - console.log("\npost the topic for : " + row['seq_id']); - await posttopic(row['producer_payload'], 1) + console.log("\npost the topic for : " + row['SEQ_ID']); + await posttopic(row['PRODUCER_PAYLOAD'], 1) }); } pgClient.end(); From 71d4ad24a2d774605886bbd43421e5305cc7f9cf Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Tue, 18 Feb 2020 10:42:22 +0530 Subject: [PATCH 085/101] reconcile take update --- .circleci/config.yml | 5 +++++ package.json | 3 ++- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 70d367c..f2a0aa0 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -55,6 +55,11 @@ builddeploy_steps: &builddeploy_steps # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-notify-deployvar # source buildenvvar # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-notify-appvar -i ${APPNAME} + # # reconciler deployment + # rm -rf buildenvvar + ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-reconciler-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} jobs: diff --git a/package.json b/package.json index 31ba7d3..ebc12c3 100644 --- a/package.json +++ b/package.json @@ -8,7 +8,8 @@ "producer": "nodemon src/nodeserver.js", "consumer": "nodemon src/consumer.js", "producerwithoutkafka" : "nodemon src/node-server-without-kafka.js", - "ifxpgnotify": "nodemon src/consumer-slacknotify.js" + "ifxpgnotify": "nodemon src/consumer-slacknotify.js", + "reconciler": "nodemon src/reconciler.js" }, "author": "", "license": "ISC", From 8f2cde0235013dec97abb2384c7e54a5e1aa06fc Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Wed, 19 Feb 2020 13:18:04 +0530 Subject: [PATCH 086/101] code changes for unsetenv --- .circleci/config.yml | 49 ++++++++++++++++++++++++-------------------- src/reconciler.js | 13 ++++++++++-- unsetenv.sh | 10 +++++++++ 3 files changed, 48 insertions(+), 24 deletions(-) create mode 100755 unsetenv.sh diff --git a/.circleci/config.yml b/.circleci/config.yml index f2a0aa0..9b536a7 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -36,29 +36,34 @@ builddeploy_steps: &builddeploy_steps command: | ./awsconfiguration.sh $DEPLOY_ENV source awsenvconf - # # producer deployment - # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar - # source buildenvvar - # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} - # # consumer deployment - # rm -rf buildenvvar - # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar - # source buildenvvar - # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} - # # without kafka dynamodb - # rm -rf buildenvvar - # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-withoutkafka-deployvar - # source buildenvvar - # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-${APPNAME}-withoutkafka-appvar -i ${APPNAME} - # # notify deployment - # rm -rf buildenvvar - # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-notify-deployvar - # source buildenvvar - # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-notify-appvar -i ${APPNAME} - # # reconciler deployment - # rm -rf buildenvvar - ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-reconciler-deployvar + # producer deployment + ./unsetenv.sh + ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + # consumer deployment + rm -rf buildenvvar + ./unsetenv.sh + ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + # without kafka dynamodb + rm -rf buildenvvar + ./unsetenv.sh + ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-withoutkafka-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-${APPNAME}-withoutkafka-appvar -i ${APPNAME} + # notify deployment + rm -rf buildenvvar + ./unsetenv.sh + ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-notify-deployvar source buildenvvar + ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-notify-appvar -i ${APPNAME} + # reconciler deployment + rm -rf buildenvvar + ./unsetenv.sh + ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-reconciler-deployvar + source buildenvvar ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} diff --git a/src/reconciler.js b/src/reconciler.js index ddecbe6..98c978b 100644 --- a/src/reconciler.js +++ b/src/reconciler.js @@ -156,8 +156,17 @@ async function postpayload_to_restapi(payload) { }; request(options, function (error, response, body) { - if (error) throw new Error(error); - console.log(body); + if (error) { + var errmsg0 = `error-sync: Audit Reconsiler1 query "${error.message}"` + logger.debug (errmsg0) + throw new Error(error); + + } + else + { + console.log("ReconcilerIFXtoPG : " + payload.SEQ_ID + " Success") + console.log(body); + } }); return } diff --git a/unsetenv.sh b/unsetenv.sh new file mode 100755 index 0000000..3b4975c --- /dev/null +++ b/unsetenv.sh @@ -0,0 +1,10 @@ +#!/bin/bash +unset "AWS_REPOSITORY" +unset "AWS_ECS_CLUSTER" +unset "AWS_ECS_SERVICE" +unset "AWS_ECS_TASK_FAMILY" +unset "AWS_ECS_CONTAINER_NAME" +unset "AWS_ECS_PORTS" +unset "AWS_ECS_CONTAINER_CMD" +unset "AWS_ECS_CONTAINER_HEALTH_CMD" +unset "AWS_ECS_TASK_ROLE_ARN" From 476416da194cff068a685aa7f87a5ce49542ec4f Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Wed, 19 Feb 2020 14:48:48 +0530 Subject: [PATCH 087/101] code changes for unsetenv --- .circleci/config.yml | 34 +++++++++++++++++----------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 9b536a7..512859f 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -36,23 +36,23 @@ builddeploy_steps: &builddeploy_steps command: | ./awsconfiguration.sh $DEPLOY_ENV source awsenvconf - # producer deployment - ./unsetenv.sh - ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} - # consumer deployment - rm -rf buildenvvar - ./unsetenv.sh - ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} - # without kafka dynamodb - rm -rf buildenvvar - ./unsetenv.sh - ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-withoutkafka-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-${APPNAME}-withoutkafka-appvar -i ${APPNAME} + # # producer deployment + # ./unsetenv.sh + # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar + # source buildenvvar + # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + # # consumer deployment + # rm -rf buildenvvar + # ./unsetenv.sh + # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar + # source buildenvvar + # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + # # without kafka dynamodb + # rm -rf buildenvvar + # ./unsetenv.sh + # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-withoutkafka-deployvar + # source buildenvvar + # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-${APPNAME}-withoutkafka-appvar -i ${APPNAME} # notify deployment rm -rf buildenvvar ./unsetenv.sh From a9dc89f82ee50f27ab4ef1a0236c8a5076fe5732 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Thu, 20 Feb 2020 17:11:58 +0530 Subject: [PATCH 088/101] code changes for unsetenv --- .circleci/config.yml | 16 ++++++++-------- src/reconciler.js | 2 +- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 512859f..fbc1b5a 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -53,15 +53,15 @@ builddeploy_steps: &builddeploy_steps # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-withoutkafka-deployvar # source buildenvvar # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-${APPNAME}-withoutkafka-appvar -i ${APPNAME} - # notify deployment - rm -rf buildenvvar - ./unsetenv.sh - ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-notify-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-notify-appvar -i ${APPNAME} + # # notify deployment + # rm -rf buildenvvar + # ./unsetenv.sh + # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-notify-deployvar + # source buildenvvar + # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-notify-appvar -i ${APPNAME} # reconciler deployment - rm -rf buildenvvar - ./unsetenv.sh + # rm -rf buildenvvar + # ./unsetenv.sh ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-reconciler-deployvar source buildenvvar ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} diff --git a/src/reconciler.js b/src/reconciler.js index 98c978b..8363d4e 100644 --- a/src/reconciler.js +++ b/src/reconciler.js @@ -8,7 +8,7 @@ const pgConnectionString = `postgresql://${pgOptions.user}:${pgOptions.password} console.log(pgConnectionString) //const pgClient = new pg.Client(pgConnectionString) //pgClient.connect(); -const logger = require('./src/common/logger') +const logger = require('./common/logger') const _ = require('lodash') var AWS = require("aws-sdk"); var params From 340d99874a163bdfafaa1028e3331f0673ac675c Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Thu, 20 Feb 2020 18:04:01 +0530 Subject: [PATCH 089/101] code changes for unsetenv --- src/reconciler.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/reconciler.js b/src/reconciler.js index 8363d4e..138155e 100644 --- a/src/reconciler.js +++ b/src/reconciler.js @@ -68,7 +68,7 @@ async function validate_data_in_pg(SequenceID, payload) { process.exit() } console.log(SequenceID); - const sqlquerytovalidate = 'SELECT COUNT(*) FROM audit_log WHERE seq_id=$1'; + const sqlquerytovalidate = 'SELECT COUNT(*) FROM audit_log WHERE "SEQ_ID"=$1'; const sqlquerytovalidate_values = [SequenceID] console.log(sqlquerytovalidate); await pgClient.query(sqlquerytovalidate, sqlquerytovalidate_values, async (err, res) => { From aae7954443c74c11d48f92e2022d1dbb126c86f7 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Thu, 20 Feb 2020 20:16:34 +0530 Subject: [PATCH 090/101] code changes for unsetenv --- src/reconciler.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/reconciler.js b/src/reconciler.js index 138155e..ac9b96c 100644 --- a/src/reconciler.js +++ b/src/reconciler.js @@ -151,7 +151,7 @@ async function postpayload_to_restapi(payload) { 'cache-control': 'no-cache', 'Content-Type': 'application/json' }, - body: JSON.stringify(payload), + body: payload, json: true }; From 7fff6f6b15b4bf32c1005a8e5a6c04d2d052e892 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Thu, 20 Feb 2020 20:54:43 +0530 Subject: [PATCH 091/101] code changes for unsetenv --- package.json | 2 +- src/reconciler.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index ebc12c3..3f6d6d4 100644 --- a/package.json +++ b/package.json @@ -9,7 +9,7 @@ "consumer": "nodemon src/consumer.js", "producerwithoutkafka" : "nodemon src/node-server-without-kafka.js", "ifxpgnotify": "nodemon src/consumer-slacknotify.js", - "reconciler": "nodemon src/reconciler.js" + "reconciler": "node src/reconciler.js" }, "author": "", "license": "ISC", diff --git a/src/reconciler.js b/src/reconciler.js index ac9b96c..a1481bb 100644 --- a/src/reconciler.js +++ b/src/reconciler.js @@ -164,7 +164,7 @@ async function postpayload_to_restapi(payload) { } else { - console.log("ReconcilerIFXtoPG : " + payload.SEQ_ID + " Success") + console.log("ReconcilerIFXtoPG : " + payload['TIME'] + "_" + payload['TABLENAME'] + " Success") console.log(body); } }); From 4537e48d5b1af8917e857159d0555456e8475750 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Mon, 2 Mar 2020 12:52:35 +0530 Subject: [PATCH 092/101] producer and consumer deployment --- .circleci/config.yml | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index fbc1b5a..0fb9e3d 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -36,17 +36,17 @@ builddeploy_steps: &builddeploy_steps command: | ./awsconfiguration.sh $DEPLOY_ENV source awsenvconf - # # producer deployment - # ./unsetenv.sh - # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar - # source buildenvvar - # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} - # # consumer deployment - # rm -rf buildenvvar - # ./unsetenv.sh - # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar - # source buildenvvar - # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + # producer deployment + ./unsetenv.sh + ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + # consumer deployment + rm -rf buildenvvar + ./unsetenv.sh + ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} # # without kafka dynamodb # rm -rf buildenvvar # ./unsetenv.sh @@ -61,10 +61,10 @@ builddeploy_steps: &builddeploy_steps # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-notify-appvar -i ${APPNAME} # reconciler deployment # rm -rf buildenvvar - # ./unsetenv.sh - ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-reconciler-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + # # ./unsetenv.sh + # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-reconciler-deployvar + # source buildenvvar + # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} jobs: From ee04fe1224e16558ed05e36fa6be955146573025 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Mon, 2 Mar 2020 13:10:56 +0530 Subject: [PATCH 093/101] producer and consumer deployment --- .circleci/config.yml | 44 ++++++++++++++++++++++---------------------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 0fb9e3d..553daef 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -36,35 +36,35 @@ builddeploy_steps: &builddeploy_steps command: | ./awsconfiguration.sh $DEPLOY_ENV source awsenvconf - # producer deployment - ./unsetenv.sh - ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} - # consumer deployment + # # producer deployment + # ./unsetenv.sh + # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar + # source buildenvvar + # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + # notify deployment rm -rf buildenvvar - ./unsetenv.sh - ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar + # ./unsetenv.sh + ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-notify-deployvar source buildenvvar - ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} - # # without kafka dynamodb + ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-notify-appvar -i ${APPNAME} + # # consumer deployment # rm -rf buildenvvar # ./unsetenv.sh - # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-withoutkafka-deployvar + # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar # source buildenvvar - # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-${APPNAME}-withoutkafka-appvar -i ${APPNAME} - # # notify deployment + # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + # # without kafka dynamodb # rm -rf buildenvvar - # ./unsetenv.sh - # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-notify-deployvar - # source buildenvvar - # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-notify-appvar -i ${APPNAME} + ./unsetenv.sh + ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-withoutkafka-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-${APPNAME}-withoutkafka-appvar -i ${APPNAME} # reconciler deployment - # rm -rf buildenvvar - # # ./unsetenv.sh - # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-reconciler-deployvar - # source buildenvvar - # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + rm -rf buildenvvar + # ./unsetenv.sh + ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-reconciler-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} jobs: From 61df77da7e092565ddb0045e2ee858ce369da576 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Wed, 4 Mar 2020 16:28:48 +0530 Subject: [PATCH 094/101] consumer code changes for date issue --- .circleci/config.yml | 38 +++++++++++++++++++------------------- src/api/migrateifxpg.js | 20 ++++++++++---------- src/api/migratepg.js | 10 +++++----- 3 files changed, 34 insertions(+), 34 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 553daef..9c15b8b 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -41,30 +41,30 @@ builddeploy_steps: &builddeploy_steps # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-producer-deployvar # source buildenvvar # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} - # notify deployment - rm -rf buildenvvar - # ./unsetenv.sh - ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-notify-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-notify-appvar -i ${APPNAME} - # # consumer deployment + # # notify deployment # rm -rf buildenvvar - # ./unsetenv.sh - # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar + # # ./unsetenv.sh + # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-notify-deployvar # source buildenvvar - # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} - # # without kafka dynamodb + # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-notify-appvar -i ${APPNAME} + # # consumer deployment # rm -rf buildenvvar - ./unsetenv.sh - ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-withoutkafka-deployvar + # ./unsetenv.sh + ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-consumer-deployvar source buildenvvar - ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-${APPNAME}-withoutkafka-appvar -i ${APPNAME} - # reconciler deployment - rm -rf buildenvvar - # ./unsetenv.sh - ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-reconciler-deployvar - source buildenvvar ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} + # # without kafka dynamodb + # rm -rf buildenvvar + # ./unsetenv.sh + # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-withoutkafka-deployvar + # source buildenvvar + # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-${APPNAME}-withoutkafka-appvar -i ${APPNAME} + # # reconciler deployment + # rm -rf buildenvvar + # # ./unsetenv.sh + # ./buildenv.sh -e $DEPLOY_ENV -b ${LOGICAL_ENV}-${APPNAME}-reconciler-deployvar + # source buildenvvar + # ./master_deploy.sh -d ECS -e $DEPLOY_ENV -t latest -s ${LOGICAL_ENV}-global-appvar,${LOGICAL_ENV}-${APPNAME}-appvar -i ${APPNAME} jobs: diff --git a/src/api/migrateifxpg.js b/src/api/migrateifxpg.js index 30ea692..8766a72 100755 --- a/src/api/migrateifxpg.js +++ b/src/api/migrateifxpg.js @@ -79,7 +79,7 @@ async function migrateifxinsertdata(payload, client) { } //console.log(columns[colName]) //conditionstr = conditionstr + tablename + "." + colName + "= '" + columns[colName] + "' " - if ((datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric') && columns[colName].toUpperCase() == 'NULL') { + if ((datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric' || datatypeobj[colName] == 'date' ) && columns[colName].toUpperCase() == 'NULL') { conditionstr = conditionstr + tablename + "." + colName + " is NULL " } else { conditionstr = conditionstr + tablename + "." + colName + "= '" + columns[colName] + "' " @@ -94,7 +94,7 @@ async function migrateifxinsertdata(payload, client) { } //console.log(columns[colName]) //conditionstr = conditionstr + tablename + "." + colName + "= '" + columns[colName] + "' " - if ((datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric') && columns[colName].toUpperCase() == 'NULL') { + if ((datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric' || datatypeobj[colName] == 'date' ) && columns[colName].toUpperCase() == 'NULL') { conditionstr = conditionstr + tablename + "." + colName + " is NULL " } else { conditionstr = conditionstr + tablename + "." + colName + "= '" + columns[colName] + "' " @@ -125,7 +125,7 @@ async function migrateifxinsertdata(payload, client) { const values = []; columnNames.forEach((colName) => { if (row[colName]) { - if (isUtf8(row[colName]) || datatypeobj[colName] == 'timestamp') { + if (isUtf8(row[colName]) || datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'date' ) { console.log(`utf8 or datetime format ${colName}`); // values.push(new Buffer.from(row[colName],'binary')); values.push(row[colName]); @@ -220,7 +220,7 @@ async function migrateifxupdatedata(payload, client) { if (bufffernewcond == 1) { conditionstr = conditionstr + " and " } - if ((datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric') && colobj['new'].toUpperCase() == 'NULL') { + if ((datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric' || datatypeobj[colName] == 'date' ) && colobj['new'].toUpperCase() == 'NULL') { conditionstr = conditionstr + tablename + "." + colName + " is NULL " } else { conditionstr = conditionstr + tablename + "." + colName + "= '" + colobj.new + "' " @@ -233,7 +233,7 @@ async function migrateifxupdatedata(payload, client) { } //console.log(colobj.old); //oldconditionstr = oldconditionstr + tablename + "." + colName + "= '" + colobj.old + "' " - if ((datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric') && colobj['old'].toUpperCase() == 'NULL') { + if ((datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric' || datatypeobj[colName] == 'date' ) && colobj['old'].toUpperCase() == 'NULL') { oldconditionstr = oldconditionstr + "\"" + colName + "\" is NULL " } else { oldconditionstr = oldconditionstr + "\"" + colName + "\"= '" + colobj.old + "' " @@ -246,7 +246,7 @@ async function migrateifxupdatedata(payload, client) { if (bufffernewcond == 1) { conditionstr = conditionstr + " and " } - if ((datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric') && colobj['new'].toUpperCase() == 'NULL') { + if ((datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric' || datatypeobj[colName] == 'date' ) && colobj['new'].toUpperCase() == 'NULL') { conditionstr = conditionstr + tablename + "." + colName + " is NULL " } else { conditionstr = conditionstr + tablename + "." + colName + "= '" + colobj.new + "' " @@ -259,7 +259,7 @@ async function migrateifxupdatedata(payload, client) { } //console.log(colobj.old); //oldconditionstr = oldconditionstr + tablename + "." + colName + "= '" + colobj.old + "' " - if ((datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric') && colobj['old'].toUpperCase() == 'NULL') { + if ((datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric' || datatypeobj[colName] == 'date' ) && colobj['old'].toUpperCase() == 'NULL') { oldconditionstr = oldconditionstr + "\"" + colName + "\" is NULL " } else { oldconditionstr = oldconditionstr + "\"" + colName + "\"= '" + colobj.old + "' " @@ -293,7 +293,7 @@ async function migrateifxupdatedata(payload, client) { updatestr = updatestr + " , " } if (row[colName]) { - if (isUtf8(row[colName]) || datatypeobj[colName] == 'timestamp') { + if (isUtf8(row[colName]) || datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'date' ) { //console.log(`utf8 format ${colName}`); values.push(row[colName]); updatestr = updatestr + "\"" + colName + "\"= \$" + counter + " " @@ -397,7 +397,7 @@ async function migrateifxdeletedata(payload, client) { } //console.log(columns[colName]) //conditionstr = conditionstr + tablename + "." + colName + "= '" + columns[colName] + "' " - if ((datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric') && columns[colName].toUpperCase() == 'NULL') { + if ((datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric' || datatypeobj[colName] == 'date' ) && columns[colName].toUpperCase() == 'NULL') { conditionstr = conditionstr + tablename + "." + colName + " is NULL " } else { conditionstr = conditionstr + tablename + "." + colName + "= '" + columns[colName] + "' " @@ -412,7 +412,7 @@ async function migrateifxdeletedata(payload, client) { } //console.log(columns[colName]) //conditionstr = conditionstr + tablename + "." + colName + "= '" + columns[colName] + "' " - if ((datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric') && columns[colName].toUpperCase() == 'NULL') { + if ((datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric' || datatypeobj[colName] == 'date' ) && columns[colName].toUpperCase() == 'NULL') { conditionstr = conditionstr + tablename + "." + colName + " is NULL " } else { conditionstr = conditionstr + tablename + "." + colName + "= '" + columns[colName] + "' " diff --git a/src/api/migratepg.js b/src/api/migratepg.js index 917ebaa..039d24a 100644 --- a/src/api/migratepg.js +++ b/src/api/migratepg.js @@ -103,7 +103,7 @@ console.log(payload[fieldname]['old']) if (bufferforsetdatastr == 1) { setdatastr = setdatastr + " , " } - if ( (datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric' ) && colobj['new'].toUpperCase() == 'NULL' ) + if ( (datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric' || datatypeobj[colName] == 'date' ) && colobj['new'].toUpperCase() == 'NULL' ) { setdatastr = setdatastr + "\"" + colName + "\"= NULL " } @@ -116,7 +116,7 @@ console.log(payload[fieldname]['old']) if (buffferoldcond == 1) { oldconditionstr = oldconditionstr + " and " } - if ( ( datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric' ) && colobj['old'].toUpperCase() == 'NULL' ) + if ( ( datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric' || datatypeobj[colName] == 'date' ) && colobj['old'].toUpperCase() == 'NULL' ) { oldconditionstr = oldconditionstr + "\"" + colName + "\" is NULL " } @@ -133,7 +133,7 @@ console.log(payload[fieldname]['old']) if (buffferoldcond == 1) { oldconditionstr = oldconditionstr + " and " } - if ( ( datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric' ) && colobj['old'].toUpperCase() == 'NULL' ) + if ( ( datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric' || datatypeobj[colName] == 'date' ) && colobj['old'].toUpperCase() == 'NULL' ) { oldconditionstr = oldconditionstr + "\"" + colName + "\" is NULL " } @@ -221,7 +221,7 @@ async function migratepgDelete(dbpool, payload) { if (bufffercond == 1) { conditionstr = conditionstr + " and " } - if ((datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric') && columns[colName].toUpperCase() == 'NULL') { + if ((datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric' || datatypeobj[colName] == 'date' ) && columns[colName].toUpperCase() == 'NULL') { conditionstr = conditionstr + tablename + "." + colName + " is NULL " } else { conditionstr = conditionstr + tablename + "." + colName + "= '" + columns[colName] + "' " @@ -233,7 +233,7 @@ async function migratepgDelete(dbpool, payload) { if (bufffercond == 1) { conditionstr = conditionstr + " and " } - if ((datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric') && columns[colName].toUpperCase() == 'NULL') { + if ((datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric' || datatypeobj[colName] == 'date' ) && columns[colName].toUpperCase() == 'NULL') { conditionstr = conditionstr + tablename + "." + colName + " is NULL " } else { conditionstr = conditionstr + tablename + "." + colName + "= '" + columns[colName] + "' " From 71b41d739bf7576e4c5adb1da4b5ecaba9903d4e Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Wed, 11 Mar 2020 16:49:54 +0530 Subject: [PATCH 095/101] special char issue --- src/api/migratepg.js | 28 +++++++++++++++++++++------- 1 file changed, 21 insertions(+), 7 deletions(-) diff --git a/src/api/migratepg.js b/src/api/migratepg.js index 039d24a..aaf8120 100644 --- a/src/api/migratepg.js +++ b/src/api/migratepg.js @@ -7,6 +7,7 @@ const pg_dbname = config.get('POSTGRES.database') async function migratepgInsert(dbpool, payload) { console.log(payload); const table = payload.TABLENAME + const tablename = payload.TABLENAME const dbname = payload.SCHEMANAME payload = payload.DATA try { @@ -29,10 +30,16 @@ console.log(payload[fieldname]) sql = `SET search_path TO ${schemaname};`; console.log(sql); await client.query(sql); - sql = `insert into "${table}" (\"${columnNames.join('\", \"')}\") values (${columnNames.map((k) => `'${payload[k]}'`).join(', ')});` // "insert into :
(col_1, col_2, ...) values (val_1, val_2, ...)" + const paramSql = Array.from(Array(columnNames.length).keys(), x => `$${x + 1}`).join(','); + sql = `insert into "${tablename}" (${columnNames.map(x => `"${x}"`).join(',')}) values(${paramSql})`; + const values = []; + columnNames.forEach((colName) => { + values.push(payload[colName]); + }); + //sql = `insert into "${table}" (\"${columnNames.join('\", \"')}\") values (${columnNames.map((k) => `'${payload[k]}'`).join(', ')});` // "insert into :
(col_1, col_2, ...) values (val_1, val_2, ...)" console.log("Executing query : " + sql); - // sql = "insert into test6 (cityname) values ('verygoosdsdsdsd');"; - await client.query(sql); + // await client.query(sql); + await client.query(sql, values); //await client.release(true); console.log(`end connection of postgres for database`); } catch (e) { @@ -162,12 +169,19 @@ console.log(payload[fieldname]['old']) console.log(sql); await client.query(sql); // sql = `update ${table} set ${Object.keys(payload).map((key) => `\"${key}\"='${payload[key]['new']}'`).join(', ')} where ${Object.keys(payload).map((key) => `\"${key}\"='${payload[key]['old']}'`).join(' AND ')} ;` // "update :
set col_1=val_1, col_2=val_2, ... where primary_key_col=primary_key_val" - sql = `update "${table}" set ${setdatastr} where ${oldconditionstr} ;` + + sql = `update "${table}" set ${columnNames.map(x => `"${x}"=$${x + 1}`).join(',')} where ${oldconditionstr} ;` + const values = []; + columnNames.forEach((colName) => { + colobj = payload[colName] + values.push(colobj.new); + }); + // sql = `update "${table}" set ${setdatastr} where ${oldconditionstr} ;` console.log("sqlstring .............................."); console.log(sql); - //update test5 set id='[object Object].new', cityname='[object Object].new' where id='[object Object].old' AND cityname='[obddject Object].old' ; - // sql = "insert into test6 (cityname) values ('verygoosdsdsdsd');"; - await client.query(sql); + + //await client.query(sql); + await client.query(sql,values); //await client.release(true); console.log(`end connection of postgres for database`); } catch (e) { From 35eb2dfd2ace0584d5af197635a04d7d917bbb65 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Wed, 11 Mar 2020 17:48:52 +0530 Subject: [PATCH 096/101] special char issue --- src/api/migratepg.js | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/src/api/migratepg.js b/src/api/migratepg.js index aaf8120..93e9d91 100644 --- a/src/api/migratepg.js +++ b/src/api/migratepg.js @@ -170,12 +170,23 @@ console.log(payload[fieldname]['old']) await client.query(sql); // sql = `update ${table} set ${Object.keys(payload).map((key) => `\"${key}\"='${payload[key]['new']}'`).join(', ')} where ${Object.keys(payload).map((key) => `\"${key}\"='${payload[key]['old']}'`).join(' AND ')} ;` // "update :
set col_1=val_1, col_2=val_2, ... where primary_key_col=primary_key_val" - sql = `update "${table}" set ${columnNames.map(x => `"${x}"=$${x + 1}`).join(',')} where ${oldconditionstr} ;` + // sql = `update "${table}" set ${columnNames.map(x => `"${x}"=$${x + 1}`).join(',')} where ${oldconditionstr} ;` const values = []; + var updatestr = "" + counter = 1 + buffferupcond=0 columnNames.forEach((colName) => { colobj = payload[colName] values.push(colobj.new); + if (buffferupcond == 1) { + updatestr = updatestr + " , " + } + updatestr = updatestr + "\"" + colName + "\"= \$" + counter + " " + buffferupcond = 1 + counter = counter + 1 }); + + sql = `update "${table}" set ${updatestr} where ${oldconditionstr} ;` // sql = `update "${table}" set ${setdatastr} where ${oldconditionstr} ;` console.log("sqlstring .............................."); console.log(sql); From 5a417ed10b83d945c9a3797bceb431a1db513dde Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Wed, 11 Mar 2020 18:46:37 +0530 Subject: [PATCH 097/101] special char issue --- src/api/migratepg.js | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/src/api/migratepg.js b/src/api/migratepg.js index 93e9d91..909ec95 100644 --- a/src/api/migratepg.js +++ b/src/api/migratepg.js @@ -177,7 +177,15 @@ console.log(payload[fieldname]['old']) buffferupcond=0 columnNames.forEach((colName) => { colobj = payload[colName] - values.push(colobj.new); + if ( ( datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric' || datatypeobj[colName] == 'date' ) && colobj['new'].toUpperCase() == 'NULL' ) + { + values.push(null); + } + else + { + values.push(colobj.new); + } + //values.push(colobj.new); if (buffferupcond == 1) { updatestr = updatestr + " , " } From 91d1e7e06ced5d9fb7cc3cea5c4322ecac6e5d40 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Fri, 13 Mar 2020 14:36:06 +0530 Subject: [PATCH 098/101] null value issue --- src/api/migratepg.js | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/src/api/migratepg.js b/src/api/migratepg.js index 909ec95..40f52a8 100644 --- a/src/api/migratepg.js +++ b/src/api/migratepg.js @@ -34,7 +34,14 @@ console.log(payload[fieldname]) sql = `insert into "${tablename}" (${columnNames.map(x => `"${x}"`).join(',')}) values(${paramSql})`; const values = []; columnNames.forEach((colName) => { + if ( payload[colName].toUpperCase() == 'NULL' ) + { + values.push(null); + } + else + { values.push(payload[colName]); + } }); //sql = `insert into "${table}" (\"${columnNames.join('\", \"')}\") values (${columnNames.map((k) => `'${payload[k]}'`).join(', ')});` // "insert into :
(col_1, col_2, ...) values (val_1, val_2, ...)" console.log("Executing query : " + sql); @@ -177,7 +184,8 @@ console.log(payload[fieldname]['old']) buffferupcond=0 columnNames.forEach((colName) => { colobj = payload[colName] - if ( ( datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric' || datatypeobj[colName] == 'date' ) && colobj['new'].toUpperCase() == 'NULL' ) + //if ( ( datatypeobj[colName] == 'timestamp' || datatypeobj[colName] == 'numeric' || datatypeobj[colName] == 'date' ) && colobj['new'].toUpperCase() == 'NULL' ) + if ( colobj['new'].toUpperCase() == 'NULL' ) { values.push(null); } From 76dc2482410ede8b387fc903c3a9cf0c2de2676c Mon Sep 17 00:00:00 2001 From: informix Date: Thu, 19 Mar 2020 04:03:36 -0400 Subject: [PATCH 099/101] integer datatype length issue --- informix_auditing/audit_util.c | 5 ++++- informix_auditing/auditing2.c | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/informix_auditing/audit_util.c b/informix_auditing/audit_util.c index 3a893bd..59bbbe2 100644 --- a/informix_auditing/audit_util.c +++ b/informix_auditing/audit_util.c @@ -76,6 +76,9 @@ mi_string *do_castl(MI_CONNECTION *conn, MI_DATUM *datum, if (strcmp("datetime", srcType) == 0) { return (mi_datetime_to_string((mi_datetime *)datum)); } + if (strcmp("integer", srcType) == 0) { + collen = 30; + } fn = mi_cast_get(conn, tid, lvar_id, &status); if (NULL == fn) { switch(status) { @@ -103,7 +106,7 @@ mi_string *do_castl(MI_CONNECTION *conn, MI_DATUM *datum, precision = mi_type_precision(tdesc); printf("rputine read initiated \n"); - + printf("rputine read initiated %ld\n",collen); new_datum = mi_routine_exec(conn, fn, &ret, datum, collen, precision, fp); printf("routine read completed \n"); pbuf = mi_lvarchar_to_string(new_datum); diff --git a/informix_auditing/auditing2.c b/informix_auditing/auditing2.c index 388b946..79c5f62 100644 --- a/informix_auditing/auditing2.c +++ b/informix_auditing/auditing2.c @@ -32,7 +32,7 @@ */ #include "audit_util.h" -#define LOGGERFILEPREFIX "/tmp/audit" +#define LOGGERFILEPREFIX "/mnt/efsifxpg/audit" typedef struct chains { mi_integer seq; From c4f04343b32d66e6d39a228abdee1e95e2155516 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Fri, 20 Mar 2020 12:27:15 +0530 Subject: [PATCH 100/101] length issue for particular data type --- informix_auditing/audit_util.c | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/informix_auditing/audit_util.c b/informix_auditing/audit_util.c index 59bbbe2..e4af24b 100644 --- a/informix_auditing/audit_util.c +++ b/informix_auditing/audit_util.c @@ -76,7 +76,13 @@ mi_string *do_castl(MI_CONNECTION *conn, MI_DATUM *datum, if (strcmp("datetime", srcType) == 0) { return (mi_datetime_to_string((mi_datetime *)datum)); } - if (strcmp("integer", srcType) == 0) { + if ((strcmp("integer", srcType) == 0) || + (strcmp("bigint", srcType) == 0) || + (strcmp("int8", srcType) == 0) || + (strcmp("serial", srcType) == 0) || + (strcmp("bigserial", srcType) == 0) || + (strcmp("serial8", srcType) == 0) || + (strcmp("smallint", srcType) == 0)) { collen = 30; } fn = mi_cast_get(conn, tid, lvar_id, &status); From 128c9e30a22270c028137e191f30ca2eaac57ee7 Mon Sep 17 00:00:00 2001 From: Gunasekar-K Date: Thu, 26 Mar 2020 11:14:23 +0530 Subject: [PATCH 101/101] nodemon removal --- package.json | 8 ++++---- src/api/consumer_retry.js | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/package.json b/package.json index 3f6d6d4..b09b486 100644 --- a/package.json +++ b/package.json @@ -5,10 +5,10 @@ "main": "src/nodeserver.js", "scripts": { "test": "echo \"Error: no test specified\" && exit 1", - "producer": "nodemon src/nodeserver.js", - "consumer": "nodemon src/consumer.js", - "producerwithoutkafka" : "nodemon src/node-server-without-kafka.js", - "ifxpgnotify": "nodemon src/consumer-slacknotify.js", + "producer": "node src/nodeserver.js", + "consumer": "node src/consumer.js", + "producerwithoutkafka" : "node src/node-server-without-kafka.js", + "ifxpgnotify": "node src/consumer-slacknotify.js", "reconciler": "node src/reconciler.js" }, "author": "", diff --git a/src/api/consumer_retry.js b/src/api/consumer_retry.js index 9d1d481..d4ca5ad 100644 --- a/src/api/consumer_retry.js +++ b/src/api/consumer_retry.js @@ -22,7 +22,7 @@ async function consumerretry(producer, payload) //add auditlog if (!kafka_error) { await producerpost_success_log(payload, "ConsumerReposted") - res.send('done') + //res.send('done') //res.send('done') } else { //add auditlog