diff --git a/.gitignore b/.gitignore index e136e07a7..6b00e9688 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ node_modules .vscode *.sqlite +.nyc_output package-lock.json diff --git a/.travis.yml b/.travis.yml index 396594b0a..9e4fecf82 100644 --- a/.travis.yml +++ b/.travis.yml @@ -19,7 +19,7 @@ stages: - name: dev_deploy if: branch = develop AND type = push - name: package_build - if: (branch = develop AND type = cron) OR (branch = master and type = push) + if: branch = master and type = push jobs: include: @@ -48,7 +48,5 @@ jobs: on: tags: false after_deploy: - - if [ "$TRAVIS_BRANCH" == "develop" ]; then sshpass -p $PRE_PROD_MACHINE_PASSWORD ssh -o StrictHostKeyChecking=no - $PRE_PROD_MACHINE_USERNAME@$PRE_PROD_MACHINE_IP "iofog-controller stop; npm update -g --unsafe-perm iofogcontroller; iofog-controller start"; - else sshpass -p $PROD_MACHINE_PASSWORD ssh -o StrictHostKeyChecking=no - $PROD_MACHINE_USERNAME@$PROD_MACHINE_IP "iofog-controller stop; npm update -g --unsafe-perm iofogcontroller; iofog-controller start"; fi + - sshpass -p $PROD_MACHINE_PASSWORD ssh -o StrictHostKeyChecking=no + $PROD_MACHINE_USERNAME@$PROD_MACHINE_IP "iofog-controller stop; npm update -g --unsafe-perm iofogcontroller; iofog-controller start" diff --git a/package.json b/package.json index bfe5da093..0e292c97f 100644 --- a/package.json +++ b/package.json @@ -1,85 +1,96 @@ { - "name": "iofogcontroller", - "version": "1.0.24", - "description": "ioFog Controller project for Eclipse IoFog @ iofog.org \\nCopyright (c) 2018 Edgeworx, Inc.", - "main": "./src/main.js", - "author": "Saeid Baghbidi", - "contributors": [ - "Kilton Hopkins ", - "Saeid Baghbidi", - "Pavel Kazlou", - "Egor Krylovich", - "Iryna Laryionava", - "Maryna Lipnitskaya", - "Dmitriy Kudasov", - "Dmitry Stolbunov", - "Darya Busel", - "Alexander Shpak", - "Kate Lukashick", - "Eugene Pankov", - "Maksim Chepelev", - "Tetiana Yatsiuk", - "Sergey Valevich" - ], - "license": { - "type": "EPL-2.0", - "url": "https://www.eclipse.org/legal/epl-v20.html" - }, - "bugs": { - "email": "edgemaster@iofog.org" - }, - "homepage": "https://www.iofog.org", - "repository": { - "type": "git", - "url": "https://github.com/ioFog/Controller" - }, - "scripts": { - "start": "NODE_ENV=production node ./src/main.js start", - "start-dev": "NODE_ENV=development node ./src/main.js start", - "build": "export NODE_ENV=production && cd src/sequelize && ../../node_modules/.bin/sequelize db:migrate && ../../node_modules/.bin/sequelize db:seed:all", - "preuninstall": "bash scripts/preuninstall.sh", - "postinstall": "bash scripts/postinstall.sh && NODE_ENV=production node ./src/main.js init", - "lint": "./node_modules/.bin/eslint \"**/*.js\"", - "automatic-release": "automatic-release", - "test": "" - }, - "preferGlobal": true, - "bin": { - "iofog-controller": "src/main.js" - }, - "dependencies": { - "body-parser": "^1.18.3", - "command-line-args": "^5.0.2", - "command-line-usage": "^5.0.5", - "continuation-local-storage": "^3.2.1", - "cookie-parser": "^1.4.3", - "daemonize2": "^0.4.2", - "ejs": "^2.6.1", - "express": "^4.16.3", - "formidable": "^1.2.1", - "fs": "^0.0.1-security", - "ftp": "^0.3.10", - "helmet": "^3.13.0", - "jsonschema": "^1.2.4", - "morgan": "^1.9.1", - "nconf": "^0.10.0", - "nodemailer": "^4.6.8", - "nodemailer-smtp-transport": "^2.7.4", - "path": "^0.12.7", - "portscanner": "^2.2.0", - "retry-as-promised": "^3.1.0", - "sequelize": "^4.39.0", - "sequelize-cli": "^4.1.1", - "sqlite3": "^4.0.2", - "string-format": "^2.0.0", - "umzug": "^2.1.0", - "underscore": "^1.9.1", - "winston": "^3.1.0", - "xss-clean": "^0.1.1", - "qs": "^6.5.2" - }, - "devDependencies": { - "automatic-release": "^1.1.1", - "eslint": "^5.6.1" - } + "name": "iofogcontroller", + "version": "1.0.24", + "description": "ioFog Controller project for Eclipse IoFog @ iofog.org \\nCopyright (c) 2018 Edgeworx, Inc.", + "main": "./src/main.js", + "author": "Saeid Baghbidi", + "contributors": [ + "Kilton Hopkins ", + "Saeid Baghbidi", + "Pavel Kazlou", + "Egor Krylovich", + "Iryna Laryionava", + "Maryna Lipnitskaya", + "Dmitriy Kudasov", + "Dmitry Stolbunov", + "Darya Busel", + "Alexander Shpak", + "Kate Lukashick", + "Eugene Pankov", + "Maksim Chepelev", + "Tetiana Yatsiuk", + "Sergey Valevich" + ], + "license": { + "type": "EPL-2.0", + "url": "https://www.eclipse.org/legal/epl-v20.html" + }, + "bugs": { + "email": "edgemaster@iofog.org" + }, + "homepage": "https://www.iofog.org", + "repository": { + "type": "git", + "url": "https://github.com/ioFog/Controller" + }, + "scripts": { + "start": "node scripts/start.js", + "start-dev": "node scripts/start-dev.js", + "build": "node scripts/init.js", + "preuninstall": "node scripts/preuninstall.js", + "postinstall": "node scripts/postinstall.js", + "lint": "./node_modules/.bin/eslint \"**/*.js\"", + "automatic-release": "automatic-release", + "test": "node scripts/test.js", + "coverage": "node scripts/coverage.js" + }, + "preferGlobal": true, + "bin": { + "iofog-controller": "src/main.js" + }, + "dependencies": { + "body-parser": "^1.18.3", + "command-line-args": "^5.0.2", + "command-line-usage": "^5.0.5", + "continuation-local-storage": "^3.2.1", + "cookie-parser": "^1.4.3", + "daemonize2": "^0.4.2", + "ejs": "^2.6.1", + "express": "^4.16.3", + "formidable": "^1.2.1", + "fs": "^0.0.1-security", + "ftp": "^0.3.10", + "helmet": "^3.13.0", + "jsonschema": "^1.2.4", + "morgan": "^1.9.1", + "nconf": "^0.10.0", + "nodemailer": "^4.6.8", + "nodemailer-smtp-transport": "^2.7.4", + "path": "^0.12.7", + "portscanner": "^2.2.0", + "retry-as-promised": "^3.1.0", + "sequelize": "^4.39.0", + "sequelize-cli": "^4.1.1", + "sqlite3": "^4.0.2", + "string-format": "^2.0.0", + "umzug": "^2.1.0", + "underscore": "^1.9.1", + "winston": "^3.1.0", + "xss-clean": "^0.1.1", + "qs": "^6.5.2", + "child_process": "^1.0.2", + "os": "^0.1.1" + }, + "devDependencies": { + "automatic-release": "^1.1.1", + "bdd-lazy-var": "^2.5.0", + "chai": "^4.2.0", + "chai-as-promised": "^7.1.1", + "chai-http": "^4.2.0", + "eslint": "^5.6.1", + "mocha": "^5.2.0", + "nyc": "^13.1.0", + "sinon": "^7.1.1", + "sinon-chai": "^3.2.0" + } } diff --git a/scripts/coverage.js b/scripts/coverage.js new file mode 100644 index 000000000..fc23b0ecd --- /dev/null +++ b/scripts/coverage.js @@ -0,0 +1,24 @@ +/* + * ******************************************************************************* + * * Copyright (c) 2018 Edgeworx, Inc. + * * + * * This program and the accompanying materials are made available under the + * * terms of the Eclipse Public License v. 2.0 which is available at + * * http://www.eclipse.org/legal/epl-2.0 + * * + * * SPDX-License-Identifier: EPL-2.0 + * ******************************************************************************* + * + */ + +const execSync = require('child_process').execSync; + +const options = { + env: { + 'NODE_ENV': 'test', + "PATH": process.env.PATH + }, + stdio: [process.stdin, process.stdout, process.stderr] +}; + +execSync('nyc mocha', options); \ No newline at end of file diff --git a/scripts/init.js b/scripts/init.js new file mode 100644 index 000000000..2c1ee958a --- /dev/null +++ b/scripts/init.js @@ -0,0 +1,24 @@ +/* + * ******************************************************************************* + * * Copyright (c) 2018 Edgeworx, Inc. + * * + * * This program and the accompanying materials are made available under the + * * terms of the Eclipse Public License v. 2.0 which is available at + * * http://www.eclipse.org/legal/epl-2.0 + * * + * * SPDX-License-Identifier: EPL-2.0 + * ******************************************************************************* + * + */ + +const execSync = require('child_process').execSync; + +const options = { + env: { + 'NODE_ENV': 'production', + "PATH": process.env.PATH + }, + stdio: [process.stdin, process.stdout, process.stderr] +}; + +execSync('node ./src/main.js init', options); \ No newline at end of file diff --git a/scripts/postinstall.js b/scripts/postinstall.js new file mode 100644 index 000000000..58d051a50 --- /dev/null +++ b/scripts/postinstall.js @@ -0,0 +1,47 @@ +/* + * ******************************************************************************* + * * Copyright (c) 2018 Edgeworx, Inc. + * * + * * This program and the accompanying materials are made available under the + * * terms of the Eclipse Public License v. 2.0 which is available at + * * http://www.eclipse.org/legal/epl-2.0 + * * + * * SPDX-License-Identifier: EPL-2.0 + * ******************************************************************************* + * + */ + + +const os = require('os'); +const execSync = require('child_process').execSync; +const fs = require('fs'); + +const rootDir = `${__dirname}/../`; +let installation_variables_file_name = 'iofogcontroller_install_variables'; +let installation_variables_file; +let tempDir; + +if (os.type() === 'Linux') { + tempDir = '/tmp/'; +} else if (os.type() === 'Darwin') { + tempDir = '/tmp/'; +} else if (os.type() === 'Windows_NT') { + tempDir = `${process.env.APPDATA}/`; +} else { + throw new Error("Unsupported OS found: " + os.type()); +} + +installation_variables_file = tempDir + installation_variables_file_name; + + +const devDbBackup = `${tempDir}dev_database.sqlite`; +if (fs.existsSync(devDbBackup)) { + fs.renameSync(devDbBackup, `${rootDir}/src/sequelize/dev_database.sqlite`) +} + +const prodDbBackup = `${tempDir}prod_database.sqlite`; +if (fs.existsSync(prodDbBackup)) { + fs.renameSync(prodDbBackup, `${rootDir}/src/sequelize/prod_database.sqlite`) +} + +//TODO: add version migrations diff --git a/scripts/postinstall.sh b/scripts/postinstall.sh index 1b43ac973..c0d4e8555 100644 --- a/scripts/postinstall.sh +++ b/scripts/postinstall.sh @@ -1,5 +1,7 @@ #!/bin/bash +##TODO: remove after js scripts finished + vercomp () { if [[ $1 == $2 ]] then @@ -37,9 +39,8 @@ vercomp () { #START #restore db -IOFOG_CONTROLLER_BIN_DIR=$(whereis iofog-controller | awk -F " " '{print $2}') -IOFOG_CONTROLLER_BIN_DIR=${IOFOG_CONTROLLER_BIN_DIR%"iofog-controller"} -IOFOG_CONTROLLER_SEQUELIZE_DIR=$IOFOG_CONTROLLER_BIN_DIR'../lib/node_modules/iofogcontroller/src/sequelize' +IOFOG_CONTROLLER_NODE_MODULES=$(npm root -g iofog-controller) +IOFOG_CONTROLLER_SEQUELIZE_DIR=$IOFOG_CONTROLLER_NODE_MODULES'/iofogcontroller/src/sequelize' DEV_DB_FILE=$IOFOG_CONTROLLER_SEQUELIZE_DIR'/dev_database.sqlite' DEV_DB_FILE_BACKUP='/tmp/dev_database.sqlite' @@ -54,16 +55,18 @@ if [ -f $PROD_DB_FILE_BACKUP ]; then fi #prev versions migrations -PREV_IOFOG_CONTROLLER_VER=$(grep prev_ver /tmp/iofogcontroller_install_variables | awk '{print $2}') -echo "Prev ver: "${PREV_IOFOG_CONTROLLER_VER} +if [ -f /tmp/iofogcontroller_install_variables ]; then + PREV_IOFOG_CONTROLLER_VER=$(grep prev_ver /tmp/iofogcontroller_install_variables | awk '{print $2}') +fi if [[ -z "${PREV_IOFOG_CONTROLLER_VER// }" ]] then - echo "No prev ver" + echo "No previous version" else + echo "Previous version: "${PREV_IOFOG_CONTROLLER_VER} if [[ $(vercomp $PREV_IOFOG_CONTROLLER_VER 1.0.0) = '<' ]] || [[ $(vercomp $PREV_IOFOG_CONTROLLER_VER 1.0.0) = '=' ]] then - echo "Upgrading from ver 1.0.0" + echo "Upgrading from version 1.0.0" sqlite3 src/sequelize/prod_database.sqlite "insert into SequelizeMeta (name) values ('20180928110125-insert-registry.js');" sqlite3 src/sequelize/prod_database.sqlite "insert into SequelizeMeta (name) values ('20180928111532-insert-catalog-item.js');" sqlite3 src/sequelize/prod_database.sqlite "insert into SequelizeMeta (name) values ('20180928112152-insert-iofog-type.js');" diff --git a/scripts/preinstall.sh b/scripts/preinstall.sh deleted file mode 100644 index a08aff9c5..000000000 --- a/scripts/preinstall.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/bash - -#store prev ver number -export PREV_IOFOG_CONTROLLER_VER=$(npm list --depth=0 -g --silent | grep iofogcontroller | awk -F "@" '{print $2}') -printf 'prev_ver: '$PREV_IOFOG_CONTROLLER_VER > /tmp/iofogcontroller_install_variables - -#backup db -IOFOG_CONTROLLER_BIN_DIR=$(whereis iofog-controller | awk -F " " '{print $2}') -IOFOG_CONTROLLER_BIN_DIR=${IOFOG_CONTROLLER_BIN_DIR%"iofog-controller"} -IOFOG_CONTROLLER_SEQUELIZE_DIR=$IOFOG_CONTROLLER_BIN_DIR'../lib/node_modules/iofogcontroller/src/sequelize' - -DEV_DB_FILE=$IOFOG_CONTROLLER_SEQUELIZE_DIR'/dev_database.sqlite' -if [ -f $DEV_DB_FILE ]; then - mv $DEV_DB_FILE /tmp/ -fi -PROD_DB_FILE=$IOFOG_CONTROLLER_SEQUELIZE_DIR'/prod_database.sqlite' -if [ -f $PROD_DB_FILE ]; then - mv $PROD_DB_FILE /tmp/ -fi \ No newline at end of file diff --git a/scripts/preuninstall.js b/scripts/preuninstall.js new file mode 100644 index 000000000..0cec350a3 --- /dev/null +++ b/scripts/preuninstall.js @@ -0,0 +1,47 @@ +/* + * ******************************************************************************* + * * Copyright (c) 2018 Edgeworx, Inc. + * * + * * This program and the accompanying materials are made available under the + * * terms of the Eclipse Public License v. 2.0 which is available at + * * http://www.eclipse.org/legal/epl-2.0 + * * + * * SPDX-License-Identifier: EPL-2.0 + * ******************************************************************************* + * + */ + +const os = require('os'); +const execSync = require('child_process').execSync; +const fs = require('fs'); +const version = require('../package').version; + +const rootDir = `${__dirname}/../`; +let installation_variables_file_name = 'iofogcontroller_install_variables'; +let installation_variables_file; +let tempDir; + +if (os.type() === 'Linux') { + tempDir = '/tmp/'; +} else if (os.type() === 'Darwin') { + tempDir = '/tmp/'; +} else if (os.type() === 'Windows_NT') { + tempDir = `${process.env.APPDATA}/`; +} else { + throw new Error("Unsupported OS found: " + os.type()); +} + +installation_variables_file = tempDir + installation_variables_file_name; + +fs.writeFileSync(installation_variables_file, `prev_ver: ${version}`); + +const devDb = `${rootDir}/src/sequelize/dev_database.sqlite`; +if (fs.existsSync(devDb)) { + fs.renameSync(devDb, `${tempDir}dev_database.sqlite`) +} + +const prodDb = `${rootDir}/src/sequelize/prod_database.sqlite`; +if (fs.existsSync(prodDb)) { + fs.renameSync(prodDb, `${tempDir}prod_database.sqlite`) +} + diff --git a/scripts/preuninstall.sh b/scripts/preuninstall.sh deleted file mode 100644 index a08aff9c5..000000000 --- a/scripts/preuninstall.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/bash - -#store prev ver number -export PREV_IOFOG_CONTROLLER_VER=$(npm list --depth=0 -g --silent | grep iofogcontroller | awk -F "@" '{print $2}') -printf 'prev_ver: '$PREV_IOFOG_CONTROLLER_VER > /tmp/iofogcontroller_install_variables - -#backup db -IOFOG_CONTROLLER_BIN_DIR=$(whereis iofog-controller | awk -F " " '{print $2}') -IOFOG_CONTROLLER_BIN_DIR=${IOFOG_CONTROLLER_BIN_DIR%"iofog-controller"} -IOFOG_CONTROLLER_SEQUELIZE_DIR=$IOFOG_CONTROLLER_BIN_DIR'../lib/node_modules/iofogcontroller/src/sequelize' - -DEV_DB_FILE=$IOFOG_CONTROLLER_SEQUELIZE_DIR'/dev_database.sqlite' -if [ -f $DEV_DB_FILE ]; then - mv $DEV_DB_FILE /tmp/ -fi -PROD_DB_FILE=$IOFOG_CONTROLLER_SEQUELIZE_DIR'/prod_database.sqlite' -if [ -f $PROD_DB_FILE ]; then - mv $PROD_DB_FILE /tmp/ -fi \ No newline at end of file diff --git a/scripts/start-dev.js b/scripts/start-dev.js new file mode 100644 index 000000000..709f4d371 --- /dev/null +++ b/scripts/start-dev.js @@ -0,0 +1,24 @@ +/* + * ******************************************************************************* + * * Copyright (c) 2018 Edgeworx, Inc. + * * + * * This program and the accompanying materials are made available under the + * * terms of the Eclipse Public License v. 2.0 which is available at + * * http://www.eclipse.org/legal/epl-2.0 + * * + * * SPDX-License-Identifier: EPL-2.0 + * ******************************************************************************* + * + */ + +const execSync = require('child_process').execSync; + +const options = { + env: { + 'NODE_ENV': 'development', + "PATH": process.env.PATH + }, + stdio: [process.stdin, process.stdout, process.stderr] +}; + +execSync('node ./src/main.js start', options); \ No newline at end of file diff --git a/scripts/start.js b/scripts/start.js new file mode 100644 index 000000000..f8f2f1f3f --- /dev/null +++ b/scripts/start.js @@ -0,0 +1,24 @@ +/* + * ******************************************************************************* + * * Copyright (c) 2018 Edgeworx, Inc. + * * + * * This program and the accompanying materials are made available under the + * * terms of the Eclipse Public License v. 2.0 which is available at + * * http://www.eclipse.org/legal/epl-2.0 + * * + * * SPDX-License-Identifier: EPL-2.0 + * ******************************************************************************* + * + */ + +const execSync = require('child_process').execSync; + +const options = { + env: { + 'NODE_ENV': 'production', + "PATH": process.env.PATH + }, + stdio: [process.stdin, process.stdout, process.stderr] +}; + +execSync('node ./src/main.js start', options); \ No newline at end of file diff --git a/scripts/test.js b/scripts/test.js new file mode 100644 index 000000000..fc27d1aac --- /dev/null +++ b/scripts/test.js @@ -0,0 +1,24 @@ +/* + * ******************************************************************************* + * * Copyright (c) 2018 Edgeworx, Inc. + * * + * * This program and the accompanying materials are made available under the + * * terms of the Eclipse Public License v. 2.0 which is available at + * * http://www.eclipse.org/legal/epl-2.0 + * * + * * SPDX-License-Identifier: EPL-2.0 + * ******************************************************************************* + * + */ + +const execSync = require('child_process').execSync; + +const options = { + env: { + 'NODE_ENV': 'test', + "PATH": process.env.PATH + }, + stdio: [process.stdin, process.stdout, process.stderr] +}; + +execSync('mocha', options); \ No newline at end of file diff --git a/src/cli/config.js b/src/cli/config.js index 53bb561df..ce42ea944 100644 --- a/src/cli/config.js +++ b/src/cli/config.js @@ -156,15 +156,15 @@ const _addConfigOption = async function (options) { onSuccess(); }); - if (options.sslKey) { + await updateConfig(options.sslKey, 'ssl-key', 'Server:SslKey', (onSuccess) => { const sslKey = options.sslKey; if (!AppHelper.isFileExists(sslKey)) { logger.error(ErrorMessages.INVALID_FILE_PATH); return; } config.set('Server:SslKey', sslKey); - logger.info('Config option ssl-key has been updated.'); - } + onSuccess(); + }); await updateConfig(options.intermediateCert, 'intermediate-cert', 'Server:IntermediateCert', (onSuccess) => { const intermediateCert = options.intermediateCert; @@ -182,10 +182,6 @@ const _addConfigOption = async function (options) { }); await updateConfig(options.emailAddress, 'email-address', 'Email:Address', (onSuccess) => { - if (options.emailPassword) { - logger.info('When changing email address, password must be provided.'); - return; - } config.set('Email:Address', options.emailAddress); onSuccess(); }); diff --git a/src/cli/microservice.js b/src/cli/microservice.js index 33b982fab..ea3550546 100644 --- a/src/cli/microservice.js +++ b/src/cli/microservice.js @@ -376,7 +376,7 @@ const _removeVolumeMapping = async function (obj, user) { await MicroserviceService.deleteVolumeMapping(obj.microserviceId, obj.mappingId, user, true); logger.info('Volume mapping has been deleted successfully.'); } catch (e) { - logger.error(ErrorMessages.CLI.INVALID_VOLUME_MAPPING); + logger.error(e.message); } }; diff --git a/src/controllers/tunnel-controller.js b/src/controllers/tunnel-controller.js index 1d57dacfb..defb6efa4 100644 --- a/src/controllers/tunnel-controller.js +++ b/src/controllers/tunnel-controller.js @@ -16,12 +16,13 @@ const AuthDecorator = require('../decorators/authorization-decorator'); const TunnelService = require('../services/tunnel-service'); const Errors = require('../helpers/errors'); const ErrorMessages = require('../helpers/error-messages'); +const { isTest } = require('../helpers/app-helper'); const manageTunnelEndPoint = async function (req, user) { logger.info("Parameters:" + JSON.stringify(req.body)); const action = req.body.action; const tunnelData = { - iofogUuid: req.params.id + iofogUuid: req.params.id } switch (action) { case 'open': @@ -44,6 +45,6 @@ const getTunnelEndPoint = async function (req, user) { }; module.exports = { - manageTunnelEndPoint: AuthDecorator.checkAuthToken(manageTunnelEndPoint), - getTunnelEndPoint: AuthDecorator.checkAuthToken(getTunnelEndPoint) + manageTunnelEndPoint: AuthDecorator.checkAuthToken(manageTunnelEndPoint), + getTunnelEndPoint: AuthDecorator.checkAuthToken(getTunnelEndPoint), }; \ No newline at end of file diff --git a/src/decorators/authorization-decorator.js b/src/decorators/authorization-decorator.js index b4a36ba75..a3f78aee4 100644 --- a/src/decorators/authorization-decorator.js +++ b/src/decorators/authorization-decorator.js @@ -17,9 +17,13 @@ const AccessTokenManager = require('../sequelize/managers/access-token-manager') const FogManager = require('../sequelize/managers/iofog-manager') const FogAccessTokenManager = require('../sequelize/managers/iofog-access-token-manager') const Errors = require('../helpers/errors') +const { isTest } = require('../helpers/app-helper'); function checkAuthToken(f) { return async function() { + if (isTest()) { + return await f.apply(this, arguments); + } const fArgs = Array.prototype.slice.call(arguments); const req = fArgs[0]; @@ -46,6 +50,9 @@ function checkAuthToken(f) { function checkFogToken(f) { return async function() { + if (isTest()) { + return await f.apply(this, arguments); + } const fArgs = Array.prototype.slice.call(arguments); const req = fArgs[0]; diff --git a/src/decorators/cli-decorator.js b/src/decorators/cli-decorator.js index b0a84c9e0..0829bfaf7 100644 --- a/src/decorators/cli-decorator.js +++ b/src/decorators/cli-decorator.js @@ -16,9 +16,13 @@ const config = require('../config'); const UserManager = require('../sequelize/managers/user-manager'); const AccessTokenManager = require('../sequelize/managers/access-token-manager'); const Errors = require('../helpers/errors'); +const { isTest } = require('../helpers/app-helper'); function prepareUserById(f) { return async function() { + if (isTest()) { + return await f.apply(this, arguments); + } const fArgs = Array.prototype.slice.call(arguments) const obj = fArgs[0] @@ -42,6 +46,9 @@ function prepareUserById(f) { function prepareUserByEmail(f) { return async function() { + if (isTest()) { + return await f.apply(this, arguments); + } const fArgs = Array.prototype.slice.call(arguments) const obj = fArgs[0] diff --git a/src/decorators/response-decorator.js b/src/decorators/response-decorator.js index 1088391ff..2b2a8b448 100644 --- a/src/decorators/response-decorator.js +++ b/src/decorators/response-decorator.js @@ -11,9 +11,13 @@ * */ const logger = require('../logger'); +const { isTest } = require('../helpers/app-helper'); function handleErrors(f, successCode, errorsCodes) { return async function() { + if (isTest()) { + return await f.apply(this, arguments); + } let responseObject = {}; try { diff --git a/src/decorators/transaction-decorator.js b/src/decorators/transaction-decorator.js index 580df4d04..c35c6cd1e 100644 --- a/src/decorators/transaction-decorator.js +++ b/src/decorators/transaction-decorator.js @@ -15,9 +15,14 @@ const db = require('./../sequelize/models'); const retry = require('retry-as-promised'); const sequelize = db.sequelize; const Transaction = require('sequelize/lib/transaction'); +const { isTest } = require('../helpers/app-helper'); function transaction(f) { return async function() { + if (isTest()) { + return await f.apply(this, arguments); + } + const fArgs = Array.prototype.slice.call(arguments); //TODO [when transactions concurrency issue fixed]: Remove 'fArgs[fArgs.length - 1].fakeTransaction' if (fArgs.length > 0 && (fArgs[fArgs.length - 1] instanceof Transaction || fArgs[fArgs.length - 1].fakeTransaction)) { @@ -51,6 +56,10 @@ function generateTransaction(f) { function fakeTransaction(f) { const fakeTransactionObject = {fakeTransaction: true} return async function() { + if (isTest()) { + return await f.apply(this, arguments); + } + const fArgs = Array.prototype.slice.call(arguments); if (fArgs.length > 0 && fArgs[fArgs.length - 1] instanceof Transaction) { fArgs[fArgs.length - 1] = fakeTransactionObject; diff --git a/src/helpers/app-helper.js b/src/helpers/app-helper.js index cb7089b90..b0f7d3ad1 100644 --- a/src/helpers/app-helper.js +++ b/src/helpers/app-helper.js @@ -105,6 +105,9 @@ function generateAccessToken() { } function checkTransaction(transaction) { + if (isTest()) { + return + } //TODO [when transactions concurrency issue fixed]: Remove '!transaction.fakeTransaction' if (!transaction || (!(transaction instanceof Transaction) && !transaction.fakeTransaction)) { throw new Errors.TransactionError() @@ -247,6 +250,10 @@ function _getPossibleArgsList(command, commandDefinitions) { return possibleArgsList; } +function isTest() { + return process.env.NODE_ENV === 'test' +} + module.exports = { encryptText, @@ -266,5 +273,6 @@ module.exports = { isValidPublicIP, handleCLIError, trimCertificate, - validateParameters + validateParameters, + isTest, }; diff --git a/src/schemas/config.js b/src/schemas/config.js index 17d5f9018..4814b3b5f 100644 --- a/src/schemas/config.js +++ b/src/schemas/config.js @@ -15,25 +15,22 @@ const configUpdate = { "id": "/configUpdate", "type": "object", "properties": { - "port": {"type": "integer", "minimum" : 0, "maximum" : 65535}, + "port": {"type": "integer", "minimum": 0, "maximum": 65535}, "sslCert": {"type": "string"}, "sslKey": {"type": "string"}, "intermediateCert": {"type": "string"}, "emailActivationOn": {"type": "boolean"}, "emailActivationOff": {"type": "boolean"}, "homeUrl": {"type": "string"}, - "emailAddress": { - "type": "string", - "pattern": "^(([^<>()\\[\\]\\\\.,;:\\s@\"]+(\\.[^<>()\\[\\]\\\\.,;:\\s@\"]+)*)|(\".+\"))@((\\[[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}])|(([a-zA-Z\\-0-9]+\\.)+[a-zA-Z]{2,}))$" - }, + "emailAddress": {"type": "string"}, "emailPassword": {"type": "string", "minLength": 1}, "emailService": {"type": "string"}, "logDir": {"type": "string"}, "logSize": {"type": "integer"} } -} +}; - module.exports = { +module.exports = { mainSchemas: [configUpdate], innerSchemas: [] }; diff --git a/src/schemas/connector.js b/src/schemas/connector.js index d23b105bb..f62f7bc09 100644 --- a/src/schemas/connector.js +++ b/src/schemas/connector.js @@ -22,7 +22,7 @@ const connectorCreate = { "isSelfSignedCert": {"type": "boolean"}, "devMode": {"type": "boolean"} }, - "required": ["publicIp", "name"], + "required": ["publicIp", "name", "devMode"], "additionalProperties": false }; diff --git a/src/sequelize/config/config.json b/src/sequelize/config/config.json index 52cb1bc9d..ae1a9f086 100644 --- a/src/sequelize/config/config.json +++ b/src/sequelize/config/config.json @@ -2,6 +2,7 @@ "development": { "dialect": "sqlite", "storage": "dev_database.sqlite", + "logging": false, "operatorsAliases": false }, "test": { diff --git a/src/services/connector-service.js b/src/services/connector-service.js index b0e19853b..bd522e6ac 100644 --- a/src/services/connector-service.js +++ b/src/services/connector-service.js @@ -160,7 +160,7 @@ async function openPortsOnConnector(connector, isPublicAccess, transaction) { 'Content-Length': Buffer.byteLength(data) } }; - if (connector.cert && connector.isSelfSignedCert === true) { + if (!connector.devMode && connector.cert && connector.isSelfSignedCert === true) { const ca = fs.readFileSync(connector.cert); options.ca = new Buffer(ca); } @@ -180,8 +180,6 @@ async function _getRandomConnector(transaction) { } async function closePortOnConnector(connector, ports, transaction) { - console.log(ports); - let data = qs.stringify({ mappingid: ports.mappingId }); diff --git a/src/services/controller-service.js b/src/services/controller-service.js index a16cabe7b..906c1cc35 100644 --- a/src/services/controller-service.js +++ b/src/services/controller-service.js @@ -14,6 +14,7 @@ const FogTypesManager = require('../sequelize/managers/iofog-type-manager'); const Config = require('../config'); const TransactionDecorator = require('../decorators/transaction-decorator'); +const packageJson = require('../../package'); const getFogTypes = async function (isCLI, transaction) { const fogTypes = await FogTypesManager.findAll({}, transaction); @@ -58,7 +59,7 @@ const statusController = async function (isCLI) { }; const getVersion = async function (isCLI) { - return "Iofog-Controller version: 1.0.0"; + return `Iofog-Controller version: ${packageJson.version}`; }; module.exports = { diff --git a/src/services/microservices-service.js b/src/services/microservices-service.js index 86698f7a2..7b4707c5e 100644 --- a/src/services/microservices-service.js +++ b/src/services/microservices-service.js @@ -11,6 +11,7 @@ * */ +const logger = require('../logger') const TransactionDecorator = require('../decorators/transaction-decorator'); const MicroserviceManager = require('../sequelize/managers/microservice-manager'); const MicroserviceStatusManager = require('../sequelize/managers/microservice-status-manager'); @@ -33,7 +34,7 @@ const RoutingManager = require('../sequelize/managers/routing-manager'); const Op = require('sequelize').Op; const fs = require('fs'); -const _listMicroservices = async function (flowId, user, isCLI, transaction) { +async function _listMicroservices(flowId, user, isCLI, transaction) { if (!isCLI) { await FlowService.getFlow(flowId, user, isCLI, transaction); } @@ -43,9 +44,9 @@ const _listMicroservices = async function (flowId, user, isCLI, transaction) { return { microservices: microservices } -}; +} -const _getMicroservice = async function (microserviceUuid, user, isCLI, transaction) { +async function _getMicroservice(microserviceUuid, user, isCLI, transaction) { if (!isCLI) { await _validateMicroserviceOnGet(user.id, microserviceUuid, transaction); } @@ -58,9 +59,9 @@ const _getMicroservice = async function (microserviceUuid, user, isCLI, transact throw new Errors.NotFoundError(AppHelper.formatMessage(ErrorMessages.INVALID_MICROSERVICE_UUID, microserviceUuid)); } return microservice; -}; +} -const _createMicroserviceOnFog = async function (microserviceData, user, isCLI, transaction) { +async function _createMicroserviceOnFog(microserviceData, user, isCLI, transaction) { await Validation.validate(microserviceData, Validation.schemas.microserviceCreate); const microservice = await _createMicroservice(microserviceData, user, isCLI, transaction); @@ -87,9 +88,9 @@ const _createMicroserviceOnFog = async function (microserviceData, user, isCLI, return { uuid: microservice.uuid } -}; +} -const _createMicroservice = async function (microserviceData, user, isCLI, transaction) { +async function _createMicroservice(microserviceData, user, isCLI, transaction) { let newMicroservice = { uuid: AppHelper.generateRandomString(32), @@ -117,30 +118,30 @@ const _createMicroservice = async function (microserviceData, user, isCLI, trans } return await MicroserviceManager.create(newMicroservice, transaction); -}; +} -const _createMicroserviceStatus = function (uuid, transaction) { - return MicroserviceStatusManager.create({ +async function _createMicroserviceStatus(uuid, transaction) { + return await MicroserviceStatusManager.create({ microserviceUuid: uuid }, transaction); -}; +} -const _createVolumeMappings = async function (volumeMappings, microserviceUuid, transaction) { +async function _createVolumeMappings(volumeMappings, microserviceUuid, transaction) { for (let volumeMapping of volumeMappings) { volumeMapping.microserviceUuid = microserviceUuid } await VolumeMappingManager.bulkCreate(volumeMappings, transaction) -}; +} -const _createRoutes = async function (routes, microserviceUuid, user, transaction) { +async function _createRoutes(routes, microserviceUuid, user, transaction) { for (let route of routes) { await _createRoute(microserviceUuid, route, user, false, transaction) } -}; +} -const _updateMicroservice = async function (microserviceUuid, microserviceData, user, isCLI, transaction) { +async function _updateMicroservice(microserviceUuid, microserviceData, user, isCLI, transaction) { await Validation.validate(microserviceData, Validation.schemas.microserviceUpdate); const query = isCLI @@ -167,6 +168,9 @@ const _updateMicroservice = async function (microserviceUuid, microserviceData, const microserviceDataUpdate = AppHelper.deleteUndefinedFields(microserviceToUpdate); const microservice = await MicroserviceManager.findOne(query, transaction); + if (!microservice) { + throw new Errors.NotFoundError(AppHelper.formatMessage(ErrorMessages.INVALID_MICROSERVICE_UUID, microserviceUuid)) + } if (microserviceDataUpdate.name) { const userId = isCLI ? microservice.userId : user.id; @@ -184,7 +188,7 @@ const _updateMicroservice = async function (microserviceUuid, microserviceData, await _updateVolumeMappings(microserviceDataUpdate.volumeMappings, microserviceUuid, transaction); } - if (microserviceDataUpdate.iofogUuid !== microservice.iofogUuid) { + if (microserviceDataUpdate.iofogUuid && microserviceDataUpdate.iofogUuid !== microservice.iofogUuid) { const routes = await _getLogicalNetworkRoutesByFog(microservice.iofogUuid, transaction); for (let route of routes) { await _deleteRoute(route.sourceMicroserviceUuid, route.destMicroserviceUuid, user, isCLI, transaction); @@ -202,9 +206,9 @@ const _updateMicroservice = async function (microserviceUuid, microserviceData, //update change tracking for new fog await _updateChangeTracking(microserviceData.config ? true : false, microserviceDataUpdate.iofogUuid, transaction); -}; +} -const _updateVolumeMappings = async function (volumeMappings, microserviceUuid, transaction) { +async function _updateVolumeMappings(volumeMappings, microserviceUuid, transaction) { for (let volumeMapping of volumeMappings) { await VolumeMappingManager.update({ microserviceUuid: microserviceUuid @@ -212,15 +216,15 @@ const _updateVolumeMappings = async function (volumeMappings, microserviceUuid, } }; -const _updateChangeTracking = async function (configUpdated, fogNodeUuid, transaction) { +async function _updateChangeTracking(configUpdated, fogNodeUuid, transaction) { if (configUpdated) { await ChangeTrackingService.update(fogNodeUuid, ChangeTrackingService.events.microserviceCommon, transaction); } else { await ChangeTrackingService.update(fogNodeUuid, ChangeTrackingService.events.microserviceList, transaction); } -}; +} -const _deleteMicroservice = async function (microserviceUuid, microserviceData, user, isCLI, transaction) { +async function _deleteMicroservice(microserviceUuid, microserviceData, user, isCLI, transaction) { const where = isCLI ? @@ -240,9 +244,7 @@ const _deleteMicroservice = async function (microserviceUuid, microserviceData, } if (microservice.microserviceStatus.status === MicroserviceStates.NOT_RUNNING) { - await MicroserviceManager.delete({ - uuid: microserviceUuid - }, transaction); + await _deleteMicroserviceWithRoutes(microserviceUuid, transaction); } else { await MicroserviceManager.update({ uuid: microserviceUuid @@ -254,21 +256,17 @@ const _deleteMicroservice = async function (microserviceUuid, microserviceData, } await _updateChangeTracking(false, microservice.iofogUuid, transaction) -}; +} -const _deleteNotRunningMicroservices = async function (transaction) { +async function _deleteNotRunningMicroservices(transaction) { const microservices = await MicroserviceManager.findAllWithStatuses(transaction); microservices .filter(microservice => microservice.delete) .filter(microservice => microservice.microserviceStatus.status === MicroserviceStates.NOT_RUNNING) - .forEach(microservice => { - MicroserviceManager.delete({ - uuid: microservice.uuid - }, transaction); - }); -}; + .forEach(microservice => _deleteMicroserviceWithRoutes(microservice.uuid, transaction)); +} -const _checkForDuplicateName = async function (name, item, userId, transaction) { +async function _checkForDuplicateName(name, item, userId, transaction) { if (name) { const where = item.id ? @@ -288,15 +286,9 @@ const _checkForDuplicateName = async function (name, item, userId, transaction) throw new Errors.DuplicatePropertyError(AppHelper.formatMessage(ErrorMessages.DUPLICATE_NAME, name)); } } -}; - -const _deleteRoutes = async function (routes, microserviceUuid, user, transaction) { - for (let route of routes) { - await _deleteRoute(microserviceUuid, route, user, transaction) - } -}; +} -const _validateMicroserviceOnGet = async function (userId, microserviceUuid, transaction) { +async function _validateMicroserviceOnGet(userId, microserviceUuid, transaction) { const where = { '$flow.user.id$': userId, uuid: microserviceUuid @@ -305,7 +297,7 @@ const _validateMicroserviceOnGet = async function (userId, microserviceUuid, tra if (!microservice) { throw new Errors.NotFoundError(ErrorMessages.INVALID_MICROSERVICE_USER); } -}; +} async function _createRoute(sourceMicroserviceUuid, destMicroserviceUuid, user, isCLI, transaction) { const sourceWhere = isCLI @@ -395,7 +387,7 @@ async function _createRouteOverConnector(sourceMicroservice, destMicroservice, u const networkCatalogItem = await CatalogService.getNetworkCatalogItem(transaction) let cert; - if (connector.cert) { + if (!connector.devMode && connector.cert) { cert = AppHelper.trimCertificate(fs.readFileSync(connector.cert, "utf-8")) } @@ -533,7 +525,11 @@ async function _deleteRouteOverConnector(route, transaction) { const ports = await ConnectorPortManager.findOne({id: route.connectorPortId}, transaction) const connector = await ConnectorManager.findOne({id: ports.connectorId}, transaction) - await ConnectorService.closePortOnConnector(connector, ports, transaction) + try { + await ConnectorService.closePortOnConnector(connector, ports, transaction); + } catch (e) { + logger.warn(`Can't close ports pair ${ports.mappingId} on connector ${connector.publicIp}. Delete manually if necessary`); + } await RoutingManager.delete({id: route.id}, transaction) await ConnectorPortManager.delete({id: ports.id}, transaction) @@ -590,7 +586,7 @@ async function _createSimplePortMapping(microservice, portMappingData, user, tra isPublic: false, portInternal: portMappingData.internal, portExternal: portMappingData.external, - userId: user.id, + userId: microservice.userId, microserviceUuid: microservice.uuid } @@ -622,7 +618,7 @@ async function _createPortMappingOverConnector(microservice, portMappingData, us const networkCatalogItem = await CatalogService.getNetworkCatalogItem(transaction) let cert; - if (connector.cert) { + if (!connector.devMode && connector.cert) { cert = AppHelper.trimCertificate(fs.readFileSync(connector.cert, "utf-8")); } //create netw ms1 @@ -652,7 +648,7 @@ async function _createPortMappingOverConnector(microservice, portMappingData, us isPublic: true, portInternal: portMappingData.internal, portExternal: portMappingData.external, - userId: user.id, + userId: microservice.userId, microserviceUuid: microservice.uuid } @@ -833,6 +829,42 @@ async function _getLogicalNetworkRoutesByFog(iofogUuid, transaction) { return res; } +async function _getLogicalRoutesByMicroservice(microserviceUuid, transaction) { + let res = []; + const query = { + [Op.or]: + [ + { + sourceMicroserviceUuid: microserviceUuid + }, + { + destMicroserviceUuid: microserviceUuid + } + ] + }; + const routes = await RoutingManager.findAll(query, transaction) + for (let route of routes) { + if (route.sourceMicroserviceUuid && route.destMicroserviceUuid) { + res.push(route); + } + } + return res; +} + +async function _deleteMicroserviceWithRoutes(microserviceUuid, transaction) { + const routes = await _getLogicalRoutesByMicroservice(microserviceUuid, transaction); + for (let route of routes) { + //TODO: simplify after splitting all endpoints service functions to validation and request processing part + const user = { + id: route.sourceMicroserviceUuid.userId + }; + await _deleteRoute(route.sourceMicroserviceUuid, route.destMicroserviceUuid, user, false, transaction); + } + await MicroserviceManager.delete({ + uuid: microserviceUuid + }, transaction); +} + async function _buildLink(protocol, ip, port) { return `${protocol}://${ip}:${port}` } diff --git a/src/services/tunnel-service.js b/src/services/tunnel-service.js index 5b2211fc9..ae868228e 100644 --- a/src/services/tunnel-service.js +++ b/src/services/tunnel-service.js @@ -21,29 +21,29 @@ const TransactionDecorator = require('../decorators/transaction-decorator'); const ChangeTrackingService = require('./change-tracking-service'); const openTunnel = async function (tunnelData, user, isCli, transaction) { - const iofog = await FogManager.findOne({uuid : tunnelData.iofogUuid}, transaction); - if (!iofog) { - throw new Errors.NotFoundError('Invalid Fog Id'); - } - let tunnel = tunnelData; - if (isCli){ - tunnel.rport = await AppHelper.findAvailablePort(tunnelData.host); - } else { - const host = Config.get("Tunnel:Host"); - tunnel = { - username: Config.get("Tunnel:Username"), - password: Config.get("Tunnel:Password"), - host: host, - rsakey: Config.get("Tunnel:RsaKey"), - lport: Config.get("Tunnel:Lport"), - iofogUuid: iofog.uuid, - closed: false, - rport: await AppHelper.findAvailablePort(host) - }; - } - await Validator.validate(tunnel, Validator.schemas.tunnelCreate); - await TunnelManager.updateOrCreate(tunnelData, tunnel, transaction); - await ChangeTrackingService.update(tunnelData.iofogUuid, ChangeTrackingService.events.tunnel, transaction); + const iofog = await FogManager.findOne({ uuid: tunnelData.iofogUuid }, transaction); + if (!iofog) { + throw new Errors.NotFoundError('Invalid Fog Id'); + } + let tunnel = tunnelData; + if (isCli) { + tunnel.rport = await AppHelper.findAvailablePort(tunnelData.host); + } else { + const host = Config.get("Tunnel:Host"); + tunnel = { + username: Config.get("Tunnel:Username"), + password: Config.get("Tunnel:Password"), + host: host, + rsakey: Config.get("Tunnel:RsaKey"), + lport: Config.get("Tunnel:Lport"), + iofogUuid: iofog.uuid, + closed: false, + rport: await AppHelper.findAvailablePort(host) + }; + } + await Validator.validate(tunnel, Validator.schemas.tunnelCreate); + await TunnelManager.updateOrCreate(tunnelData, tunnel, transaction); + await ChangeTrackingService.update(tunnelData.iofogUuid, ChangeTrackingService.events.tunnel, transaction); }; const findTunnel = async function (tunnelData, user, transaction) { @@ -52,24 +52,24 @@ const findTunnel = async function (tunnelData, user, transaction) { throw new Errors.NotFoundError('Invalid Tunnel Id'); } return { - username: tunnel.username, - host: tunnel.host, - remotePort: tunnel.rport, - localPort: tunnel.lport, - status: tunnel.closed ? "closed" : "open" - }; + username: tunnel.username, + host: tunnel.host, + remotePort: tunnel.rport, + localPort: tunnel.lport, + status: tunnel.closed ? "closed" : "open" + }; }; const findAll = async function (transaction) { - const tunnels = await TunnelManager.findAll({}, transaction); - return { - tunnels : tunnels - }; + const tunnels = await TunnelManager.findAll({}, transaction); + return { + tunnels: tunnels + }; }; const closeTunnel = async function (tunnelData, user, transaction) { - await findTunnel(tunnelData, user, transaction); - await TunnelManager.update(tunnelData, {closed : true}, transaction); + await module.exports.findTunnel(tunnelData, user, transaction); + await TunnelManager.update(tunnelData, { closed: true }, transaction); await ChangeTrackingService.update(tunnelData.iofogUuid, ChangeTrackingService.events.tunnel, transaction); }; diff --git a/tests/Controller Testing.postman_collection.json b/test/Controller Testing.postman_collection.json similarity index 100% rename from tests/Controller Testing.postman_collection.json rename to test/Controller Testing.postman_collection.json diff --git a/test/mocha.opts b/test/mocha.opts new file mode 100644 index 000000000..d64877b5d --- /dev/null +++ b/test/mocha.opts @@ -0,0 +1,3 @@ +--require test/support/setup.js +--ui bdd-lazy-var/global +--recursive \ No newline at end of file diff --git a/test/src/controllers/tunnel-controller.test.js b/test/src/controllers/tunnel-controller.test.js new file mode 100644 index 000000000..ab0cb2dd4 --- /dev/null +++ b/test/src/controllers/tunnel-controller.test.js @@ -0,0 +1,129 @@ +const { expect } = require('chai') +const sinon = require('sinon') + +const TunnelController = require('../../../src/controllers/tunnel-controller') +const TunnelService = require('../../../src/services/tunnel-service'); + +describe('Tunnel Controller', () => { + def('subject', () => TunnelController) + def('sandbox', () => sinon.createSandbox()) + + afterEach(() => $sandbox.restore()) + + describe('.manageTunnelEndPoint()', () => { + def('action', () => 'open') + def('id', () => 1) + def('req', () => ({ + body: { + action: $action, + }, + params: { + id: $id, + }, + })) + def('user', () => 'user!') + def('response', () => Promise.resolve()) + def('subject', () => $subject.manageTunnelEndPoint($req, $user)) + + beforeEach(() => { + $sandbox.stub(TunnelService, 'openTunnel').returns($response) + $sandbox.stub(TunnelService, 'closeTunnel').returns($response) + }) + + context('when action is "open"', async () => { + it('calls TunnelService#openTunnel with correct args', async () => { + await $subject + expect(TunnelService.openTunnel).to.have.been.calledWith({ iofogUuid: $id }, $user, false) + }) + + context('when TunnelService#openTunnel fails', () => { + const error = 'Error!' + + def('response', () => Promise.reject(error)) + + it(`fails with "${error}"`, () => { + return expect($subject).to.be.rejectedWith(error) + }) + }) + + context('when TunnelService#openTunnel succeeds', () => { + it(`succeeds`, () => { + return expect($subject).to.eventually.equal(undefined) + }) + }) + }) + + context('when action is "close"', async () => { + def('action', () => 'close') + + it('calls TunnelService#closeTunnel with correct args', async () => { + await $subject + expect(TunnelService.closeTunnel).to.have.been.calledWith({ iofogUuid: $id }, $user) + }) + + context('when TunnelService#closeTunnel fails', () => { + const error = 'Error!' + + def('response', () => Promise.reject(error)) + + it(`fails with "${error}"`, () => { + return expect($subject).to.be.rejectedWith(error) + }) + }) + + context('when TunnelService#closeTunnel succeeds', () => { + it(`succeeds`, () => { + return expect($subject).to.eventually.equal(undefined) + }) + }) + }) + + context('when action is neither "open" nor "close"', () => { + def('action', () => 'invalid-action') + + it('throws an error', () => { + return expect($subject).to.be.rejectedWith('Unknown action property. Action can be "open" or "close"') + }) + }) + }) + + describe('.getTunnelEndPoint()', () => { + def('id', () => 1) + def('req', () => ({ + body: { + action: $action, + }, + params: { + id: $id, + }, + })) + def('user', () => 'user!') + def('response', () => Promise.resolve()) + def('subject', () => $subject.getTunnelEndPoint($req, $user)) + + beforeEach(() => { + $sandbox.stub(TunnelService, 'findTunnel').returns($response) + }) + + it('calls TunnelService#findTunnel with correct args', async () => { + await $subject + expect(TunnelService.findTunnel).to.have.been.calledWith({ iofogUuid: $id }, $user) + }) + + context('when TunnelService#findTunnel fails', () => { + const error = 'Error!' + + def('response', () => Promise.reject(error)) + + it(`fails with "${error}"`, () => { + return expect($subject).to.be.rejectedWith(error) + }) + }) + + context('when TunnelService#findTunnel succeeds', () => { + it(`succeeds`, () => { + return expect($subject).to.eventually.equal(undefined) + }) + }) + }) +}) diff --git a/test/src/helpers/app-helpers.test.js b/test/src/helpers/app-helpers.test.js new file mode 100644 index 000000000..69935d8d2 --- /dev/null +++ b/test/src/helpers/app-helpers.test.js @@ -0,0 +1,302 @@ +const crypto = require('crypto') +const { expect } = require('chai') +const fs = require('fs') +const path = require('path') +const portscanner = require('portscanner') +const sinon = require('sinon') + +const AppHelpers = require('../../../src/helpers/app-helper') +const Config = require('../../../src/config') + +describe('App Helpers', () => { + const text = 'some-text' + const salt = 'kosher-salt' + const encrypted = '17f4faa5c532708c8f' + + def('subject', () => AppHelpers) + def('sandbox', () => sinon.createSandbox()) + def('cipher', () => ({ + update: $sandbox.stub().returns(''), + final: $sandbox.stub().returns(encrypted) + })) + def('decipher', () => ({ + update: $sandbox.stub().returns(''), + final: $sandbox.stub().returns(text) + })) + + afterEach(() => $sandbox.restore()) + + describe('.encryptText()', () => { + def('subject', () => $subject.encryptText(text, salt)) + + beforeEach(() => { + $sandbox.stub(crypto, 'createCipher').returns($cipher) + }) + + it('calls crypto#createCipher() with correct args', () => { + $subject + expect(crypto.createCipher).to.have.been.calledWith('aes-256-ctr', salt) + }) + + it('calls crypto.cipher#update() with correct args', () => { + $subject + expect($cipher.update).to.have.been.calledWith(text, 'utf8', 'hex') + }) + + it('calls crypto.cipher#final() with correct args', () => { + $subject + expect($cipher.final).to.have.been.calledWith('hex') + }) + + it('returns the encrypted text', () => { + expect($subject).to.equal(encrypted) + }) + }) + + describe('.decryptText()', () => { + def('subject', () => $subject.decryptText(encrypted, salt)) + + beforeEach(() => { + $sandbox.stub(crypto, 'createDecipher').returns($decipher) + }) + + it('calls crypto#createDecipher() with correct args', () => { + $subject + expect(crypto.createDecipher).to.have.been.calledWith('aes-256-ctr', salt) + }) + + it('calls crypto.decipher#update() with correct args', () => { + $subject + expect($decipher.update).to.have.been.calledWith(encrypted, 'hex', 'utf8') + }) + + it('calls crypto.decipher#final() with correct args', () => { + $subject + expect($decipher.final).to.have.been.calledWith('utf8') + }) + + it('returns the decrypted text', () => { + expect($subject).to.equal(text) + }) + }) + + describe('.generateRandomString()', () => { + def('size', () => 12) + + context('when size is greater than zero', () => { + it('returns a random string with length of size', () => { + expect(AppHelpers.generateRandomString($size)).to.have.lengthOf($size) + }) + }) + + context('when size is zero', () => { + def('size', () => 0) + + it('returns an empty string', () => { + expect(AppHelpers.generateRandomString($size)).to.have.lengthOf(0) + }) + }) + + context('when size is less than zero', () => { + def('size', () => 0) + + it('returns an empty string', () => { + expect(AppHelpers.generateRandomString($size)).to.have.lengthOf(0) + }) + }) + }) + + describe('.checkPortAvailability()', () => { + const portNumber = 12345 + const portStatus = 'open' + + def('subject', () => $subject.checkPortAvailability(portNumber)) + + beforeEach(() => { + $sandbox.stub(portscanner, 'checkPortStatus').returns(Promise.resolve(portStatus)) + }) + + it('calls portscanner#checkPortStatus() with correct args', async () => { + await $subject + expect(portscanner.checkPortStatus).to.have.been.calledWith(portNumber) + }) + + it('returns a promise', () => { + return expect($subject).to.be.an.instanceOf(Promise) + }) + + it('resolves to port status', () => { + return expect($subject).to.eventually.equal(portStatus) + }) + }) + + describe('.findAvailablePort()', () => { + const portRangeFrom = 12345 + const portRangeTo = 12350 + const availablePort = 12346 + const hostName = 'hostname' + + def('subject', () => $subject.findAvailablePort(hostName)) + + beforeEach(() => { + $sandbox.stub(Config, 'get') + .withArgs('Tunnel:PortRange') + .returns(`${portRangeFrom}-${portRangeTo}`) + + $sandbox.stub(portscanner, 'findAPortNotInUse').returns(Promise.resolve(availablePort)) + }) + + it('calls portscanner#findAPortNotInUse() with correct args', async () => { + await $subject + expect(portscanner.findAPortNotInUse).to.have.been.calledWith(portRangeFrom, portRangeTo, hostName) + }) + + it('returns a promise', () => { + return expect($subject).to.be.an.instanceOf(Promise) + }) + + it('returns the first available port', () => { + return expect($subject).to.eventually.equal(availablePort) + }) + }) + + describe('.isFileExists()', () => { + def('file', () => 'test.tmp') + def('extName', () => '.tmp') + def('exists', () => true) + def('subject', () => $subject.isFileExists($file)) + + beforeEach(() => { + $sandbox.stub(path, 'extname').returns($extName) + $sandbox.stub(fs, 'existsSync').returns($exists) + }) + + context('when does not have extension', () => { + def('extName', () => 'test') + + it('returns false', () => { + expect($subject).to.be.false + }) + }) + + context('when has extension', () => { + context('when file does not exist', () => { + def('exists', () => false) + + it('returns false', () => { + expect($subject).to.be.false + }) + }) + + context('when file does not exist', () => { + it('returns true', () => { + expect($subject).to.be.true + }) + }) + }) + }) + + describe('.isValidPort()', () => { + def('port', () => 12345) + def('subject', () => $subject.isValidPort($port)) + + const testPort = (text, string) => { + context(`when the ${text} is not an integer`, () => { + def('port', () => string ? '12345.5' : 12345.5) + + it('returns false', () => { + expect($subject).to.be.false + }) + }) + + context(`when the ${text} is an integer`, () => { + context(`when the ${text} is less than 0`, () => { + def('port', () => string ? '-1' : -1) + + it('returns false', () => { + expect($subject).to.be.false + }) + }) + + context(`when the ${text} is greater than 65534`, () => { + def('port', () => string ? '65535' : 65535) + + it('returns false', () => { + expect($subject).to.be.false + }) + }) + + context(`when the ${text} is valid`, () => { + def('port', () => string ? '12345' : 12345) + + it('returns false', () => { + expect($subject).to.be.true + }) + }) + }) + } + + context('when provided value is a number', () => { + testPort('number', false) + }) + + context('when provided value is a string', () => { + context('when the string is not numeric', () => { + def('port', () => 'some-text') + + it('returns false', () => { + expect($subject).to.be.false + }) + }) + + context('when the string is numeric', () => { + testPort('numeric string', true) + }) + }) + }) + + describe('.isValidDomain()', () => { + def('domain', () => 'www.domain.com') + def('subject', () => $subject.isValidDomain($domain)) + + context('when provided value is null', () => { + def('domain', () => null) + + it('returns false', () => { + expect($subject).to.be.false + }) + }) + + context('when provided value is not null', () => { + context('when domain is valid', () => { + it('returns true', () => { + expect($subject).to.be.true + }) + }) + + context('when domain is not valid', () => { + def('domain', () => 'invalid-domain') + + it('returns false', () => { + expect($subject).to.be.false + }) + }) + }) + }) + + // TODO: + // generateAccessToken + // checkTransaction + // deleteUndefinedFields + // validateBooleanCliOptions + // formatMessage + // stringifyCliJsonSchema + // handleCLIError + // trimCertificate + // validateParameters + // _validateArg + // _getPossibleAliasesList + // _getPossibleArgsList + // isTest + +}) diff --git a/test/src/services/tunnel-service.test.js b/test/src/services/tunnel-service.test.js new file mode 100644 index 000000000..2ebef36d7 --- /dev/null +++ b/test/src/services/tunnel-service.test.js @@ -0,0 +1,331 @@ +const { expect } = require('chai') +const sinon = require('sinon') + +const AppHelper = require('../../../src/helpers/app-helper') +const ChangeTrackingService = require('../../../src/services/change-tracking-service') +const Config = require('../../../src/config') +const FogManager = require('../../../src/sequelize/managers/iofog-manager') +const TunnelManager = require('../../../src/sequelize/managers/tunnel-manager') +const TunnelService = require('../../../src/services/tunnel-service') +const Validator = require('../../../src/schemas') + +describe('Tunnel Service', () => { + def('subject', () => TunnelService) + def('sandbox', () => sinon.createSandbox()) + + afterEach(() => $sandbox.restore()) + + describe('.openTunnel()', () => { + const uuid = 'abcd' + const port = 12345 + const config = 'tunnel-config' + const tunnelHost = 'tunnel-host' + const transaction = {} + const error = 'Error!' + + def('subject', () => $subject.openTunnel($tunnelData, $user, $cli, transaction)) + def('tunnelData', () => ({ + iofogUuid: uuid, + host: tunnelHost, + })) + def('user', () => 'user') + def('cli', () => false) + def('fog', () => ({ uuid })) + def('fogResponse', () => Promise.resolve($fog)) + def('portResponse', () => Promise.resolve(port)) + def('validatorResponse', () => Promise.resolve(true)) + def('tunnelManagerResponse', () => Promise.resolve()) + def('changeResponse', () => Promise.resolve()) + + beforeEach(() => { + $sandbox.stub(FogManager, 'findOne').returns($fogResponse) + $sandbox.stub(AppHelper, 'findAvailablePort').returns($portResponse) + $sandbox.stub(Config, 'get').returns(config) + $sandbox.stub(Validator, 'validate').returns($validatorResponse) + $sandbox.stub(TunnelManager, 'updateOrCreate').returns($tunnelManagerResponse) + $sandbox.stub(ChangeTrackingService, 'update').returns($changeResponse) + }) + + it('calls FogManager#findOne() with correct args', async () => { + await $subject + expect(FogManager.findOne).to.have.been.calledWith({ uuid }, transaction) + }) + + context('when FogManager#findOne() fails', () => { + def('fogResponse', () => Promise.reject(error)) + + it(`fails with ${error}`, () => { + return expect($subject).to.be.rejectedWith(error) + }) + }) + + context('when FogManager#findOne() succeeds', () => { + context('when FogManager#findOne() does not return a Fog instance', () => { + def('fog', () => null) + + it('fails with error', () => { + return expect($subject).to.be.rejectedWith('Invalid Fog Id') + }) + }) + + context('when FogManager#findOne() returns a Fog instance', () => { + const testOpenTunnel = function (cli) { + const tunnel = cli ? { + host: tunnelHost, + iofogUuid: uuid, + rport: port, + } : { + closed: false, + host: config, + iofogUuid: uuid, + lport: config, + password: config, + rport: port, + rsakey: config, + username: config, + } + + it('calls Validator#validate() with correct args', async () => { + await $subject + expect(Validator.validate).to.have.been.calledWith(tunnel, Validator.schemas.tunnelCreate) + }) + + context('when Validator#validate() fails', () => { + def('validatorResponse', () => Promise.reject(error)) + + it(`fails with ${error}`, () => { + return expect($subject).to.be.rejectedWith(error) + }) + }) + + context('when Validator#validate() succeeds', () => { + it('calls TunnelManager#updateOrCreate() with correct args', async () => { + await $subject + expect(TunnelManager.updateOrCreate).to.have.been.calledWith($tunnelData, tunnel, transaction) + }) + + context('when TunnelManager#updateOrCreate() fails', () => { + def('tunnelManagerResponse', () => Promise.reject(error)) + + it(`fails with ${error}`, () => { + return expect($subject).to.be.rejectedWith(error) + }) + }) + + context('when TunnelManager#updateOrCreate() succeeds', () => { + it('calls ChangeTrackingService#update() with correct args', async () => { + await $subject + expect(ChangeTrackingService.update).to.have.been.calledWith(uuid, ChangeTrackingService.events.tunnel, transaction) + }) + + context('when ChangeTrackingService#update() fails', () => { + def('changeResponse', () => Promise.reject(error)) + + it(`fails with ${error}`, () => { + return expect($subject).to.be.rejectedWith(error) + }) + }) + + context('when ChangeTrackingService#update() succeeds', () => { + it('fulfills the promise', () => { + return expect($subject).to.eventually.equal(undefined) + }) + }) + }) + }) + } + + context('when running from command line', () => { + def('cli', () => true) + + it('calls AppHelper#findAvailablePort() with correct args', async () => { + await $subject + expect(AppHelper.findAvailablePort).to.have.been.calledWith(tunnelHost) + }) + + context('when AppHelper#findAvailablePort() fails', () => { + def('portResponse', () => Promise.reject(error)) + + it(`fails with ${error}`, () => { + return expect($subject).to.be.rejectedWith(error) + }) + }) + + context('when AppHelper#findAvailablePort() succeeds', () => { + testOpenTunnel(true) + }) + }) + + context('when running from api', () => { + testOpenTunnel(false) + }) + }) + }) + }) + + describe('.findTunnel()', () => { + const uuid = 'abcd' + const tunnelHost = 'tunnel-host' + const tunnel = { + username: 'user', + host: tunnelHost, + rport: 12345, + lport: 54321, + closed: false, + } + const transaction = {} + const error = 'Error!' + + def('subject', () => $subject.findTunnel($tunnelData, $user, transaction)) + def('tunnelData', () => ({ + iofogUuid: uuid, + host: tunnelHost, + })) + def('user', () => 'user') + def('tunnelManagerResponse', () => Promise.resolve(tunnel)) + + beforeEach(() => { + $sandbox.stub(TunnelManager, 'findOne').returns($tunnelManagerResponse) + }) + + it('calls TunnelManager#findOne() with correct args', async () => { + await $subject + expect(TunnelManager.findOne).to.have.been.calledWith($tunnelData, transaction) + }) + + context('when TunnelManager#findOne() fails', () => { + def('tunnelManagerResponse', () => Promise.reject(error)) + + it(`fails with ${error}`, () => { + return expect($subject).to.be.rejectedWith(error) + }) + }) + + context('when tunnelManagerResponse#findOne() succeeds', () => { + context('when tunnelManagerResponse#findOne() does not return tunnel info', () => { + def('tunnelManagerResponse', () => null) + + it('fails with error', () => { + return expect($subject).to.be.rejectedWith('Invalid Tunnel Id') + }) + }) + + context('when tunnelManagerResponse#findOne() returns tunnel info', () => { + it('resolves with tunnel info', () => { + return expect($subject).to.eventually.eql({ + username: tunnel.username, + host: tunnel.host, + remotePort: tunnel.rport, + localPort: tunnel.lport, + status: 'open', + }) + }) + }) + }) + }) + + describe('.findAll()', () => { + const tunnel = [{}] + const transaction = {} + const error = 'Error!' + + def('subject', () => $subject.findAll(transaction)) + def('tunnelManagerResponse', () => Promise.resolve(tunnel)) + + beforeEach(() => { + $sandbox.stub(TunnelManager, 'findAll').returns($tunnelManagerResponse) + }) + + it('calls TunnelManager#findAll() with correct args', async () => { + await $subject + expect(TunnelManager.findAll).to.have.been.calledWith({}, transaction) + }) + + context('when TunnelManager#findAll() fails', () => { + def('tunnelManagerResponse', () => Promise.reject(error)) + + it(`fails with ${error}`, () => { + return expect($subject).to.be.rejectedWith(error) + }) + }) + + context('when tunnelManagerResponse#findAll() succeeds', () => { + it('resolves with tunnel info', () => { + return expect($subject).to.eventually.eql({ tunnels: tunnel }) + }) + }) + }) + + describe('.closeTunnel()', () => { + const tunnel = {} + const uuid = 'abcd' + const tunnelHost = 'tunnel-host' + const transaction = {} + const error = 'Error!' + + def('subject', () => $subject.closeTunnel($tunnelData, $user, transaction)) + def('tunnelData', () => ({ + iofogUuid: uuid, + host: tunnelHost, + })) + def('user', () => 'user') + def('findTunnelResponse', () => Promise.resolve(tunnel)) + def('tunnelManagerResponse', () => Promise.resolve()) + def('changeResponse', () => Promise.resolve()) + + beforeEach(() => { + $sandbox.stub(TunnelService, 'findTunnel').returns($findTunnelResponse) + $sandbox.stub(TunnelManager, 'update').returns($tunnelManagerResponse) + $sandbox.stub(ChangeTrackingService, 'update').returns($changeResponse) + }) + + it('calls TunnelService#findTunnel() with correct args', async () => { + await $subject + expect(TunnelService.findTunnel).to.have.been.calledWith($tunnelData, $user, transaction) + }) + + context('when TunnelService#findTunnel() fails', () => { + def('findTunnelResponse', () => Promise.reject(error)) + + it(`fails with ${error}`, () => { + return expect($subject).to.be.rejectedWith(error) + }) + }) + + context('when TunnelService#findTunnel() succeeds', () => { + it('calls TunnelManager#update() with correct args', async () => { + await $subject + expect(TunnelManager.update).to.have.been.calledWith($tunnelData, { closed: true }, transaction) + }) + + context('when TunnelManager#updateOrCreate() fails', () => { + def('tunnelManagerResponse', () => Promise.reject(error)) + + it(`fails with ${error}`, () => { + return expect($subject).to.be.rejectedWith(error) + }) + }) + + context('when TunnelManager#updateOrCreate() succeeds', () => { + it('calls ChangeTrackingService#update() with correct args', async () => { + await $subject + expect(ChangeTrackingService.update).to.have.been.calledWith(uuid, ChangeTrackingService.events.tunnel, transaction) + }) + + context('when ChangeTrackingService#update() fails', () => { + def('changeResponse', () => Promise.reject(error)) + + it(`fails with ${error}`, () => { + return expect($subject).to.be.rejectedWith(error) + }) + }) + + context('when ChangeTrackingService#update() succeeds', () => { + it('fulfills the promise', () => { + return expect($subject).to.eventually.equal(undefined) + }) + }) + }) + }) + }) +}) \ No newline at end of file diff --git a/test/support/setup.js b/test/support/setup.js new file mode 100644 index 000000000..114cd9089 --- /dev/null +++ b/test/support/setup.js @@ -0,0 +1,15 @@ +const chai = require('chai') +const chaiAsPromised = require('chai-as-promised') +const chaiHttp = require('chai-http') +const sinonChai = require("sinon-chai"); + +process.env.NODE_ENV = 'test' +process.on('unhandledRejection', () => {}) +process.on('rejectionHandled', () => {}) + +MAIN: { + chai.should() + chai.use(chaiAsPromised) + chai.use(chaiHttp) + chai.use(sinonChai) +}