Skip to content

Commit

Permalink
Add status.js and its tests
Browse files Browse the repository at this point in the history
  • Loading branch information
AnkshitJain committed Nov 24, 2017
1 parent 8cf9990 commit 511867c
Show file tree
Hide file tree
Showing 13 changed files with 331 additions and 114 deletions.
1 change: 1 addition & 0 deletions .gitignore
Expand Up @@ -5,6 +5,7 @@ main_server/lab_backups/
main_server/test/npm-debug.log
main_server/test/node_modules/
tests/functional_tests/node_modules/
util/node_modules/
.vagrant
docker-images/*.tar
ubuntu-xenial-16.04-cloudimg-console.log
Expand Down
9 changes: 8 additions & 1 deletion deploy/configs/load_balancer/nodes_data_conf.json
@@ -1,22 +1,27 @@
{
"Nodes": [
{
"role": "execution_node",
"hostname": "localhost",
"port": "8082"
},
{
"role": "execution_node",
"hostname": "localhost",
"port": "8083"
},
{
"role": "execution_node",
"hostname": "localhost",
"port": "8084"
},
{
"role": "execution_node",
"hostname": "localhost",
"port": "8085"
},
{
"role": "execution_node",
"hostname": "localhost",
"port": "8086"
}
Expand All @@ -35,7 +40,9 @@
"hostname": "localhost",
"port": "80"
},
"host_port": {
"load_balancer": {
"role": "load_balancer",
"hostname": "localhost",
"port": "8081"
}
}
1 change: 1 addition & 0 deletions deploy/playbook-single.yml
Expand Up @@ -150,6 +150,7 @@
- "../log/load_balancer:/log"
env:
LOGGERCONFIG: "/etc/util/logger.json"
LBCONFIG: "/etc/load_balancer/nodes_data_conf.json"
GITLAB_IP: "{{ gitlab_hostname }}"

- hosts: mainserver
Expand Down
1 change: 1 addition & 0 deletions deploy/playbook.yml
Expand Up @@ -150,6 +150,7 @@
- "../log/load_balancer:/log"
env:
LOGGERCONFIG: "/etc/util/logger.json"
LBCONFIG: "/etc/load_balancer/nodes_data_conf.json"
GITLAB_IP: "{{ gitlab_hostname }}"

- hosts: mainserver
Expand Down
119 changes: 24 additions & 95 deletions load_balancer/load_balancer.js
Expand Up @@ -13,13 +13,16 @@ var http = require('http');
var bodyParser = require('body-parser');
var fs = require('fs');
var sys = require('sys');
var exec = require('child_process').exec;
var { exec } = require('child_process');
var nodes_data;
if(process.env.mode == 'TESTING') nodes_data = require('/etc/load_balancer/nodes_data_conf.json');
else nodes_data = require('/etc/load_balancer/nodes_data_conf.json');

var mysql = require('mysql');
const { Status } = require('./status.js');

const status = new Status(nodes_data.Nodes);
const node_queue = [];

app.use(express.static(__dirname + '/public'));
app.use(bodyParser.urlencoded({extended: true}));
Expand All @@ -30,56 +33,19 @@ app.get('/userCheck', function (req,res) {
res.send(true);
});

app.get('/connectionCheck', function (req,res) {
console.log('connectionCheck requested');
var result = 'Load Balancer Working\n';
var numOfNodes = nodes_data.Nodes.length;
function checkNodeConn(node){
var options = {
host: node.hostname,
port: node.port,
path: '/connectionCheck',
key : fs.readFileSync('./ssl/key.pem'),
cert: fs.readFileSync('./ssl/cert.pem'),
rejectUnauthorized:false,
};
//send a get request and capture the response
var req = https.request(options, function(res){
// Buffer the body entirely for processing as a whole.
var bodyChunks = [];
res.on('data', function(chunk){
bodyChunks.push(chunk);
}).on('end', function(){
var body = Buffer.concat(bodyChunks);
result = result.concat('<br/>Execution Node at '+node.hostname+':'+node.port+' working: ' + body);
console.log("nodeing");
//return if all requets processed
if(--numOfNodes === 0){
console.log("DispRes");
dispResult();
}
});
});
req.on('error', function(e) {
result = result.concat('<br/>Execution Node at '+node.hostname+':'+node.port+' Error: ' + e.message);
//return if all requets processed
if(--numOfNodes === 0){
console.log("DispRes");
dispResult();
}
});
req.end();
} //checkNodeConnection ends

function dispResult(){
res.send(result);
}
//Check connection of all nodes
for(var i=0;i<nodes_data.Nodes.length;i++)
{
console.log(numOfNodes);
checkNodeConn(nodes_data.Nodes[i]);
}
app.get('/connectionCheck', (req, res) => {
console.log('Connection check requested');
const lbStatus = nodes_data.load_balancer;
lbStatus.status = 'up';
status.checkStatus((result) => {
//since the request is being processed, it is assumed that load balancer is up
const statusJson = result;
statusJson.components.push(lbStatus);
statusJson.job_queue_length = job_queue.length;
statusJson.timestamp = (new Date()).toString();
console.log("Connection check request completed");
res.send(statusJson);
});
});

app.post('/submit', function(req, res){
Expand Down Expand Up @@ -258,7 +224,7 @@ app.post('/addNode', function(req, res){
console.log(req.body)
node_queue.push(req.body);
console.log("Added "+req.body.hostname+":"+req.body.port+" to queue");

if(job_queue.length!==0)
{
var assigned_node = node_queue.pop();
Expand Down Expand Up @@ -320,57 +286,20 @@ try {
} finally {

}
var node_queue=[];
for(var i=0;i<nodes_data.Nodes.length;i++)
{
checkNodeConn(nodes_data.Nodes[i]);
function checkNodeConn(node) {

var https_checkConn ={
hostname : node.hostname,
port : node.port,
path : '/connectionCheck',
key : fs.readFileSync('./ssl/key.pem'),
cert: fs.readFileSync('./ssl/cert.pem'),
rejectUnauthorized:false,
};

var checkConnRequest = https.request(https_checkConn,function(res)
{
var bodyChunks =[];
res.on('data',function(chunk)
{
bodyChunks.push(chunk);
}).on('end',function()
{
var body = Buffer.concat(bodyChunks);
if(body.toString()=='true')
{
console.log("Added "+node.hostname+":"+node.port+" to queue");
node_queue.push(node);
}
});
});

checkConnRequest.on('error',function(err)
{
console.log("Error connecting to "+node.hostname+":"+node.port);

/* This will update the node queue and working nodes will be added.
The logger level would be info, when logger.js is integrated. */
status.selectActiveNodes((workingNodes) => {
node_queue.push(...workingNodes);
});
checkConnRequest.end();

}
}

var job_queue = [];
if(process.env.mode !== "TESTING")
{
server.listen(nodes_data.host_port.port);
console.log("Listening at "+nodes_data.host_port.port);
server.listen(nodes_data.load_balancer.port);
console.log("Listening at "+nodes_data.load_balancer.port);
}



setInterval(function () {
connection.query('SELECT 1' ,function(err, rows, fields) {
console.log("keep alive query");
Expand Down
14 changes: 12 additions & 2 deletions load_balancer/package.json
Expand Up @@ -4,7 +4,8 @@
"description": "load balancer component of autolab evaluation sofware",
"main": "load_balancer.js",
"scripts": {
"start": "node load_balancer.js"
"start": "node load_balancer.js",
"test": "mocha -u bdd -R spec ./tests/"
},
"repository": {
"type": "git",
Expand All @@ -17,7 +18,9 @@
],
"author": "Rajat Agarwal",
"contributors": [
"Tejas Sangol", "Gaurav Narula"
"Tejas Sangol",
"Gaurav Narula",
"Ankshit Jain"
],
"license": "GPL-2.0",
"bugs": {
Expand All @@ -29,5 +32,12 @@
"express": "^4.13.4",
"httpolyglot": "^0.1.1",
"mysql": ""
},
"devDependencies": {
"chai": "^4.1.1",
"mocha": "^3.5.0",
"chai-as-promised": "^7.1.1",
"rewire": "2.5.2",
"dirty-chai": "^2.0.1"
}
}
102 changes: 102 additions & 0 deletions load_balancer/status.js
@@ -0,0 +1,102 @@
/* eslint import/no-dynamic-require: 0 */
const https = require('https');
const { Logger } = require('../util/logger.js');
const { check } = require('../util/environmentCheck.js');
/* The environment variable LBCONFIG will contain the path to the config file.
The actual path for the config is "../deploy/configs/load_balancer/nodes_data_conf.json"
For the docker containers, the path is /etc/load_balancer/nodes_data_conf.json */
check('LBCONFIG');

const lbConfig = require(process.env.LBCONFIG);

const loggerConfig = lbConfig.load_balancer;
loggerConfig.cmd = 'log';

const logger = new Logger(loggerConfig);

const getComponentResponse = function getComponentResponse(node, resolve) {
const httpsCheckConn = Object.assign({}, node);
httpsCheckConn.path = '/connectionCheck';
httpsCheckConn.rejectUnauthorized = false;

// send a get request and capture the response
const checkConnRequest = https.request(httpsCheckConn, (res) => {
// Buffer the body entirely for processing as a whole.
const bodyChunks = [];
res.on('data', (chunk) => {
bodyChunks.push(chunk);
}).on('end', () => {
const body = Buffer.concat(bodyChunks);
if (body.toString() === 'true') {
logger.debug(`Node at ${node.hostname}:${node.port} is up and running.`);
resolve('up');
}
});
});

checkConnRequest.on('error', (err) => {
logger.error(`Error connecting to ${node.hostname}:${node.port}`);
logger.error(err);
resolve('down');
});

checkConnRequest.end();
};

const getComponentStatus = function getComponentStatus(node) {
return new Promise((resolve) => {
if (node.hostname === undefined || node.port === undefined || node.role === undefined) {
resolve(undefined);
} else {
const resultJson = Object.assign({}, node);
getComponentResponse(node, (status) => {
resultJson.status = status;
resolve(resultJson);
});
}
});
};

class Status {
constructor(nodes) {
this.nodes = nodes;
this.promises = [];
for (let i = 0; i < this.nodes.length; i += 1) {
this.promises.push(getComponentStatus(this.nodes[i]));
}
}

selectActiveNodes(response) {
const nodeQueue = [];
// Check connection of all nodes
Promise.all(this.promises).then((data) => {
for (let i = 0; i < data.length; i += 1) {
if (data[i].status === 'up') {
nodeQueue.push({
hostname: data[i].hostname,
port: data[i].port,
});
logger.debug(`Added node ${data[i].hostname}:${data[i].port} to the node queue.`);
}
}
response(nodeQueue);
}).catch((error) => {
logger.error(error);
});
}

checkStatus(response) {
const result = {};
result.components = [];
Promise.all(this.promises).then((data) => {
result.components = data;
response(result);
}).catch((error) => {
logger.error(error);
});
}
}

module.exports = {
Status,
};

0 comments on commit 511867c

Please sign in to comment.