Skip to content

Commit

Permalink
Merge pull request #2010 from hapijs/heavy
Browse files Browse the repository at this point in the history
Heavy
  • Loading branch information
geek committed Oct 6, 2014
2 parents 3d0b093 + bd0f046 commit 52a7910
Show file tree
Hide file tree
Showing 5 changed files with 10 additions and 182 deletions.
9 changes: 0 additions & 9 deletions lib/defaults.js
Expand Up @@ -82,15 +82,6 @@ exports.server = {
server: false // Determines how long to wait for server request processing. Disabled by default
},

// Load

load: {
maxHeapUsedBytes: 0, // Reject requests when V8 heap is over size in bytes (zero is no max)
maxRssBytes: 0, // Reject requests when process RSS is over size in bytes (zero is no max)
maxEventLoopDelay: 0, // Milliseconds of delay after which requests are rejected (zero is no max)
sampleInterval: 0 // Frequency of load sampling in milliseconds (zero is no sampling)
},

// Debug

debug: {
Expand Down
7 changes: 1 addition & 6 deletions lib/schema.js
Expand Up @@ -90,12 +90,7 @@ internals.serverBase = Joi.object({
suffix: Joi.string().allow(null)
}),
labels: internals.labels,
load: {
maxHeapUsedBytes: Joi.number().min(0),
maxEventLoopDelay: Joi.number().min(0),
maxRssBytes: Joi.number().min(0),
sampleInterval: Joi.number().min(0)
},
load: Joi.object(),
location: Joi.string().allow(''),
cacheControlStatus: Joi.array().min(1).includes(Joi.number().integer().min(200)),
payload: Joi.object({
Expand Down
52 changes: 8 additions & 44 deletions lib/server.js
Expand Up @@ -6,6 +6,7 @@ var Https = require('https');
var Os = require('os');
var Path = require('path');
var Boom = require('boom');
var Heavy = require('heavy');
var Hoek = require('hoek');
var LruCache = require('lru-cache');
var Shot = require('shot');
Expand Down Expand Up @@ -102,17 +103,8 @@ exports = module.exports = internals.Server = function (/* host, port, options *
this._etags = (this.settings.files.etagsCacheMaxSize ? LruCache({ max: this.settings.files.etagsCacheMaxSize }) : null);
Router.create(this); // Sets this._router

// Server load

Hoek.assert(this.settings.load.sampleInterval || (!this.settings.load.maxEventLoopDelay && !this.settings.load.maxHeapUsedBytes && !this.settings.load.maxRssBytes), 'Load sample interval must be set in enable load limits');

this._eventLoopTimer = null;
this._loadBench = new Hoek.Bench();
this.load = {
eventLoopDelay: 0,
heapUsed: 0,
rss: 0
};
this._heavy = new Heavy(this.settings.load);
this.load = this._heavy.load;

/*
onRequest: New request, before handing over to the router (allows changes to the request method, url, etc.)
Expand Down Expand Up @@ -267,8 +259,6 @@ internals.Server.prototype._dispatch = function (options) {
var self = this;

options = options || {};
var load = this.load;
var limits = this.settings.load;

return function (req, res) {

Expand All @@ -278,13 +268,9 @@ internals.Server.prototype._dispatch = function (options) {

// Check load

if (limits.sampleInterval &&
((limits.maxEventLoopDelay && (load.eventLoopDelay > limits.maxEventLoopDelay || self._loadBench.elapsed() > limits.maxEventLoopDelay)) ||
(limits.maxHeapUsedBytes && load.heapUsed > limits.maxHeapUsedBytes) ||
(limits.maxRssBytes && load.rss > limits.maxRssBytes))) {

self.log(['hapi', 'load'], load);
request._reply(Boom.serverTimeout('Server under heavy load', load));
if (!self._heavy.check()) {
self.log(['hapi', 'load'], self.load);
request._reply(Boom.serverTimeout('Server under heavy load', self.load));
}
else {

Expand Down Expand Up @@ -338,26 +324,7 @@ internals.Server.prototype._init = function (callback) {

// Load measurements

if (this.settings.load.sampleInterval) {
var loopSample = function () {

self._loadBench.reset();
var measure = function () {

var mem = process.memoryUsage();

self.load.eventLoopDelay = (self._loadBench.elapsed() - self.settings.load.sampleInterval);
self.load.heapUsed = mem.heapUsed;
self.load.rss = mem.rss;

loopSample();
};

self._eventLoopTimer = setTimeout(measure, self.settings.load.sampleInterval);
};

loopSample();
}
this._heavy.start();

// Setup listener

Expand Down Expand Up @@ -415,10 +382,7 @@ internals.Server.prototype._stop = function (options, callback) {
}

this._started = false;

if (this._eventLoopTimer) {
clearTimeout(this._eventLoopTimer);
}
this._heavy.stop();

var timeoutId = setTimeout(function () {

Expand Down
1 change: 1 addition & 0 deletions package.json
Expand Up @@ -26,6 +26,7 @@
"catbox-memory": "1.x.x",
"cryptiles": "2.x.x",
"h2o2": "1.x.x",
"heavy": "1.x.x",
"hoek": "^2.4.x",
"iron": "2.x.x",
"items": "1.x.x",
Expand Down
123 changes: 0 additions & 123 deletions test/server.js
Expand Up @@ -256,33 +256,6 @@ describe('Server', function () {

describe('Load', { parallel: false }, function () {

it('requires load interval when maxEventLoopDelay is set', function (done) {

expect(function () {

var server = new Hapi.Server({ load: { sampleInterval: 0, maxEventLoopDelay: 10, maxHeapUsedBytes: 0, maxRssBytes: 0 } });
}).to.throw('Load sample interval must be set in enable load limits');
done();
});

it('requires load interval when maxHeapUsedBytes is set', function (done) {

expect(function () {

var server = new Hapi.Server({ load: { sampleInterval: 0, maxEventLoopDelay: 0, maxHeapUsedBytes: 10, maxRssBytes: 0 } });
}).to.throw('Load sample interval must be set in enable load limits');
done();
});

it('requires load interval when maxRssBytes is set', function (done) {

expect(function () {

var server = new Hapi.Server({ load: { sampleInterval: 0, maxEventLoopDelay: 0, maxHeapUsedBytes: 0, maxRssBytes: 10 } });
}).to.throw('Load sample interval must be set in enable load limits');
done();
});

it('measures loop delay', function (done) {

var server = new Hapi.Server(0, { load: { sampleInterval: 4 } });
Expand Down Expand Up @@ -361,102 +334,6 @@ describe('Server', function () {
});
});
});

it('rejects request due to high heap load', function (done) {

var server = new Hapi.Server(0, { load: { sampleInterval: 5, maxHeapUsedBytes: 1 } });
var handler = function (request, reply) {

var start = Date.now();
while (Date.now() - start < 10);
reply('ok');
};

server.route({ method: 'GET', path: '/', handler: handler });
server.start(function (err) {

server.inject('/', function (res) {

expect(res.statusCode).to.equal(200);

setImmediate(function () {

server.inject('/', function (res) {

expect(res.statusCode).to.equal(503);
server.stop(function () {

done();
});
});
});
});
});
});

it('rejects request due to high event loop delay load', function (done) {

var server = new Hapi.Server(0, { load: { sampleInterval: 5, maxEventLoopDelay: 5 } });
var handler = function (request, reply) {

var start = Date.now();
while (Date.now() - start < 10);
reply('ok');
};

server.route({ method: 'GET', path: '/', handler: handler });
server.start(function (err) {

server.inject('/', function (res) {

expect(res.statusCode).to.equal(200);

setImmediate(function () {

server.inject('/', function (res) {

expect(res.statusCode).to.equal(503);
server.stop(function () {

done();
});
});
});
});
});
});

it('rejects request due to high event loop delay load before next sample', function (done) {

var server = new Hapi.Server(0, { load: { sampleInterval: 500, maxEventLoopDelay: 1 } });
var handler = function (request, reply) {

var start = Date.now();
while (Date.now() - start < 10);
reply('ok');
};

server.route({ method: 'GET', path: '/', handler: handler });
server.start(function (err) {

server.inject('/', function (res) {

expect(res.statusCode).to.equal(200);

setImmediate(function () {

server.inject('/', function (res) {

expect(res.statusCode).to.equal(503);
server.stop(function () {

done();
});
});
});
});
});
});
});

it('reuses the same cache segment', function (done) {
Expand Down

0 comments on commit 52a7910

Please sign in to comment.