Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse files

Initial commit

  • Loading branch information...
commit 68d1f1b5ba9ddb03255bbd24f7457242a90b119b 0 parents
@hmalphettes authored
13 .gitignore
@@ -0,0 +1,13 @@
+node_modules
+npm-debug.log
+.DS_Store
+meta.json
+.idea
+coverage.html
+lib-cov
+.coverage_data
+reports
+html-report
+build
+cobertura-coverage.xml
+metadata
36 .jshintrc
@@ -0,0 +1,36 @@
+{
+ "proto": true,
+ "browser": true,
+ "curly": true,
+ "devel": true,
+ "eqeqeq": true,
+ "eqnull": true,
+ "es5": false,
+ "evil": false,
+ "immed": false,
+ "jquery": true,
+ "latedef": false,
+ "laxcomma": true,
+ "newcap": true,
+ "node": true,
+ "noempty": true,
+ "nonew": true,
+ "predef":
+ [
+ "after",
+ "afterEach",
+ "before",
+ "beforeEach",
+ "describe",
+ "it",
+ "unescape",
+ "par",
+ "each",
+ "setImmediate"
+ ],
+ "smarttabs": true,
+ "trailing": false,
+ "undef": true,
+ "strict": false,
+ "expr": true
+}
16 .tm_properties
@@ -0,0 +1,16 @@
+fontName = "Monaco"
+fontSize = 12
+
+myExtraExcludes = "log,vendor,tmp,node_modules"
+excludeInFileChooser = "{$excludeInFileChooser,$myExtraExcludes}"
+excludeInFolderSearch = "{$excludeInFolderSearch,$myExtraExcludes}"
+excludeInBrowser = "{$excludeInBrowser,log,vendor,tmp,node_modules}"
+
+showInvisibles = true
+softTabs = true
+tabSize = 2
+wrapColumn = 120
+showWrapColumn = true
+
+projectDirectory = "$CWD"
+windowTitle = "$TM_DISPLAYNAME — ${CWD/^.*\///} ($TM_SCM_BRANCH)"
48 Makefile
@@ -0,0 +1,48 @@
+BIN = ./node_modules/.bin
+MOCHA_OPTS = --timeout 2000
+REPORTER = spec
+TEST_FILES = test/*.js
+TEST_INTEGRATION_FILES = test/integration/*.js
+S3_STOIC=s3cmd -c ~/.s3cmd/.stoic
+S3_NPM_REPO=s3://npm-repo
+
+lint:
+ jshint lib/* test/* --config .jshintrc
+
+test: lint
+ ./node_modules/.bin/mocha \
+ $(MOCHA_OPTS) \
+ --reporter $(REPORTER) \
+ $(TEST)
+
+test-integration: lint
+ ./node_modules/.bin/mocha \
+ $(MOCHA_OPTS) \
+ --reporter $(REPORTER) \
+ $(TEST_INTEGRATION_FILES)
+
+test-ci:
+ $(MAKE) -k test MOCHA_OPTS="$(MOCHA_OPTS) --watch --growl" REPORTER="min"
+
+lib-cov:
+ [ -d "lib-cov" ] && rm -rf lib-cov || true
+ $(BIN)/istanbul instrument --output lib-cov --no-compact --variable global.__coverage__ lib
+
+test-cov: lib-cov
+ @LOG4JS_COV=1 $(MAKE) test "REPORTER=mocha-istanbul" ISTANBUL_REPORTERS=text-summary,html
+
+clean:
+ [ -d "lib-cov" ] && rm -rf lib-cov || true
+ [ -d "reports" ] && rm -rf reports || true
+ [ -d "build" ] && rm -rf build || true
+
+install-local:
+ @npm install
+ @npm link ../mapperjs || true
+
+deploy:
+ @npm pack
+ $(S3_STOIC) put *.tgz $(S3_NPM_REPO)
+ rm *.tgz
+
+.PHONY: test
157 README.md
@@ -0,0 +1,157 @@
+log4js-elasticsearch
+====================
+
+log4js-elasticsearch is a log4js log appender to push log messages into [elasticsearch](http://elasticsearch.org).
+[Kibana](http://kibana.org) is the awesome tool to view the logs.
+
+The logs produced are compatible with [logstash's elasticsearch_http output](logstash.net/docs/1.1.12/outputs/elasticsearch_http).
+
+Installation
+------------
+
+You can install install log4js-elasticsearch via npm:
+
+ npm install log4js-elasticsearch
+
+Usage: basic
+------------
+
+ var log4js = require('log4js');
+ var esAppenderConfig = {
+ url: 'http://user:password@myelasticsearch.com:9200'
+ };
+ var log4jsESAppender = require('log4js-elasticsearch').configure(esAppenderConfig);
+ log4js.addAppender(log4js, 'tests');
+
+The default url of the ES server is http://localhost:9200
+
+Usage: log4js configuration
+---------------------------
+
+ var log4js = require('log4js');
+ log4js.configure({
+ "appenders": [
+ {
+ "category": "tests",
+ "type": "logLevelFilter",
+ "level": "WARN",
+ "appender": {
+ "type": "log4js-elasticsearch",
+ "url": "http://127.0.0.1:9200"
+ }
+ },
+ {
+ "category": "tests",
+ "type": "console",
+ }
+ ],
+ "levels": {
+ "tests": "DEBUG"
+ }
+ });
+
+Usage: advanced
+---------------
+
+ var log4js = require('log4js');
+ log4js.configure({
+ "appenders": [
+ "appender": {
+ "type": "log4js-elasticsearch",
+ "indexName": function(loggingEvent) {
+ return loggingEvent.categoryName;
+ },
+ "typeName": function(loggingEvent) {
+ return loggingEvent.level.levelStr;
+ },
+ "url": "http://127.0.0.1:9200",
+ "layout": {
+ "type": "logstash",
+ "tags": [ "mytag" ],
+ "sourceHost": function(loggingEvent) {
+ return "it-depends";
+ }
+ }
+ }
+ ],
+ "levels": {
+ "tests": "DEBUG"
+ }
+ });
+
+
+Appender configuration parameters
+=================================
+- `url`: the URL of the elasticsearch server.
+Basic authentication is supported.
+Default: http://localhost:9200
+
+- `indexName`: the name of the elasticsearch index in which the logs are stored.
+Either a static string either a function that is passed the logging event.
+Defaults: undefined'; The indexNamePrefix is used by default.
+
+- `indexNamePrefix`: the prefix of the index name in which the logs are stored.
+The name of the actual index is suffixed with the date: `%{+YYYY.MM.dd}` and changes every day, UTC time.
+Defaults: 'logstash-'.
+
+- `typeName`: the name of the elasticsearch object in which the logs are posted.
+Either a string or a function that is passed the logging event.
+Default: 'nodejs'.
+
+- `wrapper`: function that wraps the serialization of a logging event into the data stored in Elasticsearch.
+A function that is passed the output of the layout and returns what gets posted into elasticsearch.
+When the value is the string 'logstash' it uses the built-in logstash wrapper.
+When the value is 'simple' or 'passthrough' it passes along the output of the layout.
+Default: the built-in logstashWrapper.
+
+- `layout`: object descriptor for the layout.
+By default the layout is logstash.
+
+Additional Built-in layouts
+============================
+
+The following layouts are added to the log4js builtin layouts:
+- logstash
+- simpleJson
+
+The following parameters are the children of the `layout` parameter in the appender's configuration for those new built-in layouts.
+
+Default: Logstash layout
+------------------------
+The logstash layout posts logs in the same structure than [logstash's elasticsearch_http output](logstash.net/docs/1.1.12/outputs/elasticsearch_http).
+
+- `tags`: output as the value of the `@tags` property.
+A static array or a function that is passed the logging event.
+Default: empty array.
+
+- `sourceHost`: output as the value of the `@source_host` property.
+A static string or a function that is passed the logging event
+Default: OS's hostname.
+
+- `source`: output as the value of the `@source` property.
+A string.
+Default: 'log4js'.
+
+- `sourcePath`: output as the value of the `@source_path` property
+A string.
+Default: working directory of the current process.
+
+- `logId`: outputs the value of the `_id` field.
+A function or undefined to let elasticsearch generates it.
+Default: undefined.
+
+- `template`: the elasticsearch template to define.
+Only used if no template with the same name is defined.
+Default: from [untergeek's using-templates-to-improve-elasticsearch-caching-with-logstash](http://untergeek.com/2012/09/20/using-templates-to-improve-elasticsearch-caching-with-logstash/).
+
+simpleJson Layout
+-----------------
+A simple message pass through of the loggingEvent.
+
+License
+=======
+MIT
+
+Copyright
+=========
+(c) 2013 Sutoiku, Inc.
33 lib/elasticsearch-client.js
@@ -0,0 +1,33 @@
+var parseUrl = require('url').parse;
+var ElasticSearchClient = require('elasticsearchclient');
+
+/**
+ * Parses a URL with optional login / password
+ * returns the expected json options to configure the connection to ES.
+ */
+ElasticSearchClient.makeOptions = function(url) {
+ var urlP = parseUrl(url);
+ var options = {
+ host: urlP.hostname
+ };
+ var secure = urlP.protocol === 'https:';
+ if (urlP.port !== null && urlP.port !== undefined) {
+ options.port = urlP.port;
+ } else if (secure) {
+ options.port = '443';
+ } else {
+ options.port = '80';
+ }
+ options.secure = secure;
+ if (urlP.auth) {
+ var toks = urlP.auth.split(':');
+ if (toks.length === 2) {
+ options.auth = { username: toks[0], password: toks[1] };
+ } else {
+ options.auth = { username: toks[0], password: '' };
+ }
+ }
+ return options;
+};
+
+module.exports = ElasticSearchClient;
196 lib/log4js-elasticsearch-layouts.js
@@ -0,0 +1,196 @@
+/**
+ some idea; maybe not so good:
+ support the pattern layout but make it as a json serialization ?
+*/
+var layouts = require('log4js').layouts;
+module.exports = layouts;
+
+var messagePassThroughLayout = layouts.messagePassThroughLayout;
+
+/*
+log4js logging event:
+startTime: date,
+categoryName: string,
+level: { level: int, levelStr: levelStr },
+data: [ args of logger ],
+logger: ... circular ...
+*/
+
+
+/**
+ * Outputs a JSON object
+ */
+function simpleJsonLayout(loggingEvent) {
+ var data = __formatData(loggingEvent);
+ var message = data[0], errorMsg = data[1], stack = data[2];
+ var base = {
+ startTime: loggingEvent.startTime,
+ category: loggingEvent.categoryName,
+ level: loggingEvent.level.level,
+ levelStr: loggingEvent.level.levelStr,
+ message: message
+ };
+ if (errorMsg !== undefined) {
+ base.error = errorMsg;
+ base.stack = stack;
+ }
+ return base;
+}
+
+/**
+ * @param the logging event
+ * @return The JSON
+ */
+function logstashLayout(loggingEvent) {
+ var data = __formatData(loggingEvent);
+ var message = data[0], errorMsg = data[1], stack = data[2];
+ var eslogger = loggingEvent.logger;
+ var base = {
+ '@timestamp': loggingEvent.startTime,
+ '@message': message,
+ '@fields': {
+ level: loggingEvent.level.level,
+ levelStr: loggingEvent.level.levelStr,
+ category: loggingEvent.categoryName
+ }
+ };
+ if (errorMsg) {
+ base['@fields'].error = errorMsg;
+ base['@fields'].stack = stack;
+ }
+ return base;
+}
+
+/**
+ * Extracts the message, error-message and stack track.
+ */
+function __formatData(loggingEvent) {
+ var data = loggingEvent.data;
+ var message, errorMsg, stack;
+ if (data[data.length -1] instanceof Error) {
+ var error = data[data.length - 1];
+ errorMsg = error.message;
+ if (typeof error.stack === 'string') {
+ stack = error.stack.split('\n');
+ } else {
+ stack = error.stack;
+ }
+ data = data.splice(0, data.length -1);
+ message = messagePassThroughLayout({data: data});
+ } else {
+ message = messagePassThroughLayout(loggingEvent);
+ }
+ return [ message, errorMsg, stack ];
+}
+
+layouts.logstashLayout = logstashLayout;
+layouts.simpleJsonLayout = simpleJsonLayout;
+
+
+var defaultHostname = require('os').hostname();
+function logstashLayoutMaker(layoutConfig) {
+ var typeName = layoutConfig.typeName;
+ var source = layoutConfig.source ? layoutConfig.source : 'log4js';
+
+ var sourceHost;
+ if (typeof layoutConfig.sourceHost === 'function') {
+ sourceHost = layoutConfig.sourceHost;
+ } else {
+ sourceHost = function() {
+ return layoutConfig.sourceHost || defaultHostname;
+ };
+ }
+ var tags;
+ if (typeof layoutConfig.tags === 'function') {
+ tags = layoutConfig.tags;
+ } else {
+ tags = function() {
+ return layoutConfig.tags || [];
+ };
+ }
+ var sourcePath = layoutConfig.sourcePath ? layoutConfig.sourcePath : process.cwd();
+ return function(loggingEvent) {
+ var layoutOutput = logstashLayout(loggingEvent);
+ layoutOutput['@type'] = typeName(loggingEvent);
+ layoutOutput['@source'] = source;
+ layoutOutput['@source_host'] = sourceHost(loggingEvent);
+ layoutOutput['@source_path'] = sourcePath;
+ layoutOutput['@tags'] = tags(loggingEvent);
+ return layoutOutput;
+ };
+}
+
+// add the new layouts:
+var oriLayoutMaker = layouts.layout;
+if (oriLayoutMaker.name !== 'layoutEs') {
+ // really sure we don't double monky patch or yagni ?
+ layouts.layout = function layoutEs(name, config) {
+ if (name === 'logstash') {
+ return logstashLayoutMaker(config);
+ } else if (name === 'simpleJson') {
+ return layouts.simpleJson;
+ } else {
+ return oriLayoutMaker(name, config);
+ }
+ };
+}
+
+layouts.esTemplateMakers = {};
+
+// http://untergeek.com/2012/09/20/using-templates-to-improve-elasticsearch-caching-with-logstash/
+layouts.esTemplateMakers.logstash = function(templateName) {
+ return {
+ "template" : templateName || "logstash-*",
+ "settings" : {
+ "number_of_shards" : parseInt(process.env.ES_DEFAULT_SHARDS_NUMBER, 10) || 4,
+ "index.cache.field.type" : "soft",
+ "index.refresh_interval" : "5s",
+ "index.store.compress.stored" : true,
+ "index.query.default_field" : "@message",
+ "index.routing.allocation.total_shards_per_node" : 2
+ },
+ "mappings" : {
+ "_default_" : {
+ "_all" : {"enabled" : false},
+ "properties" : {
+ "@fields" : { "type" : "object", "dynamic": true, "path": "full" },
+ "@message": { "type": "string", "index": "analyzed" },
+ "@source": { "type": "string", "index": "not_analyzed" },
+ "@source_host": { "type": "string", "index": "not_analyzed" },
+ "@source_path": { "type": "string", "index": "not_analyzed" },
+ "@tags": { "type": "string", "index": "not_analyzed" },
+ "@timestamp": { "type": "date", "index": "not_analyzed" },
+ "@type": { "type": "string", "index": "not_analyzed" }
+ }
+ }
+ }
+ };
+};
+
+layouts.esTemplateMakers.simpleJson = function(templateName) {
+ return {
+ "template" : templateName || "log4js*",
+ "settings" : {
+ "number_of_shards" : parseInt(process.env.ES_DEFAULT_SHARDS_NUMBER, 10) || 4,
+ "index.cache.field.type" : "soft",
+ "index.refresh_interval" : "5s",
+ "index.store.compress.stored" : true,
+ "index.query.default_field" : "message",
+ "index.routing.allocation.total_shards_per_node" : 2
+ },
+ "mappings" : {
+ "_default_" : {
+ "_all" : {"enabled" : false},
+ "properties" : {
+ "category": { "type": "string", "index": "not_analyzed" },
+ "level": { "type": "integer" },
+ "levelStr": { "type": "string", "index": "not_analyzed" },
+ "startTime": { "type": "date" },
+ "message": { "type": "string", "index": "analyzed" },
+ "error": { "type": "string", "index": "analyzed" },
+ "stack": { "type": "object", "dynamic": true }
+ }
+ }
+ }
+ };
+};
131 lib/log4js-elasticsearch.js
@@ -0,0 +1,131 @@
+var ElasticsearchClient = require('./elasticsearch-client');
+var layouts = require('./log4js-elasticsearch-layouts');
+
+var defaultHostname = require('os').hostname();
+
+function createAppender(layout, config, options, done) {
+ var layoutES = makeESHelper(layout, config);
+ var esclient = initESClient(config, options, layoutES.template, done);
+ return function(loggingEvent) {
+ var cc = esclient.index(layoutES.indexName(), layoutES.typeName()
+ , layout(loggingEvent)//layoutES.wrapper(layout(loggingEvent), loggingEvent)
+ , layoutES.logId());
+ cc.exec(function() {
+ //emit an error?
+ });
+ };
+}
+
+function configure(config, options, done) {
+ var layout;
+ config = loadAppenderConfig(config);
+ layout = layouts.layout(config.layout.type, config.layout);
+ if (typeof layout !== 'function') {
+ console.error('Unable to find a layout named ' + config.layout.type);
+ }
+ return createAppender(layout, config, options, done);
+}
+
+function loadAppenderConfig(config) {
+ if (!config) {
+ config = {};
+ }
+ if (typeof config.typeName !== 'function') {
+ var value = config.typeName || 'nodejs';
+ config.typeName = function(loggingEvent) {
+ return value;
+ };
+ }
+ if (!config.layout) {
+ config.layout = { type: 'logstash' };
+ }
+
+ //we need to pass the typeName to the layout config.
+ //it is used both by the logstash layout and by the ES client.
+ config.layout.typeName = config.typeName;
+
+ return config;
+}
+
+function initESClient(config, options, template, done) {
+ var esOptions;
+ if (config.url) {
+ esOptions = ElasticsearchClient.makeOptions(config.url);
+ } else if (config.esOptions) {
+ esOptions = config.esOptions;
+ } else {
+ esOptions = ElasticsearchClient.makeOptions('http://localhost:9200');
+ }
+ var esclient = config.esclient || new ElasticsearchClient(esOptions);
+ if (template) {
+ var templateName = template.template;
+ esclient.getTemplate(templateName).exec(function(err, res) {
+ if (res === '{}' || config.forceDefineTemplate) {
+ esclient.defineTemplate(templateName, template).exec(function() {
+ //let it be or plug an event emitter in there
+ if (typeof done === 'function') {
+ done();
+ }
+ });
+ } else if (typeof done === 'function') {
+ done();
+ }
+ });
+ } else if (typeof done === 'function') {
+ done();
+ }
+ return esclient;
+}
+
+function makeESHelper(layout, config) {
+ var logId = config.logId || function() {};
+ var typeName;
+ if (typeof config.typeName === 'function') {
+ typeName = config.typeName;
+ } else {
+ typeName = function(loggingEvent) {
+ return config.typeName || 'nodejs';
+ };
+ }
+
+ var indexName;
+ var templateName;
+ var template;
+ if (typeof config.indexName === 'function') {
+ indexName = config.indexName;
+ } else if (typeof config.indexName === 'string') {
+ indexName = function() {
+ return config.indexName;
+ };
+ } else {
+ var prefix = config.indexNamePrefix || 'logstash-';
+ templateName = prefix + '*';
+ indexName = function() {
+ function pad(n){
+ return n<10 ? '0'+n : n;
+ }
+ var date = new Date();
+ var vDay = pad(date.getUTCDate());
+ var vMonth = pad(date.getUTCMonth()+1);
+ var vYearLong = pad(date.getUTCFullYear());
+ //'logstash-%{+YYYY.MM.dd}';
+ return prefix + vYearLong + '.' + vMonth + '.' + vDay;
+ };
+ if (config.layout) {
+ if (config.layout.type === 'logstash') {
+ template = layouts.esTemplateMakers.logstash(templateName);
+ } else if (config.layout.type === 'simpleJson') {
+ template = layouts.esTemplateMakers.simpleJson(templateName);
+ }
+ }
+ }
+ return {
+ indexName: indexName,
+ typeName: typeName,
+ logId: logId,
+ template: template
+ };
+}
+
+exports.appender = createAppender;
+exports.configure = configure;
31 package.json
@@ -0,0 +1,31 @@
+{
+ "name": "log4js-elasticsearch",
+ "version": "0.0.0",
+ "description": "log4js Appender for node that targets elasticsearch.\n
+Compatible with logstash's elasticsearch_http output; Viewable in Kibana.",
+ "main": "lib/log4js-elasticsearch.js",
+ "scripts": {
+ "test": "make test"
+ },
+ "repository": "",
+ "keywords": [
+ "log4js",
+ "log4js-node",
+ "elasticsearch",
+ "kibana"
+ ],
+ "author": "Hugues Malphettes",
+ "license": "MIT",
+ "dependencies": {
+ "elasticsearchclient": "*",
+ "log4js": "*"
+ },
+ "devDependencies": {
+ "chai": "*",
+ "mocha": "*",
+ "istanbul": "*",
+ "mocha-istanbul": "*",
+ "sandboxed-module": "*"
+ },
+ "optionalDependencies": {}
+}
114 test/integration/test-real-eslogger.js
@@ -0,0 +1,114 @@
+// run this for some real life logging
+var expect = require('chai').expect;
+var sandbox = require('sandboxed-module');
+var libpath = process.env.COVERAGE ? '../../lib-cov' : '../../lib';
+var log4jsElasticSearch = require(libpath + '/log4js-elasticsearch');
+
+// a single shard for testing is enough.
+if (!process.env.ES_DEFAULT_SHARDS_NUMBER) {
+ process.env.ES_DEFAULT_SHARDS_NUMBER = 1;
+}
+
+describe('When configuring a logger posting events to elasticsearch', function() {
+ var log4js = require('log4js');
+ before(function(done) {
+ var config = {
+ typeName: 'log4js',
+ layout: {
+ type: 'logstash'
+ }
+ };
+ log4js.clearAppenders();
+ log4js.addAppender(log4jsElasticSearch.configure(config, null, done), 'unittest');
+ });
+
+ describe("When logging", function() {
+ it('Must send events to elasticsearch', function(done) {
+ var log = log4js.getLogger('unittest');
+ var nolog = log4js.getLogger('notunittest');
+ nolog.error('nono');
+ log.error('aha');
+ log.info('huhu', 'hehe');
+ log.warn('ohoho', new Error('pants on fire'));
+ log.error('ohoho %s', 'a param', new Error('pants on fire'));
+ setTimeout(done, 700);
+ });
+ });
+});
+
+describe('When configuring an es logger', function() {
+ var log4js = sandbox.require('log4js', {
+ requires: {
+ 'log4js-elasticsearch': log4jsElasticSearch
+ }
+ });
+ before(function(done) {
+ var config = {
+ "appenders": [
+ {
+ "type": "log4js-elasticsearch",
+ "url": "http://127.0.0.1:9200",
+ "forceDefineTemplate": true,
+ "layout": {
+ "type": "logstash",
+ "tags": [ "goodie" ],
+ "sourceHost": "aspecialhost"
+ }
+ }
+ ]
+ };
+ log4js.clearAppenders();
+ log4js.configure(config);
+ setTimeout(done, 1000);
+ });
+ it('Must log where expected', function(done) {
+ log4js.getLogger('tests').warn('and one for ES and the console');
+ log4js.getLogger('tests').debug('and one for the console alone');
+ setTimeout(done, 700);
+ });
+});
+
+// if someone knows how to setup the sandbox to make it load right
+// it would be nice!
+describe.skip('When configuring a filtered es logger', function() {
+ var log4js = sandbox.require('log4js', {
+ requires: {
+ 'log4js-elasticsearch': log4jsElasticSearch
+ }
+ });
+ before(function(done) {
+ var config = {
+ "appenders": [
+ {
+ "category": "tests",
+ "type": "logLevelFilter",
+ "level": "WARN",
+ "appender": {
+ "type": "log4js-elasticsearch",
+ "layout": {
+ "type": "logstash"
+ }
+ }
+ },
+ {
+ "category": "tests",
+ "type": "console",
+ "layout": {
+ "type": "messagePassThrough"
+ }
+ }
+ ],
+ "levels": {
+ "tests": "DEBUG"
+ }
+ };
+ log4js.clearAppenders();
+ log4js.configure(config);
+ setTimeout(done, 1000);
+ });
+ it('Must log where expected', function(done) {
+ log4js.getLogger('tests').warn('and one for ES and the console');
+ log4js.getLogger('tests').debug('and one for the console alone');
+ setTimeout(done, 700);
+ });
+});
232 test/test-eslogger.js
@@ -0,0 +1,232 @@
+var expect = require('chai').expect;
+var sandbox = require('sandboxed-module');
+var libpath = process.env.COVERAGE ? '../lib-cov' : '../lib';
+var log4jsElasticSearch = require(libpath + '/log4js-elasticsearch');
+
+describe('When configuring a logger posting events to elasticsearch', function() {
+ var log4js = require('log4js');
+ var mockElasticsearchClient = {
+ index: function(indexName, typeName, logObj, newLogId) {
+ expect(indexName).to.match(/^logstash-/);
+ expect(typeName).to.equal('nodejs');
+ expect(newLogId).to.not.exist;
+ expect(logObj['@fields'].category).to.equal('unittest');
+ expect(logObj['@source']).to.equal('log4js');
+ expect(logObj['@source_host']).to.equal(require('os').hostname());
+ expect(logObj['@source_path']).to.equal(process.cwd());
+ expect(logObj['@tags'].length).to.equal(0);
+ if (currentMsg) {
+ expect(logObj['@message']).to.equal(currentMsg);
+ currentMsg = null;
+ } else {
+ expect(logObj['@message']).to.exist;
+ console.log('hereis the message', logObj['@message']);
+ }
+ if (currentErrorMsg) {
+ expect(currentErrorMsg).to.equal(logObj['@fields'].error);
+ expect(logObj['@fields'].stack).to.be['instanceof'](Array);
+ currentErrorMsg = null;
+ }
+ if (currentLevelStr) {
+ expect(logObj['@fields'].levelStr).to.equal(currentLevelStr);
+ currentLevelStr = null;
+ }
+ if (currentCallback) {
+ return { exec: function() {
+ currentCallback();
+ currentCallback = null;
+ } };
+ } else {
+ return { exec: function() {
+
+ }};
+ }
+ }, defineTemplate: function(templateName, template, done) {
+ expect(templateName).to.equal('logstash-*');
+ defineTemplateWasCalled = true;
+ if (typeof done === 'function') {
+ done();
+ }
+ return { exec: function(cb) {
+ cb();
+ }};
+ }, getTemplate: function(templateName) {
+ return { exec: function(cb) {
+ cb(null, '{}');
+ }};
+ }
+ };
+ var currentMsg;
+ var currentCallback;
+ var currentErrorMsg;
+ var currentLevelStr;
+ var defineTemplateWasCalled = false;
+ before(function(done) {
+ var config = { esclient: mockElasticsearchClient };
+ log4js.clearAppenders();
+ log4js.addAppender(log4jsElasticSearch.configure(config, null, done), 'unittest');
+ });
+ it("Must have created the template", function() {
+ expect(defineTemplateWasCalled).to.equal(true);
+ });
+
+ describe("When logging", function() {
+ it('Must send events to elasticsearch', function(done) {
+ var log = log4js.getLogger('unittest');
+ var nolog = log4js.getLogger('notunittest');
+ currentErrorMsg = 'I should not be called at all';
+ nolog.error('nono');
+ currentErrorMsg = null;
+
+ currentLevelStr = 'ERROR';
+ currentMsg = 'aha';
+ log.error('aha');
+ expect(currentMsg).to.be['null'];
+
+ currentLevelStr = 'INFO';
+ currentMsg = 'huhu \'hehe\'';
+ log.info('huhu', 'hehe');
+ expect(currentMsg).to.be['null'];
+
+ currentLevelStr = 'WARN';
+ currentMsg = 'ohoho';
+ currentErrorMsg = 'pants on fire';
+ log.warn('ohoho', new Error('pants on fire'));
+
+ currentCallback = done;
+ currentMsg = 'ohoho a param';
+ currentErrorMsg = 'pants on fire';
+ log.error('ohoho %s', 'a param', new Error('pants on fire'));
+ });
+ });
+});
+
+describe('When configuring an elasticsearch appender', function() {
+ var log4js = sandbox.require('log4js', {
+ requires: {
+ 'log4js-elasticsearch': log4jsElasticSearch
+ }
+ });
+
+ var currentMsg;
+ var defineTemplateWasCalled = false;
+ var mockElasticsearchClient = {
+ index: function(indexName, typeName, logObj) {
+ expect(logObj['@message']).to.equal(currentMsg);
+ currentMsg = null;
+ return { exec: function() {
+ }};
+ }, defineTemplate: function() {
+ defineTemplateWasCalled = true;
+ return { exec: function(cb) {
+ cb(null, '{}');
+ }};
+ }, getTemplate: function(templateName) {
+ return { exec: function(cb) {
+ cb(null, '{}');
+ }};
+ }
+ };
+ before(function() {
+ log4js.configure({
+ "appenders": [
+ {
+ "type": "log4js-elasticsearch",
+ "esclient": mockElasticsearchClient,
+ "layout": { type: 'logstash' }
+ }
+ ]
+ });
+ expect(defineTemplateWasCalled).to.be['true'];
+ });
+ it('Must have configured the appender', function() {
+ currentMsg = 'hello';
+ log4js.getLogger('unittest').info('hello');
+ expect(currentMsg).to.be['null'];
+ });
+});
+
+describe('When configuring an elasticsearch logstash appender layout', function() {
+ var log4js = sandbox.require('log4js', {
+ requires: {
+ 'log4js-elasticsearch': log4jsElasticSearch
+ }
+ });
+
+ var currentMsg;
+ var defineTemplateWasCalled = false;
+ var mockElasticsearchClient = {
+ index: function(indexName, typeName, logObj) {
+ expect(logObj['@message']).to.equal(currentMsg);
+ expect(logObj['@tags'][0]).to.equal('goodie');
+ expect(logObj['@source_host']).to.equal('aspecialhost');
+ expect(typeName).to.equal('customType');
+ currentMsg = null;
+ return { exec: function() {
+ }};
+ }, defineTemplate: function() {
+ defineTemplateWasCalled = true;
+ return { exec: function(cb) {
+ cb(null, '{}');
+ }};
+ }, getTemplate: function(templateName) {
+ return { exec: function(cb) {
+ cb(null, '{}');
+ }};
+ }
+ };
+ it('Must have configured the appender with static params', function() {
+ log4js.configure({
+ "appenders": [
+ {
+ "type": "log4js-elasticsearch",
+ "esclient": mockElasticsearchClient,
+ "typeName": "customType",
+ "layout": {
+ "type": "logstash",
+ "tags": [ "goodie" ],
+ "sourceHost": "aspecialhost"
+ }
+ }
+ ]
+ });
+ expect(defineTemplateWasCalled).to.be['true'];
+ defineTemplateWasCalled = undefined;
+
+ currentMsg = 'hello';
+ log4js.getLogger('unittest').info('hello');
+ expect(currentMsg).to.be['null'];
+ });
+
+ it('Must have configured the appender with dynamic params', function() {
+ log4js.configure({
+ "appenders": [
+ {
+ "type": "log4js-elasticsearch",
+ "esclient": mockElasticsearchClient,
+ "typeName": function(loggingEvent) {
+ return 'customType';
+ },
+ "layout": {
+ "type": "logstash",
+ "tags": function(loggingEvent) {
+ return [ 'goodie' ];
+ },
+ "sourceHost": function(loggingEvent) {
+ return "aspecialhost";
+ }
+ }
+ }
+ ]
+ });
+ expect(defineTemplateWasCalled).to.be['true'];
+ defineTemplateWasCalled = undefined;
+
+ currentMsg = 'hello';
+ log4js.getLogger('unittest').info('hello');
+ expect(currentMsg).to.be['null'];
+ });
+});
+
+
+
Please sign in to comment.
Something went wrong with that request. Please try again.