diff --git a/.gitignore b/.gitignore
index cf41a9c..00f1605 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,4 +1,14 @@
-node_modules
coverage
+node_modules
+build
+dist
+out
+*.log
+*.dump
+.DS_Store
+.nyc_output
+.test
+.tmp
+
npm-debug.log
-gumbo.js
+gumbo.js
\ No newline at end of file
diff --git a/.travis.yml b/.travis.yml
index 7bbbb0e..b4d6839 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,10 +1,11 @@
language: node_js
node_js:
- - 6.9.1
+ - 6.9.2
sudo: false
branches:
only:
- master
+ - /^greenkeeper-.*$/
cache:
directories:
- node_modules
@@ -18,8 +19,10 @@ before_script:
- npm prune
- npm install -g codecov
script:
- - npm run-script cover
+ - npm run cover
after_script:
- codecov
+ - cat ./coverage/lcov.info | ./node_modules/coveralls/bin/coveralls.js
+ - npm run docs
after_success:
- npm run semantic-release
diff --git a/LICENSE b/LICENSE
index 90ba552..c447882 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,4 +1,4 @@
-Copyright (c) 2016, darlenya & arlac77
+Copyright (c) 2016 darlenya & arlac77
All rights reserved.
Redistribution and use in source and binary forms, with or without
diff --git a/README.adoc b/README.adoc
deleted file mode 100644
index de20993..0000000
--- a/README.adoc
+++ /dev/null
@@ -1,9 +0,0 @@
-image:https://img.shields.io/npm/v/stream-line-parser.svg[npm,link=https://www.npmjs.com/package/stream-line-parser]
-image:https://secure.travis-ci.org/darlenya/stream-line-parser.png[Build Status,link=http://travis-ci.org/darlenya/stream-line-parser]
-image:https://david-dm.org/darlenya/stream-line-parser.svg[Dependency Status,link=https://david-dm.org/darlenya/stream-line-parser]
-image:https://david-dm.org/darlenya/stream-line-parser/dev-status.svg[devDependency Status,link=https://david-dm.org/darlenya/stream-line-parser#info=devDependencies]
-
-include::./doc/index.adoc[]
-
-=== License
-BSD-2-Clause
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..ccc1068
--- /dev/null
+++ b/README.md
@@ -0,0 +1,106 @@
+[](https://www.npmjs.com/package/kronos-interceptor-object-data-processor-chunk)
+[](https://github.com/Kronos-integration/kronos-interceptor-object-data-processor-chunk)
+[](http://travis-ci.org/Kronos-integration/kronos-interceptor-object-data-processor-chunk)
+[](https://www.bithound.io/github/Kronos-integration/kronos-interceptor-object-data-processor-chunk)
+[](http://codecov.io/github/Kronos-integration/kronos-interceptor-object-data-processor-chunk?branch=master)
+[](https://coveralls.io/r/Kronos-integration/kronos-interceptor-object-data-processor-chunk)
+[](https://codeclimate.com/github/Kronos-integration/kronos-interceptor-object-data-processor-chunk)
+[](https://snyk.io/test/github/Kronos-integration/kronos-interceptor-object-data-processor-chunk)
+[](https://github.com/Kronos-integration/kronos-interceptor-object-data-processor-chunk/issues)
+[](http://waffle.io/Kronos-integration/kronos-interceptor-object-data-processor-chunk)
+[](https://david-dm.org/Kronos-integration/kronos-interceptor-object-data-processor-chunk)
+[](https://david-dm.org/Kronos-integration/kronos-interceptor-object-data-processor-chunk#info=devDependencies)
+[](http://inch-ci.org/github/Kronos-integration/kronos-interceptor-object-data-processor-chunk)
+[](https://npmjs.org/package/kronos-interceptor-object-data-processor-chunk)
+[](http://commitizen.github.io/cz-cli/)
+
+
+kronos-interceptor-object-data-processor-chunk
+=====
+Splits a line by a sparator into tokens
+
+# API Reference
+
+*
+
+## createFunctions(chunkDefinition)
+Creates the checks for checking boolean values
+
+**Kind**: global function
+
+| Param | Description |
+| --- | --- |
+| chunkDefinition | The chunk definition for these records. |
+
+
+*
+
+## createHashFunction(hashFields, hashName)
+Creates a function which generates a hash from the given content and stores the generated Hash under the given name in the record
+
+**Kind**: global function
+
+| Param | Description |
+| --- | --- |
+| hashFields | All the field names used to create the hash |
+| hashName | The name to be used to store the hash value back in the record |
+
+
+*
+
+## addError(data, error)
+Adds an error to the stream data
+
+**Kind**: global function
+
+| Param | Description |
+| --- | --- |
+| data | The current stream data |
+| error | The error to be added. |
+
+
+*
+
+## createTmpHashAction()
+Creates a hash function to compute a content hash without the multirow fields
+
+**Kind**: global function
+
+*
+
+## dataProcessorChunk.addRowActions()
+Add custom actions to the rowActions.
+These actions will be executed per row
+
+**Kind**: instance method of [DataProcessorChunk](#DataProcessorChunk)
+
+*
+
+## dataProcessorChunk._transform()
+Reads the stream data and split it into lines.
+
+**Kind**: instance method of [DataProcessorChunk](#DataProcessorChunk)
+
+*
+
+## dataProcessorChunk._flush()
+Flushes the data.
+Only needed if there are multi row fields used
+
+**Kind**: instance method of [DataProcessorChunk](#DataProcessorChunk)
+
+* * *
+
+install
+=======
+
+With [npm](http://npmjs.org) do:
+
+```shell
+npm install kronos-interceptor-object-data-processor-chunk
+```
+
+license
+=======
+
+BSD-2-Clause
diff --git a/doc/README.hbs b/doc/README.hbs
new file mode 100644
index 0000000..82cecb2
--- /dev/null
+++ b/doc/README.hbs
@@ -0,0 +1,43 @@
+[](https://www.npmjs.com/package/kronos-interceptor-object-data-processor-chunk)
+[](https://github.com/Kronos-integration/kronos-interceptor-object-data-processor-chunk)
+[](http://travis-ci.org/Kronos-integration/kronos-interceptor-object-data-processor-chunk)
+[](https://www.bithound.io/github/Kronos-integration/kronos-interceptor-object-data-processor-chunk)
+[](http://codecov.io/github/Kronos-integration/kronos-interceptor-object-data-processor-chunk?branch=master)
+[](https://coveralls.io/r/Kronos-integration/kronos-interceptor-object-data-processor-chunk)
+[](https://codeclimate.com/github/Kronos-integration/kronos-interceptor-object-data-processor-chunk)
+[](https://snyk.io/test/github/Kronos-integration/kronos-interceptor-object-data-processor-chunk)
+[](https://github.com/Kronos-integration/kronos-interceptor-object-data-processor-chunk/issues)
+[](http://waffle.io/Kronos-integration/kronos-interceptor-object-data-processor-chunk)
+[](https://david-dm.org/Kronos-integration/kronos-interceptor-object-data-processor-chunk)
+[](https://david-dm.org/Kronos-integration/kronos-interceptor-object-data-processor-chunk#info=devDependencies)
+[](http://inch-ci.org/github/Kronos-integration/kronos-interceptor-object-data-processor-chunk)
+[](https://npmjs.org/package/kronos-interceptor-object-data-processor-chunk)
+[](http://commitizen.github.io/cz-cli/)
+
+
+kronos-interceptor-object-data-processor-chunk
+=====
+Splits a line by a sparator into tokens
+
+# API Reference
+{{#modules~}}
+- {{name}}
+{{/modules}}
+
+{{#functions}}* {{>docs}}
+{{/functions}}
+* * *
+
+install
+=======
+
+With [npm](http://npmjs.org) do:
+
+```shell
+npm install kronos-interceptor-object-data-processor-chunk
+```
+
+license
+=======
+
+BSD-2-Clause
diff --git a/index.js b/index.js
deleted file mode 100644
index 53a0078..0000000
--- a/index.js
+++ /dev/null
@@ -1,8 +0,0 @@
-/* jslint node: true, esnext: true */
-'use strict';
-
-const Interceptor = require('./lib/interceptor').Interceptor;
-
-exports.Interceptor = Interceptor;
-
-exports.registerWithManager = manager => manager.registerInterceptor(Interceptor);
diff --git a/lib/data-hash.js b/lib/data-hash.js
deleted file mode 100644
index 1d85234..0000000
--- a/lib/data-hash.js
+++ /dev/null
@@ -1,97 +0,0 @@
-/* jslint node: true, esnext: true */
-'use strict';
-
-const md5 = require('md5');
-
-module.exports = {
-
- /**
- * Creates the checks for checking boolean values
- * @param chunkDefinition The chunk definition for these records.
- */
- createFunctions: function (chunkDefinition) {
-
- // all the field names used to build the key for a record and a name to store the hash
- const keyHashFields = chunkDefinition.keyHashFields;
- const keyHashName = chunkDefinition.keyHashName;
-
- // all the field names used to build the content hash for a record and a name to store the hash
- const contentHashFields = chunkDefinition.contentHashFields;
- const contentHashName = chunkDefinition.contentHashName;
-
- // all the field names used to build the content hash for a record and a name to store the hash
- const scopeHashFields = chunkDefinition.scopeHashFields;
- const scopeHashName = chunkDefinition.scopeHashName;
-
- if (keyHashName === undefined || keyHashName === null || keyHashFields === undefined) {
- throw ("No 'keyHashFields' and 'keyHashName' are given.");
- }
-
- if (contentHashFields) {
- if (contentHashName === undefined || contentHashName === null) {
- throw ("No 'contentHashName' given in the chunk definition, but the 'contentHashFields' are defined");
- }
- }
-
- if (scopeHashFields) {
- if (scopeHashName === undefined || scopeHashName === null) {
- throw ("No 'scopeHashName' given in the chunk definition, but the 'scopeHashFields' are defined");
- }
- }
-
- let functions = [];
- // create the key hash functions
- functions.push(createHashFunction(keyHashFields, keyHashName));
-
- // create the content hash functions
- if (contentHashFields && contentHashFields.length > 0) {
- functions.push(createHashFunction(contentHashFields, contentHashName));
- }
-
- // create the scope hash functions
- if (scopeHashFields && scopeHashFields.length > 0) {
- functions.push(createHashFunction(scopeHashFields, scopeHashName));
- }
-
- return functions;
- },
-
- createHashFunction: function (hashFields, hashName) {
- return createHashFunction(hashFields, hashName);
- }
-
-};
-
-/**
- * Creates a function which generates a hash from the given content and stores the generated Hash under the given name in the record
- * @param hashFields All the field names used to create the hash
- * @param hashName The name to be used to store the hash value back in the record
- */
-function createHashFunction(hashFields, hashName) {
-
- /**
- * The function gerates the hash for the given record
- * @param record The record to create the hash for
- */
- return function (record) {
- // an array to store all the key field values
-
- const hashFieldValues = [];
-
- for (let i = 0; i < hashFields.length; i++) {
- let val = record[hashFields[i]];
- if (val === undefined) {
- val = '';
- }
- hashFieldValues.push(val);
- }
-
- const valueString = hashFieldValues.join('|');
- const hash = md5(valueString);
-
- record[hashName] = hash;
-
- return;
-
- };
-}
diff --git a/package.json b/package.json
index a368c05..2a84a24 100644
--- a/package.json
+++ b/package.json
@@ -2,12 +2,14 @@
"name": "kronos-interceptor-object-data-processor-chunk",
"version": "0.0.0-semantic-release",
"description": "Splits a line by a sparator into tokens.",
- "main": "index.js",
+ "module": "src/module.js",
+ "main": "dist/module.js",
"scripts": {
- "cover": "./node_modules/istanbul/lib/cli.js cover --hook-run-in-context ./node_modules/mocha/bin/_mocha -- --R spec --U exports tests/*_test.js",
- "doc": "./node_modules/.bin/jsdoc lib/*.js",
- "test": "./node_modules/.bin/mocha tests/*_test.js",
- "semantic-release": "semantic-release pre && npm publish && semantic-release post"
+ "cover": "npm run build && node_modules/.bin/istanbul cover node_modules/.bin/_mocha -- -u exports tests/*_test.js",
+ "test": "npm run build && node_modules/.bin/mocha tests/*_test.js && markdown-doctest",
+ "semantic-release": "semantic-release pre && npm publish && semantic-release post",
+ "build": "node_modules/rollup/bin/rollup --output=dist/module.js -c -- src/module.js",
+ "docs": "jsdoc2md -l off -t doc/README.hbs -f src/*.js > README.md"
},
"repository": {
"type": "git",
@@ -23,29 +25,28 @@
"z-schema": "3.18.2"
},
"devDependencies": {
- "chai": "3.5.0",
+ "chai": "^3.5.0",
"clone-deep": "0.2.4",
- "cz-conventional-changelog": "1.2.0",
- "istanbul": "0.4.5",
- "jsdoc": "3.4.2",
+ "cz-conventional-changelog": "^1.2.0",
+ "istanbul": "^0.4.5",
"kronos-test-interceptor": "2.0.5",
- "mocha": "3.2.0",
- "semantic-release": "6.3.2",
- "underscore": "1.8.3"
+ "mocha": "^3.2.0",
+ "semantic-release": "^6.3.5",
+ "underscore": "1.8.3",
+ "jsdoc-to-markdown": "^2.0.1",
+ "markdown-doctest": "^0.9.1",
+ "rollup": "^0.38.2"
},
- "contributors": [
- {
- "name": "Torsten Link",
- "email": "torstenlink@gmx.de"
- },
- {
- "name": "Markus Felten",
- "email": "markus.felten@gmx.de"
- }
- ],
+ "contributors": [{
+ "name": "Torsten Link",
+ "email": "torstenlink@gmx.de"
+ }, {
+ "name": "Markus Felten",
+ "email": "markus.felten@gmx.de"
+ }],
"license": "BSD-2-Clause",
"engines": {
- "node": ">=6.9.1"
+ "node": ">=6.9.2"
},
"keywords": [
"stream",
diff --git a/rollup.config.js b/rollup.config.js
new file mode 100644
index 0000000..5106472
--- /dev/null
+++ b/rollup.config.js
@@ -0,0 +1,7 @@
+/* jslint node: true, esnext: true */
+'use strict';
+
+export default {
+ format: 'cjs',
+ plugins: []
+};
diff --git a/lib/interceptor.js b/src/ChunkProcessorInterceptor.js
similarity index 62%
rename from lib/interceptor.js
rename to src/ChunkProcessorInterceptor.js
index bde31b8..6566ae1 100644
--- a/lib/interceptor.js
+++ b/src/ChunkProcessorInterceptor.js
@@ -1,20 +1,27 @@
/* jslint node: true, esnext: true */
'use strict';
-const Interceptor = require('kronos-interceptor').Interceptor;
-const parserFactory = require('./data-processor-chunk');
+import {
+ Interceptor
+}
+from 'kronos-interceptor';
+
+import {
+ DataProcessorChunkFactory
+}
+from './data-processor-chunk';
/**
* This interceptor cares about the handling of the messages.
* It will add the hops and copies the messages
*/
-class ChunkProcessorInterceptor extends Interceptor {
+export default class ChunkProcessorInterceptor extends Interceptor {
constructor(config, endpoint) {
super(config, endpoint);
// just validate the config once
- parserFactory(config.config, true);
+ DataProcessorChunkFactory(config.config, true);
}
static get name() {
@@ -23,11 +30,10 @@ class ChunkProcessorInterceptor extends Interceptor {
receive(request, oldRequest) {
if (request.payload) {
- const streamFilter = parserFactory(this.config.config);
+ const streamFilter = DataProcessorChunkFactory(this.config.config);
const stream = request.payload;
request.payload = stream.pipe(streamFilter);
}
return this.connected.receive(request, oldRequest);
}
}
-exports.Interceptor = ChunkProcessorInterceptor;
diff --git a/src/data-hash.js b/src/data-hash.js
new file mode 100644
index 0000000..ebbe122
--- /dev/null
+++ b/src/data-hash.js
@@ -0,0 +1,94 @@
+/* jslint node: true, esnext: true */
+'use strict';
+
+const md5 = require('md5');
+
+/**
+ * Creates the checks for checking boolean values
+ * @param chunkDefinition The chunk definition for these records.
+ */
+function createFunctions(chunkDefinition) {
+
+ // all the field names used to build the key for a record and a name to store the hash
+ const keyHashFields = chunkDefinition.keyHashFields;
+ const keyHashName = chunkDefinition.keyHashName;
+
+ // all the field names used to build the content hash for a record and a name to store the hash
+ const contentHashFields = chunkDefinition.contentHashFields;
+ const contentHashName = chunkDefinition.contentHashName;
+
+ // all the field names used to build the content hash for a record and a name to store the hash
+ const scopeHashFields = chunkDefinition.scopeHashFields;
+ const scopeHashName = chunkDefinition.scopeHashName;
+
+ if (keyHashName === undefined || keyHashName === null || keyHashFields === undefined) {
+ throw ("No 'keyHashFields' and 'keyHashName' are given.");
+ }
+
+ if (contentHashFields) {
+ if (contentHashName === undefined || contentHashName === null) {
+ throw ("No 'contentHashName' given in the chunk definition, but the 'contentHashFields' are defined");
+ }
+ }
+
+ if (scopeHashFields) {
+ if (scopeHashName === undefined || scopeHashName === null) {
+ throw ("No 'scopeHashName' given in the chunk definition, but the 'scopeHashFields' are defined");
+ }
+ }
+
+ let functions = [];
+ // create the key hash functions
+ functions.push(createHashFunction(keyHashFields, keyHashName));
+
+ // create the content hash functions
+ if (contentHashFields && contentHashFields.length > 0) {
+ functions.push(createHashFunction(contentHashFields, contentHashName));
+ }
+
+ // create the scope hash functions
+ if (scopeHashFields && scopeHashFields.length > 0) {
+ functions.push(createHashFunction(scopeHashFields, scopeHashName));
+ }
+
+ return functions;
+}
+
+/**
+ * Creates a function which generates a hash from the given content and stores the generated Hash under the given name in the record
+ * @param hashFields All the field names used to create the hash
+ * @param hashName The name to be used to store the hash value back in the record
+ */
+function createHashFunction(hashFields, hashName) {
+
+ /**
+ * The function gerates the hash for the given record
+ * @param record The record to create the hash for
+ */
+ return function (record) {
+ // an array to store all the key field values
+
+ const hashFieldValues = [];
+
+ for (let i = 0; i < hashFields.length; i++) {
+ let val = record[hashFields[i]];
+ if (val === undefined) {
+ val = '';
+ }
+ hashFieldValues.push(val);
+ }
+
+ const valueString = hashFieldValues.join('|');
+ const hash = md5(valueString);
+
+ record[hashName] = hash;
+
+ return;
+
+ };
+}
+
+export {
+ createFunctions,
+ createHashFunction
+};
diff --git a/lib/data-processor-chunk.js b/src/data-processor-chunk.js
similarity index 97%
rename from lib/data-processor-chunk.js
rename to src/data-processor-chunk.js
index 4f0f754..583f1d8 100644
--- a/lib/data-processor-chunk.js
+++ b/src/data-processor-chunk.js
@@ -11,13 +11,15 @@ const ZSchema = require('z-schema');
const validator = new ZSchema({});
const schema = require('../schema/chunk.json');
-const hashFactory = require('./data-hash');
-
const ERR_DOUBLE_KEY = 'DOUBLE_KEY';
const ERR_DOUBLE_KEY_SAME = 'DOUBLE_KEY_SAME_DATA';
const ERR_DOUBLE_KEY_DIFF = 'DOUBLE_KEY_DIFFERENT_DATA';
-
+import {
+ createFunctions,
+ createHashFunction
+}
+from './data-hash';
// The name used to store the has for the content without the
// multi row fields
@@ -65,7 +67,7 @@ class DataProcessorChunk extends stream.Transform {
}
// Stores all the checks/action to be executed
- this.rowActions = hashFactory.createFunctions(opts);
+ this.rowActions = createFunctions(opts);
// if there are multirows we need a second content hash without the multi row fields
if (this.multiRowFields && opts.contentHashFields !== undefined) {
@@ -333,13 +335,17 @@ function createTmpHashAction(contentHashFields, multiRowFields) {
// now we have a new array
if (fieldClash) {
// only in this case we need a separate hash
- return hashFactory.createHashFunction(tmpHashFields, TMP_HASH_FIELD_NAME);
+ return createHashFunction(tmpHashFields, TMP_HASH_FIELD_NAME);
} else {
// In this case we could use the normal content hash as the muti row fields where not included
return;
}
}
-module.exports = function (opts, validate) {
+function DataProcessorChunkFactory(opts, validate) {
return new DataProcessorChunk(opts, validate);
+}
+
+export {
+ DataProcessorChunkFactory
};
diff --git a/src/module.js b/src/module.js
new file mode 100644
index 0000000..2e2c069
--- /dev/null
+++ b/src/module.js
@@ -0,0 +1,26 @@
+/* jslint node: true, esnext: true */
+'use strict';
+
+import ChunkProcessorInterceptor from './ChunkProcessorInterceptor';
+import {
+ DataProcessorChunkFactory
+}
+from './data-processor-chunk';
+
+import {
+ createFunctions,
+ createHashFunction
+}
+from './data-hash';
+
+function registerWithManager(manager) {
+ return manager.registerInterceptor(ChunkProcessorInterceptor);
+}
+
+export {
+ DataProcessorChunkFactory,
+ ChunkProcessorInterceptor,
+ registerWithManager,
+ createFunctions,
+ createHashFunction
+};
diff --git a/tests/data-hash_test.js b/tests/data-hash_test.js
index cc0cd9d..c778d26 100644
--- a/tests/data-hash_test.js
+++ b/tests/data-hash_test.js
@@ -9,7 +9,7 @@ const should = chai.should();
const _ = require('underscore');
-const dataHashFactory = require('../lib/data-hash').createFunctions;
+const dataHashFactory = require('../dist/module').createFunctions;
const distinctData = require('./fixtures/distinct_data.json');
diff --git a/tests/data-processor-chunk_test.js b/tests/data-processor-chunk_test.js
index f6cfd37..6604026 100644
--- a/tests/data-processor-chunk_test.js
+++ b/tests/data-processor-chunk_test.js
@@ -9,9 +9,8 @@ const should = chai.should();
const cloneDeep = require('clone-deep');
const mockReadStream = require('kronos-test-interceptor').mockReadStreamFactory;
-const chunkProcessor = require('../lib/data-processor-chunk.js');
-
const objData = require('./fixtures/data.json');
+const chunkProcessor = require('../dist/module').DataProcessorChunkFactory;
const chunkDefinitionGroupFrinds = {
"keyHashFields": ["first", "last"],
diff --git a/tests/interceptor_test.js b/tests/interceptor_test.js
index 78e6bb3..7bd013d 100644
--- a/tests/interceptor_test.js
+++ b/tests/interceptor_test.js
@@ -11,24 +11,24 @@ const chai = require('chai'),
assert = chai.assert,
expect = chai.expect,
should = chai.should(),
- InterceptorUnderTest = require('../index').Interceptor,
+ InterceptorUnderTest = require('../dist/module').ChunkProcessorInterceptor,
MockReceiveInterceptor = require('kronos-test-interceptor').MockReceiveInterceptor;
const stepMock = {
- "name": "dummy step name",
- "type": "dummy step type"
+ name: 'dummy step name',
+ type: 'dummy step type'
};
const checkProperties = {
- "config": {
- "keyHashFields": ["first", "last"],
- "keyHashName": "__key",
- "contentHashFields": ["first", "last", "friends"],
- "contentHashName": "__content",
- "scopeHashFields": ["street"],
- "scopeHashName": "__scope",
- "multiRowFields": ["friends"]
+ config: {
+ keyHashFields: ["first", "last"],
+ keyHashName: "__key",
+ contentHashFields: ["first", "last", "friends"],
+ contentHashName: "__content",
+ scopeHashFields: ["street"],
+ scopeHashName: "__scope",
+ multiRowFields: ["friends"]
}
};
@@ -37,8 +37,8 @@ describe('Interceptor test', function () {
it('Create', function () {
const endpoint = {
- "owner": stepMock,
- "name": "gumboIn"
+ owner: stepMock,
+ name: "gumboIn"
};
const messageHandler = new InterceptorUnderTest(checkProperties, endpoint);
assert.ok(messageHandler);
@@ -46,12 +46,12 @@ describe('Interceptor test', function () {
it('Send message', function (done) {
const endpoint = {
- "owner": stepMock,
- "name": "gumboIn"
+ owner: stepMock,
+ name: "gumboIn"
};
const sendMessage = {
- "info": "first message"
+ info: "first message"
};
const messageHandler = new InterceptorUnderTest(checkProperties, endpoint);
@@ -60,7 +60,7 @@ describe('Interceptor test', function () {
assert.ok(request);
assert.deepEqual(request, {
- "info": "first message"
+ info: "first message"
});
done();
});
@@ -71,6 +71,4 @@ describe('Interceptor test', function () {
});
-
-
});