|
1 | 1 | /* jslint node: true, esnext: true */ |
2 | | -"use strict"; |
| 2 | +'use strict'; |
3 | 3 |
|
4 | 4 | /** |
5 | 5 | * This module will turn an array of tokens into an object. |
6 | 6 | */ |
7 | 7 |
|
8 | 8 | const stream = require('stream'); |
9 | 9 |
|
10 | | -const ZSchema = require("z-schema"); |
| 10 | +const ZSchema = require('z-schema'); |
11 | 11 | const validator = new ZSchema({}); |
12 | | -const schema = require("../schema/chunk.json"); |
| 12 | +const schema = require('../schema/chunk.json'); |
13 | 13 |
|
14 | 14 | const hashFactory = require('./data-hash'); |
15 | 15 |
|
16 | | -const ERR_DOUBLE_KEY = "DOUBLE_KEY"; |
17 | | -const ERR_DOUBLE_KEY_SAME = "DOUBLE_KEY_SAME_DATA"; |
18 | | -const ERR_DOUBLE_KEY_DIFF = "DOUBLE_KEY_DIFFERENT_DATA"; |
| 16 | +const ERR_DOUBLE_KEY = 'DOUBLE_KEY'; |
| 17 | +const ERR_DOUBLE_KEY_SAME = 'DOUBLE_KEY_SAME_DATA'; |
| 18 | +const ERR_DOUBLE_KEY_DIFF = 'DOUBLE_KEY_DIFFERENT_DATA'; |
19 | 19 |
|
20 | 20 |
|
21 | 21 |
|
@@ -64,8 +64,6 @@ class DataProcessorChunk extends stream.Transform { |
64 | 64 | } |
65 | 65 | } |
66 | 66 |
|
67 | | - |
68 | | - |
69 | 67 | // Stores all the checks/action to be executed |
70 | 68 | this.rowActions = hashFactory.createFunctions(opts); |
71 | 69 |
|
@@ -197,24 +195,24 @@ class DataProcessorChunk extends stream.Transform { |
197 | 195 | if (row[this.contentHashName] === lastHash) { |
198 | 196 | // error double key but same content |
199 | 197 | addError(data, { |
200 | | - "errorCode": ERR_DOUBLE_KEY_SAME, |
201 | | - "severity": this.doubleKeySameContent, |
202 | | - "doubleLines": this.keyStore[key].lineNumber |
| 198 | + errorCode: ERR_DOUBLE_KEY_SAME, |
| 199 | + severity: this.doubleKeySameContent, |
| 200 | + doubleLines: this.keyStore[key].lineNumber |
203 | 201 | }); |
204 | 202 | } else { |
205 | 203 | // error double key with different content |
206 | 204 | addError(data, { |
207 | | - "errorCode": ERR_DOUBLE_KEY_DIFF, |
208 | | - "severity": this.doubleKeyDifferenContent, |
209 | | - "doubleLines": this.keyStore[key].lineNumber |
| 205 | + errorCode: ERR_DOUBLE_KEY_DIFF, |
| 206 | + severity: this.doubleKeyDifferenContent, |
| 207 | + doubleLines: this.keyStore[key].lineNumber |
210 | 208 | }); |
211 | 209 | } |
212 | 210 | } else { |
213 | 211 | // error double key |
214 | 212 | addError(data, { |
215 | | - "errorCode": ERR_DOUBLE_KEY, |
216 | | - "severity": this.doubleKeySameContent, |
217 | | - "doubleLines": this.keyStore[key].lineNumber |
| 213 | + errorCode: ERR_DOUBLE_KEY, |
| 214 | + severity: this.doubleKeySameContent, |
| 215 | + doubleLines: this.keyStore[key].lineNumber |
218 | 216 | }); |
219 | 217 | } |
220 | 218 | } |
@@ -261,9 +259,9 @@ class DataProcessorChunk extends stream.Transform { |
261 | 259 | if (row1.data[TMP_HASH_FIELD_NAME] !== rows[rowNum].data[TMP_HASH_FIELD_NAME]) { |
262 | 260 | // the has for the other field is different. This should not be the case |
263 | 261 | addError(rows[rowNum], { |
264 | | - "errorCode": ERR_DOUBLE_KEY_DIFF, |
265 | | - "severity": this.doubleKeyDifferenContent, |
266 | | - "doubleLines": [row1.lineNumber, rows[rowNum].lineNumber] |
| 262 | + errorCode: ERR_DOUBLE_KEY_DIFF, |
| 263 | + severity: this.doubleKeyDifferenContent, |
| 264 | + doubleLines: [row1.lineNumber, rows[rowNum].lineNumber] |
267 | 265 | }); |
268 | 266 | } |
269 | 267 |
|
|
0 commit comments