Skip to content

Commit

Permalink
Merge pull request #348 from Pchelolo/dep_updates
Browse files Browse the repository at this point in the history
Update dependencies and drop node 6 support.
  • Loading branch information
Pchelolo committed Jun 15, 2020
2 parents 70ed676 + 9090d0d commit 15a1c12
Show file tree
Hide file tree
Showing 24 changed files with 112 additions and 109 deletions.
3 changes: 3 additions & 0 deletions .eslintrc.yml
Expand Up @@ -27,3 +27,6 @@ rules:
- off
no-multi-spaces:
- off
jsdoc/no-undefined-types:
- off

5 changes: 2 additions & 3 deletions .pipeline/blubber.yaml
@@ -1,18 +1,17 @@
version: v4
base: docker-registry.wikimedia.org/nodejs10-slim
apt: { packages: [librdkafka++1, librdkafka1] }
lives:
in: /srv/service
runs:
environment: { APP_BASE_PATH: /srv/service }

variants:
build:
apt: { packages: [librdkafka-dev, build-essential, python-dev, git, libsasl2-dev] }
apt: { packages: [build-essential, python-dev, git, libsasl2-dev] }
base: docker-registry.wikimedia.org/nodejs10-devel
copies: [local]
node: { requirements: [package.json]}
runs: { environment: { LINK: g++, BUILD_LIBRDKAFKA: "0" }}
runs: { environment: { LINK: g++ }}
development:
includes: [build]
entrypoint: [node, server.js]
Expand Down
3 changes: 1 addition & 2 deletions .travis.yml
Expand Up @@ -2,14 +2,13 @@ language: node_js
sudo: false

node_js:
- "6"
- "10"

env:
- KAFKA_HOME=../kafka KAFKA_VERSION=1.1.0 CXX=g++-4.8

services:
- redis-server
- redis

addons:
apt:
Expand Down
1 change: 1 addition & 0 deletions app.js
@@ -1 +1,2 @@
'use strict';
module.exports = require('hyperswitch');
2 changes: 1 addition & 1 deletion config.test.yaml
Expand Up @@ -96,7 +96,7 @@ spec: &spec
sample_testing_rule:
topic: sample_test_rule
sample:
rate: 0.2
rate: 0.5
hash_template: '{{message.meta.domain}}-{{message.page_title}}'
match:
meta:
Expand Down
7 changes: 5 additions & 2 deletions lib/base_executor.js
@@ -1,7 +1,7 @@
'use strict';

const P = require('bluebird');
const uuidv1 = require('uuid/v1');
const uuidv1 = require('uuid').v1;
const HTTPError = require('hyperswitch').HTTPError;
const URI = require('hyperswitch').URI;
const kafka = require('node-rdkafka');
Expand Down Expand Up @@ -63,6 +63,7 @@ class BaseExecutor {

/**
* Creates a new instance of a rule executor
*
* @param {Rule} rule
* @param {KafkaFactory} kafkaFactory
* @param {Object} hyper
Expand Down Expand Up @@ -186,7 +187,6 @@ class BaseExecutor {
.catch((e) => {
// This errors must come from the KafkaConsumer
// since the actual handler must never throw errors
/* eslint-disable indent */
switch (e.code) {
case kafka.CODES.ERRORS.ERR__PARTITION_EOF:
case kafka.CODES.ERRORS.ERR__TIMED_OUT:
Expand Down Expand Up @@ -221,6 +221,7 @@ class BaseExecutor {

/**
* Checks whether a message should be rate-limited
*
* @param {Object} expander the limiter key expander
* @return {Promise<boolean>}
* @private
Expand Down Expand Up @@ -567,6 +568,7 @@ class BaseExecutor {

/**
* Checks whether retry limit for this rule is exceeded.
*
* @param {Object} message a retry message to check
* @param {Error} [e] optional Error that caused a retry
* @return {boolean}
Expand Down Expand Up @@ -641,6 +643,7 @@ class BaseExecutor {

/**
* Create an error message for a special Kafka topic
*
* @param {Error} e an exception that caused a failure
* @param {string|Object} event an original event. In case JSON parsing failed - it's a string.
* @return {Object} error message object
Expand Down
5 changes: 5 additions & 0 deletions lib/kafka_factory.js
Expand Up @@ -178,6 +178,7 @@ class KafkaFactory {
* Contains the kafka consumer/producer configuration. The configuration options
* are directly passed to librdkafka. For options see librdkafka docs:
* https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md
*
* @param {Object} kafkaConf
* @param {Object} kafkaConf.metadata_broker_list a list of kafka brokers
* @param {string} [kafkaConf.consume_dc] a DC name to consume from
Expand Down Expand Up @@ -216,6 +217,7 @@ class KafkaFactory {

/**
* Returns a DC name to consume from
*
* @return {string}
*/
get consumeDC() {
Expand All @@ -224,6 +226,7 @@ class KafkaFactory {

/**
* Returns a DC name to produce to
*
* @return {string}
*/
get produceDC() {
Expand All @@ -232,6 +235,7 @@ class KafkaFactory {

/**
* Create new KafkaConsumer and connect it.
*
* @param {string} groupId Consumer group ID to use
* @param {Array} topics Topics to subscribe to
* @param {Object} [metrics] metrics reporter
Expand Down Expand Up @@ -303,6 +307,7 @@ module.exports = {
/**
* A way to replace the KafkaFactory for the mocked unit tests.
* Not to be used in production code.
*
* @param {KafkaFactory} factory a new KafkaFactory
*/
setFactory: (factory) => {
Expand Down
5 changes: 5 additions & 0 deletions lib/rule.js
Expand Up @@ -13,6 +13,7 @@ const DEFAULT_REQUEST_TIMEOUT = 7 * 60 * 1000;

/**
* Creates a JS function that verifies property equality
*
* @param {Object} retryDefinition the condition in the format of 'retry_on' stanza
* @return {Function} a function that verifies the condition
*/
Expand Down Expand Up @@ -204,6 +205,7 @@ class Rule {

/**
* Whether the claim_ttl or root_claim_ttl has passed and the event should be abandoned
*
* @param {Object} message the event to check
* @return {boolean} whether to abandon the event or not
*/
Expand Down Expand Up @@ -237,6 +239,7 @@ class Rule {
/**
* Tests the message against the compiled evaluation test. In case the rule contains
* multiple options, the first one that's matched is choosen.
*
* @param {Object} message the message to test
* @return {Integer} index of the matched option or -1 of nothing matched
*/
Expand All @@ -248,6 +251,7 @@ class Rule {
/**
* Returns a rule handler that contains of a set of exec template
* and expander function
*
* @param {Integer} index an index of the switch option
* @return {Object}
*/
Expand All @@ -264,6 +268,7 @@ class Rule {

/**
* Returns the key to use for a rate-limiter of the certain type
*
* @param {string} type limiter type
* @param {Object} expander the expander containing the message and match
* @return {string|null}
Expand Down
1 change: 1 addition & 0 deletions lib/rule_executor.js
Expand Up @@ -18,6 +18,7 @@ class RuleExecutor extends BaseExecutor {

/**
* Returns a handler to be used or undefined.
*
* @param {Object} message the message to process
* @return {Function|boolean}
*/
Expand Down
2 changes: 2 additions & 0 deletions lib/rule_subscriber.js
Expand Up @@ -91,6 +91,7 @@ class RegexTopicSubscription {

/**
* Filters out which topic names are ok to subscribe to.
*
* @param {Array} proposedTopicNames a set of available topic names
* to check which to subscribe to
* @return {Array}
Expand Down Expand Up @@ -178,6 +179,7 @@ class Subscriber {

/**
* Subscribe a rule spec under a certain rule name
*
* @param {HyperSwitch} hyper the request dispatcher
* @param {string} ruleName the name of the rule
* @param {Object} ruleSpec the rule specification
Expand Down
14 changes: 5 additions & 9 deletions lib/sampler.js
@@ -1,9 +1,8 @@
'use strict';

/* eslint no-bitwise: ["error", { "allow": ["~", "<<"] }] */
/* jshint bitwise: false */

const murmur = require('murmur-32');
const murmur = require('murmurhash');
const Template = require('hyperswitch').Template;

class Sampler {
Expand All @@ -21,17 +20,13 @@ class Sampler {
}

this._hashSourceTemplate = new Template(this._options.hash_template);

const percent = Math.round(this._options.rate * 100);
const minMurmur = 1 << 31;
const maxMurmur = ~minMurmur;
const step = Math.round((Math.abs(minMurmur) + maxMurmur) / 100);
this._maxHash = minMurmur + (step * percent);
this._maxHash = Math.round(0xFFFFFFFF * this._options.rate);
}

/**
* Returns true if this request should be sampled, false if it should
* be ignored.
*
* @param {Object} context
* @return {boolean}
*/
Expand All @@ -42,11 +37,12 @@ class Sampler {

/**
* Returns a numeric representation of the value's murmur32 hash.
*
* @param {string} value
* @return {number}
*/
static hash(value) {
return new DataView(murmur(value)).getInt32(0);
return murmur(value);
}
}

Expand Down
5 changes: 4 additions & 1 deletion lib/utils.js
@@ -1,11 +1,12 @@
'use strict';

const uuidv1 = require('uuid/v1');
const uuidv1 = require('uuid').v1;

const utils = {};

/**
* Computes the x-triggered-by header
*
* @param {Object} event the event
* @return {string}
*/
Expand All @@ -23,6 +24,7 @@ utils.requestId = () => uuidv1();

/**
* Safely stringifies the event to JSON string.
*
* @param {Object} event the event to stringify
* @return {string|undefined} stringified event or undefined if failed.
*/
Expand All @@ -37,6 +39,7 @@ utils.stringify = (event) => {
/**
* From a list of regexes and strings, constructs a regex that
* matches any item in the list
*
* @param {Array} list the list of regexes and strings to unify
* @return {RegExp|undefined} the compiled regex or undefined
*/
Expand Down
66 changes: 23 additions & 43 deletions package.json
@@ -1,6 +1,6 @@
{
"name": "change-propagation",
"version": "0.9.6",
"version": "0.10.0",
"description": "Listens to events from Kafka and delivers them",
"main": "server.js",
"repository": {
Expand Down Expand Up @@ -37,56 +37,36 @@
},
"homepage": "https://github.com/wikimedia/change-propagation",
"dependencies": {
"bluebird": "^3.5.3",
"bluebird": "^3.7.2",
"extend": "^3.0.2",
"fast-json-stable-stringify": "^2.0.0",
"htcp-purge": "^0.3.0",
"hyperswitch": "^0.12.4",
"mediawiki-title": "^0.6.5",
"murmur-32": "^0.1.0",
"node-rdkafka": "2.6.1",
"fast-json-stable-stringify": "^2.1.0",
"htcp-purge": "^0.3.1",
"hyperswitch": "^0.13.0",
"mediawiki-title": "^0.7.2",
"murmurhash": "^1.0.0",
"node-rdkafka": "2.8.1",
"ratelimit.js": "^1.8.0",
"redis": "^2.8.0",
"service-runner": "^2.7.7",
"uuid": "^3.3.2"
"redis": "^3.0.2",
"service-runner": "^2.7.8",
"uuid": "^8.1.0"
},
"devDependencies": {
"@wikimedia/jsonschema-tools": "^0.6.0",
"ajv": "^6.10.0",
"coveralls": "^3.0.3",
"eslint-config-wikimedia": "^0.11.0",
"eslint-plugin-jsdoc": "^4.4.3",
"eslint-plugin-json": "^1.4.0",
"js-yaml": "^3.12.2",
"@wikimedia/jsonschema-tools": "^0.7.5",
"ajv": "^6.12.2",
"coveralls": "^3.1.0",
"eslint-config-wikimedia": "^0.16.1",
"eslint-plugin-jsdoc": "^27.0.6",
"eslint-plugin-json": "^2.1.1",
"js-yaml": "^3.14.0",
"kafka-test-tools": "^0.1.13",
"mocha": "^6.0.2",
"mocha": "^8.0.1",
"mock-require": "^3.0.3",
"nock": "^10.0.6",
"nyc": "^13.3.0",
"nock": "^12.0.3",
"nyc": "^15.1.0",
"preq": "^0.5.14",
"redis-mock": "^0.43.0"
"redis-mock": "^0.49.0"
},
"engines": {
"node": ">=6"
},
"deploy": {
"node": "6.11.1",
"target": "debian",
"env": {
"BUILD_LIBRDKAFKA": "0"
},
"dependencies": {
"debian": [
{
"repo_url": "https://apt.wikimedia.org/wikimedia",
"release": "jessie-wikimedia",
"pool": "main",
"packages": [
"librdkafka-dev"
]
},
"libsasl2-dev"
]
}
"node": ">=10"
}
}
2 changes: 1 addition & 1 deletion server.js
Expand Up @@ -3,5 +3,5 @@
'use strict';

// B/C wrapper to make the old init script work with service-runner.
var ServiceRunner = require('service-runner');
const ServiceRunner = require('service-runner');
new ServiceRunner().start();
1 change: 1 addition & 0 deletions sys/deduplicator.js
Expand Up @@ -17,6 +17,7 @@ class Deduplicator extends mixins.mix(Object).with(mixins.Redis) {

/**
* Checks whether the message is a duplicate
*
* @param {HyperSwitch} hyper
* @param {Object} req
* @return {Promise} response status shows whether it's a duplicate or not.
Expand Down
2 changes: 1 addition & 1 deletion sys/kafka.js
Expand Up @@ -7,7 +7,7 @@
const P = require('bluebird');
const HyperSwitch = require('hyperswitch');
const HTTPError = HyperSwitch.HTTPError;
const uuidv1 = require('uuid/v1');
const uuidv1 = require('uuid').v1;

const utils = require('../lib/utils');
const kafkaFactory = require('../lib/kafka_factory');
Expand Down

0 comments on commit 15a1c12

Please sign in to comment.