Skip to content

Commit

Permalink
Merge pull request #4 from Pchelolo/travis
Browse files Browse the repository at this point in the history
Set up infrastructure for testing with kafka locally and in travis
  • Loading branch information
gwicke committed Apr 15, 2016
2 parents 2a2f014 + ad73d7a commit 1a9ccdd
Show file tree
Hide file tree
Showing 10 changed files with 298 additions and 6 deletions.
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,6 @@ coverage
node_modules
npm-debug.log
*~

.idea/*
config.yaml
12 changes: 9 additions & 3 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -1,11 +1,18 @@
language: node_js
sudo: false

node_js:
- "4.3"
- "5"

env:
- CXX=g++-4.8
- KAFKA_HOME=../kafka CXX=g++-4.8

before_install:
- wget http://www.us.apache.org/dist/kafka/0.8.2.2/kafka_2.10-0.8.2.2.tgz -O kafka.tgz
- mkdir -p ${KAFKA_HOME} && tar xzf kafka.tgz -C ${KAFKA_HOME} --strip-components 1
- echo 'delete.topic.enable=true' >> ${KAFKA_HOME}/config/server.properties
- sh test/utils/start_kafka.sh start

addons:
apt:
Expand All @@ -14,5 +21,4 @@ addons:
packages:
- g++-4.8

sudo: false

script: npm run coverage && (npm run coveralls || exit 0)
26 changes: 26 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,29 @@
A [RESTBase](https://github.com/wikimedia/restbase) queuing module for
[Apache Kafka](http://kafka.apache.org/)


## Testing

For testing locally you need to setup and start Apache Kafka and set the
`KAFKA_HOME` environment variable to point to the Kafka home directory.
Here's a sample script you need to run:

```bash
export KAFKA_HOME=<your desired kafka install path>
wget http://www.us.apache.org/dist/kafka/0.8.2.2/kafka_2.10-0.8.2.2.tgz -O kafka.tgz
mkdir -p $KAFKA_HOME && tar xzf kafka.tgz -C $KAFKA_HOME --strip-components 1
echo "KAFKA_HOME=$KAFKA_HOME" >> ~/.bash_profile
echo "PATH=\$PATH:\$KAFKA_HOME/bin" >> ~/.bash_profile
```

Also, you need to enable topic deletion so that the test scripts could clean up
kafka state before each test run:

```bash
echo 'delete.topic.enable=true' >> KAFKA_HOME/config/server.properties
```

Before starting the development version of change propagation or running
test you need to start Zookeeper and Kafka with `start-kafka` npm script.
To stop Kafka and Zookeeper tun `stop-kafka` npm script.

29 changes: 29 additions & 0 deletions config.test.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
num_workers: 0
logging:
name: changeprop
level: info
services:
- name: changeprop
module: hyperswitch
conf:
port: 7273
spec:
title: The Change Propagation root
paths:
/{domain:a}/sys/queue:
x-modules:
- path: sys/kafka.js
options:
uri: 127.0.0.1:2181
templates:
simple_test_rule:
topic: test_topic_simple_test_rule
exec:
method: post
uri: 'http://mock.com'
headers:
test_header_name: test_header_value
content-type: application/json
body:
test_field_name: test_field_value
derived_field: '{{message.message}}'
73 changes: 73 additions & 0 deletions lib/kafka_factory.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
"use strict";

var kafka = require('wmf-kafka-node');
var uuid = require('cassandra-uuid');
var P = require('bluebird');

/**
* Utility class providing high-level interfaces to kafka modules.
*
* @param {Object} kafkaConf Kafka connection configuration
* @param {string} kafkaConf.uri Zookeper URI with host and port
* @param {string} kafkaConf.clientId Client identification string
* @constructor
*/
function KafkaFactory(kafkaConf) {
this.kafkaConf = kafkaConf;
}

/**
* Creates a new kafka client.
*
* @returns {Client}
*/
KafkaFactory.prototype.newClient = function() {
return new kafka.Client(this.kafkaConf.uri,
this.kafkaConf.clientId + '-' + uuid.TimeUuid.now() + '-' + uuid.Uuid.random(),
{}
);
};

/**
* Creates and initializes an new kafka producer.
*
* @param {Client} client a kafka client to use.
* @param {Object} [options] producer options
*
* @returns {Promise<HighLevelProducer>}
*/
KafkaFactory.prototype.newProducer = function(client, options) {
return new P(function(resolve, reject) {
var producer = new kafka.HighLevelProducer(client, options || {});
producer.once('ready', function() {
resolve(P.promisifyAll(producer));
});
producer.once('error', reject);
});
};

/**
* Creates a kafka consumer.
*
* @param {Client} client a kafka client to use
* @param {string} topic a topic name to consume
* @param {string} groupId consumer group ID
* @param {Number} [offset] an offset which to start consumption from.
*
* @returns {Promise} a promise that's resolved when a consumer is ready
*/
KafkaFactory.prototype.newConsumer = function(client, topic, groupId, offset) {
var topicConf = { topic: topic };
if (offset !== undefined) {
topicConf.offset = offset;
}
return new P(function(resolve, reject) {
var consumer = new kafka.HighLevelConsumer(client, [ topicConf ], { groupId: groupId });
consumer.once('error', reject);
consumer.once('rebalanced', function() {
resolve(consumer);
});
});
};

module.exports = KafkaFactory;
13 changes: 10 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,12 @@
},
"scripts": {
"start": "service-runner",
"test": "mocha",
"coverage": "istanbul cover _mocha -- -R spec"
"cleanup": "sh test/utils/clean_kafka.sh",
"start-kafka": "sh test/utils/start_kafka.sh start",
"stop-kafka": "sh test/utils/start_kafka.sh stop",
"test": "npm run cleanup && mocha",
"coverage": "npm run cleanup && istanbul cover _mocha -- -R spec",
"coveralls": "cat ./coverage/lcov.info | coveralls"
},
"keywords": [
"REST",
Expand Down Expand Up @@ -41,7 +45,10 @@
"mocha": "^2.4.5",
"mocha-jscs": "^4.2.0",
"mocha-jshint": "^2.3.1",
"mocha-lcov-reporter": "^1.2.0"
"mocha-lcov-reporter": "^1.2.0",
"coveralls": "^2.11.6",
"js-yaml": "^3.5.2",
"nock": "^8.0.0"
},
"deploy": {
"node": "4.3.0",
Expand Down
49 changes: 49 additions & 0 deletions test/feature/static_rules.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
"use strict";

var ChangeProp = require('../utils/changeProp');
var KafkaFactory = require('../../lib/kafka_factory');
var nock = require('nock');

describe('Basic rule management', function() {
var changeProp = new ChangeProp('config.test.yaml');
var kafkaFactory = new KafkaFactory({
uri: 'localhost:2181/', // TODO: find out from the config
clientId: 'change-prop-test-suite'
});
var producer;

before(function() {
return kafkaFactory.newProducer(kafkaFactory.newClient())
.then(function(newProducer) {
producer = newProducer;
return producer.createTopicsAsync([ 'test_topic_simple_test_rule' ], false)
})
.then(function() {
return changeProp.start();
});
});


it('Should call simple executor', function() {
var service = nock('http://mock.com', {
reqheaders: {
test_header_name: 'test_header_value',
'content-type': 'application/json'
}
})
.post('/', {
'test_field_name': 'test_field_value',
'derived_field': 'test'
}).reply({});

return producer.sendAsync([{
topic: 'test_topic_simple_test_rule',
messages: [ JSON.stringify({ message: 'test' }) ]
}])
.delay(100)
.then(function() { service.done(); })
.finally(function() { nock.cleanAll(); });
});

after(function() { return changeProp.stop(); });
});
49 changes: 49 additions & 0 deletions test/utils/changeProp.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
'use strict';

var ServiceRunner = require('service-runner');
var fs = require('fs');
var yaml = require('js-yaml');
var P = require('bluebird');

var ChangeProp = function(configPath) {
this._configPath = configPath;
this._config = this._loadConfig();
this._config.num_workers = 0;
this._config.logging = {
name: 'change-prop',
level: 'fatal',
streams: [{ type: 'stdout'}]
};
this._runner = new ServiceRunner();
};

ChangeProp.prototype._loadConfig = function() {
return yaml.safeLoad(fs.readFileSync(this._configPath).toString());
};

ChangeProp.prototype.start = function() {
var self = this;
self.port = self._config.services[0].conf.port;
self.hostPort = 'http://localhost:' + self.port;
return self._runner.run(self._config)
.then(function(servers) {
self._servers = servers;
return true;
});
};

ChangeProp.prototype.stop = function() {
var self = this;
if (self._servers) {
return P.each(self._servers, function(server) {
return server.close();
})
.then(function() {
self._servers = undefined;
});
} else {
return P.resolve();
}
};

module.exports = ChangeProp;
19 changes: 19 additions & 0 deletions test/utils/clean_kafka.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
#!/bin/bash

dropTopics ( ) {
if [ "$#" -eq 1 ]
then
PATTERN=$1
echo "looking for topics named '*${PATTERN}*'..."
TOPICS=`${KAFKA_HOME}/bin/kafka-topics.sh --zookeeper localhost:2181 --list \
| grep ${PATTERN} \
| grep -v 'marked for deletion$'`
for TOPIC in ${TOPICS}
do
echo "dropping topic ${TOPIC}"
${KAFKA_HOME}/bin/kafka-topics.sh --zookeeper localhost:2181 --delete --topic ${TOPIC} > /dev/null
done
fi
}

dropTopics "test_topic"
31 changes: 31 additions & 0 deletions test/utils/start_kafka.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
#!/bin/bash

if [ "x$KAFKA_HOME" = "x" ]; then
echo "Please set KAFKA_HOME env variable to the kafka install directory"
exit 1
fi

if [ "$1" = "start" ]; then
if [ `nc localhost 2181 < /dev/null; echo $?` != 0 ]; then
sh $KAFKA_HOME/bin/zookeeper-server-start.sh $KAFKA_HOME/config/zookeeper.properties > /dev/null &
while [ `nc localhost 2181 < /dev/null; echo $?` != 0 ]; do
echo "waiting for Zookeeper..."
sleep 1 ;
done
else
echo "Zookeper already running"
fi

if [ `nc localhost 9092 < /dev/null; echo $?` != 0 ]; then
sh $KAFKA_HOME/bin/kafka-server-start.sh $KAFKA_HOME/config/server.properties > /dev/null &
while [ `nc localhost 9092 < /dev/null; echo $?` != 0 ]; do
echo "waiting for Kafka..." ;
sleep 1 ;
done
else
echo "Kafka already running";
fi
elif [ "$1" = "stop" ]; then
sh $KAFKA_HOME/bin/kafka-server-stop.sh &
sh $KAFKA_HOME/bin/zookeeper-server-stop.sh &
fi

0 comments on commit 1a9ccdd

Please sign in to comment.