Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

Added support for Snappy compression to the Consumer, and added compr…

…ession (Snappy and GZip) to the Producer and MultiProducer.

To enable snappy compression, include the following in your Gemfile (This branch is necessary until a pending pull request is accepted):

    gem "snappy", "0.0.4", :git => "git://github.com/watersofoblivion/snappy.git", :branch => "snappy-streaming"
  • Loading branch information...
commit f4eda48eac00d2b2c20b52701486ecda7033f56c 1 parent 7bd4bf3
Bob Cotton authored watersofoblivion committed
View
6 Gemfile
@@ -0,0 +1,6 @@
+source 'https://rubygems.org'
+
+# Specify your gem's dependencies in foo.gemspec
+gemspec
+
+gem "rake"
View
5 README.md
@@ -7,6 +7,11 @@ and is used in production at wooga.
You need to have access to your Kafka instance and be able to connect through TCP.
You can obtain a copy and instructions on how to setup kafka at http://incubator.apache.org/kafka/
+To make Snappy compression available, add
+
+ gem "snappy", "0.0.4", :git => "git://github.com/watersofoblivion/snappy.git", :branch => "snappy-streaming"
+
+to your Gemfile.
## Installation
View
36 Rakefile
@@ -19,35 +19,7 @@ require 'rubygems/specification'
require 'date'
require 'rspec/core/rake_task'
-spec = Gem::Specification.new do |s|
- s.name = %q{kafka-rb}
- s.version = "0.0.11"
-
- s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
- s.authors = ["Alejandro Crosa", "Stefan Mees", "Tim Lossen", "Liam Stewart"]
- s.autorequire = %q{kafka-rb}
- s.date = Time.now.strftime("%Y-%m-%d")
- s.description = %q{kafka-rb allows you to produce and consume messages using the Kafka distributed publish/subscribe messaging service.}
- s.extra_rdoc_files = ["LICENSE"]
- s.files = ["LICENSE", "README.md", "Rakefile"] + Dir.glob("lib/**/*.rb")
- s.test_files = Dir.glob("spec/**/*.rb")
- s.homepage = %q{http://github.com/acrosa/kafka-rb}
- s.require_paths = ["lib"]
- s.summary = %q{A Ruby client for the Kafka distributed publish/subscribe messaging service}
-
- if s.respond_to? :specification_version then
- current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
- s.specification_version = 3
-
- if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
- s.add_development_dependency(%q<rspec>, [">= 0"])
- else
- s.add_dependency(%q<rspec>, [">= 0"])
- end
- else
- s.add_dependency(%q<rspec>, [">= 0"])
- end
-end
+spec = eval(File.open("kafka-rb.gemspec", "r").read)
Gem::PackageTask.new(spec) do |pkg|
pkg.gem_spec = spec
@@ -58,12 +30,6 @@ task :install => [:package] do
sh %{sudo gem install pkg/#{GEM}-#{GEM_VERSION}}
end
-desc "Run all examples with RCov"
-RSpec::Core::RakeTask.new(:rcov) do |t|
- t.pattern = FileList['spec/**/*_spec.rb']
- t.rcov = true
-end
-
desc "Run specs"
RSpec::Core::RakeTask.new do |t|
t.pattern = FileList['spec/**/*_spec.rb']
View
29 kafka-rb.gemspec
@@ -0,0 +1,29 @@
+Gem::Specification.new do |s|
+ s.name = %q{kafka-rb}
+ s.version = "0.0.12"
+
+ s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
+ s.authors = ["Alejandro Crosa", "Stefan Mees", "Tim Lossen", "Liam Stewart"]
+ s.autorequire = %q{kafka-rb}
+ s.date = Time.now.strftime("%Y-%m-%d")
+ s.description = %q{kafka-rb allows you to produce and consume messages using the Kafka distributed publish/subscribe messaging service.}
+ s.extra_rdoc_files = ["LICENSE"]
+ s.files = ["LICENSE", "README.md", "Rakefile"] + Dir.glob("lib/**/*.rb")
+ s.test_files = Dir.glob("spec/**/*.rb")
+ s.homepage = %q{http://github.com/acrosa/kafka-rb}
+ s.require_paths = ["lib"]
+ s.summary = %q{A Ruby client for the Kafka distributed publish/subscribe messaging service}
+
+ if s.respond_to? :specification_version then
+ current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
+ s.specification_version = 3
+
+ if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
+ s.add_development_dependency(%q<rspec>, [">= 0"])
+ else
+ s.add_dependency(%q<rspec>, [">= 0"])
+ end
+ else
+ s.add_dependency(%q<rspec>, [">= 0"])
+ end
+end
View
8 lib/kafka.rb
@@ -14,8 +14,12 @@
# limitations under the License.
require 'socket'
require 'zlib'
-if RUBY_VERSION[0,3] == "1.8"
- require 'iconv'
+require "stringio"
+
+begin
+ require 'snappy'
+rescue LoadError
+ nil
end
require File.join(File.dirname(__FILE__), "kafka", "io")
View
34 lib/kafka/encoder.rb
@@ -15,22 +15,12 @@
module Kafka
module Encoder
- def self.message(message)
- payload = \
- if RUBY_VERSION[0,3] == "1.8" # Use old iconv on Ruby 1.8 for encoding
- Iconv.new('UTF-8//IGNORE', 'UTF-8').iconv(message.payload.to_s)
- else
- message.payload.to_s.force_encoding(Encoding::ASCII_8BIT)
- end
- data = [message.magic].pack("C") + [message.calculate_checksum].pack("N") + payload
-
- [data.length].pack("N") + data
+ def self.message(message, compression = Message::NO_COMPRESSION)
+ message.encode(compression)
end
- def self.message_block(topic, partition, messages)
- message_set = Array(messages).collect { |message|
- self.message(message)
- }.join("")
+ def self.message_block(topic, partition, messages, compression)
+ message_set = message_set(messages, compression)
topic = [topic.length].pack("n") + topic
partition = [partition].pack("N")
@@ -39,16 +29,24 @@ def self.message_block(topic, partition, messages)
return topic + partition + messages
end
- def self.produce(topic, partition, messages)
+ def self.message_set(messages, compression)
+ message_set = Array(messages).collect { |message|
+ self.message(message)
+ }.join("")
+ message_set = self.message(Message.new(message_set), compression) unless compression == Message::NO_COMPRESSION
+ message_set
+ end
+
+ def self.produce(topic, partition, messages, compression = Message::NO_COMPRESSION)
request = [RequestType::PRODUCE].pack("n")
- data = request + self.message_block(topic, partition, messages)
+ data = request + self.message_block(topic, partition, messages, compression)
return [data.length].pack("N") + data
end
- def self.multiproduce(producer_requests)
+ def self.multiproduce(producer_requests, compression = Message::NO_COMPRESSION)
part_set = Array(producer_requests).map { |req|
- self.message_block(req.topic, req.partition, req.messages)
+ self.message_block(req.topic, req.partition, req.messages, compression)
}
request = [RequestType::MULTIPRODUCE].pack("n")
View
2  lib/kafka/io.rb
@@ -14,7 +14,7 @@
# limitations under the License.
module Kafka
module IO
- attr_accessor :socket, :host, :port
+ attr_accessor :socket, :host, :port, :compression
HOST = "localhost"
PORT = 9092
View
122 lib/kafka/message.rb
@@ -33,6 +33,10 @@ module Kafka
class Message
MAGIC_IDENTIFIER_DEFAULT = 0
+ MAGIC_IDENTIFIER_COMPRESSION = 1
+ NO_COMPRESSION = 0
+ GZIP_COMPRESSION = 1
+ SNAPPY_COMPRESSION = 2
BASIC_MESSAGE_HEADER = 'NC'.freeze
VERSION_0_HEADER = 'N'.freeze
VERSION_1_HEADER = 'CN'.freeze
@@ -41,9 +45,10 @@ class Message
attr_accessor :magic, :checksum, :payload
def initialize(payload = nil, magic = MAGIC_IDENTIFIER_DEFAULT, checksum = nil)
- self.magic = magic
- self.payload = payload || ""
- self.checksum = checksum || self.calculate_checksum
+ self.magic = magic
+ self.payload = payload || ""
+ self.checksum = checksum || self.calculate_checksum
+ @compression = NO_COMPRESSION
end
def calculate_checksum
@@ -66,7 +71,7 @@ def self.parse_from(data)
break if bytes_processed + message_size + 4 > data.length # message is truncated
case magic
- when 0
+ when MAGIC_IDENTIFIER_DEFAULT
# | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 ...
# | | | |
# | message_size |magic| checksum | payload ...
@@ -75,7 +80,7 @@ def self.parse_from(data)
payload = data[bytes_processed + 9, payload_size]
messages << Kafka::Message.new(payload, magic, checksum)
- when 1
+ when MAGIC_IDENTIFIER_COMPRESSION
# | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 ...
# | | | | |
# | size |magic|attrs| checksum | payload ...
@@ -84,18 +89,22 @@ def self.parse_from(data)
payload = data[bytes_processed + 10, payload_size]
case attributes & COMPRESSION_CODEC_MASK
- when 0 # a single uncompressed message
+ when NO_COMPRESSION # a single uncompressed message
messages << Kafka::Message.new(payload, magic, checksum)
- when 1 # a gzip-compressed message set -- parse recursively
+ when GZIP_COMPRESSION # a gzip-compressed message set -- parse recursively
uncompressed = Zlib::GzipReader.new(StringIO.new(payload)).read
message_set = parse_from(uncompressed)
raise 'malformed compressed message' if message_set.size != uncompressed.size
messages.concat(message_set.messages)
+ when SNAPPY_COMPRESSION # a snappy-compresses message set -- parse recursively
+ ensure_snappy! do
+ uncompressed = Snappy::Reader.new(StringIO.new(payload)).read
+ message_set = parse_from(uncompressed)
+ raise 'malformed compressed message' if message_set.size != uncompressed.size
+ messages.concat(message_set.messages)
+ end
else
# https://cwiki.apache.org/confluence/display/KAFKA/Compression
- # claims that 2 is for Snappy compression, but Kafka's Scala client
- # implementation doesn't seem to support it yet, so I don't have
- # a reference implementation to test against.
raise "Unsupported Kafka compression codec: #{attributes & COMPRESSION_CODEC_MASK}"
end
@@ -108,10 +117,93 @@ def self.parse_from(data)
MessageSet.new(bytes_processed, messages)
end
- end
- # Encapsulates a list of Kafka messages (as Kafka::Message objects in the
- # +messages+ attribute) and their total serialized size in bytes (the +size+
- # attribute).
- class MessageSet < Struct.new(:size, :messages); end
+ def encode(compression = NO_COMPRESSION)
+ @compression = compression
+
+ self.payload = asciify_payload
+ self.payload = compress_payload if compression?
+
+ data = magic_and_compression + [calculate_checksum].pack("N") + payload
+ [data.length].pack("N") + data
+ end
+
+
+ # Encapsulates a list of Kafka messages (as Kafka::Message objects in the
+ # +messages+ attribute) and their total serialized size in bytes (the +size+
+ # attribute).
+ class MessageSet < Struct.new(:size, :messages); end
+
+ def self.ensure_snappy!
+ if Object.const_defined? "Snappy"
+ yield
+ else
+ fail "Snappy not available!"
+ end
+ end
+
+ def ensure_snappy! &block
+ self.class.ensure_snappy! &block
+ end
+
+ private
+
+ attr_reader :compression
+
+ def compression?
+ compression != NO_COMPRESSION
+ end
+
+ def magic_and_compression
+ if compression?
+ [MAGIC_IDENTIFIER_COMPRESSION, compression].pack("CC")
+ else
+ [MAGIC_IDENTIFIER_DEFAULT].pack("C")
+ end
+ end
+
+ def asciify_payload
+ if RUBY_VERSION[0, 3] == "1.8"
+ payload
+ else
+ payload.to_s.force_encoding(Encoding::ASCII_8BIT)
+ end
+ end
+
+ def compress_payload
+ case compression
+ when GZIP_COMPRESSION
+ gzip
+ when SNAPPY_COMPRESSION
+ snappy
+ end
+ end
+
+ def gzip
+ with_buffer do |buffer|
+ gz = Zlib::GzipWriter.new buffer, nil, nil
+ gz.write payload
+ gz.close
+ end
+ end
+
+ def snappy
+ ensure_snappy! do
+ with_buffer do |buffer|
+ Snappy::Writer.new buffer do |w|
+ w << payload
+ end
+ end
+ end
+ end
+
+ def with_buffer
+ buffer = StringIO.new
+ buffer.set_encoding Encoding::ASCII_8BIT unless RUBY_VERSION =~ /^1\.8/
+ yield buffer if block_given?
+ buffer.rewind
+ buffer.string
+ end
+ end
end
+
View
5 lib/kafka/multi_producer.rb
@@ -19,16 +19,17 @@ class MultiProducer
def initialize(options={})
self.host = options[:host] || HOST
self.port = options[:port] || PORT
+ self.compression = options[:compression] || Message::NO_COMPRESSION
self.connect(self.host, self.port)
end
def send(topic, messages, options={})
partition = options[:partition] || 0
- self.write(Encoder.produce(topic, partition, messages))
+ self.write(Encoder.produce(topic, partition, messages, compression))
end
def multi_send(producer_requests)
- self.write(Encoder.multiproduce(producer_requests))
+ self.write(Encoder.multiproduce(producer_requests, compression))
end
end
end
View
11 lib/kafka/producer.rb
@@ -20,15 +20,16 @@ class Producer
attr_accessor :topic, :partition
def initialize(options = {})
- self.topic = options[:topic] || "test"
- self.partition = options[:partition] || 0
- self.host = options[:host] || HOST
- self.port = options[:port] || PORT
+ self.topic = options[:topic] || "test"
+ self.partition = options[:partition] || 0
+ self.host = options[:host] || HOST
+ self.port = options[:port] || PORT
+ self.compression = options[:compression] || Message::NO_COMPRESSION
self.connect(self.host, self.port)
end
def send(messages)
- self.write(Encoder.produce(self.topic, self.partition, messages))
+ self.write(Encoder.produce(self.topic, self.partition, messages, compression))
end
def batch(&block)
View
84 spec/encoder_spec.rb
@@ -39,12 +39,73 @@ def check_message(bytes, message)
encoded = described_class.message(message)
message = Kafka::Message.parse_from(encoded).messages.first
if RUBY_VERSION[0,3] == "1.8" # Use old iconv on Ruby 1.8 for encoding
- ic = Iconv.new('UTF-8//IGNORE', 'UTF-8')
- ic.iconv(message.payload).should eql("ümlaut")
+ #ic = Iconv.new('UTF-8//IGNORE', 'UTF-8')
+ #ic.iconv(message.payload).should eql("ümlaut")
+ message.payload.should eql("ümlaut")
else
message.payload.force_encoding(Encoding::UTF_8).should eql("ümlaut")
end
end
+
+ it "should encode strings containing non-ASCII characters" do
+ message = Kafka::Message.new("\214")
+ encoded = described_class.message(message)
+ message = Kafka::Message.parse_from(encoded).messages.first
+ if RUBY_VERSION[0,3] == "1.8"
+ message.payload.should eql("\214")
+ else
+ message.payload.force_encoding(Encoding::UTF_8).should eql("\214")
+ end
+ end
+ end
+
+ describe :compression do
+ before do
+ @message = Kafka::Message.new "foo"
+ end
+
+ it "should default to no compression" do
+ msg = "foo"
+ checksum = Zlib.crc32 msg
+ magic = 0
+ msg_size = 5 + msg.size
+ raw = [msg_size, magic, checksum, msg].pack "NCNa#{msg.size}"
+
+ Encoder.message(@message).should == raw
+ end
+
+ it "should support GZip compression" do
+ buffer = StringIO.new
+ gz = Zlib::GzipWriter.new buffer, nil, nil
+ gz.write "foo"
+ gz.close
+ buffer.rewind
+ msg = buffer.string
+ checksum = Zlib.crc32 msg
+ magic = 1
+ attrs = 1
+ msg_size = 6 + msg.size
+ raw = [msg_size, magic, attrs, checksum, msg].pack "NCCNa#{msg.size}"
+ Encoder.message(@message, 1).should == raw
+ end
+
+ if Object.const_defined? "Snappy"
+ it "should support Snappy compression" do
+ buffer = StringIO.new
+ Snappy::Writer.new buffer do |w|
+ w << "foo"
+ end
+ buffer.rewind
+ msg = buffer.string
+ checksum = Zlib.crc32 msg
+ magic = 1
+ attrs = 2
+ msg_size = 6 + msg.size
+ raw = [msg_size, magic, attrs, checksum, msg].pack "NCCNa#{msg.size}"
+
+ Encoder.message(@message, 2).should == raw
+ end
+ end
end
describe "produce" do
@@ -75,6 +136,23 @@ def check_message(bytes, message)
end
end
+ describe "message_set" do
+ it "should compress messages into a message set" do
+ message_one = Kafka::Message.new "foo"
+ message_two = Kafka::Message.new "bar"
+ bytes = described_class.message_set [message_one, message_two], Kafka::Message::GZIP_COMPRESSION
+
+ messages = Kafka::Message.parse_from bytes
+ messages.should be_a Kafka::Message::MessageSet
+ messages.messages.size.should == 2
+
+ messages.messages[0].should be_a Kafka::Message
+ messages.messages[0].payload.should == "foo"
+ messages.messages[1].should be_a Kafka::Message
+ messages.messages[1].payload.should == "bar"
+ end
+ end
+
describe "multiproduce" do
it "encodes an empty request" do
bytes = described_class.multiproduce([])
@@ -135,7 +213,7 @@ def check_message(bytes, message)
messages = [Kafka::Message.new("ale"), Kafka::Message.new("beer")]
bytes = described_class.multiproduce([
Kafka::ProducerRequest.new("test", messages[0]),
- Kafka::ProducerRequest.new("topic", messages[1], partition: 1),
+ Kafka::ProducerRequest.new("topic", messages[1], :partition => 1),
])
req_length = bytes[0, 4].unpack("N").shift
View
1  spec/kafka_spec.rb
@@ -15,7 +15,6 @@
require File.dirname(__FILE__) + '/spec_helper'
describe Kafka do
-
before(:each) do
end
end
View
82 spec/message_spec.rb
@@ -16,6 +16,10 @@
describe Message do
+ def pack_v1_message bytes, attributes
+ [6 + bytes.length, 1, attributes, Zlib.crc32(bytes), bytes].pack "NCCNa*"
+ end
+
before(:each) do
@message = Message.new
end
@@ -120,7 +124,36 @@
message.payload.should == 'abracadabra'
end
- it "should recursively parse nested compressed messages" do
+ if Object.const_defined? "Snappy"
+ it "should parse a snappy-compressed message" do
+ cleartext = "abracadabra"
+ bytes = pack_v1_message cleartext, 0
+ compressed = Snappy.deflate(bytes)
+ bytes = pack_v1_message compressed, 2
+ message = Message.parse_from(bytes).messages.first
+ message.should be_valid
+ message.payload.should == cleartext
+ end
+
+ it "should recursively parse nested snappy compressed messages" do
+ uncompressed = pack_v1_message('abracadabra', 0)
+ uncompressed << pack_v1_message('foobar', 0)
+ compressed = pack_v1_message(Snappy.deflate(uncompressed), 2)
+ messages = Message.parse_from(compressed).messages
+ messages.map(&:payload).should == ['abracadabra', 'foobar']
+ messages.map(&:valid?).should == [true, true]
+ end
+
+ it "should support a mixture of snappy compressed and uncompressed messages" do
+ bytes = pack_v1_message(Snappy.deflate(pack_v1_message("compressed", 0)), 2)
+ bytes << pack_v1_message('uncompressed', 0)
+ messages = Message.parse_from(bytes).messages
+ messages.map(&:payload).should == ["compressed", "uncompressed"]
+ messages.map(&:valid?).should == [true, true]
+ end
+ end
+
+ it "should recursively parse nested gzip compressed messages" do
uncompressed = [17, 1, 0, 401275319, 'abracadabra'].pack('NCCNa*')
uncompressed << [12, 1, 0, 2666930069, 'foobar'].pack('NCCNa*')
compressed_io = StringIO.new('')
@@ -132,7 +165,7 @@
messages.map(&:valid?).should == [true, true]
end
- it "should support a mixture of compressed and uncompressed messages" do
+ it "should support a mixture of gzip compressed and uncompressed messages" do
compressed = 'H4sIAG0LI1AAA2NgYBBkZBB/9XN7YlJRYnJiCogCAH9lueQVAAAA'.unpack('m*').shift
bytes = [45, 1, 1, 1303540914, compressed].pack('NCCNa*')
bytes << [11, 1, 0, 907060870, 'hello'].pack('NCCNa*')
@@ -142,10 +175,53 @@
end
it "should raise an error if the compression codec is not supported" do
- bytes = [6, 1, 2, 0, ''].pack('NCCNa*') # 2 = Snappy codec
+ bytes = [6, 1, 3, 0, ''].pack('NCCNa*') # 3 = some unknown future compression codec
lambda {
Kafka::Message.parse_from(bytes)
}.should raise_error(RuntimeError, /Unsupported Kafka compression codec/)
end
end
+
+ describe "#ensure_snappy!" do
+ let(:message) { Kafka::Message.new }
+ before { Kafka::Message.instance_variable_set :@snappy, nil }
+
+ subject { message.ensure_snappy! { 42 } }
+
+ if Object.const_defined? "Snappy"
+ context "when snappy is available" do
+ before { Object.stub! :const_defined? => true }
+ it { should == 42 }
+ end
+ end
+
+ context "when snappy is not available" do
+ before { Object.stub! :const_defined? => false }
+
+ it "raises an error" do
+ expect { message.ensure_snappy! { 42 } }.to raise_error
+ end
+ end
+ end
+
+ describe ".ensure_snappy!" do
+ before { Kafka::Message.instance_variable_set :@snappy, nil }
+
+ subject { Kafka::Message.ensure_snappy! { 42 } }
+
+ if Object.const_defined? "Snappy"
+ context "when snappy is available" do
+ before { Object.stub! :const_defined? => true }
+ it { should == 42 }
+ end
+ end
+
+ context "when snappy is not available" do
+ before { Object.stub! :const_defined? => false }
+
+ it "raises an error" do
+ expect { Kafka::Message.ensure_snappy! { 42 } }.to raise_error
+ end
+ end
+ end
end
View
26 spec/multi_producer_spec.rb
@@ -27,12 +27,18 @@
subject.port.should eql(9092)
end
+ it "should have compression" do
+ subject.should respond_to :compression
+ described_class.new(:compression => Kafka::Message::SNAPPY_COMPRESSION).compression.should == Kafka::Message::SNAPPY_COMPRESSION
+ described_class.new.compression.should == Kafka::Message::NO_COMPRESSION
+ end
+
it "sends single messages" do
message = Kafka::Message.new("ale")
encoded = Kafka::Encoder.produce("test", 0, message)
subject.should_receive(:write).with(encoded).and_return(encoded.length)
- subject.send("test", message, partition: 0).should == encoded.length
+ subject.send("test", message, :partition => 0).should == encoded.length
end
it "sends multiple messages" do
@@ -46,5 +52,23 @@
subject.should_receive(:write).with(encoded).and_return(encoded.length)
subject.multi_send(reqs).should == encoded.length
end
+
+ it "should compress messages" do
+ subject.compression = Kafka::Message::SNAPPY_COMPRESSION
+ @mocked_socket.stub! :write => 0
+ messages = [Kafka::Message.new("ale"), Kafka::Message.new("beer")]
+
+ encoded = Encoder.produce("test", 0, messages[0])
+ Encoder.should_receive(:produce).with("test", 0, messages[0], subject.compression).and_return encoded
+ subject.send("test", messages[0], :partition => 0)
+
+ reqs = [
+ Kafka::ProducerRequest.new("topic", messages[0]),
+ Kafka::ProducerRequest.new("topic", messages[1]),
+ ]
+ encoded = Encoder.multiproduce(reqs)
+ Encoder.should_receive(:multiproduce).with(reqs, subject.compression)
+ subject.multi_send(reqs)
+ end
end
end
View
2  spec/producer_request_spec.rb
@@ -32,7 +32,7 @@
end
it "can use a user-specified partition" do
- req = described_class.new("topic", message, partition: 42)
+ req = described_class.new("topic", message, :partition => 42)
req.partition.should == 42
end
end
View
6 spec/producer_spec.rb
@@ -30,6 +30,12 @@
@producer.should respond_to(:partition)
end
+ it "should have compression" do
+ @producer.should respond_to :compression
+ Producer.new(:compression => 1).compression.should == 1
+ Producer.new.compression.should == 0
+ end
+
it "should set a topic and partition on initialize" do
@producer = Producer.new({ :host => "localhost", :port => 9092, :topic => "testing" })
@producer.topic.should eql("testing")
Please sign in to comment.
Something went wrong with that request. Please try again.