Skip to content

Commit

Permalink
Merge 88940b8 into ae11102
Browse files Browse the repository at this point in the history
  • Loading branch information
eagletmt committed Nov 20, 2013
2 parents ae11102 + 88940b8 commit 530426f
Show file tree
Hide file tree
Showing 4 changed files with 48 additions and 2 deletions.
22 changes: 22 additions & 0 deletions lib/twitter/streaming/buffered_tokenizer.rb
@@ -0,0 +1,22 @@
module Twitter
module Streaming
class BufferedTokenizer
def initialize(delimiter)
@delimiter = delimiter
@buffer = ""
end

def extract(data)
@buffer << data
items = @buffer.split(@delimiter)
if @buffer.end_with?(@delimiter)
@buffer.clear
items
else
@buffer = items.pop
items
end
end
end
end
end
2 changes: 1 addition & 1 deletion lib/twitter/streaming/response.rb
@@ -1,4 +1,4 @@
require 'buftok'
require 'twitter/streaming/buffered_tokenizer'

module Twitter
module Streaming
Expand Down
25 changes: 25 additions & 0 deletions spec/twitter/streaming/buffered_tokenizer_spec.rb
@@ -0,0 +1,25 @@
require 'helper'

describe Twitter::Streaming::BufferedTokenizer do
let(:tokenizer) { described_class.new("\r\n") }

describe '#extract' do
it 'returns an empty array when no delimiter is given' do
expect(tokenizer.extract("foo bar")).to be_empty
end

it 'returns a token' do
expect(tokenizer.extract("foo\r\n")).to eq ["foo"]
end

it 'returns multiple tokens' do
expect(tokenizer.extract("foo")).to be_empty
expect(tokenizer.extract("bar\r\nbaz\r\n")).to eq ["foobar", "baz"]
end

it 'handles splitted delimiter' do
expect(tokenizer.extract("foo\r")).to be_empty
expect(tokenizer.extract("\n")).to eq ["foo"]
end
end
end
1 change: 0 additions & 1 deletion twitter.gemspec
Expand Up @@ -3,7 +3,6 @@ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'twitter/version'

Gem::Specification.new do |spec|
spec.add_dependency 'buftok', '~> 0.1.0'
spec.add_dependency 'descendants_tracker', '~> 0.0.1'
spec.add_dependency 'equalizer', '~> 0.0.7'
spec.add_dependency 'faraday', ['>= 0.8', '< 0.10']
Expand Down

0 comments on commit 530426f

Please sign in to comment.