Permalink
Browse files

fix to support gzip usage

  • Loading branch information...
1 parent a08fdb7 commit 62071f8c1f026184f057889c9b7f17096ca2ed11 @wandenberg committed Jan 6, 2013
View
@@ -1,5 +1,6 @@
h1(#changelog). Changelog
+* Fix to support gzip usage
* Added the feature to send a custom 'channel delete message' on the body of the DELETE request
* Changed push_stream_channel_id variable to directive, and make possible set it inside an if block
* Changed push_stream_channels_path variable to directive, and make possible set it inside an if block
@@ -5,6 +5,7 @@
{}
end
+shared_examples_for "statistics location" do
it "should return 404 for a nonexistent channel" do
channel = 'ch_test_get_channel_statistics_whithout_created_channel'
@@ -22,16 +23,26 @@
it "should return channels statistics for an existent channel" do
channel = 'ch_test_get_channel_statistics_to_existing_channel'
body = 'body'
+ actual_response = ''
nginx_run_server(config) do |conf|
#create channel
publish_message(channel, headers, body)
EventMachine.run do
- pub_2 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats?id=' + channel.to_s).get :head => headers
+ pub_2 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats?id=' + channel.to_s).get :head => headers, :decoding => false
+ pub_2.stream do |chunk|
+ actual_response << chunk
+ end
pub_2.callback do
- pub_2.should be_http_status(200).with_body
- response = JSON.parse(pub_2.response)
+ pub_2.should be_http_status(200)
+
+ if (conf.gzip == "on")
+ pub_2.response_header["CONTENT_ENCODING"].should eql("gzip")
+ actual_response = Zlib::GzipReader.new(StringIO.new(actual_response)).read
+ end
+
+ response = JSON.parse(actual_response)
response["channel"].to_s.should eql(channel)
response["published_messages"].to_i.should eql(1)
response["stored_messages"].to_i.should eql(1)
@@ -50,7 +61,7 @@
create_channel_by_subscribe(channel, headers) do
pub_1 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats?id=' + channel.to_s).get :head => headers
pub_1.callback do
- pub_1.should be_http_status(200).with_body
+ pub_1.should be_http_status(200)
response = JSON.parse(pub_1.response)
response["channel"].to_s.should eql(channel)
response["published_messages"].to_i.should eql(0)
@@ -67,7 +78,7 @@
EventMachine.run do
pub_2 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats?id=ALL').get :head => headers
pub_2.callback do
- pub_2.should be_http_status(200).with_body
+ pub_2.should be_http_status(200)
response = JSON.parse(pub_2.response)
response["infos"].length.should eql(0)
EventMachine.stop
@@ -79,16 +90,26 @@
it "should return detailed channels statistics for an existent channel" do
channel = 'ch_test_get_detailed_channels_statistics_to_existing_channel'
body = 'body'
+ actual_response = ''
nginx_run_server(config) do |conf|
#create channel
publish_message(channel, headers, body)
EventMachine.run do
- pub_2 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats?id=ALL').get :head => headers
+ pub_2 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats?id=ALL').get :head => headers, :decoding => false
+ pub_2.stream do |chunk|
+ actual_response << chunk
+ end
pub_2.callback do
- pub_2.should be_http_status(200).with_body
- response = JSON.parse(pub_2.response)
+ pub_2.should be_http_status(200)
+
+ if (conf.gzip == "on")
+ pub_2.response_header["CONTENT_ENCODING"].should eql("gzip")
+ actual_response = Zlib::GzipReader.new(StringIO.new(actual_response)).read
+ end
+
+ response = JSON.parse(actual_response)
response["infos"].length.should eql(1)
response["infos"][0]["channel"].to_s.should eql(channel)
response["infos"][0]["published_messages"].to_i.should eql(1)
@@ -111,7 +132,7 @@
EventMachine.run do
pub_2 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats?id=ALL').get :head => headers
pub_2.callback do
- pub_2.should be_http_status(200).with_body
+ pub_2.should be_http_status(200)
response = JSON.parse(pub_2.response)
response["infos"].length.should eql(1)
response["channels"].to_i.should eql(0)
@@ -134,7 +155,7 @@
create_channel_by_subscribe(channel, headers) do
pub_1 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats?id=ALL').get :head => headers
pub_1.callback do
- pub_1.should be_http_status(200).with_body
+ pub_1.should be_http_status(200)
response = JSON.parse(pub_1.response)
response["infos"].length.should eql(1)
response["infos"][0]["channel"].to_s.should eql(channel)
@@ -152,7 +173,7 @@
EventMachine.run do
pub_1 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats').get :head => headers
pub_1.callback do
- pub_1.should be_http_status(200).with_body
+ pub_1.should be_http_status(200)
response = JSON.parse(pub_1.response)
response.has_key?("channels").should be_true
response["channels"].to_i.should eql(0)
@@ -165,16 +186,26 @@
it "should return summarized channels statistics for an existent channel" do
channel = 'ch_test_get_summarized_channels_statistics_to_existing_channel'
body = 'body'
+ actual_response = ''
nginx_run_server(config) do |conf|
#create channel
publish_message(channel, headers, body)
EventMachine.run do
- pub_2 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats').get :head => headers
+ pub_2 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats').get :head => headers, :decoding => false
+ pub_2.stream do |chunk|
+ actual_response << chunk
+ end
pub_2.callback do
- pub_2.should be_http_status(200).with_body
- response = JSON.parse(pub_2.response)
+ pub_2.should be_http_status(200)
+
+ if (conf.gzip == "on")
+ pub_2.response_header["CONTENT_ENCODING"].should eql("gzip")
+ actual_response = Zlib::GzipReader.new(StringIO.new(actual_response)).read
+ end
+
+ response = JSON.parse(actual_response)
response.has_key?("channels").should be_true
response["channels"].to_i.should eql(1)
response["published_messages"].to_i.should eql(1)
@@ -196,7 +227,7 @@
EventMachine.run do
pub_2 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats').get :head => headers
pub_2.callback do
- pub_2.should be_http_status(200).with_body
+ pub_2.should be_http_status(200)
response = JSON.parse(pub_2.response)
response.has_key?("channels").should be_true
response["channels"].to_i.should eql(0)
@@ -217,7 +248,7 @@
create_channel_by_subscribe(channel, headers) do
pub_1 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats').get :head => headers
pub_1.callback do
- pub_1.should be_http_status(200).with_body
+ pub_1.should be_http_status(200)
response = JSON.parse(pub_1.response)
response.has_key?("channels").should be_true
response["channels"].to_i.should eql(1)
@@ -325,7 +356,7 @@
body = 'body'
number_of_channels = 20000
- nginx_run_server(config.merge(:shared_memory_size => '200m', :keepalive => "on"), :timeout => 10) do |conf|
+ nginx_run_server(config.merge(:shared_memory_size => '200m', :keepalive => "on"), :timeout => 15) do |conf|
#create channels
0.step(number_of_channels - 1, 1000) do |i|
socket = open_socket(nginx_host, nginx_port)
@@ -339,7 +370,7 @@
EventMachine.run do
pub_2 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats?id=ALL').get :head => headers
pub_2.callback do
- pub_2.should be_http_status(200).with_body
+ pub_2.should be_http_status(200)
response = JSON.parse(pub_2.response)
response["infos"].length.should eql(number_of_channels)
EventMachine.stop
@@ -353,7 +384,7 @@
EventMachine.run do
pub_2 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats?id=prefix_*').get :head => headers
pub_2.callback do
- pub_2.should be_http_status(200).with_body
+ pub_2.should be_http_status(200)
response = JSON.parse(pub_2.response)
response["infos"].length.should eql(0)
EventMachine.stop
@@ -375,7 +406,7 @@
EventMachine.run do
pub_2 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats?id=ch_test_*').get :head => headers
pub_2.callback do
- pub_2.should be_http_status(200).with_body
+ pub_2.should be_http_status(200)
response = JSON.parse(pub_2.response)
response["infos"].length.should eql(1)
response["infos"][0]["channel"].to_s.should eql(channel)
@@ -401,7 +432,7 @@
EventMachine.run do
pub_2 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats?id=*').get :head => headers
pub_2.callback do
- pub_2.should be_http_status(200).with_body
+ pub_2.should be_http_status(200)
response = JSON.parse(pub_2.response)
response["infos"].length.should eql(2)
response["infos"][0]["channel"].to_s.should eql(channel)
@@ -429,7 +460,7 @@
EventMachine.run do
pub_2 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats?id=bd_test_*').get :head => headers
pub_2.callback do
- pub_2.should be_http_status(200).with_body
+ pub_2.should be_http_status(200)
response = JSON.parse(pub_2.response)
response["infos"].length.should eql(1)
response["channels"].to_i.should eql(0)
@@ -452,7 +483,7 @@
create_channel_by_subscribe(channel, headers) do
pub_1 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats?id=ch_test_*').get :head => headers
pub_1.callback do
- pub_1.should be_http_status(200).with_body
+ pub_1.should be_http_status(200)
response = JSON.parse(pub_1.response)
response["infos"].length.should eql(1)
response["infos"][0]["channel"].to_s.should eql(channel)
@@ -470,7 +501,7 @@
body = 'body'
number_of_channels = 20000
- nginx_run_server(config.merge(:shared_memory_size => '200m', :keepalive => "on"), :timeout => 10) do |conf|
+ nginx_run_server(config.merge(:shared_memory_size => '200m', :keepalive => "on"), :timeout => 15) do |conf|
#create channels
0.step(number_of_channels - 1, 1000) do |i|
socket = open_socket(nginx_host, nginx_port)
@@ -484,7 +515,7 @@
EventMachine.run do
pub_2 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats?id=ch_test_get_detailed_channels_statistics_to_many_channels_using_prefix_10*').get :head => headers
pub_2.callback do
- pub_2.should be_http_status(200).with_body
+ pub_2.should be_http_status(200)
response = JSON.parse(pub_2.response)
response["infos"].length.should eql(1111)
EventMachine.stop
@@ -504,7 +535,7 @@
EventMachine.run do
pub_2 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats?id=ALL').get :head => headers
pub_2.callback do
- pub_2.should be_http_status(200).with_body
+ pub_2.should be_http_status(200)
response = JSON.parse(pub_2.response)
response["hostname"].to_s.should_not be_empty
response["time"].to_s.should_not be_empty
@@ -516,7 +547,7 @@
sleep(2)
pub_3 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats?id=ALL').get :head => headers
pub_3.callback do
- pub_3.should be_http_status(200).with_body
+ pub_3.should be_http_status(200)
response = JSON.parse(pub_3.response)
response["uptime"].to_i.should be_in_the_interval(2, 3)
EventMachine.stop
@@ -537,7 +568,7 @@
EventMachine.run do
pub_2 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats').get :head => headers
pub_2.callback do
- pub_2.should be_http_status(200).with_body
+ pub_2.should be_http_status(200)
response = JSON.parse(pub_2.response)
response["hostname"].to_s.should_not be_empty
response["time"].to_s.should_not be_empty
@@ -553,7 +584,7 @@
sleep(2)
pub_3 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats').get :head => headers
pub_3.callback do
- pub_3.should be_http_status(200).with_body
+ pub_3.should be_http_status(200)
response = JSON.parse(pub_3.response)
response["uptime"].to_i.should be_in_the_interval(2, 3)
response["by_worker"][0]["uptime"].to_i.should be_in_the_interval(2, 3)
@@ -575,15 +606,15 @@
EventMachine.run do
pub_2 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats').get :head => headers
pub_2.callback do
- pub_2.should be_http_status(200).with_body
+ pub_2.should be_http_status(200)
response = JSON.parse(pub_2.response)
response["stored_messages"].to_i.should eql(1)
response["messages_in_trash"].to_i.should eql(0)
sleep(5)
pub_3 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats').get :head => headers
pub_3.callback do
- pub_3.should be_http_status(200).with_body
+ pub_3.should be_http_status(200)
response = JSON.parse(pub_3.response)
response["stored_messages"].to_i.should eql(0)
response["messages_in_trash"].to_i.should eql(1)
@@ -607,7 +638,7 @@
EventMachine.run do
pub_2 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats').get :head => headers
pub_2.callback do
- pub_2.should be_http_status(200).with_body
+ pub_2.should be_http_status(200)
response = JSON.parse(pub_2.response)
response["channels"].to_i.should eql(2)
response["broadcast_channels"].to_i.should eql(1)
@@ -621,7 +652,7 @@
pub_3 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats').get :head => headers
pub_3.callback do
- pub_3.should be_http_status(200).with_body
+ pub_3.should be_http_status(200)
response = JSON.parse(pub_3.response)
response["channels"].to_i.should eql(1)
response["broadcast_channels"].to_i.should eql(1)
@@ -649,3 +680,30 @@
end
end
end
+
+ context "when getting statistics" do
+ context "without gzip" do
+ let(:config) do
+ {:gzip => "off"}
+ end
+
+ let(:headers) do
+ {'accept' => 'application/json'}
+ end
+
+ it_should_behave_like "statistics location"
+ end
+
+ context "with gzip" do
+ let(:config) do
+ {:gzip => "on"}
+ end
+
+ let(:headers) do
+ {'accept' => 'application/json', 'accept-encoding' => 'gzip, compressed'}
+ end
+
+ it_should_behave_like "statistics location"
+ end
+ end
+end
@@ -5,7 +5,9 @@ def self.default_configuration
:master_process => 'on',
:daemon => 'on',
- :content_type => 'text/html; charset=utf-8',
+ :gzip => 'off',
+
+ :content_type => 'text/html',
:keepalive => 'off',
:ping_message_interval => '10s',
@@ -83,6 +85,14 @@ def self.template_configuration
access_log <%= access_log %>;
+
+ gzip <%= gzip %>;
+ gzip_buffers 16 4k;
+ gzip_proxied any;
+ gzip_types text/plain text/css application/x-javascript text/xml application/xml application/xml+rss text/javascript application/json;
+ gzip_comp_level 9;
+ gzip_http_version 1.0;
+
tcp_nopush on;
tcp_nodelay on;
keepalive_timeout 100;
Oops, something went wrong.

0 comments on commit 62071f8

Please sign in to comment.