Skip to content

Commit

Permalink
Update with new Event APIs
Browse files Browse the repository at this point in the history
  • Loading branch information
suyograo committed May 3, 2016
1 parent 7b6010e commit 2024c44
Show file tree
Hide file tree
Showing 5 changed files with 46 additions and 38 deletions.
10 changes: 7 additions & 3 deletions .travis.yml
@@ -1,7 +1,11 @@
---
sudo: false
language: ruby
cache: bundler
rvm:
- jruby-1.7.23
script:
- bundle exec rspec spec
- jruby-1.7.25
script:
- bundle exec rspec spec
jdk: oraclejdk8
before_install:
- git clone -b feature/event_interface https://github.com/elastic/logstash
6 changes: 5 additions & 1 deletion Gemfile
@@ -1,2 +1,6 @@
source 'https://rubygems.org'
gemspec
gemspec
gem "logstash-core", :path => "./logstash/logstash-core"
gem "logstash-core-plugin-api", :path => "./logstash/logstash-core-plugin-api"
gem "logstash-core-event-java", :path => "./logstash/logstash-core-event-java"
gem "logstash-devutils", :github => "elastic/logstash-devutils", :branch => "feature/plugin-api-2_0"
20 changes: 10 additions & 10 deletions lib/logstash/filters/metrics.rb
Expand Up @@ -190,7 +190,7 @@ def flush(options = {})
return unless should_flush?

event = LogStash::Event.new
event["message"] = Socket.gethostname
event.set("message", Socket.gethostname)
@metric_meters.each_pair do |name, metric|
flush_rates event, name, metric
metric.clear if should_clear?
Expand All @@ -199,14 +199,14 @@ def flush(options = {})
@metric_timers.each_pair do |name, metric|
flush_rates event, name, metric
# These 4 values are not sliding, so they probably are not useful.
event["[#{name}][min]"] = metric.min
event["[#{name}][max]"] = metric.max
event.set("[#{name}][min]", metric.min)
event.set("[#{name}][max]", metric.max)
# timer's stddev currently returns variance, fix it.
event["[#{name}][stddev]"] = metric.stddev ** 0.5
event["[#{name}][mean]"] = metric.mean
event.set("[#{name}][stddev]", metric.stddev ** 0.5)
event.set("[#{name}][mean]", metric.mean)

@percentiles.each do |percentile|
event["[#{name}][p#{percentile}]"] = metric.snapshot.value(percentile / 100.0)
event.set("[#{name}][p#{percentile}]", metric.snapshot.value(percentile / 100.0))
end
metric.clear if should_clear?
end
Expand Down Expand Up @@ -237,10 +237,10 @@ def periodic_flush
private

def flush_rates(event, name, metric)
event["[#{name}][count]"] = metric.count
event["[#{name}][rate_1m]"] = metric.one_minute_rate if @rates.include? 1
event["[#{name}][rate_5m]"] = metric.five_minute_rate if @rates.include? 5
event["[#{name}][rate_15m]"] = metric.fifteen_minute_rate if @rates.include? 15
event.set("[#{name}][count]", metric.count)
event.set("[#{name}][rate_1m]", metric.one_minute_rate) if @rates.include? 1
event.set("[#{name}][rate_5m]", metric.five_minute_rate) if @rates.include? 5
event.set("[#{name}][rate_15m]", metric.fifteen_minute_rate) if @rates.include? 15
end

def metric_key(key)
Expand Down
2 changes: 1 addition & 1 deletion logstash-filter-metrics.gemspec
Expand Up @@ -20,7 +20,7 @@ Gem::Specification.new do |s|
s.metadata = { "logstash_plugin" => "true", "logstash_group" => "filter" }

# Gem dependencies
s.add_runtime_dependency "logstash-core-plugin-api", "~> 1.0"
s.add_runtime_dependency "logstash-core-plugin-api", "~> 2.0"
s.add_runtime_dependency "metriks" #(MIT license)
s.add_runtime_dependency "thread_safe"

Expand Down
46 changes: 23 additions & 23 deletions spec/filters/metrics_spec.rb
Expand Up @@ -29,16 +29,16 @@

it "should flush counts" do
insist { subject.length } == 1
insist { subject.first["http_200"]["count"] } == 2
insist { subject.first["http_404"]["count"] } == 1
insist { subject.first.get("http_200")["count"] } == 2
insist { subject.first.get("http_404")["count"] } == 1
end

it "should include rates and percentiles" do
meters = [ "http_200", "http_404" ]
rates = [ "rate_1m", "rate_5m", "rate_15m" ]
meters.each do |meter|
rates.each do |rate|
insist { subject.first[meter] }.include? rate
insist { subject.first.get(meter) }.include? rate
end
end
end
Expand All @@ -56,8 +56,8 @@
events = filter.flush
events = filter.flush
insist { events.length } == 1
insist { events.first["http_200"]["count"] } == 2
insist { events.first["http_404"]["count"] } == 1
insist { events.first.get("http_200")["count"] } == 2
insist { events.first.get("http_404")["count"] } == 1
end
end
end
Expand All @@ -78,7 +78,7 @@
}

it "should include only the requested rates" do
rate_fields = subject.first["http_200"].to_hash.keys.select {|field| field.start_with?("rate") }
rate_fields = subject.first.get("http_200").to_hash.keys.select {|field| field.start_with?("rate") }
insist { rate_fields.length } == 1
insist { rate_fields }.include? "rate_1m"
end
Expand Down Expand Up @@ -114,10 +114,10 @@
events1 = filter1.flush
events2 = filter2.flush

insist { events1.first["http_200"]["count"] } == 1
insist { events2.first["http_200"]["count"] } == 2
insist { events1.first["http_404"]["count"] } == 1
insist { events2.first["http_404"] } == nil
insist { events1.first.get("http_200")["count"] } == 1
insist { events2.first.get("http_200")["count"] } == 2
insist { events1.first.get("http_404")["count"] } == 1
insist { events2.first.get("http_404") } == nil
end
end

Expand All @@ -135,34 +135,34 @@

it "should flush counts" do
insist { subject.length } == 1
insist { subject.first["http_request_time"]["count"] } == 3
insist { subject.first.get("http_request_time")["count"] } == 3
end

it "should include rates and percentiles keys" do
metrics = ["rate_1m", "rate_5m", "rate_15m", "p1", "p5", "p10", "p90", "p95", "p99"]
metrics.each do |metric|
insist { subject.first["http_request_time"] }.include? metric
insist { subject.first.get("http_request_time") }.include? metric
end
end

it "should include min value" do
insist { subject.first['http_request_time']['min'] } == 10.0
insist { subject.first.get("http_request_time")['min'] } == 10.0
end

it "should include mean value" do
insist { subject.first['http_request_time']['mean'] } == 20.0
insist { subject.first.get("http_request_time")['mean'] } == 20.0
end

it "should include stddev value" do
insist { subject.first['http_request_time']['stddev'] } == Math.sqrt(10.0)
insist { subject.first.get("http_request_time")['stddev'] } == Math.sqrt(10.0)
end

it "should include max value" do
insist { subject.first['http_request_time']['max'] } == 30.0
insist { subject.first.get("http_request_time")['max'] } == 30.0
end

it "should include percentile value" do
insist { subject.first['http_request_time']['p99'] } == 30.0
insist { subject.first.get("http_request_time")['p99'] } == 30.0
end
end
end
Expand All @@ -183,17 +183,17 @@

it "should flush counts" do
insist { subject.length } == 1
insist { subject.first["http_request_time"]["count"] } == 1
insist { subject.first.get("http_request_time")["count"] } == 1
end

it "should include only the requested rates" do
rate_fields = subject.first["http_request_time"].to_hash.keys.select {|field| field.start_with?("rate") }
rate_fields = subject.first.get("http_request_time").to_hash.keys.select {|field| field.start_with?("rate") }
insist { rate_fields.length } == 1
insist { rate_fields }.include? "rate_1m"
end

it "should include only the requested percentiles" do
percentile_fields = subject.first["http_request_time"].to_hash.keys.select {|field| field.start_with?("p") }
percentile_fields = subject.first.get("http_request_time").to_hash.keys.select {|field| field.start_with?("p") }
insist { percentile_fields.length } == 2
insist { percentile_fields }.include? "p1"
insist { percentile_fields }.include? "p2"
Expand Down Expand Up @@ -225,9 +225,9 @@
filter.register
filter.filter LogStash::Event.new({"response" => 200})

insist { filter.flush.first["http_200"]["count"] } == 1 # 5s
insist { filter.flush.first["http_200"]["count"] } == 1 # 10s
insist { filter.flush.first["http_200"]["count"] } == 1 # 15s
insist { filter.flush.first.get("http_200")["count"] } == 1 # 5s
insist { filter.flush.first.get("http_200")["count"] } == 1 # 10s
insist { filter.flush.first.get("http_200")["count"] } == 1 # 15s
insist { filter.flush }.nil? # 20s
end
end
Expand Down

0 comments on commit 2024c44

Please sign in to comment.