Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP

Loading…

use single quotes where possible #4

Merged
merged 1 commit into from

2 participants

Jason Dixon Michael Gorsuch
Jason Dixon
Owner

/cc @gorsuch

Michael Gorsuch
Collaborator

:metal:

Michael Gorsuch gorsuch merged commit 120a181 into from
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Commits on Sep 25, 2012
This page is out of date. Refresh to see the latest.
2  config.ru
View
@@ -1,4 +1,4 @@
$:.unshift File.dirname(__FILE__) + '/lib'
-require "backstop/web"
+require 'backstop/web'
run Backstop::Application
4 config/newrelic.yml
View
@@ -8,8 +8,8 @@ production:
apdex_t: 0.5
ssl: true
monitor_mode: true
- license_key: <%= ENV["NEW_RELIC_LICENSE_KEY"] %>
+ license_key: <%= ENV['NEW_RELIC_LICENSE_KEY'] %>
developer_mode: false
- app_name: <%= ENV["NEW_RELIC_APP_NAME"] %>
+ app_name: <%= ENV['NEW_RELIC_APP_NAME'] %>
capture_params: false
log_level: info
2  lib/backstop/collectd/parser.rb
View
@@ -4,7 +4,7 @@ class CollectdData
# ALL PLUGIN CHECKS ARE EXPECTED TO RETURN AN ARRAY OF HASHES OR AN EMPTY ARRAY
Dir[File.dirname(__FILE__) + '/plugins/*.rb'].each do |file|
- f = File.basename(file).gsub(/\.rb/, "")
+ f = File.basename(file).gsub(/\.rb/, '')
require "backstop/collectd/plugins/#{f}"
end
2  lib/backstop/collectd/plugins/conntrack.rb
View
@@ -2,7 +2,7 @@ class CollectdData
# conntrack stats
def parse_plugin_conntrack
[{
- metric: "conntrack.connections",
+ metric: 'conntrack.connections',
value: data['values'][0]
}]
end
10 lib/backstop/collectd/plugins/processes.rb
View
@@ -3,11 +3,11 @@ def parse_plugin_processes
# matches specific proceses
if !data['plugin_instance'].empty?
ps_value_map = {
- "ps_count" => ["num_proc", "num_thread"],
- "ps_disk_ops" => ["read", "write"],
- "ps_disk_octets" => ["read", "write"],
- "ps_pagefaults" => ["minor", "major"],
- "ps_cputime" => ["user", "system"]
+ 'ps_count' => ['num_proc', 'num_thread'],
+ 'ps_disk_ops' => ['read', 'write'],
+ 'ps_disk_octets' => ['read', 'write'],
+ 'ps_pagefaults' => ['minor', 'major'],
+ 'ps_cputime' => ['user', 'system']
}
if (map = ps_value_map[data['type']])
8 lib/backstop/config.rb
View
@@ -4,9 +4,9 @@ def self.env!(key)
ENV[key] || raise("missing #{key}")
end
- def self.deploy; env!("DEPLOY"); end
- def self.port; env!("PORT").to_i; end
- def self.carbon_urls; env!("CARBON_URLS").split(","); end
- def self.prefixes; env!("PREFIXES").split(","); end
+ def self.deploy; env!('DEPLOY'); end
+ def self.port; env!('PORT').to_i; end
+ def self.carbon_urls; env!('CARBON_URLS').split(','); end
+ def self.prefixes; env!('PREFIXES').split(','); end
end
end
42 lib/backstop/web.rb
View
@@ -20,66 +20,66 @@ def publisher
end
get '/health' do
- {"health" => "ok"}.to_json
+ {'health' => 'ok'}.to_json
end
post '/collectd' do
begin
data = JSON.parse(request.body.read)
rescue JSON::ParserError
- halt 400, "JSON is required"
+ halt 400, 'JSON is required'
end
data.each do |item|
results = CollectdData.new(item).parse
results.each do |r|
- r["source"] = "collectd"
- halt 400, "missing fields" unless (r[:cloud] && r[:slot] && r[:id] && r[:metric] && r[:value] && r[:measure_time])
- r[:cloud].gsub!(/\./, "-")
+ r['source'] = 'collectd'
+ halt 400, 'missing fields' unless (r[:cloud] && r[:slot] && r[:id] && r[:metric] && r[:value] && r[:measure_time])
+ r[:cloud].gsub!(/\./, '-')
publisher.publish("mitt.#{r[:cloud]}.#{r[:slot]}.#{r[:id]}.#{r[:metric]}", r[:value], r[:measure_time])
end
end
- "ok"
+ 'ok'
end
post '/github' do
begin
data = JSON.parse(params[:payload])
rescue JSON::ParserError
- halt 400, "JSON is required"
+ halt 400, 'JSON is required'
end
- halt 400, "missing fields" unless (data['repository'] && data['commits'])
- data["source"] = "github"
- data["ref"].gsub!(/\//, ".")
- data["commits"].each do |commit|
+ halt 400, 'missing fields' unless (data['repository'] && data['commits'])
+ data['source'] = 'github'
+ data['ref'].gsub!(/\//, '.')
+ data['commits'].each do |commit|
repo = data['repository']['name']
- author = commit['author']['email'].gsub(/[\.@]/, "-")
- measure_time = DateTime.parse(commit["timestamp"]).strftime("%s")
+ author = commit['author']['email'].gsub(/[\.@]/, '-')
+ measure_time = DateTime.parse(commit['timestamp']).strftime('%s')
publisher.publish("#{data['source']}.#{repo}.#{data['ref']}.#{author}.#{commit['id']}", 1, measure_time)
end
- "ok"
+ 'ok'
end
post '/publish/:name' do
begin
data = JSON.parse(request.body.read)
rescue JSON::ParserError
- halt 400, "JSON is required"
+ halt 400, 'JSON is required'
end
if Config.prefixes.include?(params[:name])
if data.kind_of? Array
data.each do |item|
- item["source"] = params[:name]
- halt 400, "missing fields" unless (item['metric'] && item['value'] && item['measure_time'])
+ item['source'] = params[:name]
+ halt 400, 'missing fields' unless (item['metric'] && item['value'] && item['measure_time'])
publisher.publish("#{item['source']}.#{item['metric']}", item['value'], item['measure_time'])
end
else
- data["source"] = params[:name]
- halt 400, "missing fields" unless (data['metric'] && data['value'] && data['measure_time'])
+ data['source'] = params[:name]
+ halt 400, 'missing fields' unless (data['metric'] && data['value'] && data['measure_time'])
publisher.publish("#{data['source']}.#{data['metric']}", data['value'], data['measure_time'])
end
- "ok"
+ 'ok'
else
- halt 404, "unknown prefix"
+ halt 404, 'unknown prefix'
end
end
end
4 spec/backstop/publisher_spec.rb
View
@@ -15,7 +15,7 @@
TCPSocket.should_receive(:new).with('10.0.0.1', 5000) { socket_double }
b = Backstop::Publisher.new(urls)
- socket_double.should_receive(:puts).with("foo 1 1")
+ socket_double.should_receive(:puts).with('foo 1 1')
b.publish('foo', 1, 1)
end
@@ -26,7 +26,7 @@
b = Backstop::Publisher.new(urls)
Time.should_receive(:now) { 12345 }
- socket_double.should_receive(:puts).with("foo 1 12345")
+ socket_double.should_receive(:puts).with('foo 1 12345')
b.publish('foo', 1)
end
end
Something went wrong with that request. Please try again.