Permalink
Browse files

Merge pull request #3 from bcantin/master

Stresser updated to Ruby 1.9.x
  • Loading branch information...
2 parents 72f862b + 41b3b29 commit 6b95db6e88dd247c6b724b68de052a7dbee3bd98 @jayniz jayniz committed Mar 21, 2012
Showing with 28 additions and 22 deletions.
  1. +18 −18 lib/grapher.rb
  2. +10 −4 lib/mp_perf.rb
View
@@ -1,6 +1,7 @@
require 'ruport'
require 'gruff'
require 'yaml'
+require 'csv'
$LOAD_PATH.unshift File.join(File.dirname(__FILE__), '..', 'lib')
@@ -44,15 +45,15 @@ def generate_reports(options)
def generate_report(report_type, csv_file, outfile)
puts "Generating #{report_type} to #{outfile}..."
columns = (reports[report_type] or reports[reports.keys.first])
- save_graph(csv_file, columns, outfile, :title => report_type)
+ save_graph(csv_file, columns, outfile, title: report_type)
end
#
# Creates and saves a graph
#
def save_graph(csv_file, columns, outfile, options = {})
# Draw graph
- g = graph(csv_file, columns, :title => options[:title] )
+ g = graph(csv_file, columns, title: options[:title] )
# Save graph
g.write(outfile)
@@ -61,41 +62,42 @@ def save_graph(csv_file, columns, outfile, options = {})
#
# Creates a graph from a csv file
#
+ # The headers are converted to symbols in the Ruby 1.9.X CSV library
def graph(csv_file, columns, options = {})
- table = Table(csv_file)
+ table = CSV.table(csv_file, headers: true)
# Prepare data structure
data = Hash.new
- labels = table.column "rate"
+
+ labels = table.values_at(:rate).flatten
columns.each_index do |i|
next unless i%2==0
- data[columns[i]] = table.column columns[i+1]
+ col_name = columns[i+1].gsub(' ','_').gsub('/','')
+ data[columns[i]] = table.values_at(col_name.to_sym).flatten
end
# Draw graph
- g = line_graph( options[:title], data, labels )
+ line_graph( options[:title], data, labels )
end
#
# Reads a YAML file that defines how reports are built
#
def reports(report = nil, yaml_file = File.join(File.dirname(__FILE__), "reports.yml"))
- y = YAML.load(File.read(yaml_file))
+ YAML.load(File.read(yaml_file))
end
protected
-
def line_graph(title, data, labels)
-
# Prepare line graph
g = Gruff::Line.new
g.title = title
set_defaults(g)
# Add datas
data.each do |name, values|
- g.data name, values.map(&:to_i)
+ g.data(name, values.map(&:to_i))
end
# Add labels
@@ -106,9 +108,9 @@ def line_graph(title, data, labels)
end
def to_hash(array)
- return array if array.class==Hash
+ return array if array.class == Hash
hash = Hash.new
- array.each_with_index{ |v, i| hash[i] = v }
+ array.each_with_index {|v, i| hash[i] = v.to_s }
hash
end
@@ -123,14 +125,12 @@ def set_defaults(g)
colors = %w{EFD279 95CBE9 024769 AFD775 2C5700 DE9D7F B6212D 7F5417}.map{|c| "\##{c}"}
g.theme = {
- :colors => colors,
- :marker_color => "#cdcdcd",
- :font_color => 'black',
- :background_colors => ['#fefeee', '#ffffff']
+ colors: colors,
+ marker_color: "#cdcdcd",
+ font_color: 'black',
+ background_colors: ['#fefeee', '#ffffff']
}
-
end
-
end
View
@@ -2,7 +2,7 @@
require 'ruport'
require 'httperf'
require 'trollop'
-
+require 'csv'
#
# Takes command line options and attempts to make a benchmark.
@@ -87,7 +87,8 @@ def single_benchmark(conf)
def run_suite
results = {}
- report = nil
+ # report = nil
+ report = CSV::Table.new([])
(@conf['low_rate']..@conf['high_rate']).step(@conf['rate_step']) do |rate|
# Run httperf
@@ -98,10 +99,15 @@ def run_suite
puts "~"*80
# Init table unless it's there already
- report ||= Table(:column_names => ['rate'] + results[rate].keys.sort)
+ # report ||= CSV::Table.new(:column_names => ['rate'] + results[rate].keys.sort)
+ # table_headers ||= ['rate'] + results[rate].keys
+ table_headers ||= results[rate].keys + ['rate']
+ report[0] ||= CSV::Row.new(table_headers, [], true)
# Save results of this run
- report << results[rate].merge({'rate' => rate})
+ # report << results[rate].merge({'rate' => rate})
+ report_hash = results[rate].merge({'rate' => rate})
+ report << report_hash.values
# Try to keep old pending requests from influencing the next round
sleep(@conf['sleep_time'] || 0)

0 comments on commit 6b95db6

Please sign in to comment.