Skip to content
Browse files

Fixing config / labels

  • Loading branch information...
1 parent c39b74e commit f847a3a4253b96ef19434efb9c83a349d7c9c063 @tomav tomav committed
Showing with 14 additions and 11 deletions.
  1. +6 −5 plugins/thin/thin_memory
  2. +6 −5 plugins/thin/thin_threads
  3. +2 −1 plugins/thin/thins_peak_memory
View
11 plugins/thin/thin_memory
@@ -81,11 +81,12 @@ when "config"
puts "graph_args --base 1024 -l 0"
puts "graph_scale yes"
puts "graph_info Tracks the size of individual thin processes"
- mpm.get_pids.sort.each do |pid|
- puts "thin_#{pid}.label thin_#{pid}"
- puts "thin_#{pid}.info Process memory"
- puts "thin_#{pid}.type GAUGE"
- puts "thin_#{pid}.min 0"
+ mpm.get_pids.sort.each do |instance|
+ pid, port = instance.split("|")
+ puts "thin_#{port}.label thin_#{port}"
+ puts "thin_#{port}.info Process memory"
+ puts "thin_#{port}.type GAUGE"
+ puts "thin_#{port}.min 0"
end
when "autoconf"
if mpm.autoconf
View
11 plugins/thin/thin_threads
@@ -85,11 +85,12 @@ when "config"
puts "graph_args -l 0"
puts "graph_scale yes"
puts "graph_info Tracks how many threads per thin processes"
- mpm.get_pids.sort.each do |pid|
- puts "thin_#{pid}.label thin_#{pid}"
- puts "thin_#{pid}.info Threads per Thin process"
- puts "thin_#{pid}.type GAUGE"
- puts "thin_#{pid}.min 0"
+ mpm.get_pids.sort.each do |instance|
+ pid, port = instance.split("|")
+ puts "thin_#{port}.label thin_#{port}"
+ puts "thin_#{port}.info Threads per Thin process"
+ puts "thin_#{port}.type GAUGE"
+ puts "thin_#{port}.min 0"
end
when "autoconf"
if mpm.autoconf
View
3 plugins/thin/thins_peak_memory
@@ -83,7 +83,8 @@ when "config"
puts "graph_args -l 0"
puts "graph_scale yes"
puts "graph_info Tracks the peak memory of thin processes, aka High Water Mark."
- mpm.get_pids.sort.each do |pid,port|
+ mpm.get_pids.sort.each do |instance|
+ pid, port = instance.split("|")
puts "thin_#{port}.label thin_#{port}"
puts "thin_#{port}.info Peak Memory"
puts "thin_#{port}.type GAUGE"

0 comments on commit f847a3a

Please sign in to comment.
Something went wrong with that request. Please try again.