Permalink
Browse files

Introducing MetricsAgent which allows you to gather data during a req…

…uest
  • Loading branch information...
1 parent beb176a commit 94e3e9f6bad3036cbf9c41d188cc6c4fffff76e7 @paukul committed Nov 8, 2010
View
@@ -2,4 +2,5 @@
.rvmrc
Gemfile.lock
*.gem
-.DS_Store
+.DS_Store
+.#*
View
@@ -40,51 +40,6 @@ Example:
AMQPLogging::Logger.new($stdout, :routing_key => AMQPRoutingKeyGenerator)
-==Buffered JSON Logging
-
-You can also use the BufferedJSONLogger which will buffer all the loglines of a request and
-send one JSON string after dispatching and handling the request has taken place.
-This will happen right after the dispatcher has passed the request to the controller and the
-controller has finished processing the request. All before, after and around filters
-will have run, and even if the filter chain was halted, the logger will be flushed correctly.
-
-The output will basically look like the following example:
-
- {
- "page":"FooController#index", // Controller and action
- "severity":1, // the highest severity over all logged lines
- "lines":[ // the list of loglines in the format 'severity', 'timestamp', 'logline'
- [1,"02.11.2010T17:49:42.475422","Processing FooController#index (for 127.0.0.1 at 2010-11-02 17:49:42) [GET]"]
- [1,"02.11.2010T17:49:42.477404","Completed in 2ms (View: 0, DB: 0) | 200 OK [http://localhost/foo]"]
- ],
- "process":24785, // the process id
- "host":"pc-pfriederich" // the hostname
- "custom_field_1":"foobar" // whatever custom fields you add
- "benchmarks": { // another custom field, for example added by timebandits (http://http://github.com/skaes/time_bandits)
- "sql_queries":197,
- "total":2120,
- "gc_time":22.4,
- "memcache_hits":4
- }
- }
-
-The BufferedJSONLogger is especially usefull when you plan to process the logs afterwards.
-Let's say you have 20 or more application servers which all log via AMQP, and you want to have some
-aggregated processing of the logs, request wise, it would be a pain in the ass to identify which logline
-belongs to which request. You can also add all kinds of data to the JSON object that will get logged in
-the end, so you don't have to figure that out afterwards by regexp parsing the loglines.
-For most users, this feature won't be that interesting I guess.
-
-If you want to use the Buffered logger, require 'amqp_logging/buffered_json_logger/rails' in your environment.rb
-and set the logger to an instance of AMQPLogging::BufferedJSONLogger.new. That's all.
-
- require 'amqp_logging/buffered_json_logger/rails'
-
- Rails::Initializer.run do |config|
- logger = AMQPLogging::BufferedJSONLogger.new(config.log_path)
- config.logger = logger
- # ...whatever
-
==License
Copyright © 2010 Pascal Friederich
View
@@ -9,7 +9,7 @@
end
module AMQPLogging
- autoload :BufferedJSONLogger, 'amqp_logging/buffered_json_logger'
+ autoload :MetricsAgent, 'amqp_logging/metrics_agent'
end
require 'logger'
@@ -1,71 +0,0 @@
-require 'json'
-
-
-# in der rails app
-module AMQPLogging
- module SimpleFormatter
- def self.call(severity, time, progname, msg)
- msg
- end
- end
-
- class BufferedJSONLogger < Logger
- attr_reader :buffer
- attr_reader :fields
-
- def initialize(logdev, *args)
- super
- @default_formatter = SimpleFormatter
- @default_fields = {
- :host => Socket.gethostname.split('.').first,
- :process => Process.pid
- }
- @fields = {}
- @buffer = []
- end
-
- def add_field(name, value)
- @fields[name] = value
- end
-
- def add_fields(extra_fields)
- @fields.merge!(extra_fields)
- end
-
- def add(severity, message = nil, progname = nil, &block)
- severity ||= UNKNOWN
- if severity < @level
- return true
- end
- progname ||= @progname
- if message.nil?
- if block_given?
- message = yield
- else
- message = progname
- progname = @progname
- end
- end
- t = Time.now
- formatted_message = format_message(format_severity(severity), t, progname, message).strip
- @buffer << [severity, t.strftime("%d.%m.%YT%H:%M:%S.#{t.usec}"), formatted_message]
- true
- end
-
- def flush
- @logdev.write(format_json)
- @buffer = []
- @fields = {}
- true
- end
-
- private
- def format_json
- @default_fields.merge({
- :lines => @buffer.reverse,
- :severity => @buffer.map {|l| l[0]}.max
- }).merge(@fields).to_json + "\n"
- end
-
- end
-end
@@ -1,15 +0,0 @@
-require 'amqp_logging'
-require 'amqp_logging/buffered_json_logger/rails/after_dispatch_callback_handler'
-
-
-ActionController::Dispatcher.after_dispatch do |dispatcher|
- AMQPLogging::Rails::AfterDispatchCallbackHandler.run(dispatcher)
-end
-
-class ActionController::Base
- def log_processing_for_request_id_with_json_logger_preparation
- logger.add_field :page, "#{self.class.name}\##{action_name}"
- log_processing_for_request_id_without_json_logger_preparation
- end
- alias_method_chain :log_processing_for_request_id, :json_logger_preparation
-end
@@ -0,0 +1,91 @@
+require 'json'
+
+
+module AMQPLogging
+ class MetricsAgent
+ attr_reader :fields
+
+ def initialize
+ @default_fields = {
+ :host => Socket.gethostname.split('.').first,
+ :pid => Process.pid,
+ :loglines => {
+ :default => []
+ }
+ }
+ @logger_types = {}
+ reset_fields
+ end
+
+ def logger
+ @logger || (self.logger = ::Logger.new($stdout))
+ end
+
+ def logger=(logger)
+ @logger = logger
+ @logger.formatter = Proc.new {|_, _, msg, progname| msg || progname}
+ @logger
+ end
+
+ def flush
+ logger.info(@fields.to_json + "\n")
+ reset_fields
+ end
+
+ def [](fieldname)
+ @fields[fieldname]
+ end
+
+ def []=(fieldname, value)
+ @fields[fieldname] = value
+ end
+
+ def add_logline(severity, message, progname, logger)
+ t = Time.now
+ msg = (message || progname).strip
+ @fields[:loglines][@logger_types[logger]] << [severity, t.strftime("%Y-%d-%mT%H:%M:%S.#{t.usec}"), msg]
+ end
+
+ def wrap_logger(logger, type = :default)
+ agent = self
+ register_logger(logger, type)
+ logger.instance_eval do
+ @agent = agent
+ class << self
+ include MetricsAgentSupport
+ end
+ end
+ logger
+ end
+
+ private
+ def register_logger(logger, type)
+ @logger_types[logger] = type
+ @fields[:loglines][type] = []
+ end
+
+ def reset_fields
+ @fields = {
+ }.merge!(@default_fields)
+ @logger_types.values.each {|logtype| @fields[:loglines][logtype] = []}
+ end
+
+ module MetricsAgentSupport
+ def self.included(base)
+ base.class_eval do
+ alias :add_without_proxy :add
+ alias :add :add_with_proxy
+ end
+ end
+
+ def agent
+ @agent
+ end
+
+ def add_with_proxy(severity, message = nil, progname = nil)
+ @agent.add_logline(severity, message, progname, self) unless severity < @level
+ add_without_proxy(severity, message, progname)
+ end
+ end
+ end
+end
@@ -0,0 +1,16 @@
+require 'amqp_logging'
+require 'amqp_logging/metrics_agent/rails/after_dispatch_callback_handler'
+
+ActionController::Dispatcher.after_dispatch do |dispatcher|
+ AMQPLogging::Rails::AfterDispatchCallbackHandler.run(dispatcher)
+end
+
+class ActionController::Base
+ def log_processing_for_request_id_with_metrics_agent
+ logger.agent[:page] = "#{self.class.name}\##{action_name}"
+ t = Time.now
+ logger.agent[:started_at] = t.strftime("%Y-%d-%mT%H:%M:%S.#{t.usec}")
+ log_processing_for_request_id_without_metrics_agent
+ end
+ alias_method_chain :log_processing_for_request_id, :metrics_agent
+end
@@ -13,14 +13,15 @@ def self.run(dispatcher)
request_headers[k] = "#<#{v.class.name}>"
end
end
-
- ActionController::Base.logger.add_fields({
+ agent = ActionController::Base.logger.agent
+ agent.fields.merge!({
:env => RAILS_ENV,
:response_code => response.status,
:request_params => request.request_parameters,
:request_headers => request_headers,
:response_headers => response.headers
})
+ agent.flush
end
end
end
@@ -1,73 +0,0 @@
-require File.dirname(__FILE__) + '/test_helper.rb'
-
-module AMQPLogging
- class TheBufferedJSONLoggerTest < Test::Unit::TestCase
- def setup
- @output = StringIO.new
- @logger = BufferedJSONLogger.new(@output)
- end
-
- test "should have all the convenience log methods of a regular logger" do
- ::Logger::Severity.constants.map(&:downcase).each do |logmethod|
- assert_nothing_raised do
- @logger.send(logmethod, "foo")
- end
- end
- end
-
- test "should not write the logs immediately" do
- assert_equal "", @output.string
- @logger.debug "foo"
- assert_equal "", @output.string
- end
-
- test "should write to the log when flush is called eventually" do
- assert_equal "", @output.string
- @logger.debug "foo"
- @logger.flush
- assert_match /foo/, @output.string
- end
-
- test "should empty the buffer when flush is called" do
- @logger.debug "foo"
- @logger.flush
- assert_equal [], @logger.buffer
- end
- end
-
- class BufferedLoggerJSONOutputTest < Test::Unit::TestCase
- def setup
- @output = StringIO.new
- @logger = BufferedJSONLogger.new(@output)
- @logger.debug "foo"
- @logger.warn "bar"
- @logger.info "baz"
- @logger.flush
- @json = JSON.parse(@output.string)
- end
-
- test "should have the loglines in a array called lines" do
- assert @json["lines"].instance_of?(Array)
- assert_equal 3, @json["lines"].size
- end
-
- test "should have each logline with severity, a timestamp and the message" do
- severity, timestamp, message = @json["lines"][2]
- assert_equal Logger::DEBUG, severity
- assert_nothing_raised { Time.parse(timestamp) }
- assert_equal "foo", message
- end
-
- test "should have a field with the highest severity" do
- assert_equal Logger::WARN, @json["severity"]
- end
-
- test "should have a field with the process id" do
- assert_equal Process.pid, @json["process"]
- end
-
- test "should have a field with the hostname" do
- assert_equal Socket.gethostname.split('.').first, @json["host"]
- end
- end
-end
Oops, something went wrong.

0 comments on commit 94e3e9f

Please sign in to comment.