Skip to content
Browse files

Adding limits to deduplication

This allows to do an optional pre-read in ETS to find out if
the currently deduplicated log message has been seen more than
a given number of times (dictated by the 'duplicate_limit'
env variable). If it's been seen too often, the entire
negotiation between lager_deduper and its children is skipped to
avoid undesirable message sending that may take a lot of space
or processing time.

This measure is a best effort -- ETS not having locks or anything
of that kind, it is possible to get more matched messages than the
limit states.
  • Loading branch information...
1 parent a950d24 commit 5049634679db1b060d0b9d19cdf06e7e84cc5837 @ferd committed
Showing with 39 additions and 5 deletions.
  1. +8 −2 src/lager.app.src
  2. +5 −0 src/lager_app.erl
  3. +26 −3 src/lager_deduper.erl
View
10 src/lager.app.src
@@ -41,7 +41,13 @@
%% How sensitive lager should be to near-duplicates. 0 means no
%% special handling is done
{duplicate_treshold, 0},
- %% How frequently lager should dump its duplicate entries, in milliseconds
- {duplicate_dump, 1000}
+ %% How frequently lager should dump its duplicate entries, in milliseconds.
+ %% only used if duplicate_treshold > 0
+ {duplicate_dump, 1000},
+ %% How many similar messages can be logged per dump. If 150 log
+ %% messages are identical but this limit is set to 5, the log handler
+ %% will only receive 5 of them and show (5 times+) instead of (N times).
+ %% a value of 0 or 'undefined' disables limits.
+ {duplicate_limit, undefined}
]}
]}.
View
5 src/lager_app.erl
@@ -62,6 +62,11 @@ start(_StartType, _StartArgs) ->
_ -> 1000
end,
lager_mochiglobal:put(duplicate_dump, DumpDelay),
+ DupLimit = case application:get_env(lager, duplicate_limit) of
+ {ok, DupLim} -> DupLim;
+ _ -> undefined
+ end,
+ lager_mochiglobal:put(duplicate_limit, DupLimit),
SavedHandlers = case application:get_env(lager, error_logger_redirect) of
{ok, false} ->
View
29 src/lager_deduper.erl
@@ -5,6 +5,7 @@
code_change/3, terminate/2]).
-define(SERVER, ?MODULE).
+-define(TABLE, ?MODULE).
-define(DEFAULT_TIMEOUT, 1000).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
@@ -19,6 +20,21 @@ start_link() ->
dedup_notify(Dest, Level, Timestamp, Msg) ->
Hash = hash(Msg),
Key = {Level, Hash},
+ case limit() of
+ undefined -> ask_seen(Dest, Level, Timestamp, Msg, Key);
+ 0 -> ask_seen(Dest, Level, Timestamp, Msg, Key);
+ Limit ->
+ case ets:lookup(?TABLE, Key) of
+ [] -> % not seen
+ ask_seen(Dest, Level, Timestamp, Msg, Key);
+ [{_,X,_}] when X < Limit -> % seen, but not too often
+ ask_seen(Dest, Level, Timestamp, Msg, Key);
+ [_] -> % seen too many times
+ ok
+ end
+ end.
+
+ask_seen(Dest, Level, Timestamp, Msg, Key) ->
case gen_server:call(?SERVER, {seen, Key}) of
yes ->
ok;
@@ -84,8 +100,9 @@ terminate(_, _) -> ok.
delay() -> lager_mochiglobal:get(duplicate_dump, ?DEFAULT_TIMEOUT).
treshold() -> lager_mochiglobal:get(duplicate_treshold, 1).
+limit() -> lager_mochiglobal:get(duplicate_limit, undefined).
-empty() -> ets:new(?MODULE, [private]).
+empty() -> ets:new(?TABLE, [protected,named_table]).
lookup(Key, Tab) ->
case ets:lookup(Tab, Key) of
@@ -140,17 +157,23 @@ dump(Tab, Current) ->
ets:delete(Tab,Key),
dump(Tab, Next);
[{Key, Ct, {log, Lvl, Ts, [LvlStr, Loc, Msg] }}] ->
- safe_notify({log, Lvl, Ts, [LvlStr, Loc, [Msg, io_lib:format(" (~b times)", [Ct])]]}),
+ safe_notify({log, Lvl, Ts, [LvlStr, Loc, [Msg, io_lib:format(" (~b times~s)", [Ct,plus(Ct)])]]}),
Next = ets:next(Tab, Current),
ets:delete(Tab,Key),
dump(Tab, Next);
[{Key, Ct, {log, Dest, Lvl, Ts, [LvlStr, Loc, Msg]}}] ->
- safe_notify({log, Dest, Lvl, Ts, [LvlStr, Loc, [Msg, io_lib:format(" (~b times)", [Ct])]]}),
+ safe_notify({log, Dest, Lvl, Ts, [LvlStr, Loc, [Msg, io_lib:format(" (~b times~s)", [Ct,plus(Ct)])]]}),
Next = ets:next(Tab, Current),
ets:delete(Tab,Key),
dump(Tab, Next)
end.
+%% helper to display log count
+plus(Ct) ->
+ Limit = limit(),
+ if Limit =/= undefined, Limit > 0, Ct >= Limit -> "+";
+ true -> ""
+ end.
safe_notify(Event) ->
case whereis(lager_event) of

0 comments on commit 5049634

Please sign in to comment.
Something went wrong with that request. Please try again.