Skip to content

Commit

Permalink
Merge pull request #109 from dannaaduna/master
Browse files Browse the repository at this point in the history
Clean up and add api eunit tests
  • Loading branch information
dannaaduna committed Sep 3, 2012
2 parents 6fc983d + ccc469a commit 9a928d8
Show file tree
Hide file tree
Showing 5 changed files with 147 additions and 84 deletions.
5 changes: 5 additions & 0 deletions include/call.hrl
Original file line number Diff line number Diff line change
Expand Up @@ -130,3 +130,8 @@
timestamp = util:now() :: cpx_time(),
nodes = [] :: [atom()]
}).

-record(queue_info, {
call :: #queued_call{} | 'undefined',
qpid :: pid()
}).
4 changes: 2 additions & 2 deletions src/agent_channel.erl
Original file line number Diff line number Diff line change
Expand Up @@ -692,7 +692,7 @@ public_api_test_() ->

fun(_) -> {"start/4, simple_sucess", fun() ->
meck:expect(gen_fsm, start, fun(?MODULE, [agentrecord, callrecord,
endpointdata, initstate]) ->
endpointdata, initstate], []) ->
?assert(true)
end),

Expand Down Expand Up @@ -723,7 +723,7 @@ public_api_test_() ->
end} end,

fun(_) -> {"stop/1, simple_sucess", fun() ->
meck:expect(gen_fsm, send_all_state_event, fun(pid, stop) ->
meck:expect(gen_fsm, sync_send_event, fun(pid, stop) ->
?assert(true)
end),

Expand Down
153 changes: 84 additions & 69 deletions src/agent_manager.erl
Original file line number Diff line number Diff line change
Expand Up @@ -1070,6 +1070,22 @@ external_api_test_() ->
ok
end),
?assertEqual(ok, set_ends("testagent", [dummy_media]))
end},

{"route_list/0", fun() ->
AgentList = [
{"key1", #agent_cache{skills = [], channels = [], endpoints = [], id = "skip1"}},
{"key2", #agent_cache{skills = ['_all'], channels = [], endpoints = [], id = "skip2"}},
{"key3", #agent_cache{skills = [english], channels = [], endpoints = [], id = "skip3"}},
{"key4", #agent_cache{skills = [], channels = [voice], endpoints = [], id = "skip4"}},
{"key5", #agent_cache{skills = [], channels = [], endpoints = [dummy_media], id = "skip5"}},
{"key6", #agent_cache{time_avail = 1, skills = ['_all'], channels = [voice], endpoints = [dummy_media], id = "take6"}},
{"key7", #agent_cache{time_avail = 1, skills = [english], channels = [voice], endpoints = [dummy_media], id = "take7"}}
],
meck:expect(gen_leader, call, fun(?MODULE, route_list_agents) ->
AgentList
end),
?assertEqual(AgentList, route_list())
end}

] end}.
Expand Down Expand Up @@ -1401,7 +1417,7 @@ internal_state_test_() -> [
{#agent_key{rotations = 1, has_all = z, skill_count = 0, idle_time = 1}, "agent1"}
]),
State = #state{route_list = Route1},
ExpectedState = #state{route_list = Route2, lists_requested = 1},
% ExpectedState = #state{route_list = Route2, lists_requested = 1},
{reply, OotList, OotState} = handle_call(route_list_agents, "from", State, "election"),
?assertEqual(InitList, OotList),
?assertEqual(gb_trees:to_list(Route2), gb_trees:to_list(OotState#state.route_list))
Expand All @@ -1427,7 +1443,7 @@ internal_state_test_() -> [
end),
InAgent = #agent{id = "agent1", login = "testagent"},
State = #state{},
{reply, Preply, State0} = handle_call({start_agent, InAgent}, "from", State, "election"),
{reply, _Preply, State0} = handle_call({start_agent, InAgent}, "from", State, "election"),
?assertEqual(1, gb_trees:size(State0#state.route_list)),
?assertEqual(1, dict:size(State0#state.agents)),
[begin
Expand All @@ -1449,7 +1465,6 @@ internal_state_test_() -> [
end},

{"{exists, Login}, not found", fun() ->
Zombie = util:zombie(),
meck:new(gen_leader),
meck:expect(gen_leader, leader_node, fun(_) -> node() end),
meck:expect(gen_leader, leader_call, fun(?MODULE, {full_data, "testagent"}) ->
Expand Down Expand Up @@ -1603,7 +1618,7 @@ internal_state_test_() -> [
State = #state{agents = dict:from_list([{"testagent", OldCache}])},
meck:new(gen_leader),
meck:expect(gen_leader, leader_node, fun(_) -> notus end),
meck:expect(gen_leader, leader_cast, fun(?MODULE, {update_notify, Nom, Out}) ->
meck:expect(gen_leader, leader_cast, fun(?MODULE, {update_notify, _Nom, Out}) ->
?assertNotEqual(OldCache, Out)
end),
{noreply, TestState} = handle_cast({set_ends, "testagent", [new_endpoint]}, State, "election"),
Expand Down Expand Up @@ -1635,7 +1650,7 @@ internal_state_test_() -> [
State = #state{agents = dict:from_list([{"testagent", OldCache}])},
meck:new(gen_leader),
meck:expect(gen_leader, leader_node, fun(_) -> notus end),
meck:expect(gen_leader, leader_cast, fun(?MODULE, {update_notify, Nom, Out}) ->
meck:expect(gen_leader, leader_cast, fun(?MODULE, {update_notify, _Nom, Out}) ->
?assertNotEqual(OldCache, Out)
end),
{noreply, TestState} = handle_cast({update_skill_list, "testagent", [new_skill]}, State, "election"),
Expand Down Expand Up @@ -1674,70 +1689,70 @@ internal_state_test_() -> [

].

-record(multi_node_test_state, {
master_node,
slave_node,
master_am,
slave_am
}).

multi_node_test_() ->
util:start_testnode(),
Master = util:start_testnode(agent_manager_master),
Slave = util:start_testnode(agent_manager_slave),
mnesia:change_config(extra_db_nodes, [Master, Slave]),
cover:start([Master, Slave]),
{inorder, {foreach, fun() ->
rpc:call(Master, mnesia, stop, []),
rpc:call(Slave, mnesia, stop, []),
mnesia:delete_schema([Master, Slave]),
mnesia:create_schema([Master, Slave]),
rpc:call(Master, mnesia, start, []),
rpc:call(Slave, mnesia, start, []),
mnesia:change_table_copy_type(schema, Master, disc_copies),
mnesia:change_table_copy_type(schema, Slave, disc_copies),
{ok, AMMaster} = rpc:call(Master, ?MODULE, start, [[Master, Slave]]),
{ok, AMSlave} = rpc:call(Slave, ?MODULE, start, [[Master, Slave]]),
#multi_node_test_state{
master_node = Master,
slave_node = Slave,
master_am = AMMaster,
slave_am = AMSlave
}
end,
fun(MultinodeState) ->
rpc:call(Master, ?MODULE, stop, []),
rpc:call(Slave, ?MODULE, stop, []),
rpc:call(Master, mnesia, stop, []),
rpc:call(Slave, mnesia, stop, [])
end,
[fun(TestState) -> {"Slave skips added agent", fun() ->
% only the leader knows about every agent, it seems
% the reason not every manager needs to know about every
% agent is the cook will ask each dispatcher, which will ask
% the local manager. The resulting lists are combined.
Agent = #agent{id = "agent", login = "agent"},
{ok, Apid} = rpc:call(Master, ?MODULE, start_agent, [Agent]),
List = rpc:call(Slave, ?MODULE, list, []),
?assertEqual([], List)
end} end,
fun(TestState) -> {"Master is informed of agent on slave", fun() ->
Agent = #agent{id = "agent", login = "agent", skills = []},
{ok, Apid} = rpc:call(Slave, ?MODULE, start_agent, [Agent]),
receive after 100 -> ok end,
List = rpc:call(Master, ?MODULE, list, []),
?assertMatch([{"agent", #agent_cache{pid = Apid, id="agent", time_avail = {_T1, _T2, _T3}, skills = [], channels = _ChanList, endpoints = []}}], List)
end} end,
fun(TestState) -> {"Master removes agents from dead node", fun() ->
Agent = #agent{id = "agent", login = "agent", skills = []},
{ok, Apid} = rpc:call(Slave, ?MODULE, start_agent, [Agent]),
List = rpc:call(Master, ?MODULE, list, []),
?assertMatch([{"agent", #agent_cache{pid = Apid, id = "agent", time_avail = {_T1, _T2, _T3}, skills = [], channels = _ChanList, endpoints = []}}], List),
rpc:call(Slave, erlang, exit, [TestState#multi_node_test_state.slave_am, kill]),
receive after 100 -> ok end
% TODO enable this at some point.
%?assertEqual([], rpc:call(Master, ?MODULE, list, []))
end} end]}}.
% -record(multi_node_test_state, {
% master_node,
% slave_node,
% master_am,
% slave_am
% }).

% multi_node_test_() ->
% util:start_testnode(),
% Master = util:start_testnode(agent_manager_master),
% Slave = util:start_testnode(agent_manager_slave),
% mnesia:change_config(extra_db_nodes, [Master, Slave]),
% cover:start([Master, Slave]),
% {inorder, {foreach, fun() ->
% rpc:call(Master, mnesia, stop, []),
% rpc:call(Slave, mnesia, stop, []),
% mnesia:delete_schema([Master, Slave]),
% mnesia:create_schema([Master, Slave]),
% rpc:call(Master, mnesia, start, []),
% rpc:call(Slave, mnesia, start, []),
% mnesia:change_table_copy_type(schema, Master, disc_copies),
% mnesia:change_table_copy_type(schema, Slave, disc_copies),
% {ok, AMMaster} = rpc:call(Master, ?MODULE, start, [[Master, Slave]]),
% {ok, AMSlave} = rpc:call(Slave, ?MODULE, start, [[Master, Slave]]),
% #multi_node_test_state{
% master_node = Master,
% slave_node = Slave,
% master_am = AMMaster,
% slave_am = AMSlave
% }
% end,
% fun(_) ->
% rpc:call(Master, ?MODULE, stop, []),
% rpc:call(Slave, ?MODULE, stop, []),
% rpc:call(Master, mnesia, stop, []),
% rpc:call(Slave, mnesia, stop, [])
% end,
% [fun(_TestState) -> {"Slave skips added agent", fun() ->
% % only the leader knows about every agent, it seems
% % the reason not every manager needs to know about every
% % agent is the cook will ask each dispatcher, which will ask
% % the local manager. The resulting lists are combined.
% Agent = #agent{id = "agent", login = "agent"},
% {ok, _Apid} = rpc:call(Master, ?MODULE, start_agent, [Agent]),
% List = rpc:call(Slave, ?MODULE, list, []),
% ?assertEqual([], List)
% end} end,
% fun(_TestState) -> {"Master is informed of agent on slave", fun() ->
% Agent = #agent{id = "agent", login = "agent", skills = []},
% {ok, Apid} = rpc:call(Slave, ?MODULE, start_agent, [Agent]),
% receive after 100 -> ok end,
% List = rpc:call(Master, ?MODULE, list, []),
% ?assertMatch([{"agent", #agent_cache{pid = Apid, id="agent", time_avail = {_T1, _T2, _T3}, skills = [], channels = _ChanList, endpoints = []}}], List)
% end} end,
% fun(TestState) -> {"Master removes agents from dead node", fun() ->
% Agent = #agent{id = "agent", login = "agent", skills = []},
% {ok, Apid} = rpc:call(Slave, ?MODULE, start_agent, [Agent]),
% List = rpc:call(Master, ?MODULE, list, []),
% ?assertMatch([{"agent", #agent_cache{pid = Apid, id = "agent", time_avail = {_T1, _T2, _T3}, skills = [], channels = _ChanList, endpoints = []}}], List),
% rpc:call(Slave, erlang, exit, [TestState#multi_node_test_state.slave_am, kill]),
% receive after 100 -> ok end
% % TODO enable this at some point.
% %?assertEqual([], rpc:call(Master, ?MODULE, list, []))
% end} end]}}.


% [
Expand Down
61 changes: 49 additions & 12 deletions src/dispatch_manager.erl
Original file line number Diff line number Diff line change
Expand Up @@ -121,9 +121,9 @@ init([]) ->
?DEBUG("Spawn waking up with agents ~p", [Agents]),
[case A#agent_cache.channels of
[] ->
gen_server:cast(dispatch_manager, {end_avail, A#agent_cache.pid});
dispatch_manager:end_avail(A#agent_cache.pid);
_ ->
gen_server:cast(dispatch_manager, {now_avail, A#agent_cache.pid, A#agent_cache.channels})
dispatch_manager:now_avail(A#agent_cache.pid, A#agent_cache.channels)
end || {_Id, A} <- Agents],
?DEBUG("Spawn done.", [])
end),
Expand Down Expand Up @@ -180,9 +180,10 @@ handle_cast({end_avail, AgentPid}, State) ->
{noreply, balance(State2)};
handle_cast(deep_inspect, #state{dispatchers = Disps} = State) ->
Fun = fun(Pid) ->
{ok, Dispstate} = gen_server:call(Pid, dump_state),
Queued = element(2, Dispstate),
QueueRef = element(4, Dispstate),
% {ok, Dispstate} = gen_server:call(Pid, dump_state),
% Queued = element(2, Dispstate),
% QueueRef = element(4, Dispstate),
{ok, #queue_info{call = Queued, qpid = QueueRef}} = dispatcher:get_queue_info(Pid),
[Pid, Queued, QueueRef]
end,
Mapped = lists:map(Fun, Disps),
Expand Down Expand Up @@ -334,15 +335,15 @@ monitor_test_() ->
{noreply, S1} = handle_cast({now_avail, FakeAgent, []}, State0),
{noreply, S2} = handle_cast({end_avail, FakeAgent}, S1),
{noreply, S3} = handle_cast({now_avail, FakeAgent, []}, S2),
{noreply, S4} = handle_cast({end_avail, FakeAgent}, S3),
{noreply, _S4} = handle_cast({end_avail, FakeAgent}, S3),
FakeAgent ! headshot,
Count = count_downs(FakeAgent, 0),
?assertEqual(0, Count)
end},

{"An agent gets monitored, period", fun() ->
FakeAgent = spawn(fun zombie/0),
{noreply, S1} = handle_cast({now_avail, FakeAgent, []}, State0),
{noreply, _S1} = handle_cast({now_avail, FakeAgent, []}, State0),
FakeAgent ! headshot,
Count = count_downs(FakeAgent, 0),
?assertEqual(1, Count)
Expand All @@ -351,7 +352,7 @@ monitor_test_() ->
{"An agent gets monitored once, channel difference", fun() ->
FakeAgent = spawn(fun zombie/0),
{noreply, S1} = handle_cast({now_avail, FakeAgent, []}, State0),
{noreply, S2} = handle_cast({now_avail, FakeAgent, [skill]}, S1),
{noreply, _S2} = handle_cast({now_avail, FakeAgent, [skill]}, S1),
FakeAgent ! headshot,
Count = count_downs(FakeAgent, 0),
?assertEqual(1, Count)
Expand Down Expand Up @@ -387,14 +388,16 @@ balance_test_() ->
Zombie = spawn(fun zombie/0),
{noreply, State1} = handle_cast({now_avail, Zombie, [skill]}, State0),
?assertMatch([{Zombie, _}], dict:to_list(State1#state.agents)),
?assertEqual(1, length(State1#state.dispatchers))
?assertEqual(1, length(State1#state.dispatchers)),
?assertEqual({reply, 1, State1}, handle_call(count_dispatchers, self(), State1))
end},

{"New agent multiple channels, dispatchers start", fun() ->
Zombie = spawn(fun zombie/0),
{noreply, State1} = handle_cast({now_avail, Zombie, [voice, dummy]}, State0),
?assertMatch([{Zombie, _}], dict:to_list(State1#state.agents)),
?assertEqual(2, length(State1#state.dispatchers))
?assertEqual(2, length(State1#state.dispatchers)),
?assertEqual({reply, 2, State1}, handle_call(count_dispatchers, self(), State1))
end},

{"agent dies, but dispatchers don't die with it", fun() ->
Expand All @@ -406,7 +409,8 @@ balance_test_() ->
after 10 -> ?assert(nodown) end,
{noreply, State2} = handle_info(Down, State1),
?assertEqual(dict:new(), State2#state.agents),
?assertEqual(1, length(State2#state.dispatchers))
?assertEqual(1, length(State2#state.dispatchers)),
?assertEqual({reply, 1, State2}, handle_call(count_dispatchers, self(), State2))
end},

{"unexpected dispatcher death", fun() ->
Expand All @@ -415,9 +419,42 @@ balance_test_() ->
{noreply, State1} = handle_cast({now_avail, Zombie, [skill]}, State0), [Dispatcher] = State1#state.dispatchers,
Exit = {'EXIT', Dispatcher, headshot},
{noreply, State2} = handle_info(Exit, State1),
?assertEqual(1, length(State2#state.dispatchers))
?assertEqual(1, length(State2#state.dispatchers)),
?assertEqual({reply, 1, State2}, handle_call(count_dispatchers, self(), State2))
end}

] end}.

external_api_test_() ->
{foreach, fun() ->
FakeDispatcher = util:zombie(),
FakeAgent = util:zombie(),

meck:new(dispatcher),
meck:expect(dispatcher, start_link, fun() -> {ok, FakeDispatcher} end),
meck:expect(dispatcher, get_queue_info, fun(_) -> {ok, #queue_info{}} end),

dispatch_manager:start(),
dispatch_manager:now_avail(FakeAgent, [skill]),

FakeAgent
end,
fun(_) ->
meck:unload(dispatcher),
dispatch_manager:stop()
end,
[fun(_) ->
{"count_dispatchers/0", fun() ->
?assertEqual(1, dispatch_manager:count_dispatchers())
end}
end, fun(_) ->
{"deep_inspect/2", fun() ->
?assertEqual(ok, dispatch_manager:deep_inspect())
end}
end, fun(FakeAgent) ->
{"end_avail/2", fun() ->
?assertEqual(ok, dispatch_manager:end_avail(FakeAgent))
end}
end]}.

-endif.
8 changes: 7 additions & 1 deletion src/dispatcher.erl
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@
-behaviour(gen_server).

%% API
-export([start_link/0, start/0, stop/2, get_agents/1, bound_call/1, regrab/1]).
-export([start_link/0, start/0, stop/2, get_agents/1, bound_call/1, regrab/1, get_queue_info/1]).

%% gen_server callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
Expand Down Expand Up @@ -77,6 +77,12 @@ start_link() ->
start() ->
gen_server:start(?MODULE, [], []).

%% @doc Gets the queued calls and queue pid from the dispatcher state
-spec(get_queue_info/1 :: (Pid :: pid()) -> {ok, #queue_info{}}).
get_queue_info(Pid) ->
State = gen_server:call(Pid, dump_state),
{ok, #queue_info{call = State#state.call, qpid = State#state.qpid}}.

%%====================================================================
%% gen_server callbacks
%%====================================================================
Expand Down

0 comments on commit 9a928d8

Please sign in to comment.