Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse files

First public release.

  • Loading branch information...
commit ea44911c20a96565dba24de68dce2b1a2a363ca6 0 parents
Knut Nesheim authored
21 LICENSE
@@ -0,0 +1,21 @@
+The MIT License
+
+Copyright (c) 2011 wooga GmbH
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
119 README.md
@@ -0,0 +1,119 @@
+# eredis
+
+Redis client with a focus on performance. Eredis also supports AUTH
+and SELECT.
+
+## Example
+
+If you have Redis running on localhost, with default settings, you may
+copy and paste the following into a shell to try out Eredis:
+
+ git clone git://github.com/wooga/eredis.git
+ cd eredis
+ ./rebar compile
+ erl -pa ebin/
+ {ok, C} = eredis:start_link().
+ {ok, <<"OK">>} = eredis:q(C, ["SET", "foo", "bar"]).
+ {ok, <<"bar">>} = eredis:q(C, ["GET", "foo"]).
+
+MSET and MGET:
+
+ KeyValuePairs = ["key1", "value1", "key2", "value2", "key3", "value3"].
+ {ok, <<"OK">>} = eredis:q(C, ["MSET" | KeyValuePairs]).
+ {ok, Values} = eredis:q(C, ["MGET" | ["key1", "key2", "key3"]]).
+
+EUnit tests:
+
+ ./rebar eunit
+
+## Commands
+
+Eredis has only one function to interact with redis, which is
+`eredis:q(Client::pid(), Command::iolist())`. The response will either
+be `{ok, Value::binary()}` or `{error, Message::binary()}`. The value
+is always the binary value returned by Redis, without any type
+conversion.
+
+To start the client, use `eredis:start_link/0` or
+eredis:start_link/4`. `start_link/4` takes the following arguments:
+
+* Host, dns name or ip adress as string
+* Port, integer
+* Password, string or empty string([]) for no password
+* Database, integer or 0 for default database
+
+## Reconnecting on time out
+
+Redis will disconnect any client that is idle for more than the
+configured timeout. When this happens, Eredis will automatically
+reconnect. In other words, there will always be one open connection to
+Redis for every client. If re-establishing the connection fails, the
+client terminates.
+
+## AUTH and SELECT
+
+Eredis also implements the AUTH and SELECT calls for you. When the
+client is started with something else than default values for password
+and database, it will issue the `AUTH` and `SELECT` commands
+appropriately, even when reconnecting after a timeout.
+
+
+## Benchmarking
+
+Using basho_bench(https://github.com/basho/basho_bench/) you may
+benchmark Eredis on your own hardware using the provided config and
+driver. See `priv/basho_bench_driver_eredis.config` and
+`src/basho_bench_driver_eredis.erl`.
+
+## Queueing
+
+Eredis uses the same queueing mechanism as Erldis. `eredis:q/2` uses
+`gen_server:call/2` to do a blocking call to the client
+gen_server. The client will immediately send the request to Redis, add
+the caller to the queue and reply with `noreply`. This frees the
+gen_server up to accept new requests and parse responses as they come
+on the socket.
+
+When data is received on the socket, we call `eredis_parser:parse/2`
+until it returns a value, we then use `gen_server:reply/2` to reply to
+the first process waiting in the queue.
+
+This queueing mechanism works because Redis guarantees that the
+response will be in the same order as the requests.
+
+## Response parsing
+
+The response parser is the biggest difference between Eredis and other
+libraries like Erldis, redis-erl and redis_pool. The common approach
+is to either directly block or use active once to get the first part
+of the response, then repeatedly use `gen_tcp:recv/2` to get more data
+when needed. Profiling identified this as a bottleneck, in particular
+for `MGET` and `HMGET`.
+
+To be as fast as possible, Eredis takes a different approach. The
+socket is always set to active once, which will let us receive data
+fast without blocking the gen_server. The tradeoff is that we must
+parse partial responses, which makes the parser more complex.
+
+In order to make multibulk responses as fast as possible, the parser
+will parse all data available and continue where it left off when more
+data is available.
+
+## Future improvements
+
+When the parser is accumulating data, a new binary is generated for
+every call to `parse/2`. This might create binaries that will be
+reference counted. This could be improved by replacing it with an
+iolist.
+
+When parsing a big bulk, the parser knows the size of the bulk. If the
+bulk is big and would come in many chunks, this could improved by
+having the client explicitly use `gen_tcp:recv/2` to fetch the entire
+bulk at once.
+
+## Credits
+
+Although this project is almost a complete rewrite, many patterns are
+the same as you find in Erldis, most notably the queueing of requests.
+
+`create_multibulk/1` and `to_binary/1` were taken verbatim from Erldis.
16 include/eredis.hrl
@@ -0,0 +1,16 @@
+-define(NL, "\r\n").
+
+
+%% Continuation data is whatever data returned by any of the parse
+%% functions. This is used to continue where we left off the next time
+%% the user calls parse/2.
+-type continuation_data() :: any().
+-type parser_state() :: status_continue | bulk_continue | multibulk_continue.
+
+%% Internal parser state. Is returned from parse/2 and must be
+%% included on the next calls to parse/2.
+-record(pstate, {
+ state = undefined :: parser_state() | undefined,
+ continuation_data :: continuation_data() | undefined
+}).
+
17 priv/basho_bench_eredis.config
@@ -0,0 +1,17 @@
+{mode, max}.
+%{mode, {rate, 5}}.
+
+{duration, 15}.
+
+{concurrent, 30}.
+
+{driver, basho_bench_driver_eredis}.
+
+{code_paths, ["/home/knutin/git/eredis/ebin/"]}.
+
+{operations, [{get,1}, {put,4}]}.
+
+{key_generator, {uniform_int, 10000}}.
+
+{value_generator, {function, basho_bench_driver_eredis, value_gen, []}}.
+%{value_generator, {fixed_bin, 1}}.
18 priv/basho_bench_erldis.config
@@ -0,0 +1,18 @@
+{mode, max}.
+%{mode, {rate, 5}}.
+
+{duration, 15}.
+
+{concurrent, 10}.
+
+{driver, basho_bench_driver_erldis}.
+
+{code_paths, ["/home/knutin/git/eredis/ebin/",
+ "/home/knutin/git/erldis/ebin/"]}.
+
+{operations, [{get,1}, {put,4}]}.
+
+{key_generator, {uniform_int, 10000}}.
+
+{value_generator, {function, basho_bench_driver_erldis, value_gen, []}}.
+{value_generator, {fixed_bin, 64}}.
BIN  rebar
Binary file not shown
1  rebar.config
@@ -0,0 +1 @@
+{erl_opts, [debug_info]}.
38 src/basho_bench_driver_eredis.erl
@@ -0,0 +1,38 @@
+-module(basho_bench_driver_eredis).
+
+-export([new/1,
+ run/4]).
+
+-export([value_gen/1]).
+
+new(_Id) ->
+ case eredis:start_link() of
+ {ok, Client} ->
+ {ok, Client};
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+run(get, KeyGen, _ValueGen, Client) ->
+ Start = KeyGen(),
+ %%case eredis:q(["MGET" | lists:seq(Start, Start + 500)]) of
+ case eredis:q(Client, ["GET", Start]) of
+ {ok, _Value} ->
+ {ok, Client};
+ {error, Reason} ->
+ {error, Reason, Client}
+ end;
+
+run(put, KeyGen, ValueGen, Client) ->
+ case eredis:q(Client, ["SET", KeyGen(), ValueGen()]) of
+ {ok, <<"OK">>} ->
+ {ok, Client};
+ {error, Reason} ->
+ {error, Reason, Client}
+ end.
+
+value_gen(_Id) ->
+ fun() ->
+ %% %% Example data from http://json.org/example.html, copied three times
+ <<"{\"web-app\":{\"servlet\":[{\"servlet-name\":\"cofaxCDS\",\"servlet-class\":\"org.cofax.cds.CDSServlet\",\"init-param\":{\"configGlossary:installationAt\":\"Philadelphia,PA\",\"configGlossary:adminEmail\":\"ksm@pobox.com\",\"configGlossary:poweredBy\":\"Cofax\",\"configGlossary:poweredByIcon\":\"/images/cofax.gif\",\"configGlossary:staticPath\":\"/content/static\",\"templateProcessorClass\":\"org.cofax.WysiwygTemplate\",\"templateLoaderClass\":\"org.cofax.FilesTemplateLoader\",\"templatePath\":\"templates\",\"templateOverridePath\":\"\",\"defaultListTemplate\":\"listTemplate.htm\",\"defaultFileTemplate\":\"articleTemplate.htm\",\"useJSP\":false,\"jspListTemplate\":\"listTemplate.jsp\",\"jspFileTemplate\":\"articleTemplate.jsp\",\"cachePackageTagsTrack\":200,\"cachePackageTagsStore\":200,\"cachePackageTagsRefresh\":60,\"cacheTemplatesTrack\":100,\"cacheTemplatesStore\":50,\"cacheTemplatesRefresh\":15,\"cachePagesTrack\":200,\"cachePagesStore\":100,\"cachePagesRefresh\":10,\"cachePagesDirtyRead\":10,\"searchEngineListTemplate\":\"forSearchEnginesList.htm\",\"searchEngineFileTemplate\":\"forSearchEngines.htm\",\"searchEngineRobotsDb\":\"WEB-INF/robots.db\",\"useDataStore\":true,\"dataStoreClass\":\"org.cofax.SqlDataStore\",\"redirectionClass\":\"org.cofax.SqlRedirection\",\"dataStoreName\":\"cofax\",\"dataStoreDriver\":\"com.microsoft.jdbc.sqlserver.SQLServerDriver\",\"dataStoreUrl\":\"jdbc:microsoft:sqlserver://LOCALHOST:1433;DatabaseName=goon\",\"dataStoreUser\":\"sa\",\"dataStorePassword\":\"dataStoreTestQuery\",\"dataStoreTestQuery\":\"SETNOCOUNTON;selecttest='test';\",\"dataStoreLogFile\":\"/usr/local/tomcat/logs/datastore.log\",\"dataStoreInitConns\":10,\"dataStoreMaxConns\":100,\"dataStoreConnUsageLimit\":100,\"dataStoreLogLevel\":\"debug\",\"maxUrlLength\":500}},{\"servlet-name\":\"cofaxEmail\",\"servlet-class\":\"org.cofax.cds.EmailServlet\",\"init-param\":{\"mailHost\":\"mail1\",\"mailHostOverride\":\"mail2\"}},{\"servlet-name\":\"cofaxAdmin\",\"servlet-class\":\"org.cofax.cds.AdminServlet\"},{\"servlet-name\":\"fileServlet\",\"servlet-class\":\"org.cofax.cds.FileServlet\"},{\"servlet-name\":\"cofaxTools\",\"servlet-class\":\"org.cofax.cms.CofaxToolsServlet\",\"init-param\":{\"templatePath\":\"toolstemplates/\",\"log\":1,\"logLocation\":\"/usr/local/tomcat/logs/CofaxTools.log\",\"logMaxSize\":\"\",\"dataLog\":1,\"dataLogLocation\":\"/usr/local/tomcat/logs/dataLog.log\",\"dataLogMaxSize\":\"\",\"removePageCache\":\"/content/admin/remove?cache=pages&id=\",\"removeTemplateCache\":\"/content/admin/remove?cache=templates&id=\",\"fileTransferFolder\":\"/usr/local/tomcat/webapps/content/fileTransferFolder\",\"lookInContext\":1,\"adminGroupID\":4,\"betaServer\":true}}],\"servlet-mapping\":{\"cofaxCDS\":\"/\",\"cofaxEmail\":\"/cofaxutil/aemail/*\",\"cofaxAdmin\":\"/admin/*\",\"fileServlet\":\"/static/*\",\"cofaxTools\":\"/tools/*\"},\"taglib\":{\"taglib-uri\":\"cofax.tld\",\"taglib-location\":\"/WEB-INF/tlds/cofax.tld\"}}">>
+ end.
29 src/basho_bench_driver_erldis.erl
@@ -0,0 +1,29 @@
+-module(basho_bench_driver_erldis).
+
+-export([new/1,
+ run/4]).
+
+new(_Id) ->
+ case erldis_client:connect() of
+ {ok, Pid} ->
+ {ok, Pid};
+ {error, {already_started, Pid}} ->
+ {ok, Pid}
+ end.
+
+run(get, KeyGen, _ValueGen, Client) ->
+ Start = KeyGen(),
+ case erldis:mget(Client, lists:seq(Start, Start + 500)) of
+ {error, Reason} ->
+ {error, Reason, Client};
+ _Value ->
+ {ok, Client}
+ end;
+
+run(put, KeyGen, ValueGen, Client) ->
+ case erldis:set(Client, integer_to_list(KeyGen()), ValueGen()) of
+ {error, Reason} ->
+ {error, Reason, Client};
+ _Value ->
+ {ok, Client}
+ end.
7 src/eredis.app.src
@@ -0,0 +1,7 @@
+{application, eredis, [
+ {description, "Erlang Redis Client"},
+ {vsn, "0.1.0"},
+ {modules, []},
+ {registered, []},
+ {applications, [kernel, stdlib]}
+]}.
80 src/eredis.erl
@@ -0,0 +1,80 @@
+%%
+%% Erlang Redis client
+%%
+%% Usage:
+%% {ok, Client} = eredis:start_link().
+%% {ok, <<"OK">>} = eredis:q(["SET", "foo", "bar"]).
+%% {ok, <<"bar">>} = eredis:q(["GET", "foo"]).
+
+-module(eredis).
+-author('knut.nesheim@wooga.com').
+
+-include("eredis.hrl").
+
+-export([start_link/0, start_link/2, start_link/3, start_link/4,
+ q/2]).
+
+%% Exported for testing
+-export([create_multibulk/1]).
+
+%%
+%% PUBLIC API
+%%
+
+-spec start_link() -> {ok, Client::pid()} |
+ {error, {connection_error, Reason::any()}}.
+start_link() ->
+ start_link("127.0.0.1", 6379, 0, "").
+
+start_link(Host, Port) ->
+ start_link(Host, Port, 0, "").
+
+start_link(Host, Port, Database) ->
+ start_link(Host, Port, Database, "").
+
+start_link(Host, Port, Database, Password) when is_list(Host);
+ is_integer(Port);
+ is_integer(Database);
+ is_list(Password) ->
+ eredis_client:start_link(Host, Port, Database, Password).
+
+
+-spec q(Client::pid(), Command::iolist()) ->
+ {ok, Value::binary()} | {error, Reason::binary()}.
+%% @doc: Executes the given command in the specified connection. The
+%% command must be a valid Redis command and may contain arbitrary
+%% data which will be converted to binaries. The returned values will
+%% always be binaries.
+q(Client, Command) ->
+ call(Client, Command).
+
+
+%%
+%% INTERNAL HELPERS%%
+
+call(Client, Command) ->
+ Request = {request, create_multibulk(Command)},
+ gen_server:call(Client, Request).
+
+-spec create_multibulk(Args::iolist()) -> Command::iolist().
+%% @doc: Creates a multibulk command with all the correct size headers
+create_multibulk(Args) ->
+ ArgCount = [<<$*>>, integer_to_list(length(Args)), <<?NL>>],
+ ArgsBin = lists:map(fun to_bulk/1, lists:map(fun to_binary/1, Args)),
+
+ [ArgCount, ArgsBin].
+
+to_bulk(B) when is_binary(B) ->
+ [<<$$>>, integer_to_list(iolist_size(B)), <<?NL>>, B, <<?NL>>].
+
+%% @doc: Convert given value to binary. Fallbacks to
+%% term_to_binary/1. For floats, throws {cannot_store_floats, Float}
+%% as we do not want floats to be stored in Redis. Your future self
+%% will thank you for this.
+to_binary(X) when is_list(X) -> list_to_binary(X);
+to_binary(X) when is_atom(X) -> list_to_binary(atom_to_list(X));
+to_binary(X) when is_binary(X) -> X;
+to_binary(X) when is_integer(X) -> list_to_binary(integer_to_list(X));
+to_binary(X) when is_float(X) -> throw({cannot_store_floats, X});
+to_binary(X) -> term_to_binary(X).
+
215 src/eredis_client.erl
@@ -0,0 +1,215 @@
+%%
+%% eredis_client
+%%
+%% The client is implemented as a gen_server which keeps one socket
+%% open to a single Redis instance. Users call us using the API in
+%% eredis.erl.
+%%
+%% The client works like this:
+%% * When starting up, we connect to Redis with the given connection
+%% information, or fail.
+%% * Users calls us using gen_server:call, we send the request to Redis,
+%% add the calling process at the end of the queue and reply with
+%% noreply. We are then free to handle new requests and may reply to
+%% the user later.
+%% * We receive data on the socket, we parse the response and reply to
+%% the client at the front of the queue. If the parser does not have
+%% enough data to parse the complete response, we will wait for more
+%% data to arrive.
+%%
+-module(eredis_client).
+-author('knut.nesheim@wooga.com').
+
+-behaviour(gen_server).
+
+-include("eredis.hrl").
+
+%% API
+-export([start_link/4, stop/1, select_database/2]).
+
+%% gen_server callbacks
+-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
+ terminate/2, code_change/3]).
+
+-record(state, {
+ host :: string() | undefined,
+ port :: integer() | undefined,
+ password :: binary() | undefined,
+ database :: binary() | undefined,
+
+ socket :: port() | undefined,
+ parser_state :: #pstate{} | undefined,
+ queue :: queue() | undefined
+}).
+
+-define(SOCKET_OPTS, [binary, {active, once}, {packet, raw}, {reuseaddr, true}]).
+
+%%
+%% API
+%%
+
+-spec start_link(Host::list(), Port::integer(), Database::integer(),
+ Password::string()) -> {ok, Pid::pid()} | {error, Reason::term()}.
+start_link(Host, Port, Database, Password) ->
+ gen_server:start_link(?MODULE, [Host, Port, Database, Password], []).
+
+stop(Pid) ->
+ gen_server:call(Pid, stop).
+
+%%====================================================================
+%% gen_server callbacks
+%%====================================================================
+
+init([Host, Port, Database, Password]) ->
+ State = #state{host = Host,
+ port = Port,
+ database = list_to_binary(integer_to_list(Database)),
+ password = list_to_binary(Password),
+ parser_state = eredis_parser:init(),
+ queue = queue:new()},
+
+ case connect(State) of
+ {ok, NewState} ->
+ {ok, NewState};
+ {error, Reason} ->
+ {stop, {connection_error, Reason}}
+ end.
+
+handle_call({request, Req}, From, State) ->
+ case gen_tcp:send(State#state.socket, Req) of
+ ok ->
+ NewQueue = queue:in(From, State#state.queue),
+ {noreply, State#state{queue = NewQueue}};
+ {error, Reason} ->
+ {reply, {error, Reason}, State}
+ end;
+
+handle_call(stop, _From, State) ->
+ {stop, normal, State};
+
+handle_call(_Request, _From, State) ->
+ {reply, unknown_request, State}.
+
+handle_cast(_Msg, State) ->
+ {noreply, State}.
+
+%% Receive data from socket, see handle_response/2
+handle_info({tcp, _Socket, Bs}, State) ->
+ inet:setopts(State#state.socket, [{active, once}]),
+ {noreply, handle_response(Bs, State)};
+
+%% Socket got closed, for example by Redis terminating idle clients.
+%% Reconnect and if it fails, stop the gen_server.
+handle_info({tcp_closed, _Socket}, State) ->
+ case connect(State) of
+ {ok, NewState} ->
+ %% Throw away the queue, as we will never get a response
+ %% for the requests sent on the old socket, ever
+ {noreply, NewState#state{queue = queue:new()}};
+ {error, Reason} ->
+ {stop, {reconnect_error, Reason}}
+ end;
+
+handle_info(_Info, State) ->
+ {noreply, State}.
+
+terminate(_Reason, State) ->
+ gen_tcp:close(State#state.socket),
+ ok.
+
+code_change(_OldVsn, State, _Extra) ->
+ {ok, State}.
+
+%%--------------------------------------------------------------------
+%%% Internal functions
+%%--------------------------------------------------------------------
+
+-spec handle_response(Data::binary(), State::#state{}) -> NewState::#state{}.
+%% @doc: Handle the response coming from Redis. This includes parsing
+%% and replying to the correct client, handling partial responses,
+%% handling too much data and handling continuations.
+handle_response(Data, #state{parser_state = ParserState,
+ queue = Queue} = State) ->
+
+ case eredis_parser:parse(ParserState, Data) of
+ %% Got complete response, return value to client
+ {ReturnCode, Value, NewParserState} ->
+ NewQueue = reply({ReturnCode, Value}, Queue),
+ State#state{parser_state = NewParserState,
+ queue = NewQueue};
+
+ %% Got complete response, with extra data, reply to client and
+ %% recurse over the extra data
+ {ReturnCode, Value, Rest, NewParserState} ->
+ NewQueue = reply({ReturnCode, Value}, Queue),
+ handle_response(Rest, State#state{parser_state = NewParserState,
+ queue = NewQueue});
+
+ %% Parser needs more data, the parser state now contains the
+ %% continuation data and we will try calling parse again when
+ %% we have more data
+ {continue, NewParserState} ->
+ State#state{parser_state = NewParserState}
+ end.
+
+%% @doc: Sends a value to the first client in queue. Returns the new
+%% queue without this client.
+reply(Value, Queue) ->
+ case queue:out(Queue) of
+ {{value, From}, NewQueue} ->
+ gen_server:reply(From, Value),
+ NewQueue;
+ {empty, Queue} ->
+ %% Oops
+ error_logger:info_msg("Nothing in queue, but got value from parser~n"),
+ throw(empty_queue)
+ end.
+
+
+%% @doc: Helper for connecting to Redis, authenticating and selecting
+%% the correct database. These commands are synchronous and if Redis
+%% returns something we don't expect, we crash. Returns {ok, State} or
+%% {SomeError, Reason}.
+connect(State) ->
+ case gen_tcp:connect(State#state.host, State#state.port, ?SOCKET_OPTS) of
+ {ok, Socket} ->
+ case authenticate(Socket, State#state.password) of
+ ok ->
+ case select_database(Socket, State#state.database) of
+ ok ->
+ {ok, State#state{socket = Socket}};
+ {error, Reason} ->
+ {select_error, Reason}
+ end;
+ {error, Reason} ->
+ {authentication_error, Reason}
+ end;
+ {error, Reason} ->
+ {error, {connection_error, Reason}}
+ end.
+
+select_database(Socket, Database) ->
+ do_sync_command(Socket, ["SELECT", " ", Database, "\r\n"]).
+
+authenticate(_Socket, <<>>) ->
+ ok;
+authenticate(Socket, Password) ->
+ do_sync_command(Socket, ["AUTH", " ", Password, "\r\n"]).
+
+%% @doc: Executes the given command synchronously, expects Redis to
+%% return "+OK\r\n", otherwise it will fail.
+do_sync_command(Socket, Command) ->
+ inet:setopts(Socket, [{active, false}]),
+ case gen_tcp:send(Socket, Command) of
+ ok ->
+ %% Hope there's nothing else coming down on the socket..
+ case gen_tcp:recv(Socket, 0) of
+ {ok, <<"+OK\r\n">>} ->
+ inet:setopts(Socket, [{active, once}]),
+ ok;
+ Other ->
+ {error, {unexpected_data, Other}}
+ end;
+ {error, Reason} ->
+ {error, Reason}
+ end.
273 src/eredis_parser.erl
@@ -0,0 +1,273 @@
+%%
+%% Parser of the Redis protocol, see http://redis.io/topics/protocol
+%%
+%% The idea behind this parser is that we accept any binary data
+%% available on the socket. If there is not enough data to parse a
+%% complete response, we ask the caller to call us later when there is
+%% more data. If there is too much data, we only parse the first
+%% response and let the caller call us again with the rest.
+%%
+%% This approach lets us write a "pure" parser that does not depend on
+%% manipulating the socket, which erldis and redis-erl is
+%% doing. Instead, we may ask the socket to send us data as fast as
+%% possible and parse it continously. The overhead of manipulating the
+%% socket when parsing multibulk responses is killing the performance
+%% of erldis.
+%%
+%% Future improvements:
+%% * Instead of building a binary all the time in the continuation,
+%% build an iolist
+%% * When we return a bulk continuation, we also include the size of
+%% the bulk. The caller may use this to explicitly call
+%% gen_tcp:recv/2 with the desired size.
+
+-module(eredis_parser).
+-author('knut.nesheim@wooga.com').
+
+-export([init/0, parse/2]).
+
+%% Exported for testing
+-export([parse_bulk/1, parse_bulk/2,
+ parse_multibulk/1, parse_multibulk/2]).
+
+-include_lib("eredis.hrl").
+-include_lib("eunit/include/eunit.hrl").
+
+%%
+%% API
+%%
+
+%% @doc: Initialize the parser
+init() ->
+ #pstate{}.
+
+
+-spec parse(State::#pstate{}, Data::binary()) ->
+ {ok, Value::binary(), NewState::#pstate{}} |
+ {ok, Value::binary(), Rest::binary(), NewState::#pstate{}} |
+ {error, ErrString::binary(), NewState::#pstate{}} |
+ {error, ErrString::binary(), Rest::binary(), NewState::#pstate{}} |
+ {continue, NewState::#pstate{}}.
+
+%% @doc: Parses the (possibly partial) response from Redis. Returns
+%% either {ok, Value, NewState}, {ok, Value, Rest, NewState} or
+%% {continue, NewState}. External entry point for parsing.
+%%
+%% In case {ok, Value, NewState} is returned, Value contains the value
+%% returned by Redis. NewState will be an empty parser state.
+%%
+%% In case {ok, Value, Rest, NewState} is returned, Value contains the
+%% most recent value returned by Redis, while Rest contains any extra
+%% data that was given, but was not part of the same response. In this
+%% case you should immeditely call parse again with Rest as the Data
+%% argument and NewState as the State argument.
+%%
+%% In case {continue, NewState} is returned, more data is needed
+%% before a complete value can be returned. As soon as you have more
+%% data, call parse again with NewState as the State argument and any
+%% new binary data as the Data argument.
+
+%% Parser in initial state, the data we receive will be the beginning
+%% of a response
+parse(#pstate{state = undefined} = State, NewData) ->
+ %% Look at the first byte to get the type of reply
+ case NewData of
+ %% Status
+ <<$+, Data/binary>> ->
+ return_result(parse_simple(Data), State, status_continue);
+
+ %% Error
+ <<$-, Data/binary>> ->
+ return_error(parse_simple(Data), State, status_continue);
+
+ %% Integer reply
+ <<$:, Data/binary>> ->
+ return_result(parse_simple(Data), State, status_continue);
+
+ %% Multibulk
+ <<$*, _Rest/binary>> ->
+ return_result(parse_multibulk(NewData), State, multibulk_continue);
+
+ %% Bulk
+ <<$$, _Rest/binary>> ->
+ return_result(parse_bulk(NewData), State, bulk_continue);
+
+ _ ->
+ %% TODO: Handle the case where we start parsing a new
+ %% response, but cannot make any sense of it
+ {error, unknown_response}
+ end;
+
+%% The following clauses all match on different continuation states
+
+parse(#pstate{state = bulk_continue,
+ continuation_data = ContinuationData} = State, NewData) ->
+ return_result(parse_bulk(ContinuationData, NewData), State, bulk_continue);
+
+parse(#pstate{state = multibulk_continue,
+ continuation_data = ContinuationData} = State, NewData) ->
+ return_result(parse_multibulk(ContinuationData, NewData), State, multibulk_continue);
+
+parse(#pstate{state = status_continue,
+ continuation_data = ContinuationData} = State, NewData) ->
+ return_result(parse_simple(ContinuationData, NewData), State, status_continue).
+
+%%
+%% MULTIBULK
+%%
+
+parse_multibulk(<<$*, _/binary>> = Data) ->
+ case get_newline_pos(Data) of
+ undefined ->
+ {continue, {incomplete_size, Data}};
+ NewlinePos ->
+ OffsetNewlinePos = NewlinePos - 1,
+ <<$*, Size:OffsetNewlinePos/binary, ?NL, Bulk/binary>> = Data,
+ IntSize = list_to_integer(binary_to_list(Size)),
+
+ do_parse_multibulk(IntSize, Bulk)
+ end.
+
+%% Size of multibulk was incomplete, try again
+parse_multibulk({incomplete_size, PartialData}, NewData0) ->
+ NewData = <<PartialData/binary, NewData0/binary>>,
+ parse_multibulk(NewData);
+
+%% Ran out of data inside do_parse_multibulk in parse_bulk, must
+%% continue traversing the bulks
+parse_multibulk({in_parsing_bulks, Count, OldData, Acc},
+ NewData0) ->
+ NewData = <<OldData/binary, NewData0/binary>>,
+
+ %% Continue where we left off
+ do_parse_multibulk(Count, NewData, Acc).
+
+%% @doc: Parses the given number of bulks from Data. If Data does not
+%% contain enough bulks, {continue, ContinuationData} is returned with
+%% enough information to start parsing with the correct count and
+%% accumulated data.
+do_parse_multibulk(Count, Data) ->
+ do_parse_multibulk(Count, Data, []).
+
+do_parse_multibulk(0, Data, Acc) ->
+ {ok, lists:reverse(Acc), Data};
+do_parse_multibulk(Count, <<>>, Acc) ->
+ {continue, {in_parsing_bulks, Count, <<>>, Acc}};
+do_parse_multibulk(Count, Data, Acc) ->
+ %% Try parsing the first bulk in Data, if it works, we get the
+ %% extra data back that was not part of the bulk which we can
+ %% recurse on. If the bulk does not contain enough data, we
+ %% return with a continuation and enough data to pick up where we
+ %% left off. In the continuation we will get more data
+ %% automagically in Data, so parsing the bulk might work.
+ case parse_bulk(Data) of
+ {ok, Value, Rest} ->
+ do_parse_multibulk(Count - 1, Rest, [Value | Acc]);
+ {continue, _} ->
+ {continue, {in_parsing_bulks, Count, Data, Acc}}
+ end.
+
+%%
+%% BULK
+%%
+
+%% Bulk, at beginning of response
+parse_bulk(<<$$, _/binary>> = Data) ->
+ %% Find the position of the first terminator, everything up until
+ %% this point contains the size specifier. If we cannot find it,
+ %% we received a partial response and need more data
+ case get_newline_pos(Data) of
+ undefined ->
+ {continue, {incomplete_size, Data}};
+ NewlinePos ->
+ OffsetNewlinePos = NewlinePos - 1, % Take into account the first $
+ <<$$, Size:OffsetNewlinePos/binary, Bulk/binary>> = Data,
+ IntSize = list_to_integer(binary_to_list(Size)),
+
+ if
+ %% Nil response from redis
+ IntSize =:= -1 ->
+ <<?NL, Rest/binary>> = Bulk,
+ {ok, undefined, Rest};
+ %% We have enough data for the entire bulk
+ size(Bulk) - (size(<<?NL>>) * 2) >= IntSize ->
+ <<?NL, Value:IntSize/binary, ?NL, Rest/binary>> = Bulk,
+ {ok, Value, Rest};
+ true ->
+ %% Need more data, so we send the bulk without the
+ %% size specifier to our future self
+ {continue, {IntSize, Bulk}}
+ end
+ end.
+
+%% Bulk, continuation from partial bulk size
+parse_bulk({incomplete_size, PartialData}, NewData0) ->
+ NewData = <<PartialData/binary, NewData0/binary>>,
+ parse_bulk(NewData);
+
+%% Bulk, continuation from partial bulk value
+parse_bulk({IntSize, Acc0}, Data) ->
+ Acc = <<Acc0/binary, Data/binary>>,
+
+ if
+ size(Acc) - (size(<<?NL>>) * 2) >= IntSize ->
+ <<?NL, Value:IntSize/binary, ?NL, Rest/binary>> = Acc,
+ {ok, Value, Rest};
+ true ->
+ {continue, {IntSize, Acc}}
+ end.
+
+
+%%
+%% SIMPLE REPLIES
+%%
+%% Handles replies on the following format:
+%% TData\r\n
+%% Where T is a type byte, like '+', '-', ':'. Data is terminated by \r\n
+
+%% @doc: Parse simple replies. Data must not contain type
+%% identifier. Type must be handled by the caller.
+parse_simple(Data) ->
+ case get_newline_pos(Data) of
+ undefined ->
+ {continue, {incomplete_simple, Data}};
+ NewlinePos ->
+ <<Value:NewlinePos/binary, ?NL, Rest/binary>> = Data,
+ {ok, Value, Rest}
+ end.
+
+parse_simple({incomplete_simple, OldData}, NewData0) ->
+ NewData = <<OldData/binary, NewData0/binary>>,
+ parse_simple(NewData).
+
+%%
+%% INTERNAL HELPERS
+%%
+
+get_newline_pos(B) ->
+ case binary:match(B, <<?NL>>) of
+ {Pos, _} ->
+ Pos;
+ nomatch ->
+ undefined
+ end.
+
+%% @doc: Helper for handling the result of parsing. Will update the
+%% parser state with the continuation of given name if necessary.
+return_result({ok, Value, <<>>}, _State, _StateName) ->
+ {ok, Value, init()};
+return_result({ok, Value, Rest}, _State, _StateName) ->
+ {ok, Value, Rest, init()};
+return_result({continue, ContinuationData}, State, StateName) ->
+ {continue, State#pstate{state = StateName, continuation_data = ContinuationData}}.
+
+%% @doc: Helper for returning an error. Uses return_result/3 and just transforms the {ok, ...} tuple into an error tuple
+return_error(Result, State, StateName) ->
+ case return_result(Result, State, StateName) of
+ {ok, Value, ParserState} ->
+ {error, Value, ParserState};
+ {ok, Value, Rest, ParserState} ->
+ {error, Value, Rest, ParserState};
+ Res ->
+ Res
+ end.
247 test/eredis_parser_tests.erl
@@ -0,0 +1,247 @@
+%%
+%% Parser tests. In particular tests for partial responses. This would
+%% probably be a very good candidate for testing with quickcheck or
+%% properl.
+%%
+
+-module(eredis_parser_tests).
+
+-include_lib("eredis.hrl").
+-include_lib("eunit/include/eunit.hrl").
+
+-import(eredis_parser, [parse/2, init/0, parse_bulk/1, parse_bulk/2,
+ parse_multibulk/1, parse_multibulk/2]).
+
+
+parse_bulk_test() ->
+ B = <<"$3\r\nbar\r\n">>,
+ ?assertEqual({ok, <<"bar">>, #pstate{}}, parse(#pstate{}, B)).
+
+parse_split_bulk_test() ->
+ State1 = init(),
+ B1 = <<"$3\r\n">>,
+ B2 = <<"bar\r\n">>,
+
+ {continue, State2} = parse(State1, B1),
+ ?assertEqual(#pstate{state = bulk_continue, continuation_data = {3, <<"\r\n">>}},
+ State2),
+
+ ?assertMatch({ok, <<"bar">>, _}, parse(State2, B2)).
+
+
+parse_very_split_bulk_test() ->
+ State1 = init(),
+ B1 = <<"$1">>,
+ B2 = <<"3\r\n">>,
+ B3 = <<"foobarbazquux\r\n">>, %% 13 bytes
+
+ ?assertEqual({continue,
+ #pstate{state = bulk_continue,
+ continuation_data = {incomplete_size, <<"$1">>}}},
+ parse(State1, B1)),
+ {continue, State2} = parse(State1, B1),
+
+ ?assertEqual({continue,
+ #pstate{state = bulk_continue,
+ continuation_data = {13, <<"\r\n">>}}},
+ parse(State2, B2)),
+ {continue, State3} = parse(State2, B2),
+
+ ?assertMatch({ok, <<"foobarbazquux">>, _}, parse(State3, B3)).
+
+
+too_much_data_test() ->
+ B = <<"$1\r\n1\r\n$1\r\n2\r\n$1\r\n3\r\n">>,
+ ?assertEqual({ok, <<"1">>, <<"$1\r\n2\r\n$1\r\n3\r\n">>}, parse_bulk(B)).
+
+too_much_data_in_continuation_test() ->
+ B1 = <<"$1\r\n">>,
+ B2 = <<"1\r\n$1\r\n2\r\n$1\r\n3\r\n">>,
+
+ ?assertEqual({continue, {1, <<"\r\n">>}}, parse_bulk(B1)),
+ {continue, ContinuationData1} = parse_bulk(B1),
+
+ ?assertEqual({ok, <<"1">>, <<"$1\r\n2\r\n$1\r\n3\r\n">>},
+ parse_bulk(ContinuationData1, B2)).
+
+bulk_test_() ->
+ B = <<"$3\r\nbar\r\n">>,
+ ?_assertEqual({ok, <<"bar">>, <<>>}, parse_bulk(B)).
+
+bulk_split_test() ->
+ B1 = <<"$3\r\n">>,
+ B2 = <<"bar\r\n">>,
+
+ ?assertEqual({continue, {3, <<"\r\n">>}}, parse_bulk(B1)),
+ {continue, Res} = parse_bulk(B1),
+ ?assertEqual({ok, <<"bar">>, <<>>}, parse_bulk(Res, B2)).
+
+bulk_very_split_test() ->
+ B1 = <<"$1">>,
+ B2 = <<"3\r\n">>,
+ B3 = <<"foobarbazquux\r\n">>, %% 13 bytes
+
+ ?assertEqual({continue, {incomplete_size, <<"$1">>}}, parse_bulk(B1)),
+ {continue, ContinuationData1} = parse_bulk(B1),
+
+ ?assertEqual({continue, {13, <<"\r\n">>}}, parse_bulk(ContinuationData1, B2)),
+ {continue, ContinuationData2} = parse_bulk(ContinuationData1, B2),
+
+ ?assertEqual({ok, <<"foobarbazquux">>, <<>>}, parse_bulk(ContinuationData2, B3)).
+
+bulk_split_on_newline_test() ->
+ B1 = <<"$13\r\nfoobarbazquux">>,
+ B2 = <<"\r\n">>, %% 13 bytes
+
+ ?assertEqual({continue, {13, <<"\r\nfoobarbazquux">>}}, parse_bulk(B1)),
+ {continue, ContinuationData1} = parse_bulk(B1),
+ ?assertEqual({ok, <<"foobarbazquux">>, <<>>}, parse_bulk(ContinuationData1, B2)).
+
+
+bulk_nil_test() ->
+ B = <<"$-1\r\n">>,
+ ?assertEqual({ok, undefined, init()}, parse(init(), B)).
+
+bulk_nil_chunked_test() ->
+ State1 = init(),
+ B1 = <<"$-1">>,
+ B2 = <<"\r\n">>,
+ ?assertEqual({continue, #pstate{state = bulk_continue,
+ continuation_data = {incomplete_size,<<"$-1">>}}},
+ parse(State1, B1)),
+
+ {continue, State2} = parse(State1, B1),
+
+ ?assertEqual({ok, undefined, init()}, parse(State2, B2)).
+
+bulk_nil_with_extra_test() ->
+ B = <<"$-1\r\n$3\r\nfoo\r\n">>,
+ ?assertEqual({ok, undefined, <<"$3\r\nfoo\r\n">>, init()}, parse(init(), B)).
+
+bulk_crap_test() ->
+ B = <<"\r\n">>,
+ ?assertEqual({error, unknown_response}, parse(init(), B)).
+
+
+
+
+multibulk_test() ->
+ %% [{1, 1}, {2, 2}, {3, 3}]
+ B = <<"*3\r\n$1\r\n1\r\n$1\r\n2\r\n$1\r\n3\r\n">>,
+ ?assertEqual({ok, [<<"1">>, <<"2">>, <<"3">>], <<>>}, parse_multibulk(B)).
+
+multibulk_parse_test() ->
+ %% [{1, 1}, {2, 2}, {3, 3}]
+ B = <<"*3\r\n$1\r\n1\r\n$1\r\n2\r\n$1\r\n3\r\n">>,
+ ?assertEqual({ok, [<<"1">>, <<"2">>, <<"3">>], #pstate{}}, parse(init(), B)).
+
+multibulk_split_parse_test() ->
+ %% [{1, 1}, {2, 2}, {3, 3}]
+ B1 = <<"*3\r\n$1\r\n1\r\n$1">>,
+ B2 = <<"\r\n2\r\n$1\r\n3\r\n">>,
+
+ State1 = init(),
+
+ ?assertEqual({continue,
+ #pstate{state = multibulk_continue,
+ continuation_data =
+ {in_parsing_bulks,2,<<"$1">>,[<<"1">>]}}},
+ parse(State1, B1)),
+
+ {continue, State2} = parse(State1, B1),
+
+ ?assertMatch({ok, [<<"1">>, <<"2">>, <<"3">>], _}, parse(State2, B2)).
+
+multibulk_split_test() ->
+ %% Split into 2 parts: <<"*3\r\n$1\r\n1\r\n$1\r\n2\r\n$1\r\n3\r\n">>
+ B1 = <<"*3\r\n$1\r\n1\r\n$1">>,
+ B2 = <<"\r\n2\r\n$1\r\n3\r\n">>,
+
+ {continue, ContinuationData1} = parse_multibulk(B1),
+ Result = parse_multibulk(ContinuationData1, B2),
+ ?assertEqual({ok, [<<"1">>, <<"2">>, <<"3">>], <<>>}, Result).
+
+multibulk_very_split_test() ->
+ %% Split into 4 parts: <<"*3\r\n$1\r\n1\r\n$1\r\n2\r\n$1\r\n3\r\n">>
+ B1 = <<"*">>,
+ B2 = <<"3\r\n$1\r">>,
+ B3 = <<"\n1\r\n$1\r\n2\r\n$1">>,
+ B4 = <<"\r\n3\r\n">>,
+
+ ?assertEqual({continue, {incomplete_size, <<"*">>}}, parse_multibulk(B1)),
+ {continue, ContinuationData1} = parse_multibulk(B1),
+ {continue, ContinuationData2} = parse_multibulk(ContinuationData1, B2),
+ {continue, ContinuationData3} = parse_multibulk(ContinuationData2, B3),
+
+ Result = parse_multibulk(ContinuationData3, B4),
+ ?assertEqual({ok, [<<"1">>, <<"2">>, <<"3">>], <<>>}, Result).
+
+multibulk_newline_split_test() ->
+ %% Split into 4 parts: <<"*3\r\n$1\r\n1\r\n$1\r\n2\r\n$1\r\n3\r\n">>
+ B1 = <<"*2\r\n$1\r\n1">>,
+ B2 = <<"\r\n$1\r\n2\r\n">>,
+ ?assertEqual({continue, {in_parsing_bulks, 2, <<"$1\r\n1">>, []}},
+ parse_multibulk(B1)),
+
+ {continue, ContinuationData1} = parse_multibulk(B1),
+
+ ?assertEqual({ok, [<<"1">>, <<"2">>], <<>>}, parse_multibulk(ContinuationData1, B2)).
+
+big_chunks_test() ->
+ %% Real-world example, MGET 1..200
+ B1 = <<"*200\r\n$1\r\n1\r\n$1\r\n2\r\n$1\r\n3\r\n$1\r\n4\r\n$1\r\n5\r\n$1\r\n6\r\n$1\r\n7\r\n$1\r\n8\r\n$1\r\n9\r\n$2\r\n10\r\n$2\r\n11\r\n$2\r\n12\r\n$2\r\n13\r\n$2\r\n14\r\n$2\r\n15\r\n$2\r\n16\r\n$2\r\n17\r\n$2\r\n18\r\n$2\r\n19\r\n$2\r\n20\r\n$2\r\n21\r\n$2\r\n22\r\n$2\r\n23\r\n$2\r\n24\r\n$2\r\n25\r\n$2\r\n26\r\n$2\r\n27\r\n$2\r\n28\r\n$2\r\n29\r\n$2\r\n30\r\n$2\r\n31\r\n$2\r\n32\r\n$2\r\n33\r\n$2\r\n34\r\n$2\r\n35\r\n$2\r\n36\r\n$2\r\n37\r\n$2\r\n38\r\n$2\r\n39\r\n$2\r\n40\r\n$2\r\n41\r\n$2\r\n42\r\n$2\r\n43\r\n$2\r\n44\r\n$2\r\n45\r\n$2\r\n46\r\n$2\r\n47\r\n$2\r\n48\r\n$2\r\n49\r\n$2\r\n50\r\n$2\r\n51\r\n$2\r\n52\r\n$2\r\n53\r\n$2\r\n54\r\n$2\r\n55\r\n$2\r\n56\r\n$2\r\n57\r\n$2\r\n58\r\n$2\r\n59\r\n$2\r\n60\r\n$2\r\n61\r\n$2\r\n62\r\n$2\r\n63\r\n$2\r\n64\r\n$2\r\n65\r\n$2\r\n66\r\n$2\r\n67\r\n$2\r\n68\r\n$2\r\n69\r\n$2\r\n70\r\n$2\r\n71\r\n$2\r\n72\r\n$2\r\n73\r\n$2\r\n74\r\n$2\r\n75\r\n$2\r\n76\r\n$2\r\n77\r\n$2\r\n78\r\n$2\r\n79\r\n$2\r\n80\r\n$2\r\n81\r\n$2\r\n82\r\n$2\r\n83\r\n$2\r\n84\r\n$2\r\n85\r\n$2\r\n86\r\n$2\r\n87\r\n$2\r\n88\r\n$2\r\n89\r\n$2\r\n90\r\n$2\r\n91\r\n$2\r\n92\r\n$2\r\n93\r\n$2\r\n94\r\n$2\r\n95\r\n$2\r\n96\r\n$2\r\n97\r\n$2\r\n98\r\n$2\r\n99\r\n$3\r\n100\r\n$3\r\n101\r\n$3\r\n102\r\n$3\r\n103\r\n$3\r\n104\r\n$3\r\n105\r\n$3\r\n106\r\n$3\r\n107\r\n$3\r\n108\r\n$3\r\n109\r\n$3\r\n110\r\n$3\r\n111\r\n$3\r\n112\r\n$3\r\n113\r\n$3\r\n114\r\n$3\r\n115\r\n$3\r\n116\r\n$3\r\n117\r\n$3\r\n118\r\n$3\r\n119\r\n$3\r\n120\r\n$3\r\n121\r\n$3\r\n122\r\n$3\r\n123\r\n$3\r\n124\r\n$3\r\n125\r\n$3\r\n126\r\n$3\r\n127\r\n$3\r\n128\r\n$3\r\n129\r\n$3\r\n130\r\n$3\r\n131\r\n$3\r\n132\r\n$3\r\n133\r\n$3\r\n134\r\n$3\r\n135\r\n$3\r\n136\r\n$3\r\n137\r\n$3\r\n138\r\n$3\r\n139\r\n$3\r\n140\r\n$3\r\n141\r\n$3\r\n142\r\n$3\r\n143\r\n$3\r\n144\r\n$3\r\n145\r\n$3\r\n146\r\n$3\r\n147\r\n$3\r\n148\r\n$3\r\n149\r\n$3\r\n150\r\n$3\r\n151\r\n$3\r\n152\r\n$3\r\n153\r\n$3\r\n154\r\n$3\r\n155\r\n$3\r\n156\r\n$3\r\n157\r\n$3\r\n158\r\n$3\r\n159\r\n$3\r\n160\r\n$3\r\n161\r\n$3\r\n162\r\n$3\r\n163\r\n$3\r\n164\r\n$3\r\n165\r\n$3\r\n166\r\n$3\r\n167\r\n$3\r\n168\r\n$3\r\n169\r\n$3\r\n170\r\n$3\r\n171\r\n$3\r\n172\r\n$3\r\n173\r\n$3\r\n1">>,
+ B2 = <<"74\r\n$3\r\n175\r\n$3\r\n176\r\n$3\r\n177\r\n$3\r\n178\r\n$3\r\n179\r\n$3\r\n180\r\n$3\r\n181\r\n$3\r\n182\r\n$3\r\n183\r\n$3\r\n184\r\n$3\r\n185\r\n$3\r\n186\r\n$3\r\n187\r\n$3\r\n188\r\n$3\r\n189\r\n$3\r\n190\r\n$3\r\n191\r\n$3\r\n192\r\n$3\r\n193\r\n$3\r\n194\r\n$3\r\n195\r\n$3\r\n196\r\n$3\r\n197\r\n$3\r\n198\r\n$3\r\n199\r\n$3\r\n200\r\n">>,
+ ExpectedValues = [list_to_binary(integer_to_list(N)) || N <- lists:seq(1, 200)],
+ State1 = init(),
+
+ ?assertMatch({continue,
+ #pstate{state = multibulk_continue,
+ continuation_data = {in_parsing_bulks, 27, _, _}}},
+ parse(State1, B1)),
+ {continue, State2} = parse(State1, B1),
+
+ ?assertMatch({ok, ExpectedValues, #pstate{state = undefined,
+ continuation_data = undefined}},
+ parse(State2, B2)).
+
+
+chunk_test() ->
+ B1 = <<"*500\r\n$1\r\n1\r\n$1\r\n2\r\n$1\r\n3\r\n$1\r\n4\r\n$1\r\n5\r\n$1\r\n6\r\n$1\r\n7\r\n$1\r\n8\r\n$1\r\n9\r\n$2\r\n10\r\n$2\r\n11\r\n$2\r\n12\r\n$2\r\n13\r\n$2\r\n14\r\n$2\r\n15\r\n$2\r\n16\r\n$2\r\n17\r\n$2\r\n18\r\n$2\r\n19\r\n$2\r\n20\r\n$2\r\n21\r\n$2\r\n22\r\n$2\r\n23\r\n$2\r\n24\r\n$2\r\n25\r\n$2\r\n26\r\n$2\r\n27\r\n$2\r\n28\r\n$2\r\n29\r\n$2\r\n30\r\n$2\r\n31\r\n$2\r\n32\r\n$2\r\n33\r\n$2\r\n34\r\n$2\r\n35\r\n$2\r\n36\r\n$2\r\n37\r\n$2\r\n38\r\n$2\r\n39\r\n$2\r\n40\r\n$2\r\n41\r\n$2\r\n42\r\n$2\r\n43\r\n$2\r\n44\r\n$2\r\n45\r\n$2\r\n46\r\n$2\r\n47\r\n$2\r\n48\r\n$2\r\n49\r\n$2\r\n50\r\n$2\r\n51\r\n$2\r\n52\r\n$2\r\n53\r\n$2\r\n54\r\n$2\r\n55\r\n$2\r\n56\r\n$2\r\n57\r\n$2\r\n58\r\n$2\r\n59\r\n$2\r\n60\r\n$2\r\n61\r\n$2\r\n62\r\n$2\r\n63\r\n$2\r\n64\r\n$2\r\n65\r\n$2\r\n66\r\n$2\r\n67\r\n$2\r\n68\r\n$2\r\n69\r\n$2\r\n70\r\n$2\r\n71\r\n$2\r\n72\r\n$2\r\n73\r\n$2\r\n74\r\n$2\r\n75\r\n$2\r\n76\r\n$2\r\n77\r\n$2\r\n78\r\n$2\r\n79\r\n$2\r\n80\r\n$2\r\n81\r\n$2\r\n82\r\n$2\r\n83\r\n$2\r\n84\r\n$2\r\n85\r\n$2\r\n86\r\n$2\r\n87\r\n$2\r\n88\r\n$2\r\n89\r\n$2\r\n90\r\n$2\r\n91\r\n$2\r\n92\r\n$2\r\n93\r\n$2\r\n94\r\n$2\r\n95\r\n$2\r\n96\r\n$2\r\n97\r\n$2\r\n98\r\n$2\r\n99\r\n$3\r\n100\r\n$3\r\n101\r\n$3\r\n102\r\n$3\r\n103\r\n$3\r\n104\r\n$3\r\n105\r\n$3\r\n106\r\n$3\r\n107\r\n$3\r\n108\r\n$3\r\n109\r\n$3\r\n110\r\n$3\r\n111\r\n$3\r\n112\r\n$3\r\n113\r\n$3\r\n114\r\n$3\r\n115\r\n$3\r\n116\r\n$3\r\n117\r\n$3\r\n118\r\n$3\r\n119\r\n$3\r\n120\r\n$3\r\n121\r\n$3\r\n122\r\n$3\r\n123\r\n$3\r\n124\r\n$3\r\n125\r\n$3\r\n126\r\n$3\r\n127\r\n$3\r\n128\r\n$3\r\n129\r\n$3\r\n130\r\n$3\r\n131\r\n$3\r\n132\r\n$3\r\n133\r\n$3\r\n134\r\n$3\r\n135\r\n$3\r\n136\r\n$3\r\n137\r\n$3\r\n138\r\n$3\r\n139\r\n$3\r\n140\r\n$3\r\n141\r\n$3\r\n142\r\n$3\r\n143\r\n$3\r\n144\r\n$3\r\n145\r\n$3\r\n146\r\n$3\r\n147\r\n$3\r\n148\r\n$3\r\n149\r\n$3\r\n150\r\n$3\r\n151\r\n$3\r\n152\r\n$3\r\n153\r\n$3\r\n154\r\n$3\r\n155\r\n$3\r\n156\r\n$3\r\n157\r\n$3\r\n158\r\n$3\r\n159\r\n$3\r\n160\r\n$3\r\n161\r\n$3\r\n162\r\n$3\r\n163\r\n$3\r\n164\r\n$3\r\n165\r\n$3\r\n166\r\n$3\r\n167\r\n$3\r\n168\r\n$3\r\n169\r\n$3\r\n170\r\n$3\r\n171\r\n$3\r\n172\r\n$3\r\n173\r\n$3\r\n1">>,
+ B2 = <<"74\r\n$3\r\n175\r\n$3\r\n176\r\n$3\r\n177\r\n$3\r\n178\r\n$3\r\n179\r\n$3\r\n180\r\n$3\r\n181\r\n$3\r\n182\r\n$3\r\n183\r\n$3\r\n184\r\n$3\r\n185\r\n$3\r\n186\r\n$3\r\n187\r\n$3\r\n188\r\n$3\r\n189\r\n$3\r\n190\r\n$3\r\n191\r\n$3\r\n192\r\n$3\r\n193\r\n$3\r\n194\r\n$3\r\n195\r\n$3\r\n196\r\n$3\r\n197\r\n$3\r\n198\r\n$3\r\n199\r\n$3\r\n200\r\n$3\r\n201\r\n$3\r\n202\r\n$3\r\n203\r\n$3\r\n204\r\n$3\r\n205\r\n$3\r\n206\r\n$3\r\n207\r\n$3\r\n208\r\n$3\r\n209\r\n$3\r\n210\r\n$3\r\n211\r\n$3\r\n212\r\n$3\r\n213\r\n$3\r\n214\r\n$3\r\n215\r\n$3\r\n216\r\n$3\r\n217\r\n$3\r\n218\r\n$3\r\n219\r\n$3\r\n220\r\n$3\r\n221\r\n$3\r\n222\r\n$3\r\n223\r\n$3\r\n224\r\n$3\r\n225\r\n$3\r\n226\r\n$3\r\n227\r\n$3\r\n228\r\n$3\r\n229\r\n$3\r\n230\r\n$3\r\n231\r\n$3\r\n232\r\n$3\r\n233\r\n$3\r\n234\r\n$3\r\n235\r\n$3\r\n236\r\n$3\r\n237\r\n$3\r\n238\r\n$3\r\n239\r\n$3\r\n240\r\n$3\r\n241\r\n$3\r\n242\r\n$3\r\n243\r\n$3\r\n244\r\n$3\r\n245\r\n$3\r\n246\r\n$3\r\n247\r\n$3\r\n248\r\n$3\r\n249\r\n$3\r\n250\r\n$3\r\n251\r\n$3\r\n252\r\n$3\r\n253\r\n$3\r\n254\r\n$3\r\n255\r\n$3\r\n256\r\n$3\r\n257\r\n$3\r\n258\r\n$3\r\n259\r\n$3\r\n260\r\n$3\r\n261\r\n$3\r\n262\r\n$3\r\n263\r\n$3\r\n264\r\n$3\r\n265\r\n$3\r\n266\r\n$3\r\n267\r\n$3\r\n268\r\n$3\r\n269\r\n$3\r\n270\r\n$3\r\n271\r\n$3\r\n272\r\n$3\r\n273\r\n$3\r\n274\r\n$3\r\n275\r\n$3\r\n276\r\n$3\r\n277\r\n$3\r\n278\r\n$3\r\n279\r\n$3\r\n280\r\n$3\r\n281\r\n$3\r\n282\r\n$3\r\n283\r\n$3\r\n284\r\n$3\r\n285\r\n$3\r\n286\r\n$3\r\n287\r\n$3\r\n288\r\n$3\r\n289\r\n$3\r\n290\r\n$3\r\n291\r\n$3\r\n292\r\n$3\r\n293\r\n$3\r\n294\r\n$3\r\n295\r\n$3\r\n296\r\n$3\r\n297\r\n$3\r\n298\r\n$3\r\n299\r\n$3\r\n300\r\n$3\r\n301\r\n$3\r\n302\r\n$3\r\n303\r\n$3\r\n304\r\n$3\r\n305\r\n$3\r\n306\r\n$3\r\n307\r\n$3\r\n308\r\n$3\r\n309\r\n$3\r\n310\r\n$3\r\n311\r\n$3\r\n312\r\n$3\r\n313\r\n$3\r\n314\r\n$3\r\n315\r\n$3\r\n316\r\n$3\r\n317\r\n$3\r\n318\r\n$3\r\n319\r\n$3\r\n320\r\n$3\r\n321\r\n$3\r\n322\r\n$3\r\n323\r\n$3\r\n324\r\n$3\r\n325\r\n$3\r\n326\r\n$3\r\n327\r\n$3\r\n328\r\n$3\r\n329\r\n$3\r\n330\r\n$3\r\n331\r\n$3\r\n332\r\n$3\r\n333\r\n$3\r\n334\r\n$3\r\n335\r\n$3\r\n336">>,
+ B3 = <<"\r\n$3\r\n337\r\n$3\r\n338\r\n$3\r\n339\r\n$3\r\n340\r\n$3\r\n341\r\n$3\r\n342\r\n$3\r\n343\r\n$3\r\n344\r\n$3\r\n345\r\n$3\r\n346\r\n$3\r\n347\r\n$3\r\n348\r\n$3\r\n349\r\n$3\r\n350\r\n$3\r\n351\r\n$3\r\n352\r\n$3\r\n353\r\n$3\r\n354\r\n$3\r\n355\r\n$3\r\n356\r\n$3\r\n357\r\n$3\r\n358\r\n$3\r\n359\r\n$3\r\n360\r\n$3\r\n361\r\n$3\r\n362\r\n$3\r\n363\r\n$3\r\n364\r\n$3\r\n365\r\n$3\r\n366\r\n$3\r\n367\r\n$3\r\n368\r\n$3\r\n369\r\n$3\r\n370\r\n$3\r\n371\r\n$3\r\n372\r\n$3\r\n373\r\n$3\r\n374\r\n$3\r\n375\r\n$3\r\n376\r\n$3\r\n377\r\n$3\r\n378\r\n$3\r\n379\r\n$3\r\n380\r\n$3\r\n381\r\n$3\r\n382\r\n$3\r\n383\r\n$3\r\n384\r\n$3\r\n385\r\n$3\r\n386\r\n$3\r\n387\r\n$3\r\n388\r\n$3\r\n389\r\n$3\r\n390\r\n$3\r\n391\r\n$3\r\n392\r\n$3\r\n393\r\n$3\r\n394\r\n$3\r\n395\r\n$3\r\n396\r\n$3\r\n397\r\n$3\r\n398\r\n$3\r\n399\r\n$3\r\n400\r\n$3\r\n401\r\n$3\r\n402\r\n$3\r\n403\r\n$3\r\n404\r\n$3\r\n405\r\n$3\r\n406\r\n$3\r\n407\r\n$3\r\n408\r\n$3\r\n409\r\n$3\r\n410\r\n$3\r\n411\r\n$3\r\n412\r\n$3\r\n413\r\n$3\r\n414\r\n$3\r\n415\r\n$3\r\n416\r\n$3\r\n417\r\n$3\r\n418\r\n$3\r\n419\r\n$3\r\n420\r\n$3\r\n421\r\n$3\r\n422\r\n$3\r\n423\r\n$3\r\n424\r\n$3\r\n425\r\n$3\r\n426\r\n$3\r\n427\r\n$3\r\n428\r\n$3\r\n429\r\n$3\r\n430\r\n$3\r\n431\r\n$3\r\n432\r\n$3\r\n433\r\n$3\r\n434\r\n$3\r\n435\r\n$3\r\n436\r\n$3\r\n437\r\n$3\r\n438\r\n$3\r\n439\r\n$3\r\n440\r\n$3\r\n441\r\n$3\r\n442\r\n$3\r\n443\r\n$3\r\n444\r\n$3\r\n445\r\n$3\r\n446\r\n$3\r\n447\r\n$3\r\n448\r\n$3\r\n449\r\n$3\r\n450\r\n$3\r\n451\r\n$3\r\n452\r\n$3\r\n453\r\n$3\r\n454\r\n$3\r\n455\r\n$3\r\n456\r\n$3\r\n457\r\n$3\r\n458\r\n$3\r\n459\r\n$3\r\n460\r\n$3\r\n461\r\n$3\r\n462\r\n$3\r\n463\r\n$3\r\n464\r\n$3\r\n465\r\n$3\r\n466\r\n$3\r\n467\r\n$3\r\n468\r\n$3\r\n469\r\n$3\r\n470\r\n$3\r\n471\r\n$3\r\n472\r\n$3\r\n473\r\n$3\r\n474\r\n$3\r\n475\r\n$3\r\n476\r\n$3\r\n477\r\n$3\r\n478\r\n$3\r\n479\r\n$3\r\n480\r\n$3\r\n481\r\n$3\r\n482\r\n$3\r\n483\r\n$3\r\n484\r\n$3\r\n485\r\n$3\r\n486\r\n$3\r\n487\r\n$3\r\n488\r\n$3\r\n489\r\n$3\r\n490\r\n$3\r\n491\r\n$3\r\n492\r\n$3\r\n493\r\n$3\r\n494\r\n$3\r\n495\r\n$3\r\n496\r\n$3\r\n497\r\n$3\r\n498\r\n">>,
+
+ {continue, ContinuationData1} = parse_multibulk(B1),
+ {continue, ContinuationData2} = parse_multibulk(ContinuationData1, B2),
+
+ ?assertMatch({continue, {in_parsing_bulks, 2, <<>>, _}},
+ parse_multibulk(ContinuationData2, B3)).
+
+%% @doc: Test a binary string which contains \r\n inside it's data
+binary_safe_test() ->
+ B = <<"$14\r\nfoobar\r\nbarbaz\r\n">>,
+ ?assertEqual({ok, <<"foobar\r\nbarbaz">>, init()}, parse(init(), B)).
+
+
+status_test() ->
+ B = <<"+OK\r\n">>,
+ ?assertEqual({ok, <<"OK">>, init()}, parse(init(), B)).
+
+status_chunked_test() ->
+ B1 = <<"+O">>,
+ B2 = <<"K\r\n">>,
+ State1 = init(),
+
+ ?assertEqual({continue, #pstate{state = status_continue,
+ continuation_data = {incomplete_simple, <<"O">>}}},
+ parse(State1, B1)),
+ {continue, State2} = parse(State1, B1),
+ ?assertEqual({ok, <<"OK">>, init()}, parse(State2, B2)).
+
+error_test() ->
+ B = <<"-ERR wrong number of arguments for 'get' command\r\n">>,
+ ?assertEqual({error, <<"ERR wrong number of arguments for 'get' command">>, init()},
+ parse(init(), B)).
+
+integer_test() ->
+ B = <<":2\r\n">>,
+ ?assertEqual({ok, <<"2">>, init()}, parse(init(), B)).
53 test/eredis_tests.erl
@@ -0,0 +1,53 @@
+-module(eredis_tests).
+
+-include_lib("eunit/include/eunit.hrl").
+
+-import(eredis, [create_multibulk/1]).
+
+get_set_test() ->
+ C = c(),
+ ?assertEqual({ok, <<"OK">>}, eredis:q(C, ["FLUSHALL"])),
+
+ ?assertEqual({ok, undefined}, eredis:q(C, ["GET", foo])),
+ ?assertEqual({ok, <<"OK">>}, eredis:q(C, ["SET", foo, bar])),
+ ?assertEqual({ok, <<"bar">>}, eredis:q(C, ["GET", foo])).
+
+
+delete_test() ->
+ C = c(),
+ ?assertEqual({ok, <<"OK">>}, eredis:q(C, ["FLUSHALL"])),
+
+ ?assertEqual({ok, <<"OK">>}, eredis:q(C, ["SET", foo, bar])),
+ ?assertEqual({ok, <<"1">>}, eredis:q(C, ["DEL", foo])),
+ ?assertEqual({ok, undefined}, eredis:q(C, ["GET", foo])).
+
+mset_mget_test() ->
+ C = c(),
+ ?assertEqual({ok, <<"OK">>}, eredis:q(C, ["FLUSHALL"])),
+ Keys = lists:seq(1, 1000),
+ KeyValuePairs = [[K, K*2] || K <- Keys],
+ ExpectedResult = [list_to_binary(integer_to_list(K * 2)) || K <- Keys],
+
+ ?assertEqual({ok, <<"OK">>}, eredis:q(C, ["MSET" | lists:flatten(KeyValuePairs)])),
+ ?assertEqual({ok, ExpectedResult}, eredis:q(C, ["MGET" | Keys])).
+
+c() ->
+ Res = eredis:start_link(),
+ ?assertMatch({ok, _}, Res),
+ {ok, C} = Res,
+ C.
+
+
+
+multibulk_test_() ->
+ [?_assertEqual(<<"*3\r\n$3\r\nSET\r\n$3\r\nfoo\r\n$3\r\nbar\r\n">>,
+ list_to_binary(create_multibulk(["SET", "foo", "bar"]))),
+ ?_assertEqual(<<"*3\r\n$3\r\nSET\r\n$3\r\nfoo\r\n$3\r\nbar\r\n">>,
+ list_to_binary(create_multibulk(['SET', foo, bar]))),
+
+ ?_assertEqual(<<"*3\r\n$3\r\nSET\r\n$3\r\nfoo\r\n$3\r\n123\r\n">>,
+ list_to_binary(create_multibulk(['SET', foo, 123]))),
+
+ ?_assertThrow({cannot_store_floats, 123.5},
+ list_to_binary(create_multibulk(['SET', foo, 123.5])))
+ ].
Please sign in to comment.
Something went wrong with that request. Please try again.