Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse files

Initial commit

  • Loading branch information...
commit ed5f2c6e88aba5dba8448c057c6c659560b8400e 0 parents
Scott Brooks authored
Showing with 10,900 additions and 0 deletions.
  1. +1 −0  .gitignore
  2. +20 −0 Makefile
  3. +9 −0 deps/mochiweb-src/LICENSE
  4. +20 −0 deps/mochiweb-src/Makefile
  5. +1 −0  deps/mochiweb-src/README
  6. +17 −0 deps/mochiweb-src/ebin/mochiweb.app
  7. +146 −0 deps/mochiweb-src/examples/https/https_store.erl
  8. +19 −0 deps/mochiweb-src/examples/https/server_cert.pem
  9. +27 −0 deps/mochiweb-src/examples/https/server_key.pem
  10. +81 −0 deps/mochiweb-src/examples/keepalive/keepalive.erl
  11. +20 −0 deps/mochiweb-src/priv/skel/Makefile
  12. +8 −0 deps/mochiweb-src/priv/skel/priv/www/index.html
  13. +33 −0 deps/mochiweb-src/priv/skel/src/Makefile
  14. +14 −0 deps/mochiweb-src/priv/skel/src/skel.app
  15. +30 −0 deps/mochiweb-src/priv/skel/src/skel.erl
  16. +1 −0  deps/mochiweb-src/priv/skel/src/skel.hrl
  17. +30 −0 deps/mochiweb-src/priv/skel/src/skel_app.erl
  18. +92 −0 deps/mochiweb-src/priv/skel/src/skel_deps.erl
  19. +62 −0 deps/mochiweb-src/priv/skel/src/skel_sup.erl
  20. +51 −0 deps/mochiweb-src/priv/skel/src/skel_web.erl
  21. +4 −0 deps/mochiweb-src/priv/skel/start-dev.sh
  22. +3 −0  deps/mochiweb-src/priv/skel/start.sh
  23. +40 −0 deps/mochiweb-src/priv/skel/support/include.mk
  24. +94 −0 deps/mochiweb-src/priv/skel/support/run_tests.escript
  25. +27 −0 deps/mochiweb-src/scripts/new_mochiweb.erl
  26. +33 −0 deps/mochiweb-src/src/Makefile
  27. +3 −0  deps/mochiweb-src/src/internal.hrl
  28. +425 −0 deps/mochiweb-src/src/mochifmt.erl
  29. +38 −0 deps/mochiweb-src/src/mochifmt_records.erl
  30. +30 −0 deps/mochiweb-src/src/mochifmt_std.erl
  31. +107 −0 deps/mochiweb-src/src/mochiglobal.erl
  32. +91 −0 deps/mochiweb-src/src/mochihex.erl
  33. +531 −0 deps/mochiweb-src/src/mochijson.erl
  34. +782 −0 deps/mochiweb-src/src/mochijson2.erl
  35. +104 −0 deps/mochiweb-src/src/mochilists.erl
  36. +140 −0 deps/mochiweb-src/src/mochilogfile2.erl
  37. +331 −0 deps/mochiweb-src/src/mochinum.erl
  38. +310 −0 deps/mochiweb-src/src/mochitemp.erl
  39. +316 −0 deps/mochiweb-src/src/mochiutf8.erl
  40. +9 −0 deps/mochiweb-src/src/mochiweb.app.src
  41. +289 −0 deps/mochiweb-src/src/mochiweb.erl
  42. +48 −0 deps/mochiweb-src/src/mochiweb_acceptor.erl
  43. +27 −0 deps/mochiweb-src/src/mochiweb_app.erl
  44. +308 −0 deps/mochiweb-src/src/mochiweb_charref.erl
  45. +309 −0 deps/mochiweb-src/src/mochiweb_cookies.erl
  46. +75 −0 deps/mochiweb-src/src/mochiweb_cover.erl
  47. +38 −0 deps/mochiweb-src/src/mochiweb_echo.erl
  48. +299 −0 deps/mochiweb-src/src/mochiweb_headers.erl
  49. +1,061 −0 deps/mochiweb-src/src/mochiweb_html.erl
  50. +275 −0 deps/mochiweb-src/src/mochiweb_http.erl
  51. +46 −0 deps/mochiweb-src/src/mochiweb_io.erl
  52. +94 −0 deps/mochiweb-src/src/mochiweb_mime.erl
  53. +824 −0 deps/mochiweb-src/src/mochiweb_multipart.erl
  54. +713 −0 deps/mochiweb-src/src/mochiweb_request.erl
  55. +64 −0 deps/mochiweb-src/src/mochiweb_response.erl
  56. +84 −0 deps/mochiweb-src/src/mochiweb_skel.erl
  57. +84 −0 deps/mochiweb-src/src/mochiweb_socket.erl
  58. +271 −0 deps/mochiweb-src/src/mochiweb_socket_server.erl
  59. +41 −0 deps/mochiweb-src/src/mochiweb_sup.erl
  60. +940 −0 deps/mochiweb-src/src/mochiweb_util.erl
  61. +161 −0 deps/mochiweb-src/src/reloader.erl
  62. +41 −0 deps/mochiweb-src/support/include.mk
  63. +86 −0 deps/mochiweb-src/support/make_app.escript
  64. +94 −0 deps/mochiweb-src/support/run_tests.escript
  65. +19 −0 deps/mochiweb-src/support/test-materials/test_ssl_cert.pem
  66. +27 −0 deps/mochiweb-src/support/test-materials/test_ssl_key.pem
  67. +14 −0 ebin/erlcraft.app
  68. +1 −0  erlcraft.config
  69. +33 −0 src/Makefile
  70. +14 −0 src/erlcraft.app
  71. +30 −0 src/erlcraft.erl
  72. +35 −0 src/erlcraft_app.erl
  73. +92 −0 src/erlcraft_deps.erl
  74. +60 −0 src/erlcraft_server.erl
  75. +62 −0 src/erlcraft_sup.erl
  76. +4 −0 start-dev.sh
  77. +3 −0  start.sh
  78. +40 −0 support/include.mk
  79. +94 −0 support/run_tests.escript
1  .gitignore
@@ -0,0 +1 @@
+*.beam
20 Makefile
@@ -0,0 +1,20 @@
+all: ebin/
+ (cd src;$(MAKE) all)
+
+edoc:
+ (cd src;$(MAKE) edoc)
+
+test:
+ (cd src;$(MAKE) test)
+
+clean:
+ (cd src;$(MAKE) clean)
+
+clean_plt:
+ (cd src;$(MAKE) clean_plt)
+
+dialyzer:
+ (cd src;$(MAKE) dialyzer)
+
+ebin/:
+ @mkdir -p ebin
9 deps/mochiweb-src/LICENSE
@@ -0,0 +1,9 @@
+This is the MIT license.
+
+Copyright (c) 2007 Mochi Media, Inc.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
20 deps/mochiweb-src/Makefile
@@ -0,0 +1,20 @@
+all: ebin/
+ (cd src;$(MAKE) all)
+
+edoc:
+ (cd src;$(MAKE) edoc)
+
+test:
+ (cd src;$(MAKE) test)
+
+clean:
+ (cd src;$(MAKE) clean)
+
+clean_plt:
+ (cd src;$(MAKE) clean_plt)
+
+dialyzer:
+ (cd src;$(MAKE) dialyzer)
+
+ebin/:
+ @mkdir -p ebin
1  deps/mochiweb-src/README
@@ -0,0 +1 @@
+MochiWeb is an Erlang library for building lightweight HTTP servers.
17 deps/mochiweb-src/ebin/mochiweb.app
@@ -0,0 +1,17 @@
+%% This is generated from src/mochiweb.app.src
+{application,mochiweb,
+ [{vsn,"1.3"},
+ {description,"MochiMedia Web Server"},
+ {modules,[mochifmt,mochifmt_records,mochifmt_std,mochiglobal,
+ mochihex,mochijson,mochijson2,mochilists,mochinum,
+ mochitemp,mochiutf8,mochiweb,mochiweb_app,
+ mochiweb_charref,mochiweb_cookies,mochiweb_cover,
+ mochiweb_echo,mochiweb_headers,mochiweb_html,
+ mochiweb_http,mochiweb_io,mochiweb_mime,
+ mochiweb_multipart,mochiweb_request,mochiweb_response,
+ mochiweb_skel,mochiweb_socket,mochiweb_socket_server,
+ mochiweb_sup,mochiweb_util,reloader]},
+ {registered,[]},
+ {mod,{mochiweb_app,[]}},
+ {env,[]},
+ {applications,[kernel,stdlib,crypto,inets]}]}.
146 deps/mochiweb-src/examples/https/https_store.erl
@@ -0,0 +1,146 @@
+
+%% Trivial web storage app. It's available over both HTTP (port 8442)
+%% and HTTPS (port 8443). You use a PUT to store items, a GET to
+%% retrieve them and DELETE to delete them. The HTTP POST method is
+%% invalid for this application. Example (using HTTPS transport):
+%%
+%% $ curl -k --verbose https://localhost:8443/flintstones
+%% ...
+%% 404 Not Found
+%% ...
+%% $ echo -e "Fred\nWilma\nBarney" |
+%% curl -k --verbose https://localhost:8443/flintstones \
+%% -X PUT -H "Content-Type: text/plain" --data-binary @-
+%% ...
+%% 201 Created
+%% ...
+%% $ curl -k --verbose https://localhost:8443/flintstones
+%% ...
+%% Fred
+%% Wilma
+%% Barney
+%% ...
+%% $ curl -k --verbose https://localhost:8443/flintstones -X DELETE
+%% ...
+%% 200 OK
+%% ...
+%% $ curl -k --verbose https://localhost:8443/flintstones
+%% ...
+%% 404 Not Found
+%% ...
+%%
+%% All submitted data is stored in memory (in an ets table). Could be
+%% useful for ad-hoc testing.
+
+-module(https_store).
+
+-export([start/0,
+ stop/0,
+ dispatch/1,
+ loop/1
+ ]).
+
+-define(HTTP_OPTS, [
+ {loop, {?MODULE, dispatch}},
+ {port, 8442},
+ {name, http_8442}
+ ]).
+
+-define(HTTPS_OPTS, [
+ {loop, {?MODULE, dispatch}},
+ {port, 8443},
+ {name, https_8443},
+ {ssl, true},
+ {ssl_opts, [
+ {certfile, "server_cert.pem"},
+ {keyfile, "server_key.pem"}]}
+ ]).
+
+-record(sd, {http, https}).
+-record(resource, {type, data}).
+
+start() ->
+ {ok, Http} = mochiweb_http:start(?HTTP_OPTS),
+ {ok, Https} = mochiweb_http:start(?HTTPS_OPTS),
+ SD = #sd{http=Http, https=Https},
+ Pid = spawn_link(fun() ->
+ ets:new(?MODULE, [named_table]),
+ loop(SD)
+ end),
+ register(http_store, Pid),
+ ok.
+
+stop() ->
+ http_store ! stop,
+ ok.
+
+dispatch(Req) ->
+ case Req:get(method) of
+ 'GET' ->
+ get_resource(Req);
+ 'PUT' ->
+ put_resource(Req);
+ 'DELETE' ->
+ delete_resource(Req);
+ _ ->
+ Headers = [{"Allow", "GET,PUT,DELETE"}],
+ Req:respond({405, Headers, "405 Method Not Allowed\r\n"})
+ end.
+
+get_resource(Req) ->
+ Path = Req:get(path),
+ case ets:lookup(?MODULE, Path) of
+ [{Path, #resource{type=Type, data=Data}}] ->
+ Req:ok({Type, Data});
+ [] ->
+ Req:respond({404, [], "404 Not Found\r\n"})
+ end.
+
+put_resource(Req) ->
+ ContentType = case Req:get_header_value("Content-Type") of
+ undefined ->
+ "application/octet-stream";
+ S ->
+ S
+ end,
+ Resource = #resource{type=ContentType, data=Req:recv_body()},
+ http_store ! {self(), {put, Req:get(path), Resource}},
+ Pid = whereis(http_store),
+ receive
+ {Pid, created} ->
+ Req:respond({201, [], "201 Created\r\n"});
+ {Pid, updated} ->
+ Req:respond({200, [], "200 OK\r\n"})
+ end.
+
+delete_resource(Req) ->
+ http_store ! {self(), {delete, Req:get(path)}},
+ Pid = whereis(http_store),
+ receive
+ {Pid, ok} ->
+ Req:respond({200, [], "200 OK\r\n"})
+ end.
+
+loop(#sd{http=Http, https=Https} = SD) ->
+ receive
+ stop ->
+ ok = mochiweb_http:stop(Http),
+ ok = mochiweb_http:stop(Https),
+ exit(normal);
+ {From, {put, Key, Val}} ->
+ Exists = ets:member(?MODULE, Key),
+ ets:insert(?MODULE, {Key, Val}),
+ case Exists of
+ true ->
+ From ! {self(), updated};
+ false ->
+ From ! {self(), created}
+ end;
+ {From, {delete, Key}} ->
+ ets:delete(?MODULE, Key),
+ From ! {self(), ok};
+ _ ->
+ ignore
+ end,
+ ?MODULE:loop(SD).
+
19 deps/mochiweb-src/examples/https/server_cert.pem
@@ -0,0 +1,19 @@
+-----BEGIN CERTIFICATE-----
+MIIDIDCCAgigAwIBAgIJAJLkNZzERPIUMA0GCSqGSIb3DQEBBQUAMBQxEjAQBgNV
+BAMTCWxvY2FsaG9zdDAeFw0xMDAzMTgxOTM5MThaFw0yMDAzMTUxOTM5MThaMBQx
+EjAQBgNVBAMTCWxvY2FsaG9zdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
+ggEBAJeUCOZxbmtngF4S5lXckjSDLc+8C+XjMBYBPyy5eKdJY20AQ1s9/hhp3ulI
+8pAvl+xVo4wQ+iBSvOzcy248Q+Xi6+zjceF7UNRgoYPgtJjKhdwcHV3mvFFrS/fp
+9ggoAChaJQWDO1OCfUgTWXImhkw+vcDR11OVMAJ/h73dqzJPI9mfq44PTTHfYtgr
+v4LAQAOlhXIAa2B+a6PlF6sqDqJaW5jLTcERjsBwnRhUGi7JevQzkejujX/vdA+N
+jRBjKH/KLU5h3Q7wUchvIez0PXWVTCnZjpA9aR4m7YV05nKQfxtGd71czYDYk+j8
+hd005jetT4ir7JkAWValBybJVksCAwEAAaN1MHMwHQYDVR0OBBYEFJl9s51SnjJt
+V/wgKWqV5Q6jnv1ZMEQGA1UdIwQ9MDuAFJl9s51SnjJtV/wgKWqV5Q6jnv1ZoRik
+FjAUMRIwEAYDVQQDEwlsb2NhbGhvc3SCCQCS5DWcxETyFDAMBgNVHRMEBTADAQH/
+MA0GCSqGSIb3DQEBBQUAA4IBAQB2ldLeLCc+lxK5i0EZquLamMBJwDIjGpT0JMP9
+b4XQOK2JABIu54BQIZhwcjk3FDJz/uOW5vm8k1kYni8FCjNZAaRZzCUfiUYTbTKL
+Rq9LuIAODyP2dnTqyKaQOOJHvrx9MRZ3XVecXPS0Tib4aO57vCaAbIkmhtYpTWmw
+e3t8CAIDVtgvjR6Se0a1JA4LktR7hBu22tDImvCSJn1nVAaHpani6iPBPPdMuMsP
+TBoeQfj8VpqBUjCStqJGa8ytjDFX73YaxV2mgrtGwPNme1x3YNRR11yTu7tksyMO
+GrmgxNriqYRchBhNEf72AKF0LR1ByKwfbDB9rIsV00HtCgOp
+-----END CERTIFICATE-----
27 deps/mochiweb-src/examples/https/server_key.pem
@@ -0,0 +1,27 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIEpAIBAAKCAQEAl5QI5nFua2eAXhLmVdySNIMtz7wL5eMwFgE/LLl4p0ljbQBD
+Wz3+GGne6UjykC+X7FWjjBD6IFK87NzLbjxD5eLr7ONx4XtQ1GChg+C0mMqF3Bwd
+Xea8UWtL9+n2CCgAKFolBYM7U4J9SBNZciaGTD69wNHXU5UwAn+Hvd2rMk8j2Z+r
+jg9NMd9i2Cu/gsBAA6WFcgBrYH5ro+UXqyoOolpbmMtNwRGOwHCdGFQaLsl69DOR
+6O6Nf+90D42NEGMof8otTmHdDvBRyG8h7PQ9dZVMKdmOkD1pHibthXTmcpB/G0Z3
+vVzNgNiT6PyF3TTmN61PiKvsmQBZVqUHJslWSwIDAQABAoIBACI8Ky5xHDFh9RpK
+Rn/KC7OUlTpADKflgizWJ0Cgu2F9L9mkn5HyFHvLHa+u7CootbWJOiEejH/UcBtH
+WyMQtX0snYCpdkUpJv5wvMoebGu+AjHOn8tfm9T/2O6rhwgckLyMb6QpGbMo28b1
+p9QiY17BJPZx7qJQJcHKsAvwDwSThlb7MFmWf42LYWlzybpeYQvwpd+UY4I0WXLu
+/dqJIS9Npq+5Y5vbo2kAEAssb2hSCvhCfHmwFdKmBzlvgOn4qxgZ1iHQgfKI6Z3Y
+J0573ZgOVTuacn+lewtdg5AaHFcl/zIYEr9SNqRoPNGbPliuv6k6N2EYcufWL5lR
+sCmmmHECgYEAxm+7OpepGr++K3+O1e1MUhD7vSPkKJrCzNtUxbOi2NWj3FFUSPRU
+adWhuxvUnZgTcgM1+KuQ0fB2VmxXe9IDcrSFS7PKFGtd2kMs/5mBw4UgDZkOQh+q
+kDiBEV3HYYJWRq0w3NQ/9Iy1jxxdENHtGmG9aqamHxNtuO608wGW2S8CgYEAw4yG
+ZyAic0Q/U9V2OHI0MLxLCzuQz17C2wRT1+hBywNZuil5YeTuIt2I46jro6mJmWI2
+fH4S/geSZzg2RNOIZ28+aK79ab2jWBmMnvFCvaru+odAuser4N9pfAlHZvY0pT+S
+1zYX3f44ygiio+oosabLC5nWI0zB2gG8pwaJlaUCgYEAgr7poRB+ZlaCCY0RYtjo
+mYYBKD02vp5BzdKSB3V1zeLuBWM84pjB6b3Nw0fyDig+X7fH3uHEGN+USRs3hSj6
+BqD01s1OT6fyfbYXNw5A1r+nP+5h26Wbr0zblcKxdQj4qbbBZC8hOJNhqTqqA0Qe
+MmzF7jiBaiZV/Cyj4x1f9BcCgYEAhjL6SeuTuOctTqs/5pz5lDikh6DpUGcH8qaV
+o6aRAHHcMhYkZzpk8yh1uUdD7516APmVyvn6rrsjjhLVq4ZAJjwB6HWvE9JBN0TR
+bILF+sREHUqU8Zn2Ku0nxyfXCKIOnxlx/J/y4TaGYqBqfXNFWiXNUrjQbIlQv/xR
+K48g/MECgYBZdQlYbMSDmfPCC5cxkdjrkmAl0EgV051PWAi4wR+hLxIMRjHBvAk7
+IweobkFvT4TICulgroLkYcSa5eOZGxB/DHqcQCbWj3reFV0VpzmTDoFKG54sqBRl
+vVntGt0pfA40fF17VoS7riAdHF53ippTtsovHEsg5tq5NrBl5uKm2g==
+-----END RSA PRIVATE KEY-----
81 deps/mochiweb-src/examples/keepalive/keepalive.erl
@@ -0,0 +1,81 @@
+-module(keepalive).
+
+%% your web app can push data to clients using a technique called comet long
+%% polling. browsers make a request and your server waits to send a
+%% response until data is available. see wikipedia for a better explanation:
+%% http://en.wikipedia.org/wiki/Comet_(programming)#Ajax_with_long_polling
+%%
+%% since the majority of your http handlers will be idle at any given moment,
+%% you might consider making them hibernate while they wait for more data from
+%% another process. however, since the execution stack is discarded when a
+%% process hibernates, the handler would usually terminate after your response
+%% code runs. this means http keep alives wouldn't work; the handler process
+%% would terminate after each response and close its socket rather than
+%% returning to the big @mochiweb_http@ loop and processing another request.
+%%
+%% however, if mochiweb exposes a continuation that encapsulates the return to
+%% the top of the big loop in @mochiweb_http@, we can call that after the
+%% response. if you do that then control flow returns to the proper place,
+%% and keep alives work like they would if you hadn't hibernated.
+
+-export([ start/1, loop/1
+ ]).
+
+%% internal export (so hibernate can reach it)
+-export([ resume/3
+ ]).
+
+-define(LOOP, {?MODULE, loop}).
+
+start(Options = [{port, _Port}]) ->
+ mochiweb_http:start([{name, ?MODULE}, {loop, ?LOOP} | Options]).
+
+loop(Req) ->
+ Path = Req:get(path),
+ case string:tokens(Path, "/") of
+ ["longpoll" | RestOfPath] ->
+ %% the "reentry" is a continuation -- what @mochiweb_http@
+ %% needs to do to start its loop back at the top
+ Reentry = mochiweb_http:reentry(?LOOP),
+
+ %% here we could send a message to some other process and hope
+ %% to get an interesting message back after a while. for
+ %% simplicity let's just send ourselves a message after a few
+ %% seconds
+ erlang:send_after(2000, self(), "honk honk"),
+
+ %% since we expect to wait for a long time before getting a
+ %% reply, let's hibernate. memory usage will be minimized, so
+ %% we won't be wasting memory just sitting in a @receive@
+ proc_lib:hibernate(?MODULE, resume, [Req, RestOfPath, Reentry]),
+
+ %% we'll never reach this point, and this function @loop/1@
+ %% won't ever return control to @mochiweb_http@. luckily
+ %% @resume/3@ will take care of that.
+ io:format("not gonna happen~n", []);
+
+ _ ->
+ ok(Req, io_lib:format("some other page: ~p", [Path]))
+ end,
+
+ io:format("restarting loop normally in ~p~n", [Path]),
+ ok.
+
+%% this is the function that's called when a message arrives.
+resume(Req, RestOfPath, Reentry) ->
+ receive
+ Msg ->
+ Text = io_lib:format("wake up message: ~p~nrest of path: ~p", [Msg, RestOfPath]),
+ ok(Req, Text)
+ end,
+
+ %% if we didn't call @Reentry@ here then the function would finish and the
+ %% process would exit. calling @Reentry@ takes care of returning control
+ %% to @mochiweb_http@
+ io:format("reentering loop via continuation in ~p~n", [Req:get(path)]),
+ Reentry(Req).
+
+ok(Req, Response) ->
+ Req:ok({_ContentType = "text/plain",
+ _Headers = [],
+ Response}).
20 deps/mochiweb-src/priv/skel/Makefile
@@ -0,0 +1,20 @@
+all: ebin/
+ (cd src;$(MAKE) all)
+
+edoc:
+ (cd src;$(MAKE) edoc)
+
+test:
+ (cd src;$(MAKE) test)
+
+clean:
+ (cd src;$(MAKE) clean)
+
+clean_plt:
+ (cd src;$(MAKE) clean_plt)
+
+dialyzer:
+ (cd src;$(MAKE) dialyzer)
+
+ebin/:
+ @mkdir -p ebin
8 deps/mochiweb-src/priv/skel/priv/www/index.html
@@ -0,0 +1,8 @@
+<html>
+<head>
+<title>It Worked</title>
+</head>
+<body>
+MochiWeb running.
+</body>
+</html>
33 deps/mochiweb-src/priv/skel/src/Makefile
@@ -0,0 +1,33 @@
+include ../support/include.mk
+
+APPLICATION=skel
+DOC_OPTS={dir,\"../doc\"}
+TEST_PLT=$(TEST_DIR)/dialyzer_plt
+
+all: $(EBIN_FILES)
+
+debug:
+ $(MAKE) DEBUG=-DDEBUG
+
+clean:
+ rm -rf $(EBIN_FILES)
+
+edoc:
+ $(ERL) -noshell -pa ../ebin \
+ -eval "edoc:application($(APPLICATION), \".\", [$(DOC_OPTS)])" \
+ -s init stop
+
+test: $(EBIN_FILES)
+ mkdir -p $(TEST_DIR);
+ @../support/run_tests.escript $(EBIN_DIR) | tee $(TEST_DIR)/test.log
+
+$(TEST_PLT):
+ mkdir -p $(TEST_DIR)
+ cp $(DIALYZER_PLT) $(TEST_PLT)
+ dialyzer --plt $(TEST_PLT) --add_to_plt -r ../deps/*/ebin
+
+clean_plt:
+ rm $(TEST_PLT)
+
+dialyzer: $(TEST_PLT)
+ dialyzer --src --plt $(TEST_PLT) -DNOTEST -DDIALYZER -c ../src | tee $(TEST_DIR)/dialyzer.log
14 deps/mochiweb-src/priv/skel/src/skel.app
@@ -0,0 +1,14 @@
+{application, skel,
+ [{description, "skel"},
+ {vsn, "0.01"},
+ {modules, [
+ skel,
+ skel_app,
+ skel_sup,
+ skel_web,
+ skel_deps
+ ]},
+ {registered, []},
+ {mod, {skel_app, []}},
+ {env, []},
+ {applications, [kernel, stdlib, crypto]}]}.
30 deps/mochiweb-src/priv/skel/src/skel.erl
@@ -0,0 +1,30 @@
+%% @author author <author@example.com>
+%% @copyright YYYY author.
+
+%% @doc TEMPLATE.
+
+-module(skel).
+-author('author <author@example.com>').
+-export([start/0, stop/0]).
+
+ensure_started(App) ->
+ case application:start(App) of
+ ok ->
+ ok;
+ {error, {already_started, App}} ->
+ ok
+ end.
+
+%% @spec start() -> ok
+%% @doc Start the skel server.
+start() ->
+ skel_deps:ensure(),
+ ensure_started(crypto),
+ application:start(skel).
+
+%% @spec stop() -> ok
+%% @doc Stop the skel server.
+stop() ->
+ Res = application:stop(skel),
+ application:stop(crypto),
+ Res.
1  deps/mochiweb-src/priv/skel/src/skel.hrl
@@ -0,0 +1 @@
+
30 deps/mochiweb-src/priv/skel/src/skel_app.erl
@@ -0,0 +1,30 @@
+%% @author author <author@example.com>
+%% @copyright YYYY author.
+
+%% @doc Callbacks for the skel application.
+
+-module(skel_app).
+-author('author <author@example.com>').
+
+-behaviour(application).
+-export([start/2, stop/1]).
+
+
+%% @spec start(_Type, _StartArgs) -> ServerRet
+%% @doc application start callback for skel.
+start(_Type, _StartArgs) ->
+ skel_deps:ensure(),
+ skel_sup:start_link().
+
+%% @spec stop(_State) -> ServerRet
+%% @doc application stop callback for skel.
+stop(_State) ->
+ ok.
+
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+-endif.
92 deps/mochiweb-src/priv/skel/src/skel_deps.erl
@@ -0,0 +1,92 @@
+%% @author author <author@example.com>
+%% @copyright YYYY author.
+
+%% @doc Ensure that the relatively-installed dependencies are on the code
+%% loading path, and locate resources relative
+%% to this application's path.
+
+-module(skel_deps).
+-author('author <author@example.com>').
+
+-export([ensure/0, ensure/1]).
+-export([get_base_dir/0, get_base_dir/1]).
+-export([local_path/1, local_path/2]).
+-export([deps_on_path/0, new_siblings/1]).
+
+%% @spec deps_on_path() -> [ProjNameAndVers]
+%% @doc List of project dependencies on the path.
+deps_on_path() ->
+ F = fun (X, Acc) ->
+ ProjDir = filename:dirname(X),
+ case {filename:basename(X),
+ filename:basename(filename:dirname(ProjDir))} of
+ {"ebin", "deps"} ->
+ [filename:basename(ProjDir) | Acc];
+ _ ->
+ Acc
+ end
+ end,
+ ordsets:from_list(lists:foldl(F, [], code:get_path())).
+
+%% @spec new_siblings(Module) -> [Dir]
+%% @doc Find new siblings paths relative to Module that aren't already on the
+%% code path.
+new_siblings(Module) ->
+ Existing = deps_on_path(),
+ SiblingEbin = filelib:wildcard(local_path(["deps", "*", "ebin"], Module)),
+ Siblings = [filename:dirname(X) || X <- SiblingEbin,
+ ordsets:is_element(
+ filename:basename(filename:dirname(X)),
+ Existing) =:= false],
+ lists:filter(fun filelib:is_dir/1,
+ lists:append([[filename:join([X, "ebin"]),
+ filename:join([X, "include"])] ||
+ X <- Siblings])).
+
+
+%% @spec ensure(Module) -> ok
+%% @doc Ensure that all ebin and include paths for dependencies
+%% of the application for Module are on the code path.
+ensure(Module) ->
+ code:add_paths(new_siblings(Module)),
+ code:clash(),
+ ok.
+
+%% @spec ensure() -> ok
+%% @doc Ensure that the ebin and include paths for dependencies of
+%% this application are on the code path. Equivalent to
+%% ensure(?Module).
+ensure() ->
+ ensure(?MODULE).
+
+%% @spec get_base_dir(Module) -> string()
+%% @doc Return the application directory for Module. It assumes Module is in
+%% a standard OTP layout application in the ebin or src directory.
+get_base_dir(Module) ->
+ {file, Here} = code:is_loaded(Module),
+ filename:dirname(filename:dirname(Here)).
+
+%% @spec get_base_dir() -> string()
+%% @doc Return the application directory for this application. Equivalent to
+%% get_base_dir(?MODULE).
+get_base_dir() ->
+ get_base_dir(?MODULE).
+
+%% @spec local_path([string()], Module) -> string()
+%% @doc Return an application-relative directory from Module's application.
+local_path(Components, Module) ->
+ filename:join([get_base_dir(Module) | Components]).
+
+%% @spec local_path(Components) -> string()
+%% @doc Return an application-relative directory for this application.
+%% Equivalent to local_path(Components, ?MODULE).
+local_path(Components) ->
+ local_path(Components, ?MODULE).
+
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+-endif.
62 deps/mochiweb-src/priv/skel/src/skel_sup.erl
@@ -0,0 +1,62 @@
+%% @author author <author@example.com>
+%% @copyright YYYY author.
+
+%% @doc Supervisor for the skel application.
+
+-module(skel_sup).
+-author('author <author@example.com>').
+
+-behaviour(supervisor).
+
+%% External exports
+-export([start_link/0, upgrade/0]).
+
+%% supervisor callbacks
+-export([init/1]).
+
+%% @spec start_link() -> ServerRet
+%% @doc API for starting the supervisor.
+start_link() ->
+ supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+%% @spec upgrade() -> ok
+%% @doc Add processes if necessary.
+upgrade() ->
+ {ok, {_, Specs}} = init([]),
+
+ Old = sets:from_list(
+ [Name || {Name, _, _, _} <- supervisor:which_children(?MODULE)]),
+ New = sets:from_list([Name || {Name, _, _, _, _, _} <- Specs]),
+ Kill = sets:subtract(Old, New),
+
+ sets:fold(fun (Id, ok) ->
+ supervisor:terminate_child(?MODULE, Id),
+ supervisor:delete_child(?MODULE, Id),
+ ok
+ end, ok, Kill),
+
+ [supervisor:start_child(?MODULE, Spec) || Spec <- Specs],
+ ok.
+
+%% @spec init([]) -> SupervisorTree
+%% @doc supervisor callback.
+init([]) ->
+ Ip = case os:getenv("MOCHIWEB_IP") of false -> "0.0.0.0"; Any -> Any end,
+ WebConfig = [
+ {ip, Ip},
+ {port, 8000},
+ {docroot, skel_deps:local_path(["priv", "www"])}],
+ Web = {skel_web,
+ {skel_web, start, [WebConfig]},
+ permanent, 5000, worker, dynamic},
+
+ Processes = [Web],
+ {ok, {{one_for_one, 10, 10}, Processes}}.
+
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+-endif.
51 deps/mochiweb-src/priv/skel/src/skel_web.erl
@@ -0,0 +1,51 @@
+%% @author author <author@example.com>
+%% @copyright YYYY author.
+
+%% @doc Web server for skel.
+
+-module(skel_web).
+-author('author <author@example.com>').
+
+-export([start/1, stop/0, loop/2]).
+
+%% External API
+
+start(Options) ->
+ {DocRoot, Options1} = get_option(docroot, Options),
+ Loop = fun (Req) ->
+ ?MODULE:loop(Req, DocRoot)
+ end,
+ mochiweb_http:start([{name, ?MODULE}, {loop, Loop} | Options1]).
+
+stop() ->
+ mochiweb_http:stop(?MODULE).
+
+loop(Req, DocRoot) ->
+ "/" ++ Path = Req:get(path),
+ case Req:get(method) of
+ Method when Method =:= 'GET'; Method =:= 'HEAD' ->
+ case Path of
+ _ ->
+ Req:serve_file(Path, DocRoot)
+ end;
+ 'POST' ->
+ case Path of
+ _ ->
+ Req:not_found()
+ end;
+ _ ->
+ Req:respond({501, [], []})
+ end.
+
+%% Internal API
+
+get_option(Option, Options) ->
+ {proplists:get_value(Option, Options), proplists:delete(Option, Options)}.
+
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+-endif.
4 deps/mochiweb-src/priv/skel/start-dev.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+cd `dirname $0`
+make
+exec erl -pa $PWD/ebin $PWD/deps/*/ebin -boot start_sasl -s reloader -s skel
3  deps/mochiweb-src/priv/skel/start.sh
@@ -0,0 +1,3 @@
+#!/bin/sh
+cd `dirname $0`
+exec erl -pa $PWD/ebin $PWD/deps/*/ebin -boot start_sasl -s skel
40 deps/mochiweb-src/priv/skel/support/include.mk
@@ -0,0 +1,40 @@
+## -*- makefile -*-
+
+######################################################################
+## Erlang
+
+ERL := erl
+ERLC := $(ERL)c
+
+INCLUDE_DIRS := ../include $(wildcard ../deps/*/include)
+EBIN_DIRS := $(wildcard ../deps/*/ebin)
+ERLC_FLAGS := -W $(INCLUDE_DIRS:../%=-I ../%) $(EBIN_DIRS:%=-pa %)
+
+ifndef no_debug_info
+ ERLC_FLAGS += +debug_info
+endif
+
+ifdef debug
+ ERLC_FLAGS += -Ddebug
+endif
+
+EBIN_DIR := ../ebin
+TEST_DIR := ../_test
+EMULATOR := beam
+
+ERL_SOURCES := $(wildcard *.erl)
+ERL_HEADERS := $(wildcard *.hrl) $(wildcard ../include/*.hrl)
+ERL_OBJECTS := $(ERL_SOURCES:%.erl=$(EBIN_DIR)/%.$(EMULATOR))
+ERL_OBJECTS_LOCAL := $(ERL_SOURCES:%.erl=./%.$(EMULATOR))
+APP_FILES := $(wildcard *.app)
+EBIN_FILES = $(ERL_OBJECTS) $(APP_FILES:%.app=../ebin/%.app)
+MODULES = $(ERL_SOURCES:%.erl=%)
+
+../ebin/%.app: %.app
+ cp $< $@
+
+$(EBIN_DIR)/%.$(EMULATOR): %.erl
+ $(ERLC) $(ERLC_FLAGS) -o $(EBIN_DIR) $<
+
+./%.$(EMULATOR): %.erl
+ $(ERLC) $(ERLC_FLAGS) -o . $<
94 deps/mochiweb-src/priv/skel/support/run_tests.escript
@@ -0,0 +1,94 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+%%! -name mochiweb__test@127.0.0.1
+main([Ebin]) ->
+ code:add_path(Ebin),
+ code:add_paths(filelib:wildcard("../deps/*/ebin", Ebin)),
+ code:add_paths(filelib:wildcard("../deps/*/deps/*/ebin", Ebin)),
+
+ ModuleNames = [hd(string:tokens(M, "."))
+ || "../src/" ++ M <- filelib:wildcard("../src/*.erl")],
+
+ {ok, NonTestRe} = re:compile("_tests$"),
+ Modules = [list_to_atom(M) ||
+ M <- lists:filter(
+ fun(M) ->
+ nomatch == re:run(M, NonTestRe)
+ end,
+ ModuleNames)],
+
+
+ crypto:start(),
+ start_cover(Modules),
+ eunit:test(Modules, [verbose,{report,{eunit_surefire,[{dir,"../_test"}]}}]),
+ analyze_cover(Modules);
+main(_) ->
+ io:format("usage: run_tests.escript EBIN_DIR~n"),
+ halt(1).
+
+start_cover(Modules) ->
+ {ok, _Cover} = cover:start(),
+ io:format("Cover compiling...~n"),
+ Compiled = [ M || {ok, M} <- [ cover:compile(
+ M,
+ [{i, "include"}
+ ])
+ || M <- Modules ] ],
+ case length(Modules) == length(Compiled) of
+ true -> ok;
+ false ->
+ io:format("Warning: the following modules were not"
+ " cover-compiled:~n ~p~n", [Compiled])
+ end.
+
+analyze_cover(Modules) ->
+ io:format("Analyzing cover...~n"),
+ CoverBase = filename:join(["..", "_test", "cover"]),
+ ok = filelib:ensure_dir(filename:join([CoverBase, "fake"])),
+ Coverages = lists:foldl(
+ fun(M, Acc) ->
+ [analyze_module(CoverBase, M)|Acc]
+ end,
+ [], Modules),
+ IndexFilename = filename:join([CoverBase, "index.html"]),
+ {ok, Index} = file:open(IndexFilename, [write]),
+ {LineTotal, CoverTotal} =
+ lists:foldl(fun({_,_,Lines,Covered}, {LineAcc, CovAcc}) ->
+ {LineAcc+Lines, CovAcc+Covered}
+ end, {0,0}, Coverages),
+ file:write(Index,
+ "<html><head><title>Coverage</title></head>\n"
+ "<body><h1>Coverage</h1><ul>\n"),
+ file:write(Index,
+ io_lib:format("<h2>Total: ~.2f%</h2>\n",
+ [percentage(CoverTotal, LineTotal)])),
+ [ file:write(Index,
+ io_lib:format(
+ "<li><a href=\"~s\">~p</a>: ~.2f%</li>~n",
+ [Filename, Module, percentage(Covered, Lines)]))
+ || {Filename, Module, Lines, Covered} <- Coverages ],
+ file:write(Index,"</ul></body></html>"),
+ file:close(Index),
+ io:format("Cover analysis in ~s~n", [IndexFilename]).
+
+analyze_module(CoverBase, Module) ->
+ {ok, Filename} =
+ cover:analyze_to_file(
+ Module,
+ filename:join(CoverBase, atom_to_list(Module)++".COVER.html"),
+ [html]),
+ Lines = count_lines(Filename, "[[:digit:]]\.\.|"),
+ Covered = count_lines(Filename, "[[:space:]]0\.\.|"),
+ {filename:basename(Filename), Module, Lines, Lines-Covered}.
+
+count_lines(Filename, Pattern) ->
+ {ok, [Lines],_} = io_lib:fread(
+ "~d",
+ os:cmd(io_lib:format("grep -e \"~s\" ~s | wc -l",
+ [Pattern, Filename]))),
+ Lines.
+
+percentage(_, 0) -> 1000.0;
+percentage(Part, Total) ->
+ (Part/Total)*100.
+
27 deps/mochiweb-src/scripts/new_mochiweb.erl
@@ -0,0 +1,27 @@
+#!/usr/bin/env escript
+%% -*- mode: erlang -*-
+-export([main/1]).
+
+%% External API
+
+main([Name]) ->
+ main([Name, "."]);
+main([Name, Dest]) ->
+ ensure(),
+ DestDir = filename:absname(Dest),
+ ok = mochiweb_skel:skelcopy(DestDir, Name);
+main(_) ->
+ usage().
+
+%% Internal API
+
+ensure() ->
+ code:add_patha(filename:join(filename:dirname(escript:script_name()),
+ "../ebin")).
+
+usage() ->
+ io:format("usage: ~s name [destdir]~n",
+ [filename:basename(escript:script_name())]),
+ halt(1).
+
+
33 deps/mochiweb-src/src/Makefile
@@ -0,0 +1,33 @@
+include ../support/include.mk
+
+APPLICATION=mochiweb
+DOC_OPTS={dir,\"../doc\"}
+TEST_PLT=$(TEST_DIR)/dialyzer_plt
+
+all: $(EBIN_FILES)
+
+debug:
+ $(MAKE) DEBUG=-DDEBUG
+
+clean:
+ rm -rf $(EBIN_FILES)
+
+edoc:
+ $(ERL) -noshell -pa ../ebin \
+ -eval "edoc:application($(APPLICATION), \".\", [$(DOC_OPTS)])" \
+ -s init stop
+
+test: $(EBIN_FILES)
+ mkdir -p $(TEST_DIR);
+ @../support/run_tests.escript $(EBIN_DIR) | tee $(TEST_DIR)/test.log
+
+$(TEST_PLT):
+ mkdir -p $(TEST_DIR)
+ cp $(DIALYZER_PLT) $(TEST_PLT)
+ dialyzer --plt $(TEST_PLT) --add_to_plt
+
+clean_plt:
+ rm $(TEST_PLT)
+
+dialyzer: $(TEST_PLT)
+ dialyzer --src --plt $(TEST_PLT) -DNOTEST -DDIALYZER -c ../src | tee $(TEST_DIR)/dialyzer.log
3  deps/mochiweb-src/src/internal.hrl
@@ -0,0 +1,3 @@
+
+-define(RECBUF_SIZE, 8192).
+
425 deps/mochiweb-src/src/mochifmt.erl
@@ -0,0 +1,425 @@
+%% @author Bob Ippolito <bob@mochimedia.com>
+%% @copyright 2008 Mochi Media, Inc.
+
+%% @doc String Formatting for Erlang, inspired by Python 2.6
+%% (<a href="http://www.python.org/dev/peps/pep-3101/">PEP 3101</a>).
+%%
+-module(mochifmt).
+-author('bob@mochimedia.com').
+-export([format/2, format_field/2, convert_field/2, get_value/2, get_field/2]).
+-export([tokenize/1, format/3, get_field/3, format_field/3]).
+-export([bformat/2, bformat/3]).
+-export([f/2, f/3]).
+
+-record(conversion, {length, precision, ctype, align, fill_char, sign}).
+
+%% @spec tokenize(S::string()) -> tokens()
+%% @doc Tokenize a format string into mochifmt's internal format.
+tokenize(S) ->
+ {?MODULE, tokenize(S, "", [])}.
+
+%% @spec convert_field(Arg, Conversion::conversion()) -> term()
+%% @doc Process Arg according to the given explicit conversion specifier.
+convert_field(Arg, "") ->
+ Arg;
+convert_field(Arg, "r") ->
+ repr(Arg);
+convert_field(Arg, "s") ->
+ str(Arg).
+
+%% @spec get_value(Key::string(), Args::args()) -> term()
+%% @doc Get the Key from Args. If Args is a tuple then convert Key to
+%% an integer and get element(1 + Key, Args). If Args is a list and Key
+%% can be parsed as an integer then use lists:nth(1 + Key, Args),
+%% otherwise try and look for Key in Args as a proplist, converting
+%% Key to an atom or binary if necessary.
+get_value(Key, Args) when is_tuple(Args) ->
+ element(1 + list_to_integer(Key), Args);
+get_value(Key, Args) when is_list(Args) ->
+ try lists:nth(1 + list_to_integer(Key), Args)
+ catch error:_ ->
+ {_K, V} = proplist_lookup(Key, Args),
+ V
+ end.
+
+%% @spec get_field(Key::string(), Args) -> term()
+%% @doc Consecutively call get_value/2 on parts of Key delimited by ".",
+%% replacing Args with the result of the previous get_value. This
+%% is used to implement formats such as {0.0}.
+get_field(Key, Args) ->
+ get_field(Key, Args, ?MODULE).
+
+%% @spec get_field(Key::string(), Args, Module) -> term()
+%% @doc Consecutively call Module:get_value/2 on parts of Key delimited by ".",
+%% replacing Args with the result of the previous get_value. This
+%% is used to implement formats such as {0.0}.
+get_field(Key, Args, Module) ->
+ {Name, Next} = lists:splitwith(fun (C) -> C =/= $. end, Key),
+ Res = try Module:get_value(Name, Args)
+ catch error:undef -> get_value(Name, Args) end,
+ case Next of
+ "" ->
+ Res;
+ "." ++ S1 ->
+ get_field(S1, Res, Module)
+ end.
+
+%% @spec format(Format::string(), Args) -> iolist()
+%% @doc Format Args with Format.
+format(Format, Args) ->
+ format(Format, Args, ?MODULE).
+
+%% @spec format(Format::string(), Args, Module) -> iolist()
+%% @doc Format Args with Format using Module.
+format({?MODULE, Parts}, Args, Module) ->
+ format2(Parts, Args, Module, []);
+format(S, Args, Module) ->
+ format(tokenize(S), Args, Module).
+
+%% @spec format_field(Arg, Format) -> iolist()
+%% @doc Format Arg with Format.
+format_field(Arg, Format) ->
+ format_field(Arg, Format, ?MODULE).
+
+%% @spec format_field(Arg, Format, _Module) -> iolist()
+%% @doc Format Arg with Format.
+format_field(Arg, Format, _Module) ->
+ F = default_ctype(Arg, parse_std_conversion(Format)),
+ fix_padding(fix_sign(convert2(Arg, F), F), F).
+
+%% @spec f(Format::string(), Args) -> string()
+%% @doc Format Args with Format and return a string().
+f(Format, Args) ->
+ f(Format, Args, ?MODULE).
+
+%% @spec f(Format::string(), Args, Module) -> string()
+%% @doc Format Args with Format using Module and return a string().
+f(Format, Args, Module) ->
+ case lists:member(${, Format) of
+ true ->
+ binary_to_list(bformat(Format, Args, Module));
+ false ->
+ Format
+ end.
+
+%% @spec bformat(Format::string(), Args) -> binary()
+%% @doc Format Args with Format and return a binary().
+bformat(Format, Args) ->
+ iolist_to_binary(format(Format, Args)).
+
+%% @spec bformat(Format::string(), Args, Module) -> binary()
+%% @doc Format Args with Format using Module and return a binary().
+bformat(Format, Args, Module) ->
+ iolist_to_binary(format(Format, Args, Module)).
+
+%% Internal API
+
+add_raw("", Acc) ->
+ Acc;
+add_raw(S, Acc) ->
+ [{raw, lists:reverse(S)} | Acc].
+
+tokenize([], S, Acc) ->
+ lists:reverse(add_raw(S, Acc));
+tokenize("{{" ++ Rest, S, Acc) ->
+ tokenize(Rest, "{" ++ S, Acc);
+tokenize("{" ++ Rest, S, Acc) ->
+ {Format, Rest1} = tokenize_format(Rest),
+ tokenize(Rest1, "", [{format, make_format(Format)} | add_raw(S, Acc)]);
+tokenize("}}" ++ Rest, S, Acc) ->
+ tokenize(Rest, "}" ++ S, Acc);
+tokenize([C | Rest], S, Acc) ->
+ tokenize(Rest, [C | S], Acc).
+
+tokenize_format(S) ->
+ tokenize_format(S, 1, []).
+
+tokenize_format("}" ++ Rest, 1, Acc) ->
+ {lists:reverse(Acc), Rest};
+tokenize_format("}" ++ Rest, N, Acc) ->
+ tokenize_format(Rest, N - 1, "}" ++ Acc);
+tokenize_format("{" ++ Rest, N, Acc) ->
+ tokenize_format(Rest, 1 + N, "{" ++ Acc);
+tokenize_format([C | Rest], N, Acc) ->
+ tokenize_format(Rest, N, [C | Acc]).
+
+make_format(S) ->
+ {Name0, Spec} = case lists:splitwith(fun (C) -> C =/= $: end, S) of
+ {_, ""} ->
+ {S, ""};
+ {SN, ":" ++ SS} ->
+ {SN, SS}
+ end,
+ {Name, Transform} = case lists:splitwith(fun (C) -> C =/= $! end, Name0) of
+ {_, ""} ->
+ {Name0, ""};
+ {TN, "!" ++ TT} ->
+ {TN, TT}
+ end,
+ {Name, Transform, Spec}.
+
+proplist_lookup(S, P) ->
+ A = try list_to_existing_atom(S)
+ catch error:_ -> make_ref() end,
+ B = try list_to_binary(S)
+ catch error:_ -> make_ref() end,
+ proplist_lookup2({S, A, B}, P).
+
+proplist_lookup2({KS, KA, KB}, [{K, V} | _])
+ when KS =:= K orelse KA =:= K orelse KB =:= K ->
+ {K, V};
+proplist_lookup2(Keys, [_ | Rest]) ->
+ proplist_lookup2(Keys, Rest).
+
+format2([], _Args, _Module, Acc) ->
+ lists:reverse(Acc);
+format2([{raw, S} | Rest], Args, Module, Acc) ->
+ format2(Rest, Args, Module, [S | Acc]);
+format2([{format, {Key, Convert, Format0}} | Rest], Args, Module, Acc) ->
+ Format = f(Format0, Args, Module),
+ V = case Module of
+ ?MODULE ->
+ V0 = get_field(Key, Args),
+ V1 = convert_field(V0, Convert),
+ format_field(V1, Format);
+ _ ->
+ V0 = try Module:get_field(Key, Args)
+ catch error:undef -> get_field(Key, Args, Module) end,
+ V1 = try Module:convert_field(V0, Convert)
+ catch error:undef -> convert_field(V0, Convert) end,
+ try Module:format_field(V1, Format)
+ catch error:undef -> format_field(V1, Format, Module) end
+ end,
+ format2(Rest, Args, Module, [V | Acc]).
+
+default_ctype(_Arg, C=#conversion{ctype=N}) when N =/= undefined ->
+ C;
+default_ctype(Arg, C) when is_integer(Arg) ->
+ C#conversion{ctype=decimal};
+default_ctype(Arg, C) when is_float(Arg) ->
+ C#conversion{ctype=general};
+default_ctype(_Arg, C) ->
+ C#conversion{ctype=string}.
+
+fix_padding(Arg, #conversion{length=undefined}) ->
+ Arg;
+fix_padding(Arg, F=#conversion{length=Length, fill_char=Fill0, align=Align0,
+ ctype=Type}) ->
+ Padding = Length - iolist_size(Arg),
+ Fill = case Fill0 of
+ undefined ->
+ $\s;
+ _ ->
+ Fill0
+ end,
+ Align = case Align0 of
+ undefined ->
+ case Type of
+ string ->
+ left;
+ _ ->
+ right
+ end;
+ _ ->
+ Align0
+ end,
+ case Padding > 0 of
+ true ->
+ do_padding(Arg, Padding, Fill, Align, F);
+ false ->
+ Arg
+ end.
+
+do_padding(Arg, Padding, Fill, right, _F) ->
+ [lists:duplicate(Padding, Fill), Arg];
+do_padding(Arg, Padding, Fill, center, _F) ->
+ LPadding = lists:duplicate(Padding div 2, Fill),
+ RPadding = case Padding band 1 of
+ 1 ->
+ [Fill | LPadding];
+ _ ->
+ LPadding
+ end,
+ [LPadding, Arg, RPadding];
+do_padding([$- | Arg], Padding, Fill, sign_right, _F) ->
+ [[$- | lists:duplicate(Padding, Fill)], Arg];
+do_padding(Arg, Padding, Fill, sign_right, #conversion{sign=$-}) ->
+ [lists:duplicate(Padding, Fill), Arg];
+do_padding([S | Arg], Padding, Fill, sign_right, #conversion{sign=S}) ->
+ [[S | lists:duplicate(Padding, Fill)], Arg];
+do_padding(Arg, Padding, Fill, sign_right, #conversion{sign=undefined}) ->
+ [lists:duplicate(Padding, Fill), Arg];
+do_padding(Arg, Padding, Fill, left, _F) ->
+ [Arg | lists:duplicate(Padding, Fill)].
+
+fix_sign(Arg, #conversion{sign=$+}) when Arg >= 0 ->
+ [$+, Arg];
+fix_sign(Arg, #conversion{sign=$\s}) when Arg >= 0 ->
+ [$\s, Arg];
+fix_sign(Arg, _F) ->
+ Arg.
+
+ctype($\%) -> percent;
+ctype($s) -> string;
+ctype($b) -> bin;
+ctype($o) -> oct;
+ctype($X) -> upper_hex;
+ctype($x) -> hex;
+ctype($c) -> char;
+ctype($d) -> decimal;
+ctype($g) -> general;
+ctype($f) -> fixed;
+ctype($e) -> exp.
+
+align($<) -> left;
+align($>) -> right;
+align($^) -> center;
+align($=) -> sign_right.
+
+convert2(Arg, F=#conversion{ctype=percent}) ->
+ [convert2(100.0 * Arg, F#conversion{ctype=fixed}), $\%];
+convert2(Arg, #conversion{ctype=string}) ->
+ str(Arg);
+convert2(Arg, #conversion{ctype=bin}) ->
+ erlang:integer_to_list(Arg, 2);
+convert2(Arg, #conversion{ctype=oct}) ->
+ erlang:integer_to_list(Arg, 8);
+convert2(Arg, #conversion{ctype=upper_hex}) ->
+ erlang:integer_to_list(Arg, 16);
+convert2(Arg, #conversion{ctype=hex}) ->
+ string:to_lower(erlang:integer_to_list(Arg, 16));
+convert2(Arg, #conversion{ctype=char}) when Arg < 16#80 ->
+ [Arg];
+convert2(Arg, #conversion{ctype=char}) ->
+ xmerl_ucs:to_utf8(Arg);
+convert2(Arg, #conversion{ctype=decimal}) ->
+ integer_to_list(Arg);
+convert2(Arg, #conversion{ctype=general, precision=undefined}) ->
+ try mochinum:digits(Arg)
+ catch error:undef -> io_lib:format("~g", [Arg]) end;
+convert2(Arg, #conversion{ctype=fixed, precision=undefined}) ->
+ io_lib:format("~f", [Arg]);
+convert2(Arg, #conversion{ctype=exp, precision=undefined}) ->
+ io_lib:format("~e", [Arg]);
+convert2(Arg, #conversion{ctype=general, precision=P}) ->
+ io_lib:format("~." ++ integer_to_list(P) ++ "g", [Arg]);
+convert2(Arg, #conversion{ctype=fixed, precision=P}) ->
+ io_lib:format("~." ++ integer_to_list(P) ++ "f", [Arg]);
+convert2(Arg, #conversion{ctype=exp, precision=P}) ->
+ io_lib:format("~." ++ integer_to_list(P) ++ "e", [Arg]).
+
+str(A) when is_atom(A) ->
+ atom_to_list(A);
+str(I) when is_integer(I) ->
+ integer_to_list(I);
+str(F) when is_float(F) ->
+ try mochinum:digits(F)
+ catch error:undef -> io_lib:format("~g", [F]) end;
+str(L) when is_list(L) ->
+ L;
+str(B) when is_binary(B) ->
+ B;
+str(P) ->
+ repr(P).
+
+repr(P) when is_float(P) ->
+ try mochinum:digits(P)
+ catch error:undef -> float_to_list(P) end;
+repr(P) ->
+ io_lib:format("~p", [P]).
+
+parse_std_conversion(S) ->
+ parse_std_conversion(S, #conversion{}).
+
+parse_std_conversion("", Acc) ->
+ Acc;
+parse_std_conversion([Fill, Align | Spec], Acc)
+ when Align =:= $< orelse Align =:= $> orelse Align =:= $= orelse Align =:= $^ ->
+ parse_std_conversion(Spec, Acc#conversion{fill_char=Fill,
+ align=align(Align)});
+parse_std_conversion([Align | Spec], Acc)
+ when Align =:= $< orelse Align =:= $> orelse Align =:= $= orelse Align =:= $^ ->
+ parse_std_conversion(Spec, Acc#conversion{align=align(Align)});
+parse_std_conversion([Sign | Spec], Acc)
+ when Sign =:= $+ orelse Sign =:= $- orelse Sign =:= $\s ->
+ parse_std_conversion(Spec, Acc#conversion{sign=Sign});
+parse_std_conversion("0" ++ Spec, Acc) ->
+ Align = case Acc#conversion.align of
+ undefined ->
+ sign_right;
+ A ->
+ A
+ end,
+ parse_std_conversion(Spec, Acc#conversion{fill_char=$0, align=Align});
+parse_std_conversion(Spec=[D|_], Acc) when D >= $0 andalso D =< $9 ->
+ {W, Spec1} = lists:splitwith(fun (C) -> C >= $0 andalso C =< $9 end, Spec),
+ parse_std_conversion(Spec1, Acc#conversion{length=list_to_integer(W)});
+parse_std_conversion([$. | Spec], Acc) ->
+ case lists:splitwith(fun (C) -> C >= $0 andalso C =< $9 end, Spec) of
+ {"", Spec1} ->
+ parse_std_conversion(Spec1, Acc);
+ {P, Spec1} ->
+ parse_std_conversion(Spec1,
+ Acc#conversion{precision=list_to_integer(P)})
+ end;
+parse_std_conversion([Type], Acc) ->
+ parse_std_conversion("", Acc#conversion{ctype=ctype(Type)}).
+
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+
+tokenize_test() ->
+ {?MODULE, [{raw, "ABC"}]} = tokenize("ABC"),
+ {?MODULE, [{format, {"0", "", ""}}]} = tokenize("{0}"),
+ {?MODULE, [{raw, "ABC"}, {format, {"1", "", ""}}, {raw, "DEF"}]} =
+ tokenize("ABC{1}DEF"),
+ ok.
+
+format_test() ->
+ <<" -4">> = bformat("{0:4}", [-4]),
+ <<" 4">> = bformat("{0:4}", [4]),
+ <<" 4">> = bformat("{0:{0}}", [4]),
+ <<"4 ">> = bformat("{0:4}", ["4"]),
+ <<"4 ">> = bformat("{0:{0}}", ["4"]),
+ <<"1.2yoDEF">> = bformat("{2}{0}{1}{3}", {yo, "DE", 1.2, <<"F">>}),
+ <<"cafebabe">> = bformat("{0:x}", {16#cafebabe}),
+ <<"CAFEBABE">> = bformat("{0:X}", {16#cafebabe}),
+ <<"CAFEBABE">> = bformat("{0:X}", {16#cafebabe}),
+ <<"755">> = bformat("{0:o}", {8#755}),
+ <<"a">> = bformat("{0:c}", {97}),
+ %% Horizontal ellipsis
+ <<226, 128, 166>> = bformat("{0:c}", {16#2026}),
+ <<"11">> = bformat("{0:b}", {3}),
+ <<"11">> = bformat("{0:b}", [3]),
+ <<"11">> = bformat("{three:b}", [{three, 3}]),
+ <<"11">> = bformat("{three:b}", [{"three", 3}]),
+ <<"11">> = bformat("{three:b}", [{<<"three">>, 3}]),
+ <<"\"foo\"">> = bformat("{0!r}", {"foo"}),
+ <<"2008-5-4">> = bformat("{0.0}-{0.1}-{0.2}", {{2008,5,4}}),
+ <<"2008-05-04">> = bformat("{0.0:04}-{0.1:02}-{0.2:02}", {{2008,5,4}}),
+ <<"foo6bar-6">> = bformat("foo{1}{0}-{1}", {bar, 6}),
+ <<"-'atom test'-">> = bformat("-{arg!r}-", [{arg, 'atom test'}]),
+ <<"2008-05-04">> = bformat("{0.0:0{1.0}}-{0.1:0{1.1}}-{0.2:0{1.2}}",
+ {{2008,5,4}, {4, 2, 2}}),
+ ok.
+
+std_test() ->
+ M = mochifmt_std:new(),
+ <<"01">> = bformat("{0}{1}", [0, 1], M),
+ ok.
+
+records_test() ->
+ M = mochifmt_records:new([{conversion, record_info(fields, conversion)}]),
+ R = #conversion{length=long, precision=hard, sign=peace},
+ long = M:get_value("length", R),
+ hard = M:get_value("precision", R),
+ peace = M:get_value("sign", R),
+ <<"long hard">> = bformat("{length} {precision}", R, M),
+ <<"long hard">> = bformat("{0.length} {0.precision}", [R], M),
+ ok.
+
+-endif.
38 deps/mochiweb-src/src/mochifmt_records.erl
@@ -0,0 +1,38 @@
+%% @author Bob Ippolito <bob@mochimedia.com>
+%% @copyright 2008 Mochi Media, Inc.
+
+%% @doc Formatter that understands records.
+%%
+%% Usage:
+%%
+%% 1> M = mochifmt_records:new([{rec, record_info(fields, rec)}]),
+%% M:format("{0.bar}", [#rec{bar=foo}]).
+%% foo
+
+-module(mochifmt_records, [Recs]).
+-author('bob@mochimedia.com').
+-export([get_value/2]).
+
+get_value(Key, Rec) when is_tuple(Rec) and is_atom(element(1, Rec)) ->
+ try begin
+ Atom = list_to_existing_atom(Key),
+ {_, Fields} = proplists:lookup(element(1, Rec), Recs),
+ element(get_rec_index(Atom, Fields, 2), Rec)
+ end
+ catch error:_ -> mochifmt:get_value(Key, Rec)
+ end;
+get_value(Key, Args) ->
+ mochifmt:get_value(Key, Args).
+
+get_rec_index(Atom, [Atom | _], Index) ->
+ Index;
+get_rec_index(Atom, [_ | Rest], Index) ->
+ get_rec_index(Atom, Rest, 1 + Index).
+
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+-endif.
30 deps/mochiweb-src/src/mochifmt_std.erl
@@ -0,0 +1,30 @@
+%% @author Bob Ippolito <bob@mochimedia.com>
+%% @copyright 2008 Mochi Media, Inc.
+
+%% @doc Template module for a mochifmt formatter.
+
+-module(mochifmt_std, []).
+-author('bob@mochimedia.com').
+-export([format/2, get_value/2, format_field/2, get_field/2, convert_field/2]).
+
+format(Format, Args) ->
+ mochifmt:format(Format, Args, THIS).
+
+get_field(Key, Args) ->
+ mochifmt:get_field(Key, Args, THIS).
+
+convert_field(Key, Args) ->
+ mochifmt:convert_field(Key, Args).
+
+get_value(Key, Args) ->
+ mochifmt:get_value(Key, Args).
+
+format_field(Arg, Format) ->
+ mochifmt:format_field(Arg, Format, THIS).
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+-endif.
107 deps/mochiweb-src/src/mochiglobal.erl
@@ -0,0 +1,107 @@
+%% @author Bob Ippolito <bob@mochimedia.com>
+%% @copyright 2010 Mochi Media, Inc.
+%% @doc Abuse module constant pools as a "read-only shared heap" (since erts 5.6)
+%% <a href="http://www.erlang.org/pipermail/erlang-questions/2009-March/042503.html">[1]</a>.
+-module(mochiglobal).
+-author("Bob Ippolito <bob@mochimedia.com>").
+-export([get/1, get/2, put/2, delete/1]).
+
+-spec get(atom()) -> any() | undefined.
+%% @equiv get(K, undefined)
+get(K) ->
+ get(K, undefined).
+
+-spec get(atom(), T) -> any() | T.
+%% @doc Get the term for K or return Default.
+get(K, Default) ->
+ get(K, Default, key_to_module(K)).
+
+get(_K, Default, Mod) ->
+ try Mod:term()
+ catch error:undef ->
+ Default
+ end.
+
+-spec put(atom(), any()) -> ok.
+%% @doc Store term V at K, replaces an existing term if present.
+put(K, V) ->
+ put(K, V, key_to_module(K)).
+
+put(_K, V, Mod) ->
+ Bin = compile(Mod, V),
+ code:purge(Mod),
+ code:load_binary(Mod, atom_to_list(Mod) ++ ".erl", Bin),
+ ok.
+
+-spec delete(atom()) -> boolean().
+%% @doc Delete term stored at K, no-op if non-existent.
+delete(K) ->
+ delete(K, key_to_module(K)).
+
+delete(_K, Mod) ->
+ code:purge(Mod),
+ code:delete(Mod).
+
+-spec key_to_module(atom()) -> atom().
+key_to_module(K) ->
+ list_to_atom("mochiglobal:" ++ atom_to_list(K)).
+
+-spec compile(atom(), any()) -> binary().
+compile(Module, T) ->
+ {ok, Module, Bin} = compile:forms(forms(Module, T),
+ [verbose, report_errors]),
+ Bin.
+
+-spec forms(atom(), any()) -> [erl_syntax:syntaxTree()].
+forms(Module, T) ->
+ [erl_syntax:revert(X) || X <- term_to_abstract(Module, term, T)].
+
+-spec term_to_abstract(atom(), atom(), any()) -> [erl_syntax:syntaxTree()].
+term_to_abstract(Module, Getter, T) ->
+ [%% -module(Module).
+ erl_syntax:attribute(
+ erl_syntax:atom(module),
+ [erl_syntax:atom(Module)]),
+ %% -export([Getter/0]).
+ erl_syntax:attribute(
+ erl_syntax:atom(export),
+ [erl_syntax:list(
+ [erl_syntax:arity_qualifier(
+ erl_syntax:atom(Getter),
+ erl_syntax:integer(0))])]),
+ %% Getter() -> T.
+ erl_syntax:function(
+ erl_syntax:atom(Getter),
+ [erl_syntax:clause([], none, [erl_syntax:abstract(T)])])].
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+get_put_delete_test() ->
+ K = '$$test$$mochiglobal',
+ delete(K),
+ ?assertEqual(
+ bar,
+ get(K, bar)),
+ try
+ ?MODULE:put(K, baz),
+ ?assertEqual(
+ baz,
+ get(K, bar)),
+ ?MODULE:put(K, wibble),
+ ?assertEqual(
+ wibble,
+ ?MODULE:get(K))
+ after
+ delete(K)
+ end,
+ ?assertEqual(
+ bar,
+ get(K, bar)),
+ ?assertEqual(
+ undefined,
+ ?MODULE:get(K)),
+ ok.
+-endif.
91 deps/mochiweb-src/src/mochihex.erl
@@ -0,0 +1,91 @@
+%% @author Bob Ippolito <bob@mochimedia.com>
+%% @copyright 2006 Mochi Media, Inc.
+
+%% @doc Utilities for working with hexadecimal strings.
+
+-module(mochihex).
+-author('bob@mochimedia.com').
+
+-export([to_hex/1, to_bin/1, to_int/1, dehex/1, hexdigit/1]).
+
+%% @type iolist() = [char() | binary() | iolist()]
+%% @type iodata() = iolist() | binary()
+
+%% @spec to_hex(integer | iolist()) -> string()
+%% @doc Convert an iolist to a hexadecimal string.
+to_hex(0) ->
+ "0";
+to_hex(I) when is_integer(I), I > 0 ->
+ to_hex_int(I, []);
+to_hex(B) ->
+ to_hex(iolist_to_binary(B), []).
+
+%% @spec to_bin(string()) -> binary()
+%% @doc Convert a hexadecimal string to a binary.
+to_bin(L) ->
+ to_bin(L, []).
+
+%% @spec to_int(string()) -> integer()
+%% @doc Convert a hexadecimal string to an integer.
+to_int(L) ->
+ erlang:list_to_integer(L, 16).
+
+%% @spec dehex(char()) -> integer()
+%% @doc Convert a hex digit to its integer value.
+dehex(C) when C >= $0, C =< $9 ->
+ C - $0;
+dehex(C) when C >= $a, C =< $f ->
+ C - $a + 10;
+dehex(C) when C >= $A, C =< $F ->
+ C - $A + 10.
+
+%% @spec hexdigit(integer()) -> char()
+%% @doc Convert an integer less than 16 to a hex digit.
+hexdigit(C) when C >= 0, C =< 9 ->
+ C + $0;
+hexdigit(C) when C =< 15 ->
+ C + $a - 10.
+
+%% Internal API
+
+to_hex(<<>>, Acc) ->
+ lists:reverse(Acc);
+to_hex(<<C1:4, C2:4, Rest/binary>>, Acc) ->
+ to_hex(Rest, [hexdigit(C2), hexdigit(C1) | Acc]).
+
+to_hex_int(0, Acc) ->
+ Acc;
+to_hex_int(I, Acc) ->
+ to_hex_int(I bsr 4, [hexdigit(I band 15) | Acc]).
+
+to_bin([], Acc) ->
+ iolist_to_binary(lists:reverse(Acc));
+to_bin([C1, C2 | Rest], Acc) ->
+ to_bin(Rest, [(dehex(C1) bsl 4) bor dehex(C2) | Acc]).
+
+
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+
+to_hex_test() ->
+ "ff000ff1" = to_hex([255, 0, 15, 241]),
+ "ff000ff1" = to_hex(16#ff000ff1),
+ "0" = to_hex(16#0),
+ ok.
+
+to_bin_test() ->
+ <<255, 0, 15, 241>> = to_bin("ff000ff1"),
+ <<255, 0, 10, 161>> = to_bin("Ff000aA1"),
+ ok.
+
+to_int_test() ->
+ 16#ff000ff1 = to_int("ff000ff1"),
+ 16#ff000aa1 = to_int("FF000Aa1"),
+ 16#0 = to_int("0"),
+ ok.
+
+-endif.
531 deps/mochiweb-src/src/mochijson.erl
@@ -0,0 +1,531 @@
+%% @author Bob Ippolito <bob@mochimedia.com>
+%% @copyright 2006 Mochi Media, Inc.
+
+%% @doc Yet another JSON (RFC 4627) library for Erlang.
+-module(mochijson).
+-author('bob@mochimedia.com').
+-export([encoder/1, encode/1]).
+-export([decoder/1, decode/1]).
+-export([binary_encoder/1, binary_encode/1]).
+-export([binary_decoder/1, binary_decode/1]).
+
+% This is a macro to placate syntax highlighters..
+-define(Q, $\").
+-define(ADV_COL(S, N), S#decoder{column=N+S#decoder.column}).
+-define(INC_COL(S), S#decoder{column=1+S#decoder.column}).
+-define(INC_LINE(S), S#decoder{column=1, line=1+S#decoder.line}).
+
+%% @type iolist() = [char() | binary() | iolist()]
+%% @type iodata() = iolist() | binary()
+%% @type json_string() = atom | string() | binary()
+%% @type json_number() = integer() | float()
+%% @type json_array() = {array, [json_term()]}
+%% @type json_object() = {struct, [{json_string(), json_term()}]}
+%% @type json_term() = json_string() | json_number() | json_array() |
+%% json_object()
+%% @type encoding() = utf8 | unicode
+%% @type encoder_option() = {input_encoding, encoding()} |
+%% {handler, function()}
+%% @type decoder_option() = {input_encoding, encoding()} |
+%% {object_hook, function()}
+%% @type bjson_string() = binary()
+%% @type bjson_number() = integer() | float()
+%% @type bjson_array() = [bjson_term()]
+%% @type bjson_object() = {struct, [{bjson_string(), bjson_term()}]}
+%% @type bjson_term() = bjson_string() | bjson_number() | bjson_array() |
+%% bjson_object()
+%% @type binary_encoder_option() = {handler, function()}
+%% @type binary_decoder_option() = {object_hook, function()}
+
+-record(encoder, {input_encoding=unicode,
+ handler=null}).
+
+-record(decoder, {input_encoding=utf8,
+ object_hook=null,
+ line=1,
+ column=1,
+ state=null}).
+
+%% @spec encoder([encoder_option()]) -> function()
+%% @doc Create an encoder/1 with the given options.
+encoder(Options) ->
+ State = parse_encoder_options(Options, #encoder{}),
+ fun (O) -> json_encode(O, State) end.
+
+%% @spec encode(json_term()) -> iolist()
+%% @doc Encode the given as JSON to an iolist.
+encode(Any) ->
+ json_encode(Any, #encoder{}).
+
+%% @spec decoder([decoder_option()]) -> function()
+%% @doc Create a decoder/1 with the given options.
+decoder(Options) ->
+ State = parse_decoder_options(Options, #decoder{}),
+ fun (O) -> json_decode(O, State) end.
+
+%% @spec decode(iolist()) -> json_term()
+%% @doc Decode the given iolist to Erlang terms.
+decode(S) ->
+ json_decode(S, #decoder{}).
+
+%% @spec binary_decoder([binary_decoder_option()]) -> function()
+%% @doc Create a binary_decoder/1 with the given options.
+binary_decoder(Options) ->
+ mochijson2:decoder(Options).
+
+%% @spec binary_encoder([binary_encoder_option()]) -> function()
+%% @doc Create a binary_encoder/1 with the given options.
+binary_encoder(Options) ->
+ mochijson2:encoder(Options).
+
+%% @spec binary_encode(bjson_term()) -> iolist()
+%% @doc Encode the given as JSON to an iolist, using lists for arrays and
+%% binaries for strings.
+binary_encode(Any) ->
+ mochijson2:encode(Any).
+
+%% @spec binary_decode(iolist()) -> bjson_term()
+%% @doc Decode the given iolist to Erlang terms, using lists for arrays and
+%% binaries for strings.
+binary_decode(S) ->
+ mochijson2:decode(S).
+
+%% Internal API
+
+parse_encoder_options([], State) ->
+ State;
+parse_encoder_options([{input_encoding, Encoding} | Rest], State) ->
+ parse_encoder_options(Rest, State#encoder{input_encoding=Encoding});
+parse_encoder_options([{handler, Handler} | Rest], State) ->
+ parse_encoder_options(Rest, State#encoder{handler=Handler}).
+
+parse_decoder_options([], State) ->
+ State;
+parse_decoder_options([{input_encoding, Encoding} | Rest], State) ->
+ parse_decoder_options(Rest, State#decoder{input_encoding=Encoding});
+parse_decoder_options([{object_hook, Hook} | Rest], State) ->
+ parse_decoder_options(Rest, State#decoder{object_hook=Hook}).
+
+json_encode(true, _State) ->
+ "true";
+json_encode(false, _State) ->
+ "false";
+json_encode(null, _State) ->
+ "null";
+json_encode(I, _State) when is_integer(I) ->
+ integer_to_list(I);
+json_encode(F, _State) when is_float(F) ->
+ mochinum:digits(F);
+json_encode(L, State) when is_list(L); is_binary(L); is_atom(L) ->
+ json_encode_string(L, State);
+json_encode({array, Props}, State) when is_list(Props) ->
+ json_encode_array(Props, State);
+json_encode({struct, Props}, State) when is_list(Props) ->
+ json_encode_proplist(Props, State);
+json_encode(Bad, #encoder{handler=null}) ->
+ exit({json_encode, {bad_term, Bad}});
+json_encode(Bad, State=#encoder{handler=Handler}) ->
+ json_encode(Handler(Bad), State).
+
+json_encode_array([], _State) ->
+ "[]";
+json_encode_array(L, State) ->
+ F = fun (O, Acc) ->
+ [$,, json_encode(O, State) | Acc]
+ end,
+ [$, | Acc1] = lists:foldl(F, "[", L),
+ lists:reverse([$\] | Acc1]).
+
+json_encode_proplist([], _State) ->
+ "{}";
+json_encode_proplist(Props, State) ->
+ F = fun ({K, V}, Acc) ->
+ KS = case K of
+ K when is_atom(K) ->
+ json_encode_string_utf8(atom_to_list(K));
+ K when is_integer(K) ->
+ json_encode_string(integer_to_list(K), State);
+ K when is_list(K); is_binary(K) ->
+ json_encode_string(K, State)
+ end,
+ VS = json_encode(V, State),
+ [$,, VS, $:, KS | Acc]
+ end,
+ [$, | Acc1] = lists:foldl(F, "{", Props),
+ lists:reverse([$\} | Acc1]).
+
+json_encode_string(A, _State) when is_atom(A) ->
+ json_encode_string_unicode(xmerl_ucs:from_utf8(atom_to_list(A)));
+json_encode_string(B, _State) when is_binary(B) ->
+ json_encode_string_unicode(xmerl_ucs:from_utf8(B));
+json_encode_string(S, #encoder{input_encoding=utf8}) ->
+ json_encode_string_utf8(S);
+json_encode_string(S, #encoder{input_encoding=unicode}) ->
+ json_encode_string_unicode(S).
+
+json_encode_string_utf8(S) ->
+ [?Q | json_encode_string_utf8_1(S)].
+
+json_encode_string_utf8_1([C | Cs]) when C >= 0, C =< 16#7f ->
+ NewC = case C of
+ $\\ -> "\\\\";
+ ?Q -> "\\\"";
+ _ when C >= $\s, C < 16#7f -> C;
+ $\t -> "\\t";
+ $\n -> "\\n";
+ $\r -> "\\r";
+ $\f -> "\\f";
+ $\b -> "\\b";
+ _ when C >= 0, C =< 16#7f -> unihex(C);
+ _ -> exit({json_encode, {bad_char, C}})
+ end,
+ [NewC | json_encode_string_utf8_1(Cs)];
+json_encode_string_utf8_1(All=[C | _]) when C >= 16#80, C =< 16#10FFFF ->
+ [?Q | Rest] = json_encode_string_unicode(xmerl_ucs:from_utf8(All)),
+ Rest;
+json_encode_string_utf8_1([]) ->
+ "\"".
+
+json_encode_string_unicode(S) ->
+ [?Q | json_encode_string_unicode_1(S)].
+
+json_encode_string_unicode_1([C | Cs]) ->
+ NewC = case C of
+ $\\ -> "\\\\";
+ ?Q -> "\\\"";
+ _ when C >= $\s, C < 16#7f -> C;
+ $\t -> "\\t";
+ $\n -> "\\n";
+ $\r -> "\\r";
+ $\f -> "\\f";
+ $\b -> "\\b";
+ _ when C >= 0, C =< 16#10FFFF -> unihex(C);
+ _ -> exit({json_encode, {bad_char, C}})
+ end,
+ [NewC | json_encode_string_unicode_1(Cs)];
+json_encode_string_unicode_1([]) ->
+ "\"".
+
+dehex(C) when C >= $0, C =< $9 ->
+ C - $0;
+dehex(C) when C >= $a, C =< $f ->
+ C - $a + 10;
+dehex(C) when C >= $A, C =< $F ->
+ C - $A + 10.
+
+hexdigit(C) when C >= 0, C =< 9 ->
+ C + $0;
+hexdigit(C) when C =< 15 ->
+ C + $a - 10.
+
+unihex(C) when C < 16#10000 ->
+ <<D3:4, D2:4, D1:4, D0:4>> = <<C:16>>,
+ Digits = [hexdigit(D) || D <- [D3, D2, D1, D0]],
+ [$\\, $u | Digits];
+unihex(C) when C =< 16#10FFFF ->
+ N = C - 16#10000,
+ S1 = 16#d800 bor ((N bsr 10) band 16#3ff),
+ S2 = 16#dc00 bor (N band 16#3ff),
+ [unihex(S1), unihex(S2)].
+
+json_decode(B, S) when is_binary(B) ->
+ json_decode(binary_to_list(B), S);
+json_decode(L, S) ->
+ {Res, L1, S1} = decode1(L, S),
+ {eof, [], _} = tokenize(L1, S1#decoder{state=trim}),
+ Res.
+
+decode1(L, S=#decoder{state=null}) ->
+ case tokenize(L, S#decoder{state=any}) of
+ {{const, C}, L1, S1} ->
+ {C, L1, S1};
+ {start_array, L1, S1} ->
+ decode_array(L1, S1#decoder{state=any}, []);
+ {start_object, L1, S1} ->
+ decode_object(L1, S1#decoder{state=key}, [])
+ end.
+
+make_object(V, #decoder{object_hook=null}) ->
+ V;
+make_object(V, #decoder{object_hook=Hook}) ->
+ Hook(V).
+
+decode_object(L, S=#decoder{state=key}, Acc) ->
+ case tokenize(L, S) of
+ {end_object, Rest, S1} ->
+ V = make_object({struct, lists:reverse(Acc)}, S1),
+ {V, Rest, S1#decoder{state=null}};
+ {{const, K}, Rest, S1} when is_list(K) ->
+ {colon, L2, S2} = tokenize(Rest, S1),
+ {V, L3, S3} = decode1(L2, S2#decoder{state=null}),
+ decode_object(L3, S3#decoder{state=comma}, [{K, V} | Acc])
+ end;
+decode_object(L, S=#decoder{state=comma}, Acc) ->
+ case tokenize(L, S) of
+ {end_object, Rest, S1} ->
+ V = make_object({struct, lists:reverse(Acc)}, S1),
+ {V, Rest, S1#decoder{state=null}};
+ {comma, Rest, S1} ->
+ decode_object(Rest, S1#decoder{state=key}, Acc)
+ end.
+
+decode_array(L, S=#decoder{state=any}, Acc) ->
+ case tokenize(L, S) of
+ {end_array, Rest, S1} ->
+ {{array, lists:reverse(Acc)}, Rest, S1#decoder{state=null}};
+ {start_array, Rest, S1} ->
+ {Array, Rest1, S2} = decode_array(Rest, S1#decoder{state=any}, []),
+ decode_array(Rest1, S2#decoder{state=comma}, [Array | Acc]);
+ {start_object, Rest, S1} ->
+ {Array, Rest1, S2} = decode_object(Rest, S1#decoder{state=key}, []),
+ decode_array(Rest1, S2#decoder{state=comma}, [Array | Acc]);
+ {{const, Const}, Rest, S1} ->
+ decode_array(Rest, S1#decoder{state=comma}, [Const | Acc])
+ end;
+decode_array(L, S=#decoder{state=comma}, Acc) ->
+ case tokenize(L, S) of
+ {end_array, Rest, S1} ->
+ {{array, lists:reverse(Acc)}, Rest, S1#decoder{state=null}};
+ {comma, Rest, S1} ->
+ decode_array(Rest, S1#decoder{state=any}, Acc)
+ end.
+
+tokenize_string(IoList=[C | _], S=#decoder{input_encoding=utf8}, Acc)
+ when is_list(C); is_binary(C); C >= 16#7f ->
+ List = xmerl_ucs:from_utf8(iolist_to_binary(IoList)),
+ tokenize_string(List, S#decoder{input_encoding=unicode}, Acc);
+tokenize_string("\"" ++ Rest, S, Acc) ->
+ {lists:reverse(Acc), Rest, ?INC_COL(S)};
+tokenize_string("\\\"" ++ Rest, S, Acc) ->
+ tokenize_string(Rest, ?ADV_COL(S, 2), [$\" | Acc]);
+tokenize_string("\\\\" ++ Rest, S, Acc) ->
+ tokenize_string(Rest, ?ADV_COL(S, 2), [$\\ | Acc]);
+tokenize_string("\\/" ++ Rest, S, Acc) ->
+ tokenize_string(Rest, ?ADV_COL(S, 2), [$/ | Acc]);
+tokenize_string("\\b" ++ Rest, S, Acc) ->
+ tokenize_string(Rest, ?ADV_COL(S, 2), [$\b | Acc]);
+tokenize_string("\\f" ++ Rest, S, Acc) ->
+ tokenize_string(Rest, ?ADV_COL(S, 2), [$\f | Acc]);
+tokenize_string("\\n" ++ Rest, S, Acc) ->
+ tokenize_string(Rest, ?ADV_COL(S, 2), [$\n | Acc]);
+tokenize_string("\\r" ++ Rest, S, Acc) ->
+ tokenize_string(Rest, ?ADV_COL(S, 2), [$\r | Acc]);
+tokenize_string("\\t" ++ Rest, S, Acc) ->
+ tokenize_string(Rest, ?ADV_COL(S, 2), [$\t | Acc]);
+tokenize_string([$\\, $u, C3, C2, C1, C0 | Rest], S, Acc) ->
+ % coalesce UTF-16 surrogate pair?
+ C = dehex(C0) bor
+ (dehex(C1) bsl 4) bor
+ (dehex(C2) bsl 8) bor
+ (dehex(C3) bsl 12),
+ tokenize_string(Rest, ?ADV_COL(S, 6), [C | Acc]);
+tokenize_string([C | Rest], S, Acc) when C >= $\s; C < 16#10FFFF ->
+ tokenize_string(Rest, ?ADV_COL(S, 1), [C | Acc]).
+
+tokenize_number(IoList=[C | _], Mode, S=#decoder{input_encoding=utf8}, Acc)
+ when is_list(C); is_binary(C); C >= 16#7f ->
+ List = xmerl_ucs:from_utf8(iolist_to_binary(IoList)),
+ tokenize_number(List, Mode, S#decoder{input_encoding=unicode}, Acc);
+tokenize_number([$- | Rest], sign, S, []) ->
+ tokenize_number(Rest, int, ?INC_COL(S), [$-]);
+tokenize_number(Rest, sign, S, []) ->
+ tokenize_number(Rest, int, S, []);
+tokenize_number([$0 | Rest], int, S, Acc) ->
+ tokenize_number(Rest, frac, ?INC_COL(S), [$0 | Acc]);
+tokenize_number([C | Rest], int, S, Acc) when C >= $1, C =< $9 ->
+ tokenize_number(Rest, int1, ?INC_COL(S), [C | Acc]);
+tokenize_number([C | Rest], int1, S, Acc) when C >= $0, C =< $9 ->
+ tokenize_number(Rest, int1, ?INC_COL(S), [C | Acc]);
+tokenize_number(Rest, int1, S, Acc) ->
+ tokenize_number(Rest, frac, S, Acc);
+tokenize_number([$., C | Rest], frac, S, Acc) when C >= $0, C =< $9 ->
+ tokenize_number(Rest, frac1, ?ADV_COL(S, 2), [C, $. | Acc]);
+tokenize_number([E | Rest], frac, S, Acc) when E == $e; E == $E ->
+ tokenize_number(Rest, esign, ?INC_COL(S), [$e, $0, $. | Acc]);
+tokenize_number(Rest, frac, S, Acc) ->
+ {{int, lists:reverse(Acc)}, Rest, S};
+tokenize_number([C | Rest], frac1, S, Acc) when C >= $0, C =< $9 ->
+ tokenize_number(Rest, frac1, ?INC_COL(S), [C | Acc]);
+tokenize_number([E | Rest], frac1, S, Acc) when E == $e; E == $E ->
+ tokenize_number(Rest, esign, ?INC_COL(S), [$e | Acc]);
+tokenize_number(Rest, frac1, S, Acc) ->
+ {{float, lists:reverse(Acc)}, Rest, S};
+tokenize_number([C | Rest], esign, S, Acc) when C == $-; C == $+ ->
+ tokenize_number(Rest, eint, ?INC_COL(S), [C | Acc]);
+tokenize_number(Rest, esign, S, Acc) ->
+ tokenize_number(Rest, eint, S, Acc);
+tokenize_number([C | Rest], eint, S, Acc) when C >= $0, C =< $9 ->
+ tokenize_number(Rest, eint1, ?INC_COL(S), [C | Acc]);
+tokenize_number([C | Rest], eint1, S, Acc) when C >= $0, C =< $9 ->
+ tokenize_number(Rest, eint1, ?INC_COL(S), [C | Acc]);
+tokenize_number(Rest, eint1, S, Acc) ->
+ {{float, lists:reverse(Acc)}, Rest, S}.
+
+tokenize([], S=#decoder{state=trim}) ->
+ {eof, [], S};
+tokenize([L | Rest], S) when is_list(L) ->
+ tokenize(L ++ Rest, S);
+tokenize([B | Rest], S) when is_binary(B) ->
+ tokenize(xmerl_ucs:from_utf8(B) ++ Rest, S);
+tokenize("\r\n" ++ Rest, S) ->
+ tokenize(Rest, ?INC_LINE(S));
+tokenize("\n" ++ Rest, S) ->
+ tokenize(Rest, ?INC_LINE(S));
+tokenize([C | Rest], S) when C == $\s; C == $\t ->
+ tokenize(Rest, ?INC_COL(S));
+tokenize("{" ++ Rest, S) ->
+ {start_object, Rest, ?INC_COL(S)};
+tokenize("}" ++ Rest, S) ->
+ {end_object, Rest, ?INC_COL(S)};
+tokenize("[" ++ Rest, S) ->
+ {start_array, Rest, ?INC_COL(S)};
+tokenize("]" ++ Rest, S) ->
+ {end_array, Rest, ?INC_COL(S)};
+tokenize("," ++ Rest, S) ->
+ {comma, Rest, ?INC_COL(S)};
+tokenize(":" ++ Rest, S) ->
+ {colon, Rest, ?INC_COL(S)};
+tokenize("null" ++ Rest, S) ->
+ {{const, null}, Rest, ?ADV_COL(S, 4)};
+tokenize("true" ++ Rest, S) ->
+ {{const, true}, Rest, ?ADV_COL(S, 4)};
+tokenize("false" ++ Rest, S) ->
+ {{const, false}, Rest, ?ADV_COL(S, 5)};
+tokenize("\"" ++ Rest, S) ->
+ {String, Rest1, S1} = tokenize_string(Rest, ?INC_COL(S), []),
+ {{const, String}, Rest1, S1};
+tokenize(L=[C | _], S) when C >= $0, C =< $9; C == $- ->
+ case tokenize_number(L, sign, S, []) of
+ {{int, Int}, Rest, S1} ->
+ {{const, list_to_integer(Int)}, Rest, S1};
+ {{float, Float}, Rest, S1} ->
+ {{const, list_to_float(Float)}, Rest, S1}
+ end.
+
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+
+%% testing constructs borrowed from the Yaws JSON implementation.
+
+%% Create an object from a list of Key/Value pairs.
+
+obj_new() ->
+ {struct, []}.
+
+is_obj({struct, Props}) ->
+ F = fun ({K, _}) when is_list(K) ->
+ true;
+ (_) ->
+ false
+ end,
+ lists:all(F, Props).
+
+obj_from_list(Props) ->
+ Obj = {struct, Props},
+ case is_obj(Obj) of
+ true -> Obj;
+ false -> exit(json_bad_object)
+ end.
+
+%% Test for equivalence of Erlang terms.
+%% Due to arbitrary order of construction, equivalent objects might
+%% compare unequal as erlang terms, so we need to carefully recurse
+%% through aggregates (tuples and objects).
+
+equiv({struct, Props1}, {struct, Props2}) ->
+ equiv_object(Props1, Props2);
+equiv({array, L1}, {array, L2}) ->
+ equiv_list(L1, L2);
+equiv(N1, N2) when is_number(N1), is_number(N2) -> N1 == N2;
+equiv(S1, S2) when is_list(S1), is_list(S2) -> S1 == S2;
+equiv(true, true) -> true;
+equiv(false, false) -> true;
+equiv(null, null) -> true.
+
+%% Object representation and traversal order is unknown.
+%% Use the sledgehammer and sort property lists.
+
+equiv_object(Props1, Props2) ->
+ L1 = lists:keysort(1, Props1),
+ L2 = lists:keysort(1, Props2),
+ Pairs = lists:zip(L1, L2),
+ true = lists:all(fun({{K1, V1}, {K2, V2}}) ->
+ equiv(K1, K2) and equiv(V1, V2)
+ end, Pairs).
+
+%% Recursively compare tuple elements for equivalence.
+
+equiv_list([], []) ->
+ true;
+equiv_list([V1 | L1], [V2 | L2]) ->
+ equiv(V1, V2) andalso equiv_list(L1, L2).
+
+e2j_vec_test() ->
+ test_one(e2j_test_vec(utf8), 1).
+
+issue33_test() ->
+ %% http://code.google.com/p/mochiweb/issues/detail?id=33
+ Js = {struct, [{"key", [194, 163]}]},
+ Encoder = encoder([{input_encoding, utf8}]),
+ "{\"key\":\"\\u00a3\"}" = lists:flatten(Encoder(Js)).
+
+test_one([], _N) ->
+ %% io:format("~p tests passed~n", [N-1]),
+ ok;
+test_one([{E, J} | Rest], N) ->
+ %% io:format("[~p] ~p ~p~n", [N, E, J]),
+ true = equiv(E, decode(J)),
+ true = equiv(E, decode(encode(E))),
+ test_one(Rest, 1+N).
+
+e2j_test_vec(utf8) ->
+ [
+ {1, "1"},
+ {3.1416, "3.14160"}, % text representation may truncate, trail zeroes
+ {-1, "-1"},
+ {-3.1416, "-3.14160"},
+ {12.0e10, "1.20000e+11"},
+ {1.234E+10, "1.23400e+10"},
+ {-1.234E-10, "-1.23400e-10"},
+ {10.0, "1.0e+01"},
+ {123.456, "1.23456E+2"},
+ {10.0, "1e1"},
+ {"foo", "\"foo\""},
+ {"foo" ++ [5] ++ "bar", "\"foo\\u0005bar\""},
+ {"", "\"\""},
+ {"\"", "\"\\\"\""},
+ {"\n\n\n", "\"\\n\\n\\n\""},
+ {"\\", "\"\\\\\""},
+ {"\" \b\f\r\n\t\"", "\"\\\" \\b\\f\\r\\n\\t\\\"\""},
+ {obj_new(), "{}"},
+ {obj_from_list([{"foo", "bar"}]), "{\"foo\":\"bar\"}"},
+ {obj_from_list([{"foo", "bar"}, {"baz", 123}]),
+ "{\"foo\":\"bar\",\"baz\":123}"},
+ {{array, []}, "[]"},
+ {{array, [{array, []}]}, "[[]]"},
+ {{array, [1, "foo"]}, "[1,\"foo\"]"},
+
+ % json array in a json object
+ {obj_from_list([{"foo", {array, [123]}}]),
+ "{\"foo\":[123]}"},
+
+ % json object in a json object
+ {obj_from_list([{"foo", obj_from_list([{"bar", true}])}]),
+ "{\"foo\":{\"bar\":true}}"},
+
+ % fold evaluation order
+ {obj_from_list([{"foo", {array, []}},
+ {"bar", obj_from_list([{"baz", true}])},
+ {"alice", "bob"}]),
+ "{\"foo\":[],\"bar\":{\"baz\":true},\"alice\":\"bob\"}"},
+
+ % json object in a json array
+ {{array, [-123, "foo", obj_from_list([{"bar", {array, []}}]), null]},
+ "[-123,\"foo\",{\"bar\":[]},null]"}
+ ].
+
+-endif.
782 deps/mochiweb-src/src/mochijson2.erl
@@ -0,0 +1,782 @@
+%% @author Bob Ippolito <bob@mochimedia.com>
+%% @copyright 2007 Mochi Media, Inc.
+
+%% @doc Yet another JSON (RFC 4627) library for Erlang. mochijson2 works
+%% with binaries as strings, arrays as lists (without an {array, _})
+%% wrapper and it only knows how to decode UTF-8 (and ASCII).
+
+-module(mochijson2).
+-author('bob@mochimedia.com').
+-export([encoder/1, encode/1]).
+-export([decoder/1, decode/1]).
+
+% This is a macro to placate syntax highlighters..
+-define(Q, $\").
+-define(ADV_COL(S, N), S#decoder{offset=N+S#decoder.offset,
+ column=N+S#decoder.column}).
+-define(INC_COL(S), S#decoder{offset=1+S#decoder.offset,
+ column=1+S#decoder.column}).
+-define(INC_LINE(S), S#decoder{offset=1+S#decoder.offset,
+ column=1,
+ line=1+S#decoder.line}).