forked from elixir-lang/elixir
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request elixir-lang#133 from elixir-lang/eex
EEx - Embedded Elixir
- Loading branch information
Showing
6 changed files
with
532 additions
and
3 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,98 @@ | ||
defmodule EEx do | ||
def compile(source, engine // EEx::Engine) do | ||
EEx::Compiler.compile(source, engine) | ||
end | ||
end | ||
|
||
defexception EEx::SyntaxError, message: nil | ||
|
||
defrecord EEx::State, engine: nil, dict: [], filename: nil, line: 0 | ||
|
||
defmodule EEx::Compiler do | ||
@moduledoc """ | ||
Get a string source and generate the correspondents quotes | ||
to be evaluated by Elixir. | ||
""" | ||
def compile(source, engine) do | ||
tokens = EEx::Tokenizer.tokenize(source, 1) | ||
state = EEx::State.new(engine: engine) | ||
generate_buffer(tokens, "", [], state) | ||
end | ||
|
||
# Generates the buffers | ||
|
||
defp generate_buffer([{ :text, _line, chars }|t], buffer, scope, state) do | ||
buffer = state.engine.handle_text(buffer, chars) | ||
generate_buffer(t, buffer, scope, state) | ||
end | ||
|
||
# TODO: use filename | ||
defp generate_buffer([{ :expr, line, mark, chars }|t], buffer, scope, state) do | ||
expr = { :__BLOCK__, 0, Erlang.elixir_translator.forms(chars, line, 'nofile') } | ||
buffer = state.engine.handle_expr(buffer, mark, expr) | ||
generate_buffer(t, buffer, scope, state) | ||
end | ||
|
||
defp generate_buffer([{ :start_expr, line, _, chars }|t], buffer, scope, state) do | ||
{ contents, t } = generate_buffer(t, "", [chars|scope], state.dict([]).line(line)) | ||
buffer = state.engine.handle_expr(buffer, '=', contents) | ||
generate_buffer(t, buffer, scope, state.dict([])) | ||
end | ||
|
||
defp generate_buffer([{ :middle_expr, line, _, chars }|t], buffer, [current|scope], state) do | ||
{ wrapped, state } = wrap_expr(current, line, buffer, chars, state) | ||
generate_buffer(t, "", [wrapped|scope], state) | ||
end | ||
|
||
defp generate_buffer([{ :end_expr, line, _, chars }|t], buffer, [current|_], state) do | ||
{ wrapped, state } = wrap_expr(current, line, buffer, chars, state) | ||
tuples = { :__BLOCK__, 0, Erlang.elixir_translator.forms(wrapped, state.line, 'nofile') } | ||
buffer = insert_quotes(tuples, state.dict) | ||
{ buffer, t } | ||
end | ||
|
||
defp generate_buffer([{ :end_expr, _, _, chars }|_], _buffer, [], _state) do | ||
raise SyntaxError, message: "unexpected token: #{inspect chars}" | ||
end | ||
|
||
defp generate_buffer([], buffer, [], _state) do | ||
buffer | ||
end | ||
|
||
defp generate_buffer([], _buffer, _scope, _state) do | ||
raise SyntaxError, message: "undetermined end of string" | ||
end | ||
|
||
# Creates a placeholder and wrap it inside the expression block | ||
|
||
defp wrap_expr(current, line, buffer, chars, state) do | ||
key = length(state.dict) | ||
# TODO: Implement list duplicate | ||
new_lines = :lists.duplicate(line - state.line, ?\n) | ||
placeholder = '__EEX__(' ++ integer_to_list(key) ++ ');' | ||
|
||
{ current ++ new_lines ++ placeholder ++ chars, state.merge_dict([{key, buffer}]) } | ||
end | ||
|
||
# Changes placeholder to real expression | ||
|
||
defp insert_quotes({ :__EEX__, _, [key] }, dict) do | ||
Orddict.get(dict, key) | ||
end | ||
|
||
defp insert_quotes({ left, line, right }, dict) do | ||
{ insert_quotes(left, dict), line, insert_quotes(right, dict) } | ||
end | ||
|
||
defp insert_quotes({ left, right }, dict) do | ||
{ insert_quotes(left, dict), insert_quotes(right, dict) } | ||
end | ||
|
||
defp insert_quotes(list, dict) when is_list(list) do | ||
Enum.map list, insert_quotes(&1, dict) | ||
end | ||
|
||
defp insert_quotes(other, _dict) do | ||
other | ||
end | ||
end |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,21 @@ | ||
defmodule EEx::Engine do | ||
def handle_text(buffer, text) do | ||
quote do: unquote(buffer) <> unquote(text) | ||
end | ||
|
||
def handle_expr(buffer, '=', expr) do | ||
quote do | ||
tmp_1 = unquote(buffer) | ||
tmp_2 = to_binary(unquote(expr)) | ||
tmp_1 <> tmp_2 | ||
end | ||
end | ||
|
||
def handle_expr(buffer, '', expr) do | ||
quote do | ||
tmp = unquote(buffer) | ||
unquote(expr) | ||
tmp | ||
end | ||
end | ||
end |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,146 @@ | ||
defmodule EEx::Tokenizer do | ||
# TODO: Add errors scenarios | ||
|
||
@doc """ | ||
Tokenizes the given char list. It returns 4 tokens as result: | ||
* { :text, line, contents } | ||
* { :expr, line, marker, contents} | ||
* { :start_expr, line, marker, contents} | ||
* { :end_expr, line, marker, contents} | ||
""" | ||
def tokenize(bin, line) when is_binary(bin) do | ||
tokenize(binary_to_list(bin), line) | ||
end | ||
|
||
def tokenize(list, line) do | ||
List.reverse(tokenize(list, line, line, [], [])) | ||
end | ||
|
||
defp tokenize('<%' ++ t, current_line, line, buffer, acc) do | ||
{ marker, t } = retrieve_marker(t) | ||
{ expr, new_line, rest } = tokenize_expr t, line, [] | ||
|
||
token = tip_expr_token_name(expr) | ||
expr = List.reverse(expr) | ||
|
||
# If it isn't a start or end token, it may be a middle token. | ||
if token == :expr, do: | ||
token = middle_expr_token_name(expr) | ||
|
||
acc = tokenize_text(current_line, buffer, acc) | ||
tokenize rest, new_line, new_line, [], [ { token, line, marker, expr } | acc] | ||
end | ||
|
||
defp tokenize('\n' ++ t, current_line, line, buffer, acc) do | ||
tokenize t, current_line, line + 1, [?\n|buffer], acc | ||
end | ||
|
||
defp tokenize([h|t], current_line, line, buffer, acc) do | ||
tokenize t, current_line, line, [h|buffer], acc | ||
end | ||
|
||
defp tokenize([], current_line, _line, buffer, acc) do | ||
tokenize_text(current_line, buffer, acc) | ||
end | ||
|
||
# Retrieve marker for <% | ||
|
||
defp retrieve_marker('=' ++ t) do | ||
{ '=', t } | ||
end | ||
|
||
defp retrieve_marker(t) do | ||
{ '', t } | ||
end | ||
|
||
# Tokenize an expression until we find %> | ||
|
||
defp tokenize_expr('%>' ++ t, line, buffer) do | ||
{ buffer, line, t } | ||
end | ||
|
||
defp tokenize_expr('\n' ++ t, line, buffer) do | ||
tokenize_expr t, line + 1, [?\n|buffer] | ||
end | ||
|
||
defp tokenize_expr([h|t], line, buffer) do | ||
tokenize_expr t, line, [h|buffer] | ||
end | ||
|
||
# Raise an error if the %> is not found | ||
|
||
defp tokenize_expr([], _line, buffer) do | ||
raise EEx::SyntaxError, message: "invalid token: #{inspect List.reverse(buffer)}" | ||
end | ||
|
||
# Receive an expression content and check | ||
# if it is a start or an end token. | ||
# Start tokens finish with `do` or `->` | ||
# while end tokens contain only the end word. | ||
|
||
defp tip_expr_token_name([h|t]) when h == ?\s or h == ?\t do | ||
tip_expr_token_name(t) | ||
end | ||
|
||
defp tip_expr_token_name('od' ++ [h|_]) when h == ?\s or h == ?\t or h == ?) do | ||
:start_expr | ||
end | ||
|
||
defp tip_expr_token_name('>-' ++ [h|_]) when h == ?\s or h == ?\t or h == ?) do | ||
:start_expr | ||
end | ||
|
||
defp tip_expr_token_name('dne' ++ t) do | ||
if only_spaces?(t), do: :end_expr, else: :expr | ||
end | ||
|
||
defp tip_expr_token_name(_) do | ||
:expr | ||
end | ||
|
||
# Receive an expression contents and see if it matches | ||
# a key-value arg syntax, like elsif: foo. | ||
|
||
defp middle_expr_token_name([h|t]) when h == ?\s or h == ?\t do | ||
middle_expr_token_name(t) | ||
end | ||
|
||
defp middle_expr_token_name([h|t]) when h >= ?a and h <= ?z do | ||
if valid_key_identifier?(t), do: :middle_expr, else: :expr | ||
end | ||
|
||
defp middle_expr_token_name(_) do | ||
:expr | ||
end | ||
|
||
defp valid_key_identifier?([h|t]) \ | ||
when h >= ?a and h <= ?z \ | ||
when h >= ?A and h <= ?Z \ | ||
when h >= ?0 and h <= ?9 do | ||
valid_key_identifier?(t) | ||
end | ||
|
||
defp valid_key_identifier?([?:|_]) do | ||
true | ||
end | ||
|
||
defp valid_key_identifier?(_) do | ||
false | ||
end | ||
|
||
defp only_spaces?([h|t]) when h == ?\s or h == ?\t, do: only_spaces?(t) | ||
defp only_spaces?(other), do: other == [] | ||
|
||
# Tokenize the buffered text by appending | ||
# it to the given accumulator. | ||
|
||
defp tokenize_text(_line, [], acc) do | ||
acc | ||
end | ||
|
||
defp tokenize_text(line, buffer, acc) do | ||
[{ :text, line, list_to_binary(List.reverse(buffer)) } | acc] | ||
end | ||
end |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,78 @@ | ||
Code.require_file "../../test_helper", __FILE__ | ||
|
||
defmodule EEx::TokenizerTest do | ||
use ExUnit::Case | ||
require EEx::Tokenizer, as: T | ||
|
||
test "simple chars lists" do | ||
assert_equal [ { :text, 1, "foo" } ], T.tokenize('foo', 1) | ||
end | ||
|
||
test "simple strings" do | ||
assert_equal [ { :text, 1, "foo" } ], T.tokenize("foo", 1) | ||
end | ||
|
||
test "strings with embedded code" do | ||
assert_equal [ { :text, 1, "foo " }, { :expr, 1, [], ' bar ' } ], T.tokenize('foo <% bar %>', 1) | ||
end | ||
|
||
test "strings with embedded equals code" do | ||
assert_equal [ { :text, 1, "foo " }, { :expr, 1, '=', ' bar ' } ], T.tokenize('foo <%= bar %>', 1) | ||
end | ||
|
||
test "strings with more than one line" do | ||
assert_equal [ { :text, 1, "foo\n" },{ :expr, 2, '=', ' bar ' } ], T.tokenize('foo\n<%= bar %>', 1) | ||
end | ||
|
||
test "strings with more than one line and expression with more than one line" do | ||
string = ''' | ||
foo <%= bar | ||
baz %> | ||
<% foo %> | ||
''' | ||
|
||
assert_equal [ | ||
{:text, 1, "foo "}, | ||
{:expr, 1, '=', ' bar\n\nbaz '}, | ||
{:text, 3, "\n"}, | ||
{:expr, 4, [], ' foo '}, | ||
{:text, 4, "\n"} | ||
], T.tokenize(string, 1) | ||
end | ||
|
||
test "strings with embedded do end" do | ||
assert_equal [ | ||
{ :text, 1, "foo " }, | ||
{ :start_expr, 1, '', ' if true do ' }, | ||
{ :text, 1, "bar" }, | ||
{ :end_expr, 1, '', ' end ' } | ||
], T.tokenize('foo <% if true do %>bar<% end %>', 1) | ||
end | ||
|
||
test "strings with embedded -> end" do | ||
assert_equal [ | ||
{ :text, 1, "foo " }, | ||
{ :start_expr, 1, '', ' if(true)-> ' }, | ||
{ :text, 1, "bar" }, | ||
{ :end_expr, 1, '', ' end ' } | ||
], T.tokenize('foo <% if(true)-> %>bar<% end %>', 1) | ||
end | ||
|
||
test "strings with embedded key-value blocks" do | ||
assert_equal [ | ||
{ :text, 1, "foo " }, | ||
{ :start_expr, 1, '', ' if true do ' }, | ||
{ :text, 1, "bar" }, | ||
{ :middle_expr, 1, '', ' elsif: false ' }, | ||
{ :text, 1, "baz" }, | ||
{ :end_expr, 1, '', ' end ' } | ||
], T.tokenize('foo <% if true do %>bar<% elsif: false %>baz<% end %>', 1) | ||
end | ||
|
||
test "raise syntax error when there is start mark and no end mark" do | ||
T.tokenize('foo <% :bar', 1) | ||
rescue: error in [EEx::SyntaxError] | ||
assert_equal "invalid token: ' :bar'", error.message | ||
end | ||
end |
Oops, something went wrong.