Skip to content

Commit

Permalink
Merge pull request #394 from ydah/inline-index
Browse files Browse the repository at this point in the history
Add support for adjusting Index to Inline
  • Loading branch information
yui-knk committed May 17, 2024
2 parents 142cce4 + c638e08 commit 48c4434
Show file tree
Hide file tree
Showing 11 changed files with 958 additions and 378 deletions.
13 changes: 13 additions & 0 deletions lib/lrama/grammar.rb
Original file line number Diff line number Diff line change
Expand Up @@ -144,6 +144,7 @@ def epilogue=(epilogue)
end

def prepare
resolve_inline_rules
normalize_rules
collect_symbols
set_lhs_and_rhs
Expand Down Expand Up @@ -292,6 +293,18 @@ def append_special_symbols
@accept_symbol = term
end

def resolve_inline_rules
while @rule_builders.any? {|r| r.has_inline_rules? } do
@rule_builders.map! do |builder|
if builder.has_inline_rules?
builder.resolve_inline_rules
else
builder
end
end.flatten!
end
end

def normalize_rules
# Add $accept rule to the top of rules
lineno = @rule_builders.first ? @rule_builders.first.line : 0
Expand Down
98 changes: 49 additions & 49 deletions lib/lrama/grammar/rule_builder.rb
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,7 @@ def initialize(rule_counter, midrule_action_counter, parameterizing_rule_resolve
@rules = []
@rule_builders_for_parameterizing_rules = []
@rule_builders_for_derived_rules = []
@rule_builders_for_inline_rules = []
@parameterizing_rules = []
@inline_rules = []
@midrule_action_rules = []
end

Expand Down Expand Up @@ -58,16 +56,39 @@ def complete_input

def setup_rules
preprocess_references unless @skip_preprocess_references
if rhs.any? { |token| @parameterizing_rule_resolver.find_inline(token) }
resolve_inline
else
process_rhs
end
process_rhs
build_rules
end

def rules
@parameterizing_rules + @inline_rules + @midrule_action_rules + @rules
@parameterizing_rules + @midrule_action_rules + @rules
end

def has_inline_rules?
rhs.any? { |token| @parameterizing_rule_resolver.find_inline(token) }
end

def resolve_inline_rules
resolved_builders = []
rhs.each_with_index do |token, i|
if inline_rule = @parameterizing_rule_resolver.find_inline(token)
inline_rule.rhs_list.each do |inline_rhs|
rule_builder = RuleBuilder.new(@rule_counter, @midrule_action_counter, @parameterizing_rule_resolver, lhs_tag: lhs_tag)
if token.is_a?(Lexer::Token::InstantiateRule)
resolve_inline_rhs(rule_builder, inline_rhs, i, Binding.new(inline_rule, token.args))
else
resolve_inline_rhs(rule_builder, inline_rhs, i)
end
rule_builder.lhs = lhs
rule_builder.line = line
rule_builder.precedence_sym = precedence_sym
rule_builder.user_code = replace_inline_user_code(inline_rhs, i)
resolved_builders << rule_builder
end
break
end
end
resolved_builders
end

private
Expand All @@ -83,25 +104,19 @@ def preprocess_references
def build_rules
tokens = @replaced_rhs

if tokens
rule = Rule.new(
id: @rule_counter.increment, _lhs: lhs, _rhs: tokens, lhs_tag: lhs_tag, token_code: user_code,
position_in_original_rule_rhs: @position_in_original_rule_rhs, precedence_sym: precedence_sym, lineno: line
)
@rules = [rule]
@parameterizing_rules = @rule_builders_for_parameterizing_rules.map do |rule_builder|
rule_builder.rules
end.flatten
@midrule_action_rules = @rule_builders_for_derived_rules.map do |rule_builder|
rule_builder.rules
end.flatten
@midrule_action_rules.each do |r|
r.original_rule = rule
end
else
@inline_rules = @rule_builders_for_inline_rules.map do |rule_builder|
rule_builder.rules
end.flatten
rule = Rule.new(
id: @rule_counter.increment, _lhs: lhs, _rhs: tokens, lhs_tag: lhs_tag, token_code: user_code,
position_in_original_rule_rhs: @position_in_original_rule_rhs, precedence_sym: precedence_sym, lineno: line
)
@rules = [rule]
@parameterizing_rules = @rule_builders_for_parameterizing_rules.map do |rule_builder|
rule_builder.rules
end.flatten
@midrule_action_rules = @rule_builders_for_derived_rules.map do |rule_builder|
rule_builder.rules
end.flatten
@midrule_action_rules.each do |r|
r.original_rule = rule
end
end

Expand Down Expand Up @@ -173,27 +188,10 @@ def lhs_s_value(token, bindings)
"#{token.rule_name}_#{s_values.join('_')}"
end

def resolve_inline
rhs.each_with_index do |token, i|
if (inline_rule = @parameterizing_rule_resolver.find_inline(token))
inline_rule.rhs_list.each_with_index do |inline_rhs|
rule_builder = RuleBuilder.new(@rule_counter, @midrule_action_counter, @parameterizing_rule_resolver, lhs_tag: lhs_tag, skip_preprocess_references: true)
resolve_inline_rhs(rule_builder, inline_rhs, i)
rule_builder.lhs = lhs
rule_builder.line = line
rule_builder.user_code = replace_inline_user_code(inline_rhs, i)
rule_builder.complete_input
rule_builder.setup_rules
@rule_builders_for_inline_rules << rule_builder
end
end
end
end

def resolve_inline_rhs(rule_builder, inline_rhs, index)
def resolve_inline_rhs(rule_builder, inline_rhs, index, bindings = nil)
rhs.each_with_index do |token, i|
if index == i
inline_rhs.symbols.each { |sym| rule_builder.add_rhs(sym) }
inline_rhs.symbols.each { |sym| rule_builder.add_rhs(bindings.nil? ? sym : bindings.resolve_symbol(sym)) }
else
rule_builder.add_rhs(token)
end
Expand All @@ -205,6 +203,11 @@ def replace_inline_user_code(inline_rhs, index)
return user_code if user_code.nil?

code = user_code.s_value.gsub(/\$#{index + 1}/, inline_rhs.user_code.s_value)
user_code.references.each do |ref|
next if ref.index.nil? || ref.index <= index # nil is a case for `$$`
code = code.gsub(/\$#{ref.index}/, "$#{ref.index + (inline_rhs.symbols.count-1)}")
code = code.gsub(/@#{ref.index}/, "@#{ref.index + (inline_rhs.symbols.count-1)}")
end
Lrama::Lexer::Token::UserCode.new(s_value: code, location: user_code.location)
end

Expand Down Expand Up @@ -239,9 +242,6 @@ def numberize_references
end

if ref.number
# TODO: When Inlining is implemented, for example, if `$1` is expanded to multiple RHS tokens,
# `$2` needs to access `$2 + n` to actually access it. So, after the Inlining implementation,
# it needs resolves from number to index.
ref.index = ref.number
end

Expand Down
Loading

0 comments on commit 48c4434

Please sign in to comment.