Execute rule user code blocks when reducing the rule

This commit is contained in:
Josh Holtrop 2022-10-13 04:56:50 -04:00
parent 74beaf7ed8
commit 727c8cd1ea
4 changed files with 90 additions and 13 deletions

View File

@ -386,7 +386,7 @@ class <%= @classname %>
{ {
Lexer.LexedToken lexed_token; Lexer.LexedToken lexed_token;
uint token = _TOKEN_COUNT; uint token = _TOKEN_COUNT;
StateResult[] states = new StateResult[](1); StateResult[] stateresults = new StateResult[](1);
uint reduced_rule_set = 0xFFFFFFFFu; uint reduced_rule_set = 0xFFFFFFFFu;
<%= @grammar.result_type %> reduced_parse_result; <%= @grammar.result_type %> reduced_parse_result;
for (;;) for (;;)
@ -399,28 +399,31 @@ class <%= @classname %>
uint shift_state = 0xFFFFFFFFu; uint shift_state = 0xFFFFFFFFu;
if (reduced_rule_set != 0xFFFFFFFFu) if (reduced_rule_set != 0xFFFFFFFFu)
{ {
shift_state = check_shift(states[$-1].state, reduced_rule_set); shift_state = check_shift(stateresults[$-1].state, reduced_rule_set);
} }
if (shift_state == 0xFFFFFFFFu) if (shift_state == 0xFFFFFFFFu)
{ {
shift_state = check_shift(states[$-1].state, token); shift_state = check_shift(stateresults[$-1].state, token);
if ((shift_state != 0xFFFFFFFFu) && (token == TOKEN_0EOF))
{
/* Successful parse. */
parse_result = stateresults[$-1].result;
return true;
}
} }
if (shift_state != 0xFFFFFFFFu) if (shift_state != 0xFFFFFFFFu)
{ {
if (token == TOKEN_0EOF) /* We have something to shift. */
{ stateresults ~= StateResult(shift_state);
/* Successful parse. */
parse_result = states[$-1].result;
return true;
}
states ~= StateResult(shift_state);
if (reduced_rule_set == 0xFFFFFFFFu) if (reduced_rule_set == 0xFFFFFFFFu)
{ {
/* We shifted a token, mark it consumed. */
token = _TOKEN_COUNT; token = _TOKEN_COUNT;
} }
else else
{ {
states[$-1].result = reduced_parse_result; /* We shifted a RuleSet. */
stateresults[$-1].result = reduced_parse_result;
<%= @grammar.result_type %> new_parse_result; <%= @grammar.result_type %> new_parse_result;
reduced_parse_result = new_parse_result; reduced_parse_result = new_parse_result;
reduced_rule_set = 0xFFFFFFFFu; reduced_rule_set = 0xFFFFFFFFu;
@ -428,11 +431,13 @@ class <%= @classname %>
continue; continue;
} }
uint reduce_index = check_reduce(states[$-1].state, token); uint reduce_index = check_reduce(stateresults[$-1].state, token);
if (reduce_index != 0xFFFFFFFFu) if (reduce_index != 0xFFFFFFFFu)
{ {
/* We have something to reduce. */
reduced_parse_result = user_code(reduces[reduce_index].rule, stateresults, reduces[reduce_index].n_states);
reduced_rule_set = reduces[reduce_index].rule_set; reduced_rule_set = reduces[reduce_index].rule_set;
states.length -= reduces[reduce_index].n_states; stateresults.length -= reduces[reduce_index].n_states;
continue; continue;
} }
@ -500,5 +505,31 @@ class <%= @classname %>
} }
return 0xFFFFFFFFu; return 0xFFFFFFFFu;
} }
/**
* Execute user code associated with a parser rule.
*
* @param rule The ID of the rule.
*
* @return Parse result.
*/
private <%= @grammar.result_type %> user_code(uint rule, StateResult[] stateresults, uint n_states)
{
<%= @grammar.result_type %> _result;
switch (rule)
{
<% @grammar.rules.each do |rule| %>
<% if rule.code %>
case <%= rule.id %>u: {
<%= expand_code(rule.code) %>
} break;
<% end %>
<% end %>
default: break;
}
return _result;
}
} }
} }

View File

@ -170,6 +170,11 @@ class Propane
raise Error.new("Lexer mode '#{mode_name}' not found") raise Error.new("Lexer mode '#{mode_name}' not found")
end end
"m_mode = #{mode_id}u" "m_mode = #{mode_id}u"
end.gsub(/\$\$/) do |match|
"_result"
end.gsub(/\$(\d+)/) do |match|
index = $1.to_i
"stateresults[$-1-n_states+#{index}].result"
end end
end end

View File

@ -19,6 +19,8 @@ describe Propane do
Results = Struct.new(:stdout, :stderr, :status) Results = Struct.new(:stdout, :stderr, :status)
def run def run
stdout, stderr, status = Open3.capture3("spec/run/testparser") stdout, stderr, status = Open3.capture3("spec/run/testparser")
File.binwrite("spec/run/.stderr", stderr)
File.binwrite("spec/run/.stdout", stdout)
Results.new(stdout, stderr, status) Results.new(stdout, stderr, status)
end end
@ -225,4 +227,29 @@ EOF
"pass2", "pass2",
]) ])
end end
it "executes user code associated with a parser rule" do
write_grammar <<EOF
token a;
token b;
Start -> A B <<
writeln("Start!");
>>
A -> a <<
writeln("A!");
>>
B -> b <<
writeln("B!");
>>
EOF
build_parser
compile("spec/test_parser_rule_user_code.d")
results = run
expect(results.status).to eq 0
verify_lines(results.stdout, [
"A!",
"B!",
"Start!",
])
end
end end

View File

@ -0,0 +1,14 @@
import testparser;
import std.stdio;
int main()
{
return 0;
}
unittest
{
string input = "ab";
auto parser = new Testparser.Parser(cast(const(ubyte) *)input.ptr, input.length);
assert(parser.parse() == true);
}