Store parse result; add result_type grammar keyword
This commit is contained in:
parent
02be6de48e
commit
31970522de
@ -343,6 +343,17 @@ class <%= @classname %>
|
|||||||
uint n_reduce_entries;
|
uint n_reduce_entries;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private struct StateResult
|
||||||
|
{
|
||||||
|
uint state;
|
||||||
|
<%= @grammar.result_type %> result;
|
||||||
|
|
||||||
|
this(uint state)
|
||||||
|
{
|
||||||
|
this.state = state;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
<% state_table, shift_table, reduce_table = @parser.build_tables %>
|
<% state_table, shift_table, reduce_table = @parser.build_tables %>
|
||||||
private static immutable Shift shifts[] = [
|
private static immutable Shift shifts[] = [
|
||||||
<% shift_table.each do |shift| %>
|
<% shift_table.each do |shift| %>
|
||||||
@ -373,7 +384,7 @@ class <%= @classname %>
|
|||||||
{
|
{
|
||||||
Lexer.LexedToken lexed_token;
|
Lexer.LexedToken lexed_token;
|
||||||
uint token = _TOKEN_COUNT;
|
uint token = _TOKEN_COUNT;
|
||||||
uint[] states = new uint[](1);
|
StateResult[] states = new StateResult[](1);
|
||||||
uint reduced_rule_set = 0xFFFFFFFFu;
|
uint reduced_rule_set = 0xFFFFFFFFu;
|
||||||
for (;;)
|
for (;;)
|
||||||
{
|
{
|
||||||
@ -385,11 +396,11 @@ class <%= @classname %>
|
|||||||
uint shift_state = 0xFFFFFFFFu;
|
uint shift_state = 0xFFFFFFFFu;
|
||||||
if (reduced_rule_set != 0xFFFFFFFFu)
|
if (reduced_rule_set != 0xFFFFFFFFu)
|
||||||
{
|
{
|
||||||
shift_state = check_shift(states[$-1], reduced_rule_set);
|
shift_state = check_shift(states[$-1].state, reduced_rule_set);
|
||||||
}
|
}
|
||||||
if (shift_state == 0xFFFFFFFFu)
|
if (shift_state == 0xFFFFFFFFu)
|
||||||
{
|
{
|
||||||
shift_state = check_shift(states[$-1], token);
|
shift_state = check_shift(states[$-1].state, token);
|
||||||
}
|
}
|
||||||
if (shift_state != 0xFFFFFFFFu)
|
if (shift_state != 0xFFFFFFFFu)
|
||||||
{
|
{
|
||||||
@ -398,7 +409,7 @@ class <%= @classname %>
|
|||||||
/* Successful parse. */
|
/* Successful parse. */
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
states ~= shift_state;
|
states ~= StateResult(shift_state);
|
||||||
if (reduced_rule_set == 0xFFFFFFFFu)
|
if (reduced_rule_set == 0xFFFFFFFFu)
|
||||||
{
|
{
|
||||||
token = _TOKEN_COUNT;
|
token = _TOKEN_COUNT;
|
||||||
@ -410,7 +421,7 @@ class <%= @classname %>
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
uint reduce_index = check_reduce(states[$-1], token);
|
uint reduce_index = check_reduce(states[$-1].state, token);
|
||||||
if (reduce_index != 0xFFFFFFFFu)
|
if (reduce_index != 0xFFFFFFFFu)
|
||||||
{
|
{
|
||||||
reduced_rule_set = reduces[reduce_index].rule_set;
|
reduced_rule_set = reduces[reduce_index].rule_set;
|
||||||
|
@ -8,6 +8,7 @@ class Propane
|
|||||||
attr_reader :rules
|
attr_reader :rules
|
||||||
attr_reader :tokens
|
attr_reader :tokens
|
||||||
attr_reader :code_blocks
|
attr_reader :code_blocks
|
||||||
|
attr_reader :result_type
|
||||||
|
|
||||||
def initialize(input)
|
def initialize(input)
|
||||||
@patterns = []
|
@patterns = []
|
||||||
@ -18,6 +19,7 @@ class Propane
|
|||||||
@next_line_number = @line_number
|
@next_line_number = @line_number
|
||||||
@mode = nil
|
@mode = nil
|
||||||
@input = input.gsub("\r\n", "\n")
|
@input = input.gsub("\r\n", "\n")
|
||||||
|
@result_type = "void *"
|
||||||
parse_grammar!
|
parse_grammar!
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -35,6 +37,7 @@ class Propane
|
|||||||
elsif @mode.nil? && parse_mode_label!
|
elsif @mode.nil? && parse_mode_label!
|
||||||
elsif parse_module_statement!
|
elsif parse_module_statement!
|
||||||
elsif parse_class_statement!
|
elsif parse_class_statement!
|
||||||
|
elsif parse_result_type_statement!
|
||||||
elsif parse_pattern_statement!
|
elsif parse_pattern_statement!
|
||||||
elsif parse_token_statement!
|
elsif parse_token_statement!
|
||||||
elsif parse_tokenid_statement!
|
elsif parse_tokenid_statement!
|
||||||
@ -83,6 +86,13 @@ class Propane
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def parse_result_type_statement!
|
||||||
|
if consume!(/result_type\s+/)
|
||||||
|
md = consume!(/([^;]+);/, "expected result type expression")
|
||||||
|
@result_type = md[1].strip
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def parse_token_statement!
|
def parse_token_statement!
|
||||||
if consume!(/token\s+/)
|
if consume!(/token\s+/)
|
||||||
md = consume!(/([a-zA-Z_][a-zA-Z_0-9]*)/, "expected token name")
|
md = consume!(/([a-zA-Z_][a-zA-Z_0-9]*)/, "expected token name")
|
||||||
|
@ -6,6 +6,7 @@ class Propane
|
|||||||
|
|
||||||
module a.b;
|
module a.b;
|
||||||
class Foobar;
|
class Foobar;
|
||||||
|
result_type XYZ * ;
|
||||||
|
|
||||||
token while;
|
token while;
|
||||||
|
|
||||||
@ -31,47 +32,48 @@ EOF
|
|||||||
grammar = Grammar.new(input)
|
grammar = Grammar.new(input)
|
||||||
expect(grammar.classname).to eq "Foobar"
|
expect(grammar.classname).to eq "Foobar"
|
||||||
expect(grammar.modulename).to eq "a.b"
|
expect(grammar.modulename).to eq "a.b"
|
||||||
|
expect(grammar.result_type).to eq "XYZ *"
|
||||||
|
|
||||||
o = grammar.tokens.find {|token| token.name == "while"}
|
o = grammar.tokens.find {|token| token.name == "while"}
|
||||||
expect(o).to_not be_nil
|
expect(o).to_not be_nil
|
||||||
expect(o.line_number).to eq 6
|
expect(o.line_number).to eq 7
|
||||||
|
|
||||||
o = grammar.patterns.find {|pattern| pattern.token == o}
|
o = grammar.patterns.find {|pattern| pattern.token == o}
|
||||||
expect(o).to_not be_nil
|
expect(o).to_not be_nil
|
||||||
expect(o.pattern).to eq "while"
|
expect(o.pattern).to eq "while"
|
||||||
expect(o.line_number).to eq 6
|
expect(o.line_number).to eq 7
|
||||||
expect(o.code).to be_nil
|
expect(o.code).to be_nil
|
||||||
|
|
||||||
o = grammar.tokens.find {|token| token.name == "id"}
|
o = grammar.tokens.find {|token| token.name == "id"}
|
||||||
expect(o).to_not be_nil
|
expect(o).to_not be_nil
|
||||||
expect(o.line_number).to eq 9
|
expect(o.line_number).to eq 10
|
||||||
|
|
||||||
o = grammar.patterns.find {|pattern| pattern.token == o}
|
o = grammar.patterns.find {|pattern| pattern.token == o}
|
||||||
expect(o).to_not be_nil
|
expect(o).to_not be_nil
|
||||||
expect(o.pattern).to eq "[a-zA-Z_][a-zA-Z_0-9]*"
|
expect(o.pattern).to eq "[a-zA-Z_][a-zA-Z_0-9]*"
|
||||||
expect(o.line_number).to eq 9
|
expect(o.line_number).to eq 10
|
||||||
expect(o.code).to be_nil
|
expect(o.code).to be_nil
|
||||||
|
|
||||||
o = grammar.tokens.find {|token| token.name == "token_with_code"}
|
o = grammar.tokens.find {|token| token.name == "token_with_code"}
|
||||||
expect(o).to_not be_nil
|
expect(o).to_not be_nil
|
||||||
expect(o.line_number).to eq 11
|
expect(o.line_number).to eq 12
|
||||||
|
|
||||||
o = grammar.patterns.find {|pattern| pattern.token == o}
|
o = grammar.patterns.find {|pattern| pattern.token == o}
|
||||||
expect(o).to_not be_nil
|
expect(o).to_not be_nil
|
||||||
expect(o.pattern).to eq "token_with_code"
|
expect(o.pattern).to eq "token_with_code"
|
||||||
expect(o.line_number).to eq 11
|
expect(o.line_number).to eq 12
|
||||||
expect(o.code).to eq "Code for the token\n"
|
expect(o.code).to eq "Code for the token\n"
|
||||||
|
|
||||||
o = grammar.tokens.find {|token| token.name == "token_with_no_pattern"}
|
o = grammar.tokens.find {|token| token.name == "token_with_no_pattern"}
|
||||||
expect(o).to_not be_nil
|
expect(o).to_not be_nil
|
||||||
expect(o.line_number).to eq 15
|
expect(o.line_number).to eq 16
|
||||||
|
|
||||||
o = grammar.patterns.find {|pattern| pattern.token == o}
|
o = grammar.patterns.find {|pattern| pattern.token == o}
|
||||||
expect(o).to be_nil
|
expect(o).to be_nil
|
||||||
|
|
||||||
o = grammar.patterns.find {|pattern| pattern.pattern == "\\s+"}
|
o = grammar.patterns.find {|pattern| pattern.pattern == "\\s+"}
|
||||||
expect(o).to_not be_nil
|
expect(o).to_not be_nil
|
||||||
expect(o.line_number).to eq 17
|
expect(o.line_number).to eq 18
|
||||||
expect(o.token).to be_nil
|
expect(o.token).to be_nil
|
||||||
expect(o.code).to be_nil
|
expect(o.code).to be_nil
|
||||||
|
|
||||||
@ -80,19 +82,19 @@ EOF
|
|||||||
o = grammar.rules[0]
|
o = grammar.rules[0]
|
||||||
expect(o.name).to eq "A"
|
expect(o.name).to eq "A"
|
||||||
expect(o.components).to eq %w[B]
|
expect(o.components).to eq %w[B]
|
||||||
expect(o.line_number).to eq 19
|
expect(o.line_number).to eq 20
|
||||||
expect(o.code).to eq " a = 42;\n"
|
expect(o.code).to eq " a = 42;\n"
|
||||||
|
|
||||||
o = grammar.rules[1]
|
o = grammar.rules[1]
|
||||||
expect(o.name).to eq "B"
|
expect(o.name).to eq "B"
|
||||||
expect(o.components).to eq %w[C while id]
|
expect(o.components).to eq %w[C while id]
|
||||||
expect(o.line_number).to eq 22
|
expect(o.line_number).to eq 23
|
||||||
expect(o.code).to be_nil
|
expect(o.code).to be_nil
|
||||||
|
|
||||||
o = grammar.rules[2]
|
o = grammar.rules[2]
|
||||||
expect(o.name).to eq "B"
|
expect(o.name).to eq "B"
|
||||||
expect(o.components).to eq []
|
expect(o.components).to eq []
|
||||||
expect(o.line_number).to eq 23
|
expect(o.line_number).to eq 24
|
||||||
expect(o.code).to eq " b = 0;\n"
|
expect(o.code).to eq " b = 0;\n"
|
||||||
end
|
end
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user