Assign pattern code IDs in Generator instead of Grammar
This commit is contained in:
parent
e7e30c4f28
commit
150be33826
@ -28,6 +28,7 @@ class Propane
|
|||||||
def process_grammar!
|
def process_grammar!
|
||||||
tokens_by_name = {}
|
tokens_by_name = {}
|
||||||
@grammar.tokens.each do |token|
|
@grammar.tokens.each do |token|
|
||||||
|
# Check for token name conflicts.
|
||||||
if tokens_by_name.include?(token.name)
|
if tokens_by_name.include?(token.name)
|
||||||
raise Error.new("Duplicate token name #{token.name.inspect}")
|
raise Error.new("Duplicate token name #{token.name.inspect}")
|
||||||
end
|
end
|
||||||
@ -35,9 +36,11 @@ class Propane
|
|||||||
end
|
end
|
||||||
rule_sets = {}
|
rule_sets = {}
|
||||||
@grammar.rules.each do |rule|
|
@grammar.rules.each do |rule|
|
||||||
|
# Check for token/rule name conflict.
|
||||||
if tokens_by_name.include?(rule.name)
|
if tokens_by_name.include?(rule.name)
|
||||||
raise Error.new("Rule name collides with token name #{rule.name.inspect}")
|
raise Error.new("Rule name collides with token name #{rule.name.inspect}")
|
||||||
end
|
end
|
||||||
|
# Build rule sets of all rules with the same name.
|
||||||
@_rule_set_id ||= @grammar.tokens.size
|
@_rule_set_id ||= @grammar.tokens.size
|
||||||
unless rule_sets[rule.name]
|
unless rule_sets[rule.name]
|
||||||
rule_sets[rule.name] = RuleSet.new(rule.name, @_rule_set_id)
|
rule_sets[rule.name] = RuleSet.new(rule.name, @_rule_set_id)
|
||||||
@ -46,9 +49,17 @@ class Propane
|
|||||||
rule.rule_set = rule_sets[rule.name]
|
rule.rule_set = rule_sets[rule.name]
|
||||||
rule_sets[rule.name] << rule
|
rule_sets[rule.name] << rule
|
||||||
end
|
end
|
||||||
|
# Check for start rule.
|
||||||
unless rule_sets["Start"]
|
unless rule_sets["Start"]
|
||||||
raise Error.new("Start rule not found")
|
raise Error.new("Start rule not found")
|
||||||
end
|
end
|
||||||
|
# Generate lexer user code IDs for lexer patterns with user code blocks.
|
||||||
|
@grammar.patterns.select do |pattern|
|
||||||
|
pattern.code
|
||||||
|
end.each_with_index do |pattern, code_id|
|
||||||
|
pattern.code_id = code_id
|
||||||
|
end
|
||||||
|
# Map rule components from names to Token/RuleSet objects.
|
||||||
@grammar.rules.each do |rule|
|
@grammar.rules.each do |rule|
|
||||||
rule.components.map! do |component|
|
rule.components.map! do |component|
|
||||||
if tokens_by_name[component]
|
if tokens_by_name[component]
|
||||||
@ -61,7 +72,9 @@ class Propane
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
determine_possibly_empty_rulesets!(rule_sets)
|
determine_possibly_empty_rulesets!(rule_sets)
|
||||||
|
# Generate the lexer.
|
||||||
@lexer = Lexer.new(@grammar.patterns)
|
@lexer = Lexer.new(@grammar.patterns)
|
||||||
|
# Generate the parser.
|
||||||
@parser = Parser.new(@grammar, rule_sets, rule_sets["Start"], @log)
|
@parser = Parser.new(@grammar, rule_sets, rule_sets["Start"], @log)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -12,7 +12,6 @@ class Propane
|
|||||||
@patterns = []
|
@patterns = []
|
||||||
@tokens = []
|
@tokens = []
|
||||||
@rules = []
|
@rules = []
|
||||||
@code_id = 0
|
|
||||||
@line_number = 1
|
@line_number = 1
|
||||||
@next_line_number = @line_number
|
@next_line_number = @line_number
|
||||||
@input = input.gsub("\r\n", "\n")
|
@input = input.gsub("\r\n", "\n")
|
||||||
@ -78,15 +77,12 @@ class Propane
|
|||||||
end
|
end
|
||||||
pattern ||= name
|
pattern ||= name
|
||||||
consume!(/\s+/)
|
consume!(/\s+/)
|
||||||
if code = parse_code_block!
|
unless code = parse_code_block!
|
||||||
code_id = @code_id
|
|
||||||
@code_id += 1
|
|
||||||
else
|
|
||||||
consume!(/;/, "expected pattern or `;' or code block")
|
consume!(/;/, "expected pattern or `;' or code block")
|
||||||
end
|
end
|
||||||
token = Token.new(name: name, id: @tokens.size, line_number: @line_number)
|
token = Token.new(name: name, id: @tokens.size, line_number: @line_number)
|
||||||
@tokens << token
|
@tokens << token
|
||||||
pattern = Pattern.new(pattern: pattern, token: token, line_number: @line_number, code: code, code_id: code_id)
|
pattern = Pattern.new(pattern: pattern, token: token, line_number: @line_number, code: code)
|
||||||
@patterns << pattern
|
@patterns << pattern
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@ -132,9 +128,7 @@ class Propane
|
|||||||
unless code = parse_code_block!
|
unless code = parse_code_block!
|
||||||
raise Error.new("Line #{@line_number}: expected code block to follow pattern")
|
raise Error.new("Line #{@line_number}: expected code block to follow pattern")
|
||||||
end
|
end
|
||||||
code_id = @code_id
|
@patterns << Pattern.new(pattern: pattern, line_number: @line_number, code: code)
|
||||||
@code_id += 1
|
|
||||||
@patterns << Pattern.new(pattern: pattern, line_number: @line_number, code: code, code_id: code_id)
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -8,7 +8,7 @@ class Propane
|
|||||||
|
|
||||||
# @option options [Integer, nil] :code_id
|
# @option options [Integer, nil] :code_id
|
||||||
# Code block ID.
|
# Code block ID.
|
||||||
attr_reader :code_id
|
attr_accessor :code_id
|
||||||
|
|
||||||
# @return [String, nil]
|
# @return [String, nil]
|
||||||
# Pattern.
|
# Pattern.
|
||||||
@ -32,8 +32,6 @@ class Propane
|
|||||||
# Optional parameters.
|
# Optional parameters.
|
||||||
# @option options [String, nil] :code
|
# @option options [String, nil] :code
|
||||||
# Code block to execute when the pattern is matched.
|
# Code block to execute when the pattern is matched.
|
||||||
# @option options [Integer, nil] :code_id
|
|
||||||
# Code block ID.
|
|
||||||
# @option options [Boolean] :drop
|
# @option options [Boolean] :drop
|
||||||
# Whether this is a drop pattern.
|
# Whether this is a drop pattern.
|
||||||
# @option options [String, nil] :pattern
|
# @option options [String, nil] :pattern
|
||||||
@ -44,7 +42,6 @@ class Propane
|
|||||||
# Line number where the token was defined in the input grammar.
|
# Line number where the token was defined in the input grammar.
|
||||||
def initialize(options)
|
def initialize(options)
|
||||||
@code = options[:code]
|
@code = options[:code]
|
||||||
@code_id = options[:code_id]
|
|
||||||
@drop = options[:drop]
|
@drop = options[:drop]
|
||||||
@pattern = options[:pattern]
|
@pattern = options[:pattern]
|
||||||
@token = options[:token]
|
@token = options[:token]
|
||||||
|
@ -41,7 +41,6 @@ EOF
|
|||||||
expect(o).to_not be_nil
|
expect(o).to_not be_nil
|
||||||
expect(o.pattern).to eq "while"
|
expect(o.pattern).to eq "while"
|
||||||
expect(o.line_number).to eq 6
|
expect(o.line_number).to eq 6
|
||||||
expect(o.code_id).to be_nil
|
|
||||||
expect(o.code).to be_nil
|
expect(o.code).to be_nil
|
||||||
|
|
||||||
o = grammar.tokens.find {|token| token.name == "id"}
|
o = grammar.tokens.find {|token| token.name == "id"}
|
||||||
@ -53,7 +52,6 @@ EOF
|
|||||||
expect(o).to_not be_nil
|
expect(o).to_not be_nil
|
||||||
expect(o.pattern).to eq "[a-zA-Z_][a-zA-Z_0-9]*"
|
expect(o.pattern).to eq "[a-zA-Z_][a-zA-Z_0-9]*"
|
||||||
expect(o.line_number).to eq 9
|
expect(o.line_number).to eq 9
|
||||||
expect(o.code_id).to be_nil
|
|
||||||
expect(o.code).to be_nil
|
expect(o.code).to be_nil
|
||||||
|
|
||||||
o = grammar.tokens.find {|token| token.name == "token_with_code"}
|
o = grammar.tokens.find {|token| token.name == "token_with_code"}
|
||||||
@ -65,7 +63,6 @@ EOF
|
|||||||
expect(o).to_not be_nil
|
expect(o).to_not be_nil
|
||||||
expect(o.pattern).to eq "token_with_code"
|
expect(o.pattern).to eq "token_with_code"
|
||||||
expect(o.line_number).to eq 11
|
expect(o.line_number).to eq 11
|
||||||
expect(o.code_id).to eq 0
|
|
||||||
expect(o.code).to eq "Code for the token\n"
|
expect(o.code).to eq "Code for the token\n"
|
||||||
|
|
||||||
o = grammar.tokens.find {|token| token.name == "token_with_no_pattern"}
|
o = grammar.tokens.find {|token| token.name == "token_with_no_pattern"}
|
||||||
@ -79,7 +76,6 @@ EOF
|
|||||||
expect(o).to_not be_nil
|
expect(o).to_not be_nil
|
||||||
expect(o.line_number).to eq 17
|
expect(o.line_number).to eq 17
|
||||||
expect(o.token).to be_nil
|
expect(o.token).to be_nil
|
||||||
expect(o.code_id).to be_nil
|
|
||||||
expect(o.code).to be_nil
|
expect(o.code).to be_nil
|
||||||
|
|
||||||
expect(grammar.rules.size).to eq 3
|
expect(grammar.rules.size).to eq 3
|
||||||
@ -128,7 +124,6 @@ EOF
|
|||||||
|
|
||||||
o = grammar.patterns.find {|pattern| pattern.token == o}
|
o = grammar.patterns.find {|pattern| pattern.token == o}
|
||||||
expect(o).to_not be_nil
|
expect(o).to_not be_nil
|
||||||
expect(o.code_id).to eq 0
|
|
||||||
expect(o.code).to eq " a = b;\n return c;\n"
|
expect(o.code).to eq " a = b;\n return c;\n"
|
||||||
|
|
||||||
o = grammar.tokens.find {|token| token.name == "code2"}
|
o = grammar.tokens.find {|token| token.name == "code2"}
|
||||||
@ -138,7 +133,6 @@ EOF
|
|||||||
|
|
||||||
o = grammar.patterns.find {|pattern| pattern.token == o}
|
o = grammar.patterns.find {|pattern| pattern.token == o}
|
||||||
expect(o).to_not be_nil
|
expect(o).to_not be_nil
|
||||||
expect(o.code_id).to eq 1
|
|
||||||
expect(o.code).to eq %[ writeln("Hello there");\n]
|
expect(o.code).to eq %[ writeln("Hello there");\n]
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
Loading…
x
Reference in New Issue
Block a user