Remove RuleSet creation from Grammar
This commit is contained in:
parent
ca6a93a4c5
commit
c42c3576a5
@ -8,8 +8,6 @@ class Propane
|
|||||||
@log_file = log_file
|
@log_file = log_file
|
||||||
@classname = @grammar.classname || File.basename(output_file).sub(%r{[^a-zA-Z0-9].*}, "").capitalize
|
@classname = @grammar.classname || File.basename(output_file).sub(%r{[^a-zA-Z0-9].*}, "").capitalize
|
||||||
process_grammar!
|
process_grammar!
|
||||||
@lexer = Lexer.new(@grammar.tokens, @grammar.drop_tokens)
|
|
||||||
@parser = Parser.new(@grammar.rule_sets)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def generate
|
def generate
|
||||||
@ -30,27 +28,30 @@ class Propane
|
|||||||
end
|
end
|
||||||
tokens_by_name[token.name] = token
|
tokens_by_name[token.name] = token
|
||||||
end
|
end
|
||||||
@grammar.rule_sets.each do |rule_name, rule_set|
|
rule_sets = {}
|
||||||
if tokens_by_name.include?(rule_name)
|
@grammar.rules.each do |rule|
|
||||||
raise Error.new("Rule name collides with token name #{rule_name.inspect}")
|
if tokens_by_name.include?(rule.name)
|
||||||
|
raise Error.new("Rule name collides with token name #{rule.name.inspect}")
|
||||||
end
|
end
|
||||||
|
rule_sets[rule.name] ||= RuleSet.new(rule.name)
|
||||||
|
rule_sets[rule.name] << rule
|
||||||
end
|
end
|
||||||
unless @grammar.rule_sets["Start"]
|
unless rule_sets["Start"]
|
||||||
raise Error.new("Start rule not found")
|
raise Error.new("Start rule not found")
|
||||||
end
|
end
|
||||||
@grammar.rule_sets.each do |rule_name, rule_set|
|
@grammar.rules.each do |rule|
|
||||||
rule_set.rules.each do |rule|
|
|
||||||
rule.components.map! do |component|
|
rule.components.map! do |component|
|
||||||
if tokens_by_name[component]
|
if tokens_by_name[component]
|
||||||
tokens_by_name[component]
|
tokens_by_name[component]
|
||||||
elsif @grammar.rule_sets[component]
|
elsif rule_sets[component]
|
||||||
@grammar.rule_sets[component]
|
rule_sets[component]
|
||||||
else
|
else
|
||||||
raise Error.new("Symbol #{component} not found")
|
raise Error.new("Symbol #{component} not found")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
@lexer = Lexer.new(@grammar.tokens, @grammar.drop_tokens)
|
||||||
|
@parser = Parser.new(rule_sets["Start"])
|
||||||
end
|
end
|
||||||
|
|
||||||
end
|
end
|
||||||
|
@ -5,13 +5,13 @@ class Propane
|
|||||||
attr_reader :classname
|
attr_reader :classname
|
||||||
attr_reader :drop_tokens
|
attr_reader :drop_tokens
|
||||||
attr_reader :modulename
|
attr_reader :modulename
|
||||||
attr_reader :rule_sets
|
attr_reader :rules
|
||||||
attr_reader :tokens
|
attr_reader :tokens
|
||||||
|
|
||||||
def initialize(input)
|
def initialize(input)
|
||||||
@tokens = []
|
@tokens = []
|
||||||
@drop_tokens = []
|
@drop_tokens = []
|
||||||
@rule_sets = {}
|
@rules = []
|
||||||
input = input.gsub("\r\n", "\n")
|
input = input.gsub("\r\n", "\n")
|
||||||
parse_grammar(input)
|
parse_grammar(input)
|
||||||
end
|
end
|
||||||
@ -44,9 +44,7 @@ class Propane
|
|||||||
elsif sliced = input.slice!(/\A(\S+)\s*:\s*\[(.*?)\] <<\n(.*?)^>>\n/m)
|
elsif sliced = input.slice!(/\A(\S+)\s*:\s*\[(.*?)\] <<\n(.*?)^>>\n/m)
|
||||||
rule_name, components, code = $1, $2, $3
|
rule_name, components, code = $1, $2, $3
|
||||||
components = components.strip.split(/\s+/)
|
components = components.strip.split(/\s+/)
|
||||||
@rule_sets[rule_name] ||= RuleSet.new(rule_name, @rule_sets.size)
|
@rules << Rule.new(rule_name, components, code, line_number)
|
||||||
rule = Rule.new(rule_name, components, code, line_number)
|
|
||||||
@rule_sets[rule_name].add_rule(rule)
|
|
||||||
else
|
else
|
||||||
if input.size > 25
|
if input.size > 25
|
||||||
input = input.slice(0..20) + "..."
|
input = input.slice(0..20) + "..."
|
||||||
|
@ -2,11 +2,11 @@ class Propane
|
|||||||
|
|
||||||
class Parser
|
class Parser
|
||||||
|
|
||||||
def initialize(rule_sets)
|
def initialize(start_rule_set)
|
||||||
@token_eof = Token.new(name: "$", id: TOKEN_EOF)
|
@token_eof = Token.new(name: "$", id: TOKEN_EOF)
|
||||||
@item_sets = []
|
@item_sets = []
|
||||||
@item_sets_set = {}
|
@item_sets_set = {}
|
||||||
start_items = rule_sets["Start"].rules.map do |rule|
|
start_items = start_rule_set.rules.map do |rule|
|
||||||
rule.components << @token_eof
|
rule.components << @token_eof
|
||||||
Item.new(rule, 0)
|
Item.new(rule, 0)
|
||||||
end
|
end
|
||||||
|
@ -2,19 +2,16 @@ class Propane
|
|||||||
|
|
||||||
class RuleSet
|
class RuleSet
|
||||||
|
|
||||||
attr_reader :id
|
|
||||||
|
|
||||||
attr_reader :name
|
attr_reader :name
|
||||||
|
|
||||||
attr_reader :rules
|
attr_reader :rules
|
||||||
|
|
||||||
def initialize(name, id)
|
def initialize(name)
|
||||||
@name = name
|
@name = name
|
||||||
@id = id
|
|
||||||
@rules = []
|
@rules = []
|
||||||
end
|
end
|
||||||
|
|
||||||
def add_rule(rule)
|
def <<(rule)
|
||||||
@rules << rule
|
@rules << rule
|
||||||
end
|
end
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user