Move generation logic to new Generator class
This commit is contained in:
parent
b1d5cddab4
commit
ca6a93a4c5
@ -1,8 +1,8 @@
|
|||||||
<% if @modulename %>
|
<% if @grammar.modulename %>
|
||||||
module <%= @modulename %>;
|
module <%= @grammar.modulename %>;
|
||||||
|
|
||||||
<% end %>
|
<% end %>
|
||||||
class <%= classname %>
|
class <%= @classname %>
|
||||||
{
|
{
|
||||||
enum
|
enum
|
||||||
{
|
{
|
||||||
@ -117,7 +117,7 @@ class <%= classname %>
|
|||||||
uint accepts;
|
uint accepts;
|
||||||
}
|
}
|
||||||
|
|
||||||
<% transition_table, state_table = lexer.build_tables %>
|
<% transition_table, state_table = @lexer.build_tables %>
|
||||||
private static const Transition transitions[] = [
|
private static const Transition transitions[] = [
|
||||||
<% transition_table.each do |transition_table_entry| %>
|
<% transition_table.each do |transition_table_entry| %>
|
||||||
Transition(<%= transition_table_entry[:first] %>u, <%= transition_table_entry[:last] %>u, <%= transition_table_entry[:destination] %>u),
|
Transition(<%= transition_table_entry[:first] %>u, <%= transition_table_entry[:last] %>u, <%= transition_table_entry[:destination] %>u),
|
||||||
|
@ -5,6 +5,7 @@ require_relative "propane/code_point_range"
|
|||||||
require_relative "propane/fa"
|
require_relative "propane/fa"
|
||||||
require_relative "propane/fa/state"
|
require_relative "propane/fa/state"
|
||||||
require_relative "propane/fa/state/transition"
|
require_relative "propane/fa/state/transition"
|
||||||
|
require_relative "propane/generator"
|
||||||
require_relative "propane/grammar"
|
require_relative "propane/grammar"
|
||||||
require_relative "propane/lexer"
|
require_relative "propane/lexer"
|
||||||
require_relative "propane/lexer/dfa"
|
require_relative "propane/lexer/dfa"
|
||||||
@ -36,64 +37,13 @@ class Propane
|
|||||||
class Error < RuntimeError
|
class Error < RuntimeError
|
||||||
end
|
end
|
||||||
|
|
||||||
def initialize(input)
|
|
||||||
@grammar = Grammar.new(input)
|
|
||||||
@classname = @grammar.classname
|
|
||||||
@modulename = @grammar.modulename
|
|
||||||
@rule_sets = @grammar.rule_sets
|
|
||||||
end
|
|
||||||
|
|
||||||
def generate(output_file, log_file)
|
|
||||||
expand_rules
|
|
||||||
lexer = Lexer.new(@grammar.tokens, @grammar.drop_tokens)
|
|
||||||
parser = Parser.new(@rule_sets)
|
|
||||||
classname = @classname || File.basename(output_file).sub(%r{[^a-zA-Z0-9].*}, "").capitalize
|
|
||||||
erb = ERB.new(File.read(File.join(File.dirname(File.expand_path(__FILE__)), "../assets/parser.d.erb")), trim_mode: "<>")
|
|
||||||
result = erb.result(binding.clone)
|
|
||||||
File.open(output_file, "wb") do |fh|
|
|
||||||
fh.write(result)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
private
|
|
||||||
|
|
||||||
def expand_rules
|
|
||||||
tokens_by_name = {}
|
|
||||||
@grammar.tokens.each do |token|
|
|
||||||
if tokens_by_name.include?(token.name)
|
|
||||||
raise Error.new("Duplicate token name #{token.name.inspect}")
|
|
||||||
end
|
|
||||||
tokens_by_name[token.name] = token
|
|
||||||
end
|
|
||||||
@rule_sets.each do |rule_name, rule_set|
|
|
||||||
if tokens_by_name.include?(rule_name)
|
|
||||||
raise Error.new("Rule name collides with token name #{rule_name.inspect}")
|
|
||||||
end
|
|
||||||
end
|
|
||||||
unless @rule_sets["Start"]
|
|
||||||
raise Error.new("Start rule not found")
|
|
||||||
end
|
|
||||||
@rule_sets.each do |rule_name, rule_set|
|
|
||||||
rule_set.rules.each do |rule|
|
|
||||||
rule.components.map! do |component|
|
|
||||||
if tokens_by_name[component]
|
|
||||||
tokens_by_name[component]
|
|
||||||
elsif @rule_sets[component]
|
|
||||||
@rule_sets[component]
|
|
||||||
else
|
|
||||||
raise Error.new("Symbol #{component} not found")
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
class << self
|
class << self
|
||||||
|
|
||||||
def run(input_file, output_file, log_file)
|
def run(input_file, output_file, log_file)
|
||||||
begin
|
begin
|
||||||
propane = Propane.new(File.read(input_file))
|
grammar = Grammar.new(File.read(input_file))
|
||||||
propane.generate(output_file, log_file)
|
generator = Generator.new(grammar, output_file, log_file)
|
||||||
|
generator.generate
|
||||||
rescue Error => e
|
rescue Error => e
|
||||||
$stderr.puts e.message
|
$stderr.puts e.message
|
||||||
return 2
|
return 2
|
||||||
|
58
lib/propane/generator.rb
Normal file
58
lib/propane/generator.rb
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
class Propane
|
||||||
|
|
||||||
|
class Generator
|
||||||
|
|
||||||
|
def initialize(grammar, output_file, log_file)
|
||||||
|
@grammar = grammar
|
||||||
|
@output_file = output_file
|
||||||
|
@log_file = log_file
|
||||||
|
@classname = @grammar.classname || File.basename(output_file).sub(%r{[^a-zA-Z0-9].*}, "").capitalize
|
||||||
|
process_grammar!
|
||||||
|
@lexer = Lexer.new(@grammar.tokens, @grammar.drop_tokens)
|
||||||
|
@parser = Parser.new(@grammar.rule_sets)
|
||||||
|
end
|
||||||
|
|
||||||
|
def generate
|
||||||
|
erb = ERB.new(File.read(File.join(File.dirname(File.expand_path(__FILE__)), "../../assets/parser.d.erb")), trim_mode: "<>")
|
||||||
|
result = erb.result(binding.clone)
|
||||||
|
File.open(@output_file, "wb") do |fh|
|
||||||
|
fh.write(result)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def process_grammar!
|
||||||
|
tokens_by_name = {}
|
||||||
|
@grammar.tokens.each do |token|
|
||||||
|
if tokens_by_name.include?(token.name)
|
||||||
|
raise Error.new("Duplicate token name #{token.name.inspect}")
|
||||||
|
end
|
||||||
|
tokens_by_name[token.name] = token
|
||||||
|
end
|
||||||
|
@grammar.rule_sets.each do |rule_name, rule_set|
|
||||||
|
if tokens_by_name.include?(rule_name)
|
||||||
|
raise Error.new("Rule name collides with token name #{rule_name.inspect}")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
unless @grammar.rule_sets["Start"]
|
||||||
|
raise Error.new("Start rule not found")
|
||||||
|
end
|
||||||
|
@grammar.rule_sets.each do |rule_name, rule_set|
|
||||||
|
rule_set.rules.each do |rule|
|
||||||
|
rule.components.map! do |component|
|
||||||
|
if tokens_by_name[component]
|
||||||
|
tokens_by_name[component]
|
||||||
|
elsif @grammar.rule_sets[component]
|
||||||
|
@grammar.rule_sets[component]
|
||||||
|
else
|
||||||
|
raise Error.new("Symbol #{component} not found")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
end
|
Loading…
x
Reference in New Issue
Block a user