Change Token constructor to take a Hash of all parameters

This commit is contained in:
Josh Holtrop 2022-06-05 14:40:41 -04:00
parent f37801ec9e
commit b1d5cddab4
3 changed files with 20 additions and 18 deletions

View File

@ -37,10 +37,10 @@ class Propane
unless name =~ /^[a-zA-Z_][a-zA-Z_0-9]*$/ unless name =~ /^[a-zA-Z_][a-zA-Z_0-9]*$/
raise Error.new("Invalid token name #{name}") raise Error.new("Invalid token name #{name}")
end end
@tokens << Token.new(name, pattern, @tokens.size, line_number) @tokens << Token.new(name: name, pattern: pattern, id: @tokens.size, line_number: line_number)
elsif sliced = input.slice!(/\Adrop\s+(\S+)\n/) elsif sliced = input.slice!(/\Adrop\s+(\S+)\n/)
pattern = $1 pattern = $1
@drop_tokens << Token.new(nil, pattern, nil, line_number) @drop_tokens << Token.new(pattern: pattern, line_number: line_number)
elsif sliced = input.slice!(/\A(\S+)\s*:\s*\[(.*?)\] <<\n(.*?)^>>\n/m) elsif sliced = input.slice!(/\A(\S+)\s*:\s*\[(.*?)\] <<\n(.*?)^>>\n/m)
rule_name, components, code = $1, $2, $3 rule_name, components, code = $1, $2, $3
components = components.strip.split(/\s+/) components = components.strip.split(/\s+/)

View File

@ -3,7 +3,7 @@ class Propane
class Parser class Parser
def initialize(rule_sets) def initialize(rule_sets)
@token_eof = Token.new("$", nil, TOKEN_EOF, nil) @token_eof = Token.new(name: "$", id: TOKEN_EOF)
@item_sets = [] @item_sets = []
@item_sets_set = {} @item_sets_set = {}
start_items = rule_sets["Start"].rules.map do |rule| start_items = rule_sets["Start"].rules.map do |rule|

View File

@ -2,15 +2,15 @@ class Propane
class Token class Token
# @return [String] # @return [String, nil]
# Token name. # Token name.
attr_reader :name attr_reader :name
# @return [String] # @return [String, nil]
# Token pattern. # Token pattern.
attr_reader :pattern attr_reader :pattern
# @return [Integer] # @return [Integer, nil]
# Token ID. # Token ID.
attr_reader :id attr_reader :id
@ -18,27 +18,29 @@ class Propane
# Line number where the token was defined in the input grammar. # Line number where the token was defined in the input grammar.
attr_reader :line_number attr_reader :line_number
# @return [Regex::NFA] # @return [Regex::NFA, nil]
# Regex NFA for matching the token. # Regex NFA for matching the token.
attr_reader :nfa attr_reader :nfa
# Construct a Token. # Construct a Token.
# #
# @param name [String] # @param options [Hash]
# Optional parameters.
# @option options [String, nil] :name
# Token name. # Token name.
# @param pattern [String] # @option options [String, nil] :pattern
# Token pattern. # Token pattern.
# @param id [Integer] # @option options [Integer, nil] :id
# Token ID. # Token ID.
# @param line_number [Integer, nil] # @option options [Integer, nil] :line_number
# Line number where the token was defined in the input grammar. # Line number where the token was defined in the input grammar.
def initialize(name, pattern, id, line_number) def initialize(options)
@name = name @name = options[:name]
@pattern = pattern @pattern = options[:pattern]
@id = id @id = options[:id]
@line_number = line_number @line_number = options[:line_number]
unless pattern.nil? unless @pattern.nil?
regex = Regex.new(pattern) regex = Regex.new(@pattern)
regex.nfa.end_state.accepts = self regex.nfa.end_state.accepts = self
@nfa = regex.nfa @nfa = regex.nfa
end end