aboutsummaryrefslogtreecommitdiffstats
path: root/tool
diff options
context:
space:
mode:
authoryui-knk <spiketeika@gmail.com>2023-11-04 13:12:30 +0900
committerYuichiro Kaneko <spiketeika@gmail.com>2023-11-05 12:04:52 +0900
commita15aa259db16ad2cbd3805d253bae557b4dde0b8 (patch)
treea342c46fd63588b946b3654d75c8fe1cdab515f3 /tool
parent368a1cb3c40ea7fd17809e6d2e78f6a77f770a29 (diff)
downloadruby-a15aa259db16ad2cbd3805d253bae557b4dde0b8.tar.gz
Lrama v0.5.9
Diffstat (limited to 'tool')
-rw-r--r--tool/lrama/lib/lrama/command.rb7
-rw-r--r--tool/lrama/lib/lrama/grammar.rb155
-rw-r--r--tool/lrama/lib/lrama/grammar/percent_code.rb12
-rw-r--r--tool/lrama/lib/lrama/grammar/symbol.rb4
-rw-r--r--tool/lrama/lib/lrama/lexer.rb29
-rw-r--r--tool/lrama/lib/lrama/lexer/token.rb76
-rw-r--r--tool/lrama/lib/lrama/lexer/token/char.rb8
-rw-r--r--tool/lrama/lib/lrama/lexer/token/ident.rb8
-rw-r--r--tool/lrama/lib/lrama/lexer/token/parameterizing.rb19
-rw-r--r--tool/lrama/lib/lrama/lexer/token/tag.rb8
-rw-r--r--tool/lrama/lib/lrama/lexer/token/type.rb8
-rw-r--r--tool/lrama/lib/lrama/lexer/token/user_code.rb14
-rw-r--r--tool/lrama/lib/lrama/option_parser.rb3
-rw-r--r--tool/lrama/lib/lrama/options.rb3
-rw-r--r--tool/lrama/lib/lrama/output.rb9
-rw-r--r--tool/lrama/lib/lrama/parser.rb958
-rw-r--r--tool/lrama/lib/lrama/version.rb2
-rw-r--r--tool/lrama/template/bison/_yacc.h4
-rw-r--r--tool/lrama/template/bison/yacc.c2
19 files changed, 716 insertions, 613 deletions
diff --git a/tool/lrama/lib/lrama/command.rb b/tool/lrama/lib/lrama/command.rb
index afaecda506..b6b1fd2766 100644
--- a/tool/lrama/lib/lrama/command.rb
+++ b/tool/lrama/lib/lrama/command.rb
@@ -8,7 +8,7 @@ module Lrama
warning = Lrama::Warning.new
text = options.y.read
options.y.close if options.y != STDIN
- grammar = Lrama::Parser.new(text, options.grammar_file).parse
+ grammar = Lrama::Parser.new(text, options.grammar_file, options.debug).parse
states = Lrama::States.new(grammar, warning, trace_state: (options.trace_opts[:automaton] || options.trace_opts[:closure]))
states.compute
context = Lrama::Context.new(states)
@@ -20,6 +20,11 @@ module Lrama
end
end
+ if options.trace_opts && options.trace_opts[:rules]
+ puts "Grammar rules:"
+ puts grammar.rules
+ end
+
File.open(options.outfile, "w+") do |f|
Lrama::Output.new(
out: f,
diff --git a/tool/lrama/lib/lrama/grammar.rb b/tool/lrama/lib/lrama/grammar.rb
index 012202adbc..f8f9994e08 100644
--- a/tool/lrama/lib/lrama/grammar.rb
+++ b/tool/lrama/lib/lrama/grammar.rb
@@ -3,6 +3,7 @@ require "strscan"
require "lrama/grammar/auxiliary"
require "lrama/grammar/code"
require "lrama/grammar/error_token"
+require "lrama/grammar/percent_code"
require "lrama/grammar/precedence"
require "lrama/grammar/printer"
require "lrama/grammar/reference"
@@ -13,11 +14,9 @@ require "lrama/lexer"
require "lrama/type"
module Lrama
- Token = Lrama::Lexer::Token
-
# Grammar is the result of parsing an input grammar file
class Grammar
- attr_reader :eof_symbol, :error_symbol, :undef_symbol, :accept_symbol, :aux
+ attr_reader :percent_codes, :eof_symbol, :error_symbol, :undef_symbol, :accept_symbol, :aux
attr_accessor :union, :expect,
:printers, :error_tokens,
:lex_param, :parse_param, :initial_action,
@@ -26,6 +25,8 @@ module Lrama
:sym_to_rules
def initialize
+ # Code defined by "%code"
+ @percent_codes = []
@printers = []
@error_tokens = []
@symbols = []
@@ -43,6 +44,10 @@ module Lrama
append_special_symbols
end
+ def add_percent_code(id:, code:)
+ @percent_codes << PercentCode.new(id, code)
+ end
+
def add_printer(ident_or_tags:, code:, lineno:)
@printers << Printer.new(ident_or_tags: ident_or_tags, code: code, lineno: lineno)
end
@@ -122,16 +127,7 @@ module Lrama
@_rules << [lhs, rhs, lineno]
end
- def build_references(token_code)
- token_code.references.map! do |type, value, tag, first_column, last_column|
- Reference.new(type: type, value: value, ex_tag: tag, first_column: first_column, last_column: last_column)
- end
-
- token_code
- end
-
def build_code(type, token_code)
- build_references(token_code)
Code.new(type: type, token_code: token_code)
end
@@ -152,6 +148,7 @@ module Lrama
end
def prepare
+ extract_references
normalize_rules
collect_symbols
replace_token_with_symbol
@@ -314,31 +311,33 @@ module Lrama
# $ references
# It need to wrap an identifier with brackets to use ".-" for identifiers
when scanner.scan(/\$(<[a-zA-Z0-9_]+>)?\$/) # $$, $<long>$
- tag = scanner[1] ? Lrama::Lexer::Token.new(type: Lrama::Lexer::Token::Tag, s_value: scanner[1]) : nil
- return [:dollar, "$", tag, start, scanner.pos - 1]
+ tag = scanner[1] ? Lrama::Lexer::Token::Tag.new(s_value: scanner[1]) : nil
+ return Reference.new(type: :dollar, value: "$", ex_tag: tag, first_column: start, last_column: scanner.pos - 1)
when scanner.scan(/\$(<[a-zA-Z0-9_]+>)?(\d+)/) # $1, $2, $<long>1
- tag = scanner[1] ? Lrama::Lexer::Token.new(type: Lrama::Lexer::Token::Tag, s_value: scanner[1]) : nil
- return [:dollar, Integer(scanner[2]), tag, start, scanner.pos - 1]
+ tag = scanner[1] ? Lrama::Lexer::Token::Tag.new(s_value: scanner[1]) : nil
+ return Reference.new(type: :dollar, value: Integer(scanner[2]), ex_tag: tag, first_column: start, last_column: scanner.pos - 1)
when scanner.scan(/\$(<[a-zA-Z0-9_]+>)?([a-zA-Z_][a-zA-Z0-9_]*)/) # $foo, $expr, $<long>program (named reference without brackets)
- tag = scanner[1] ? Lrama::Lexer::Token.new(type: Lrama::Lexer::Token::Tag, s_value: scanner[1]) : nil
- return [:dollar, scanner[2], tag, start, scanner.pos - 1]
+ tag = scanner[1] ? Lrama::Lexer::Token::Tag.new(s_value: scanner[1]) : nil
+ return Reference.new(type: :dollar, value: scanner[2], ex_tag: tag, first_column: start, last_column: scanner.pos - 1)
when scanner.scan(/\$(<[a-zA-Z0-9_]+>)?\[([a-zA-Z_.][-a-zA-Z0-9_.]*)\]/) # $expr.right, $expr-right, $<long>program (named reference with brackets)
- tag = scanner[1] ? Lrama::Lexer::Token.new(type: Lrama::Lexer::Token::Tag, s_value: scanner[1]) : nil
- return [:dollar, scanner[2], tag, start, scanner.pos - 1]
+ tag = scanner[1] ? Lrama::Lexer::Token::Tag.new(s_value: scanner[1]) : nil
+ return Reference.new(type: :dollar, value: scanner[2], ex_tag: tag, first_column: start, last_column: scanner.pos - 1)
# @ references
# It need to wrap an identifier with brackets to use ".-" for identifiers
when scanner.scan(/@\$/) # @$
- return [:at, "$", nil, start, scanner.pos - 1]
+ return Reference.new(type: :at, value: "$", first_column: start, last_column: scanner.pos - 1)
when scanner.scan(/@(\d+)/) # @1
- return [:at, Integer(scanner[1]), nil, start, scanner.pos - 1]
+ return Reference.new(type: :at, value: Integer(scanner[1]), first_column: start, last_column: scanner.pos - 1)
when scanner.scan(/@([a-zA-Z][a-zA-Z0-9_]*)/) # @foo, @expr (named reference without brackets)
- return [:at, scanner[1], nil, start, scanner.pos - 1]
+ return Reference.new(type: :at, value: scanner[1], first_column: start, last_column: scanner.pos - 1)
when scanner.scan(/@\[([a-zA-Z_.][-a-zA-Z0-9_.]*)\]/) # @expr.right, @expr-right (named reference with brackets)
- return [:at, scanner[1], nil, start, scanner.pos - 1]
+ return Reference.new(type: :at, value: scanner[1], first_column: start, last_column: scanner.pos - 1)
end
end
+ private
+
def extract_references
unless initial_action.nil?
scanner = StringScanner.new(initial_action.s_value)
@@ -353,7 +352,6 @@ module Lrama
end
initial_action.token_code.references = references
- build_references(initial_action.token_code)
end
@printers.each do |printer|
@@ -369,7 +367,6 @@ module Lrama
end
printer.code.token_code.references = references
- build_references(printer.code.token_code)
end
@error_tokens.each do |error_token|
@@ -385,12 +382,11 @@ module Lrama
end
error_token.code.token_code.references = references
- build_references(error_token.code.token_code)
end
@_rules.each do |lhs, rhs, _|
rhs.each_with_index do |token, index|
- next if token.class == Lrama::Grammar::Symbol || token.type != Lrama::Lexer::Token::User_code
+ next unless token.class == Lrama::Lexer::Token::UserCode
scanner = StringScanner.new(token.s_value)
references = []
@@ -407,14 +403,11 @@ module Lrama
end
token.references = references
- token.numberize_references(lhs, rhs)
- build_references(token)
+ numberize_references(lhs, rhs, token.references)
end
end
end
- private
-
def find_nterm_by_id!(id)
nterms.find do |nterm|
nterm.id == id
@@ -428,29 +421,54 @@ module Lrama
# @empty_symbol = term
# YYEOF
- term = add_term(id: Token.new(type: Token::Ident, s_value: "YYEOF"), alias_name: "\"end of file\"", token_id: 0)
+ term = add_term(id: Lrama::Lexer::Token::Ident.new(s_value: "YYEOF"), alias_name: "\"end of file\"", token_id: 0)
term.number = 0
term.eof_symbol = true
@eof_symbol = term
# YYerror
- term = add_term(id: Token.new(type: Token::Ident, s_value: "YYerror"), alias_name: "error")
+ term = add_term(id: Lrama::Lexer::Token::Ident.new(s_value: "YYerror"), alias_name: "error")
term.number = 1
term.error_symbol = true
@error_symbol = term
# YYUNDEF
- term = add_term(id: Token.new(type: Token::Ident, s_value: "YYUNDEF"), alias_name: "\"invalid token\"")
+ term = add_term(id: Lrama::Lexer::Token::Ident.new(s_value: "YYUNDEF"), alias_name: "\"invalid token\"")
term.number = 2
term.undef_symbol = true
@undef_symbol = term
# $accept
- term = add_nterm(id: Token.new(type: Token::Ident, s_value: "$accept"))
+ term = add_nterm(id: Lrama::Lexer::Token::Ident.new(s_value: "$accept"))
term.accept_symbol = true
@accept_symbol = term
end
+ def numberize_references(lhs, rhs, references)
+ references.map! {|ref|
+ ref_name = ref.value
+ if ref_name.is_a?(::String) && ref_name != '$'
+ value =
+ if lhs.referred_by?(ref_name)
+ '$'
+ else
+ index = rhs.find_index {|token| token.referred_by?(ref_name) }
+
+ if index
+ index + 1
+ else
+ raise "'#{ref_name}' is invalid name."
+ end
+ end
+
+ ref.value = value
+ ref
+ else
+ ref
+ end
+ }
+ end
+
# 1. Add $accept rule to the top of rules
# 2. Extract precedence and last action
# 3. Extract action in the middle of RHS into new Empty rule
@@ -493,7 +511,7 @@ module Lrama
case
when r.is_a?(Symbol) # precedence_sym
precedence_sym = r
- when (r.type == Token::User_code) && precedence_sym.nil? && code.nil? && rhs1.empty?
+ when r.is_a?(Lrama::Lexer::Token::UserCode) && precedence_sym.nil? && code.nil? && rhs1.empty?
code = r
else
rhs1 << r
@@ -503,7 +521,7 @@ module Lrama
# Bison n'th component is 1-origin
(rhs1 + [code]).compact.each.with_index(1) do |token, i|
- if token.type == Token::User_code
+ if token.is_a?(Lrama::Lexer::Token::UserCode)
token.references.each do |ref|
# Need to keep position_in_rhs for actions in the middle of RHS
ref.position_in_rhs = i - 1
@@ -532,9 +550,9 @@ module Lrama
end
rhs2 = rhs1.map do |token|
- if token.type == Token::User_code
+ if token.is_a?(Lrama::Lexer::Token::UserCode)
prefix = token.referred ? "@" : "$@"
- new_token = Token.new(type: Token::Ident, s_value: prefix + extracted_action_number.to_s)
+ new_token = Lrama::Lexer::Token::Ident.new(s_value: prefix + extracted_action_number.to_s)
extracted_action_number += 1
a << [new_token, token]
new_token
@@ -550,8 +568,12 @@ module Lrama
end
c = code ? Code.new(type: :user_code, token_code: code) : nil
- @rules << Rule.new(id: @rules.count, lhs: lhs, rhs: rhs2, code: c, precedence_sym: precedence_sym, lineno: lineno)
-
+ # Expand Parameterizing rules
+ if rhs2.any? {|r| r.is_a?(Lrama::Lexer::Token::Parameterizing) }
+ expand_parameterizing_rules(lhs, rhs2, c, precedence_sym, lineno)
+ else
+ @rules << Rule.new(id: @rules.count, lhs: lhs, rhs: rhs2, code: c, precedence_sym: precedence_sym, lineno: lineno)
+ end
add_nterm(id: lhs)
a.each do |new_token, _|
add_nterm(id: new_token)
@@ -559,14 +581,37 @@ module Lrama
end
end
+ def expand_parameterizing_rules(lhs, rhs, code, precedence_sym, lineno)
+ token = Lrama::Lexer::Token::Ident.new(s_value: rhs[0].s_value)
+ if rhs.any? {|r| r.is_a?(Lrama::Lexer::Token::Parameterizing) && r.option? }
+ option_token = Lrama::Lexer::Token::Ident.new(s_value: "option_#{rhs[0].s_value}")
+ add_term(id: option_token)
+ @rules << Rule.new(id: @rules.count, lhs: lhs, rhs: [option_token], code: code, precedence_sym: precedence_sym, lineno: lineno)
+ @rules << Rule.new(id: @rules.count, lhs: option_token, rhs: [], code: code, precedence_sym: precedence_sym, lineno: lineno)
+ @rules << Rule.new(id: @rules.count, lhs: option_token, rhs: [token], code: code, precedence_sym: precedence_sym, lineno: lineno)
+ elsif rhs.any? {|r| r.is_a?(Lrama::Lexer::Token::Parameterizing) && r.nonempty_list? }
+ nonempty_list_token = Lrama::Lexer::Token::Ident.new(s_value: "nonempty_list_#{rhs[0].s_value}")
+ add_term(id: nonempty_list_token)
+ @rules << Rule.new(id: @rules.count, lhs: lhs, rhs: [nonempty_list_token], code: code, precedence_sym: precedence_sym, lineno: lineno)
+ @rules << Rule.new(id: @rules.count, lhs: nonempty_list_token, rhs: [token], code: code, precedence_sym: precedence_sym, lineno: lineno)
+ @rules << Rule.new(id: @rules.count, lhs: nonempty_list_token, rhs: [nonempty_list_token, token], code: code, precedence_sym: precedence_sym, lineno: lineno)
+ elsif rhs.any? {|r| r.is_a?(Lrama::Lexer::Token::Parameterizing) && r.list? }
+ list_token = Lrama::Lexer::Token::Ident.new(s_value: "list_#{rhs[0].s_value}")
+ add_term(id: list_token)
+ @rules << Rule.new(id: @rules.count, lhs: lhs, rhs: [list_token], code: code, precedence_sym: precedence_sym, lineno: lineno)
+ @rules << Rule.new(id: @rules.count, lhs: list_token, rhs: [], code: code, precedence_sym: precedence_sym, lineno: lineno)
+ @rules << Rule.new(id: @rules.count, lhs: list_token, rhs: [list_token, token], code: code, precedence_sym: precedence_sym, lineno: lineno)
+ end
+ end
+
# Collect symbols from rules
def collect_symbols
@rules.flat_map(&:rhs).each do |s|
case s
- when Token
- if s.type == Token::Char
- add_term(id: s)
- end
+ when Lrama::Lexer::Token::Char
+ add_term(id: s)
+ when Lrama::Lexer::Token
+ # skip
when Symbol
# skip
else
@@ -607,7 +652,7 @@ module Lrama
# If id is Token::Char, it uses ASCII code
if sym.term? && sym.token_id.nil?
- if sym.id.type == Token::Char
+ if sym.id.is_a?(Lrama::Lexer::Token::Char)
# Ignore ' on the both sides
case sym.id.s_value[1..-2]
when "\\b"
@@ -660,7 +705,7 @@ module Lrama
rule.code.references.each do |ref|
next if ref.type == :at
- if ref.referring_symbol.type != Token::User_code
+ if !ref.referring_symbol.is_a?(Lrama::Lexer::Token::UserCode)
ref.referring_symbol = token_to_symbol(ref.referring_symbol)
end
end
@@ -670,7 +715,7 @@ module Lrama
def token_to_symbol(token)
case token
- when Token
+ when Lrama::Lexer::Token
find_symbol_by_id!(token)
when Symbol
token
@@ -716,10 +761,10 @@ module Lrama
@symbols.each do |sym|
@printers.each do |printer|
printer.ident_or_tags.each do |ident_or_tag|
- case ident_or_tag.type
- when Token::Ident
+ case ident_or_tag
+ when Lrama::Lexer::Token::Ident
sym.printer = printer if sym.id == ident_or_tag
- when Token::Tag
+ when Lrama::Lexer::Token::Tag
sym.printer = printer if sym.tag == ident_or_tag
else
raise "Unknown token type. #{printer}"
@@ -733,10 +778,10 @@ module Lrama
@symbols.each do |sym|
@error_tokens.each do |error_token|
error_token.ident_or_tags.each do |ident_or_tag|
- case ident_or_tag.type
- when Token::Ident
+ case ident_or_tag
+ when Lrama::Lexer::Token::Ident
sym.error_token = error_token if sym.id == ident_or_tag
- when Token::Tag
+ when Lrama::Lexer::Token::Tag
sym.error_token = error_token if sym.tag == ident_or_tag
else
raise "Unknown token type. #{error_token}"
diff --git a/tool/lrama/lib/lrama/grammar/percent_code.rb b/tool/lrama/lib/lrama/grammar/percent_code.rb
new file mode 100644
index 0000000000..5faa3a582b
--- /dev/null
+++ b/tool/lrama/lib/lrama/grammar/percent_code.rb
@@ -0,0 +1,12 @@
+module Lrama
+ class Grammar
+ class PercentCode
+ attr_reader :id, :code
+
+ def initialize(id, code)
+ @id = id
+ @code = code
+ end
+ end
+ end
+end
diff --git a/tool/lrama/lib/lrama/grammar/symbol.rb b/tool/lrama/lib/lrama/grammar/symbol.rb
index 9df1c2f636..39e5218d53 100644
--- a/tool/lrama/lib/lrama/grammar/symbol.rb
+++ b/tool/lrama/lib/lrama/grammar/symbol.rb
@@ -47,9 +47,9 @@ module Lrama
name = "YYACCEPT"
when eof_symbol?
name = "YYEOF"
- when term? && id.type == Token::Char
+ when term? && id.is_a?(Lrama::Lexer::Token::Char)
name = number.to_s + display_name
- when term? && id.type == Token::Ident
+ when term? && id.is_a?(Lrama::Lexer::Token::Ident)
name = id.s_value
when nterm? && (id.s_value.include?("$") || id.s_value.include?("@"))
name = number.to_s + id.s_value
diff --git a/tool/lrama/lib/lrama/lexer.rb b/tool/lrama/lib/lrama/lexer.rb
index 926606f3b9..870d087b38 100644
--- a/tool/lrama/lib/lrama/lexer.rb
+++ b/tool/lrama/lib/lrama/lexer.rb
@@ -3,6 +3,7 @@ require "lrama/lexer/token"
module Lrama
class Lexer
+ attr_reader :head_line, :head_column
attr_accessor :status
attr_accessor :end_symbol
@@ -24,6 +25,8 @@ module Lrama
%precedence
%prec
%error-token
+ %empty
+ %code
)
def initialize(text)
@@ -63,8 +66,6 @@ module Lrama
when @scanner.scan(/\/\//)
@scanner.scan_until(/\n/)
newline
- when @scanner.scan(/%empty/)
- # noop
else
break
end
@@ -80,18 +81,20 @@ module Lrama
return [@scanner.matched, @scanner.matched]
when @scanner.scan(/#{PERCENT_TOKENS.join('|')}/)
return [@scanner.matched, @scanner.matched]
+ when @scanner.scan(/[\?\+\*]/)
+ return [@scanner.matched, @scanner.matched]
when @scanner.scan(/<\w+>/)
- return [:TAG, build_token(type: Token::Tag, s_value: @scanner.matched)]
+ return [:TAG, setup_token(Lrama::Lexer::Token::Tag.new(s_value: @scanner.matched))]
when @scanner.scan(/'.'/)
- return [:CHARACTER, build_token(type: Token::Char, s_value: @scanner.matched)]
+ return [:CHARACTER, setup_token(Lrama::Lexer::Token::Char.new(s_value: @scanner.matched))]
when @scanner.scan(/'\\\\'|'\\b'|'\\t'|'\\f'|'\\r'|'\\n'|'\\v'|'\\13'/)
- return [:CHARACTER, build_token(type: Token::Char, s_value: @scanner.matched)]
+ return [:CHARACTER, setup_token(Lrama::Lexer::Token::Char.new(s_value: @scanner.matched))]
when @scanner.scan(/"/)
return [:STRING, %Q("#{@scanner.scan_until(/"/)})]
when @scanner.scan(/\d+/)
return [:INTEGER, Integer(@scanner.matched)]
when @scanner.scan(/([a-zA-Z_.][-a-zA-Z0-9_.]*)/)
- token = build_token(type: Token::Ident, s_value: @scanner.matched)
+ token = setup_token(Lrama::Lexer::Token::Ident.new(s_value: @scanner.matched))
type =
if @scanner.check(/\s*(\[\s*[a-zA-Z_.][-a-zA-Z0-9_.]*\s*\])?\s*:/)
:IDENT_COLON
@@ -100,7 +103,7 @@ module Lrama
end
return [type, token]
else
- raise
+ raise ParseError, "Unexpected token: #{@scanner.peek(10).chomp}."
end
end
@@ -115,13 +118,13 @@ module Lrama
when @scanner.scan(/}/)
if nested == 0 && @end_symbol == '}'
@scanner.unscan
- return [:C_DECLARATION, build_token(type: Token::User_code, s_value: code, references: [])]
+ return [:C_DECLARATION, setup_token(Lrama::Lexer::Token::UserCode.new(s_value: code))]
else
code += @scanner.matched
nested -= 1
end
when @scanner.check(/#{@end_symbol}/)
- return [:C_DECLARATION, build_token(type: Token::User_code, s_value: code, references: [])]
+ return [:C_DECLARATION, setup_token(Lrama::Lexer::Token::UserCode.new(s_value: code))]
when @scanner.scan(/\n/)
code += @scanner.matched
newline
@@ -136,7 +139,7 @@ module Lrama
code += @scanner.getch
end
end
- raise
+ raise ParseError, "Unexpected code: #{code}."
end
private
@@ -155,13 +158,9 @@ module Lrama
end
end
- def build_token(type:, s_value:, **options)
- token = Token.new(type: type, s_value: s_value)
+ def setup_token(token)
token.line = @head_line
token.column = @head_column
- options.each do |attr, value|
- token.public_send("#{attr}=", value)
- end
token
end
diff --git a/tool/lrama/lib/lrama/lexer/token.rb b/tool/lrama/lib/lrama/lexer/token.rb
index a6180746cc..0951a92547 100644
--- a/tool/lrama/lib/lrama/lexer/token.rb
+++ b/tool/lrama/lib/lrama/lexer/token.rb
@@ -1,84 +1,26 @@
-require 'lrama/lexer/token/type'
-
module Lrama
class Lexer
- class Token
+ class Token < Struct.new(:s_value, :alias_name, keyword_init: true)
attr_accessor :line, :column, :referred
- # For User_code
- attr_accessor :references
def to_s
"#{super} line: #{line}, column: #{column}"
end
def referred_by?(string)
- [self.s_value, self.alias].include?(string)
+ [self.s_value, self.alias_name].include?(string)
end
def ==(other)
- self.class == other.class && self.type == other.type && self.s_value == other.s_value
+ self.class == other.class && self.s_value == other.s_value
end
-
- def numberize_references(lhs, rhs)
- self.references.map! {|ref|
- ref_name = ref[1]
- if ref_name.is_a?(::String) && ref_name != '$'
- value =
- if lhs.referred_by?(ref_name)
- '$'
- else
- index = rhs.find_index {|token| token.referred_by?(ref_name) }
-
- if index
- index + 1
- else
- raise "'#{ref_name}' is invalid name."
- end
- end
- [ref[0], value, ref[2], ref[3], ref[4]]
- else
- ref
- end
- }
- end
-
- @i = 0
- @types = []
-
- def self.define_type(name)
- type = Type.new(id: @i, name: name.to_s)
- const_set(name, type)
- @types << type
- @i += 1
- end
-
- # Token types
- define_type(:P_expect) # %expect
- define_type(:P_define) # %define
- define_type(:P_printer) # %printer
- define_type(:P_error_token) # %error-token
- define_type(:P_lex_param) # %lex-param
- define_type(:P_parse_param) # %parse-param
- define_type(:P_initial_action) # %initial-action
- define_type(:P_union) # %union
- define_type(:P_token) # %token
- define_type(:P_type) # %type
- define_type(:P_nonassoc) # %nonassoc
- define_type(:P_left) # %left
- define_type(:P_right) # %right
- define_type(:P_precedence) # %precedence
- define_type(:P_prec) # %prec
- define_type(:User_code) # { ... }
- define_type(:Tag) # <int>
- define_type(:Number) # 0
- define_type(:Ident_Colon) # k_if:, k_if : (spaces can be there)
- define_type(:Ident) # api.pure, tNUMBER
- define_type(:Named_Ref) # [foo]
- define_type(:Semicolon) # ;
- define_type(:Bar) # |
- define_type(:String) # "str"
- define_type(:Char) # '+'
end
end
end
+
+require 'lrama/lexer/token/char'
+require 'lrama/lexer/token/ident'
+require 'lrama/lexer/token/parameterizing'
+require 'lrama/lexer/token/tag'
+require 'lrama/lexer/token/user_code'
diff --git a/tool/lrama/lib/lrama/lexer/token/char.rb b/tool/lrama/lib/lrama/lexer/token/char.rb
new file mode 100644
index 0000000000..ec3560ca09
--- /dev/null
+++ b/tool/lrama/lib/lrama/lexer/token/char.rb
@@ -0,0 +1,8 @@
+module Lrama
+ class Lexer
+ class Token
+ class Char < Token
+ end
+ end
+ end
+end
diff --git a/tool/lrama/lib/lrama/lexer/token/ident.rb b/tool/lrama/lib/lrama/lexer/token/ident.rb
new file mode 100644
index 0000000000..e576eaeccd
--- /dev/null
+++ b/tool/lrama/lib/lrama/lexer/token/ident.rb
@@ -0,0 +1,8 @@
+module Lrama
+ class Lexer
+ class Token
+ class Ident < Token
+ end
+ end
+ end
+end
diff --git a/tool/lrama/lib/lrama/lexer/token/parameterizing.rb b/tool/lrama/lib/lrama/lexer/token/parameterizing.rb
new file mode 100644
index 0000000000..b5ce6fbde3
--- /dev/null
+++ b/tool/lrama/lib/lrama/lexer/token/parameterizing.rb
@@ -0,0 +1,19 @@
+module Lrama
+ class Lexer
+ class Token
+ class Parameterizing < Token
+ def option?
+ self.s_value == "?"
+ end
+
+ def nonempty_list?
+ self.s_value == "+"
+ end
+
+ def list?
+ self.s_value == "*"
+ end
+ end
+ end
+ end
+end
diff --git a/tool/lrama/lib/lrama/lexer/token/tag.rb b/tool/lrama/lib/lrama/lexer/token/tag.rb
new file mode 100644
index 0000000000..de1e7b3518
--- /dev/null
+++ b/tool/lrama/lib/lrama/lexer/token/tag.rb
@@ -0,0 +1,8 @@
+module Lrama
+ class Lexer
+ class Token
+ class Tag < Token
+ end
+ end
+ end
+end
diff --git a/tool/lrama/lib/lrama/lexer/token/type.rb b/tool/lrama/lib/lrama/lexer/token/type.rb
deleted file mode 100644
index d58a56f40b..0000000000
--- a/tool/lrama/lib/lrama/lexer/token/type.rb
+++ /dev/null
@@ -1,8 +0,0 @@
-module Lrama
- class Lexer
- class Token < Struct.new(:type, :s_value, :alias, keyword_init: true)
- class Type < Struct.new(:id, :name, keyword_init: true)
- end
- end
- end
-end
diff --git a/tool/lrama/lib/lrama/lexer/token/user_code.rb b/tool/lrama/lib/lrama/lexer/token/user_code.rb
new file mode 100644
index 0000000000..abd51c752f
--- /dev/null
+++ b/tool/lrama/lib/lrama/lexer/token/user_code.rb
@@ -0,0 +1,14 @@
+module Lrama
+ class Lexer
+ class Token
+ class UserCode < Token
+ attr_accessor :references
+
+ def initialize(s_value: nil, alias_name: nil)
+ super
+ self.references = []
+ end
+ end
+ end
+ end
+end
diff --git a/tool/lrama/lib/lrama/option_parser.rb b/tool/lrama/lib/lrama/option_parser.rb
index b0415cdf97..560b269b06 100644
--- a/tool/lrama/lib/lrama/option_parser.rb
+++ b/tool/lrama/lib/lrama/option_parser.rb
@@ -58,6 +58,7 @@ module Lrama
o.separator 'Tuning the Parser:'
o.on('-S', '--skeleton=FILE', 'specify the skeleton to use') {|v| @options.skeleton = v }
o.on('-t', 'reserved, do nothing') { }
+ o.on('--debug', 'display debugging outputs of internal parser') {|v| @options.debug = true }
o.separator ''
o.separator 'Output:'
o.on('-H', '--header=[FILE]', 'also produce a header file named FILE') {|v| @options.header = true; @options.header_file = v }
@@ -108,7 +109,7 @@ module Lrama
def validate_trace(trace)
list = %w[
none locations scan parse automaton bitsets
- closure grammar resource sets muscles tools
+ closure grammar rules resource sets muscles tools
m4-early m4 skeleton time ielr cex all
]
h = {}
diff --git a/tool/lrama/lib/lrama/options.rb b/tool/lrama/lib/lrama/options.rb
index 01b3e701d9..007661f632 100644
--- a/tool/lrama/lib/lrama/options.rb
+++ b/tool/lrama/lib/lrama/options.rb
@@ -4,7 +4,8 @@ module Lrama
attr_accessor :skeleton, :header, :header_file,
:report_file, :outfile,
:error_recovery, :grammar_file,
- :report_file, :trace_opts, :report_opts, :y
+ :report_file, :trace_opts, :report_opts, :y,
+ :debug
def initialize
@skeleton = "bison/yacc.c"
diff --git a/tool/lrama/lib/lrama/output.rb b/tool/lrama/lib/lrama/output.rb
index 95747ce4ea..3c97ff4b16 100644
--- a/tool/lrama/lib/lrama/output.rb
+++ b/tool/lrama/lib/lrama/output.rb
@@ -349,6 +349,15 @@ module Lrama
end
end
+ # b4_percent_code_get
+ def percent_code(name)
+ @grammar.percent_codes.select do |percent_code|
+ percent_code.id.s_value == name
+ end.map do |percent_code|
+ percent_code.code.s_value
+ end.join
+ end
+
private
def eval_template(file, path)
diff --git a/tool/lrama/lib/lrama/parser.rb b/tool/lrama/lib/lrama/parser.rb
index 96d0bd4626..3afe372516 100644
--- a/tool/lrama/lib/lrama/parser.rb
+++ b/tool/lrama/lib/lrama/parser.rb
@@ -1,14 +1,13 @@
#
# DO NOT MODIFY!!!!
-# This file is automatically generated by Racc 1.7.1
-# from Racc grammar file "".
+# This file is automatically generated by Racc 1.7.3
+# from Racc grammar file "parser.y".
#
###### racc/parser.rb begin
unless $".find {|p| p.end_with?('/racc/parser.rb')}
$".push "#{__dir__}/racc/parser.rb"
self.class.module_eval(<<'...end racc/parser.rb/module_eval...', 'racc/parser.rb', 1)
-# frozen_string_literal: false
#--
# Copyright (c) 1999-2006 Minero Aoki
#
@@ -22,20 +21,9 @@ self.class.module_eval(<<'...end racc/parser.rb/module_eval...', 'racc/parser.rb
unless $".find {|p| p.end_with?('/racc/info.rb')}
$".push "#{__dir__}/racc/info.rb"
-#--
-#
-#
-#
-# Copyright (c) 1999-2006 Minero Aoki
-#
-# This program is free software.
-# You can distribute/modify this program under the same terms of ruby.
-# see the file "COPYING".
-#
-#++
module Racc
- VERSION = '1.7.1'
+ VERSION = '1.7.3'
Version = VERSION
Copyright = 'Copyright (c) 1999-2006 Minero Aoki'
end
@@ -84,10 +72,12 @@ end
# [-v, --verbose]
# verbose mode. create +filename+.output file, like yacc's y.output file.
# [-g, --debug]
-# add debug code to parser class. To display debuggin information,
+# add debug code to parser class. To display debugging information,
# use this '-g' option and set @yydebug true in parser class.
# [-E, --embedded]
# Output parser which doesn't need runtime files (racc/parser.rb).
+# [-F, --frozen]
+# Output parser which declares frozen_string_literals: true
# [-C, --check-only]
# Check syntax of racc grammar file and quit.
# [-S, --output-status]
@@ -566,7 +556,7 @@ module Racc
#
# If this method returns, parsers enter "error recovering mode".
def on_error(t, val, vstack)
- raise ParseError, sprintf("\nparse error on value %s (%s)",
+ raise ParseError, sprintf("parse error on value %s (%s)",
val.inspect, token_to_str(t) || '?')
end
@@ -668,13 +658,14 @@ end
module Lrama
class Parser < Racc::Parser
-module_eval(<<'...end parser.y/module_eval...', 'parser.y', 388)
+module_eval(<<'...end parser.y/module_eval...', 'parser.y', 383)
include Lrama::Report::Duration
-def initialize(text, path)
+def initialize(text, path, debug = false)
@text = text
@path = path
+ @yydebug = debug
end
def parse
@@ -682,8 +673,8 @@ def parse
@lexer = Lrama::Lexer.new(@text)
@grammar = Lrama::Grammar.new
@precedence_number = 0
+ reset_precs
do_parse
- @grammar.extract_references
@grammar.prepare
@grammar.compute_nullable
@grammar.compute_first_set
@@ -697,253 +688,287 @@ def next_token
end
def on_error(error_token_id, error_value, value_stack)
- source = @text.split("\n")[error_value.line - 1]
+ if error_value.respond_to?(:line) && error_value.respond_to?(:column)
+ line = error_value.line
+ first_column = error_value.column
+ else
+ line = @lexer.line
+ first_column = @lexer.head_column
+ end
+
raise ParseError, <<~ERROR
- #{@path}:#{@lexer.line}:#{@lexer.column}: parse error on value #{error_value.inspect} (#{token_to_str(error_token_id) || '?'})
- #{source}
- #{' ' * @lexer.column}^
+ #{@path}:#{line}:#{first_column}: parse error on value #{error_value.inspect} (#{token_to_str(error_token_id) || '?'})
+ #{@text.split("\n")[line - 1]}
+ #{carrets(first_column)}
ERROR
end
+
+private
+
+def reset_precs
+ @prec_seen = false
+ @code_after_prec = false
+end
+
+def begin_c_declaration(end_symbol)
+ @lexer.status = :c_declaration
+ @lexer.end_symbol = end_symbol
+end
+
+def end_c_declaration
+ @lexer.status = :initial
+ @lexer.end_symbol = nil
+end
+
+def carrets(first_column)
+ ' ' * (first_column + 1) + '^' * (@lexer.column - first_column)
+end
...end parser.y/module_eval...
##### State transition tables begin ###
racc_action_table = [
- 82, 132, 83, 42, 42, 41, 41, 65, 65, 42,
- 42, 41, 41, 131, 56, 68, 3, 8, 38, 134,
- 134, 42, 42, 41, 41, 65, 61, 68, 38, 6,
- 32, 7, 84, 77, 135, 135, 20, 22, 23, 24,
- 25, 26, 27, 28, 29, 30, 20, 22, 23, 24,
- 25, 26, 27, 28, 29, 30, 9, 39, 44, 14,
- 12, 13, 15, 16, 17, 18, 46, 46, 19, 20,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 42,
- 42, 41, 41, 46, 68, 68, 42, 42, 41, 41,
- 65, 153, 42, 42, 41, 41, 65, 153, 42, 42,
- 41, 41, 65, 153, 42, 42, 41, 41, 65, 153,
- 42, 42, 41, 41, 65, 153, 42, 42, 41, 41,
- 65, 153, 42, 42, 41, 41, 65, 65, 42, 42,
- 41, 41, 65, 65, 42, 42, 41, 41, 65, 65,
- 42, 42, 41, 41, 42, 42, 41, 41, 42, 42,
- 41, 41, 42, 42, 41, 41, 49, 50, 51, 52,
- 53, 74, 78, 80, 85, 85, 85, 92, 96, 97,
- 105, 106, 108, 109, 110, 111, 112, 113, 116, 118,
- 119, 122, 123, 124, 138, 139, 140, 141, 142, 143,
- 122, 80, 148, 149, 156, 160, 161, 80, 80 ]
+ 84, 137, 85, 3, 6, 43, 7, 42, 39, 67,
+ 43, 8, 42, 136, 67, 43, 43, 42, 42, 33,
+ 58, 142, 43, 43, 42, 42, 142, 21, 23, 24,
+ 25, 26, 27, 28, 29, 30, 31, 86, 139, 140,
+ 141, 143, 39, 139, 140, 141, 143, 79, 43, 43,
+ 42, 42, 67, 63, 70, 43, 43, 42, 42, 40,
+ 70, 21, 23, 24, 25, 26, 27, 28, 29, 30,
+ 31, 9, 45, 47, 14, 12, 13, 15, 16, 17,
+ 18, 47, 47, 19, 20, 21, 23, 24, 25, 26,
+ 27, 28, 29, 30, 31, 43, 43, 42, 42, 50,
+ 70, 70, 43, 43, 42, 42, 67, 161, 43, 43,
+ 42, 42, 67, 161, 43, 43, 42, 42, 67, 161,
+ 43, 43, 42, 42, 67, 161, 43, 43, 42, 42,
+ 67, 161, 43, 43, 42, 42, 67, 161, 43, 43,
+ 42, 42, 67, 67, 43, 43, 42, 42, 67, 67,
+ 43, 43, 42, 42, 67, 67, 43, 43, 42, 42,
+ 43, 43, 42, 42, 51, 52, 53, 54, 55, 76,
+ 80, 82, 87, 87, 87, 89, 95, 99, 100, 108,
+ 109, 111, 113, 114, 115, 116, 117, 120, 122, 123,
+ 126, 127, 128, 130, 145, 147, 148, 149, 150, 151,
+ 126, 82, 156, 157, 164, 167, 82 ]
racc_action_check = [
- 40, 120, 40, 121, 144, 121, 144, 121, 144, 25,
- 27, 25, 27, 120, 25, 27, 1, 3, 9, 121,
- 144, 26, 28, 26, 28, 26, 26, 28, 33, 2,
- 7, 2, 40, 33, 121, 144, 9, 9, 9, 9,
- 9, 9, 9, 9, 9, 9, 33, 33, 33, 33,
- 33, 33, 33, 33, 33, 33, 4, 12, 14, 4,
- 4, 4, 4, 4, 4, 4, 15, 16, 4, 4,
- 4, 4, 4, 4, 4, 4, 4, 4, 4, 29,
- 30, 29, 30, 17, 29, 30, 141, 13, 141, 13,
- 141, 141, 142, 55, 142, 55, 142, 142, 143, 56,
- 143, 56, 143, 143, 150, 66, 150, 66, 150, 150,
- 154, 67, 154, 67, 154, 154, 155, 68, 155, 68,
- 155, 155, 60, 61, 60, 61, 60, 61, 97, 99,
- 97, 99, 97, 99, 117, 135, 117, 135, 117, 135,
- 71, 72, 71, 72, 73, 92, 73, 92, 94, 100,
- 94, 100, 102, 114, 102, 114, 18, 20, 22, 23,
- 24, 31, 36, 37, 45, 47, 48, 54, 58, 59,
- 79, 80, 86, 87, 88, 89, 90, 91, 95, 103,
- 104, 105, 106, 107, 125, 126, 127, 128, 129, 130,
- 131, 133, 136, 137, 146, 157, 159, 160, 161 ]
+ 41, 124, 41, 1, 2, 125, 2, 125, 9, 125,
+ 152, 3, 152, 124, 152, 26, 13, 26, 13, 7,
+ 26, 125, 57, 58, 57, 58, 152, 9, 9, 9,
+ 9, 9, 9, 9, 9, 9, 9, 41, 125, 125,
+ 125, 125, 34, 152, 152, 152, 152, 34, 27, 28,
+ 27, 28, 27, 27, 28, 29, 68, 29, 68, 12,
+ 29, 34, 34, 34, 34, 34, 34, 34, 34, 34,
+ 34, 4, 14, 15, 4, 4, 4, 4, 4, 4,
+ 4, 16, 17, 4, 4, 4, 4, 4, 4, 4,
+ 4, 4, 4, 4, 4, 30, 31, 30, 31, 18,
+ 30, 31, 149, 69, 149, 69, 149, 149, 150, 70,
+ 150, 70, 150, 150, 151, 73, 151, 73, 151, 151,
+ 158, 74, 158, 74, 158, 158, 162, 75, 162, 75,
+ 162, 162, 163, 95, 163, 95, 163, 163, 62, 63,
+ 62, 63, 62, 63, 100, 102, 100, 102, 100, 102,
+ 121, 143, 121, 143, 121, 143, 97, 103, 97, 103,
+ 105, 118, 105, 118, 19, 21, 23, 24, 25, 32,
+ 37, 38, 46, 48, 49, 50, 56, 60, 61, 81,
+ 82, 88, 90, 91, 92, 93, 94, 98, 106, 107,
+ 108, 109, 110, 112, 129, 131, 132, 133, 134, 135,
+ 136, 138, 144, 146, 154, 166, 167 ]
racc_action_pointer = [
- nil, 16, 19, 17, 47, nil, nil, 23, nil, 14,
- nil, nil, 51, 84, 51, 47, 48, 64, 137, nil,
- 138, nil, 139, 140, 141, 6, 18, 7, 19, 76,
- 77, 159, nil, 24, nil, nil, 141, 128, nil, nil,
- -5, nil, nil, nil, nil, 145, nil, 146, 147, nil,
- nil, nil, nil, nil, 159, 90, 96, nil, 162, 161,
- 119, 120, nil, nil, nil, nil, 102, 108, 114, nil,
- nil, 137, 138, 141, nil, nil, nil, nil, nil, 138,
- 166, nil, nil, nil, nil, nil, 170, 171, 172, 173,
- 174, 175, 142, nil, 145, 171, nil, 125, nil, 126,
- 146, nil, 149, 168, 178, 162, 146, 181, nil, nil,
- nil, nil, nil, nil, 150, nil, nil, 131, nil, nil,
- -20, 0, nil, nil, nil, 164, 165, 166, 167, 168,
- 169, 171, nil, 156, nil, 132, 190, 173, nil, nil,
- nil, 83, 89, 95, 1, nil, 192, nil, nil, nil,
- 101, nil, nil, nil, 107, 113, nil, 175, nil, 176,
- 162, 163, nil, nil ]
+ nil, 3, -6, 11, 62, nil, nil, 12, nil, 4,
+ nil, nil, 53, 13, 65, 54, 62, 63, 94, 145,
+ nil, 146, nil, 147, 148, 149, 12, 45, 46, 52,
+ 92, 93, 167, nil, 38, nil, nil, 148, 131, nil,
+ nil, -5, nil, nil, nil, nil, 153, nil, 154, 155,
+ 156, nil, nil, nil, nil, nil, 168, 19, 20, nil,
+ 171, 170, 135, 136, nil, nil, nil, nil, 53, 100,
+ 106, nil, nil, 112, 118, 124, nil, nil, nil, nil,
+ nil, 146, 175, nil, nil, nil, nil, nil, 179, nil,
+ 180, 181, 182, 183, 184, 130, nil, 153, 180, nil,
+ 141, nil, 142, 154, nil, 157, 177, 187, 155, 150,
+ 190, nil, 191, nil, nil, nil, nil, nil, 158, nil,
+ nil, 147, nil, nil, -21, 2, nil, nil, nil, 174,
+ nil, 175, 176, 177, 178, 179, 165, nil, 161, nil,
+ nil, nil, nil, 148, 182, nil, 183, nil, nil, 99,
+ 105, 111, 7, nil, 202, nil, nil, nil, 117, nil,
+ nil, nil, 123, 129, nil, nil, 185, 166, nil ]
racc_action_default = [
- -2, -104, -8, -104, -104, -3, -4, -104, 164, -104,
- -9, -10, -104, -104, -104, -104, -104, -104, -104, -20,
- -104, -24, -104, -104, -104, -104, -104, -104, -104, -104,
- -104, -104, -7, -91, -71, -73, -104, -88, -90, -11,
- -95, -69, -70, -94, -13, -14, -60, -15, -16, -17,
- -21, -25, -28, -31, -34, -40, -104, -43, -46, -35,
- -50, -104, -53, -55, -56, -103, -36, -63, -104, -66,
- -68, -37, -38, -39, -5, -1, -72, -92, -74, -104,
- -104, -12, -96, -97, -98, -57, -104, -104, -104, -104,
- -104, -104, -104, -44, -41, -48, -47, -104, -54, -51,
- -65, -67, -64, -104, -104, -79, -104, -104, -61, -18,
- -22, -26, -29, -32, -42, -45, -49, -52, -6, -93,
- -75, -76, -84, -89, -58, -104, -104, -104, -104, -104,
- -104, -79, -78, -88, -81, -104, -104, -104, -62, -19,
- -23, -104, -104, -104, -77, -80, -104, -87, -85, -59,
- -27, -99, -101, -102, -30, -33, -82, -104, -100, -104,
- -88, -88, -86, -83 ]
+ -2, -108, -8, -108, -108, -3, -4, -108, 169, -108,
+ -9, -10, -108, -108, -108, -108, -108, -108, -108, -108,
+ -23, -108, -27, -108, -108, -108, -108, -108, -108, -108,
+ -108, -108, -108, -7, -95, -74, -76, -108, -92, -94,
+ -11, -99, -72, -73, -98, -13, -14, -63, -15, -16,
+ -108, -20, -24, -28, -31, -34, -37, -43, -108, -46,
+ -49, -38, -53, -108, -56, -58, -59, -107, -39, -66,
+ -108, -69, -71, -40, -41, -42, -5, -1, -75, -96,
+ -77, -108, -108, -12, -100, -101, -102, -60, -108, -17,
+ -108, -108, -108, -108, -108, -108, -47, -44, -51, -50,
+ -108, -57, -54, -68, -70, -67, -108, -108, -82, -108,
+ -108, -64, -108, -21, -25, -29, -32, -35, -45, -48,
+ -52, -55, -6, -97, -78, -79, -83, -93, -61, -108,
+ -18, -108, -108, -108, -108, -108, -82, -81, -92, -85,
+ -86, -87, -88, -108, -108, -65, -108, -22, -26, -108,
+ -108, -108, -80, -84, -108, -91, -62, -19, -30, -103,
+ -105, -106, -33, -36, -89, -104, -108, -92, -90 ]
racc_goto_table = [
- 79, 43, 62, 60, 55, 121, 93, 101, 34, 66,
- 71, 72, 73, 58, 1, 70, 70, 70, 70, 100,
- 2, 102, 4, 158, 100, 100, 100, 158, 158, 33,
- 75, 144, 76, 5, 31, 94, 98, 62, 99, 103,
- 101, 10, 101, 58, 58, 93, 45, 47, 48, 150,
- 154, 155, 11, 40, 70, 70, 70, 81, 87, 70,
- 70, 70, 126, 88, 127, 93, 89, 128, 90, 129,
- 91, 114, 130, 62, 117, 98, 54, 59, 95, 115,
- 58, 107, 58, 137, 86, 125, 120, 146, 70, 159,
- 70, 136, 157, 98, 104, nil, 145, 133, nil, nil,
- nil, nil, 58, nil, nil, nil, nil, nil, nil, nil,
- nil, 147, nil, nil, nil, nil, nil, nil, nil, nil,
- 133, nil, nil, 162, 163 ]
+ 81, 64, 44, 57, 62, 96, 125, 104, 35, 165,
+ 46, 48, 49, 165, 165, 60, 1, 72, 72, 72,
+ 72, 103, 2, 105, 4, 34, 103, 103, 103, 68,
+ 73, 74, 75, 78, 152, 97, 101, 64, 77, 5,
+ 102, 104, 32, 104, 106, 96, 60, 60, 158, 162,
+ 163, 10, 11, 41, 83, 112, 146, 72, 72, 72,
+ 90, 131, 72, 72, 72, 91, 96, 132, 92, 133,
+ 93, 134, 118, 94, 64, 135, 101, 121, 56, 61,
+ 98, 119, 110, 144, 60, 88, 60, 129, 124, 154,
+ 166, 107, 72, nil, 72, 101, nil, nil, nil, 138,
+ 153, nil, nil, nil, nil, nil, nil, 60, nil, nil,
+ nil, nil, nil, nil, nil, nil, nil, 155, nil, nil,
+ nil, nil, nil, nil, nil, nil, 138, nil, nil, 168 ]
racc_goto_check = [
- 46, 31, 35, 34, 29, 48, 30, 42, 43, 28,
- 28, 28, 28, 31, 1, 31, 31, 31, 31, 41,
- 2, 41, 3, 54, 41, 41, 41, 54, 54, 4,
- 5, 48, 43, 6, 7, 29, 35, 35, 34, 8,
- 42, 9, 42, 31, 31, 30, 13, 13, 13, 17,
- 17, 17, 10, 11, 31, 31, 31, 12, 14, 31,
- 31, 31, 15, 18, 19, 30, 20, 21, 22, 23,
- 24, 29, 25, 35, 34, 35, 26, 27, 32, 33,
- 31, 37, 31, 38, 39, 40, 47, 49, 31, 50,
- 31, 51, 52, 35, 53, nil, 46, 35, nil, nil,
- nil, nil, 31, nil, nil, nil, nil, nil, nil, nil,
- nil, 35, nil, nil, nil, nil, nil, nil, nil, nil,
- 35, nil, nil, 46, 46 ]
+ 48, 37, 33, 31, 36, 32, 50, 44, 45, 54,
+ 13, 13, 13, 54, 54, 33, 1, 33, 33, 33,
+ 33, 43, 2, 43, 3, 4, 43, 43, 43, 30,
+ 30, 30, 30, 45, 50, 31, 37, 37, 5, 6,
+ 36, 44, 7, 44, 8, 32, 33, 33, 19, 19,
+ 19, 9, 10, 11, 12, 14, 15, 33, 33, 33,
+ 16, 17, 33, 33, 33, 20, 32, 21, 22, 23,
+ 24, 25, 31, 26, 37, 27, 37, 36, 28, 29,
+ 34, 35, 39, 40, 33, 41, 33, 42, 49, 51,
+ 52, 53, 33, nil, 33, 37, nil, nil, nil, 37,
+ 48, nil, nil, nil, nil, nil, nil, 33, nil, nil,
+ nil, nil, nil, nil, nil, nil, nil, 37, nil, nil,
+ nil, nil, nil, nil, nil, nil, 37, nil, nil, 48 ]
racc_goto_pointer = [
- nil, 14, 20, 20, 20, -3, 31, 28, -35, 37,
- 48, 40, 17, 31, 9, -47, nil, -92, 13, -46,
- 15, -44, 16, -43, 17, -41, 51, 51, -18, -21,
- -49, -12, 20, -16, -23, -24, nil, -4, -41, 38,
- -23, -47, -60, -1, nil, nil, -37, -19, -100, -47,
- -67, -31, -56, 17, -127 ]
+ nil, 16, 22, 22, 16, 4, 37, 36, -32, 47,
+ 48, 40, 13, -5, -34, -74, 9, -52, nil, -101,
+ 13, -47, 15, -46, 16, -45, 18, -42, 52, 52,
+ 1, -23, -52, -11, 20, -17, -23, -26, nil, -5,
+ -45, 38, -24, -47, -62, -1, nil, nil, -38, -20,
+ -102, -53, -74, 12, -149 ]
racc_goto_default = [
nil, nil, nil, nil, nil, nil, nil, nil, nil, nil,
- 36, nil, nil, nil, nil, nil, 21, nil, nil, nil,
+ 37, nil, nil, nil, nil, nil, nil, nil, 22, nil,
nil, nil, nil, nil, nil, nil, nil, nil, nil, nil,
- 57, 63, nil, nil, nil, 152, 64, nil, nil, nil,
- nil, 67, 69, nil, 35, 37, nil, nil, nil, nil,
- nil, nil, nil, nil, 151 ]
+ nil, nil, 59, 65, nil, nil, nil, 160, 66, nil,
+ nil, nil, nil, 69, 71, nil, 36, 38, nil, nil,
+ nil, nil, nil, nil, 159 ]
racc_reduce_table = [
0, 0, :racc_error,
- 5, 39, :_reduce_none,
- 0, 40, :_reduce_none,
- 2, 40, :_reduce_none,
- 0, 45, :_reduce_4,
- 0, 46, :_reduce_5,
- 5, 44, :_reduce_6,
- 2, 44, :_reduce_none,
- 0, 41, :_reduce_8,
- 2, 41, :_reduce_none,
- 1, 47, :_reduce_none,
- 2, 47, :_reduce_11,
- 3, 47, :_reduce_none,
- 2, 47, :_reduce_none,
- 2, 47, :_reduce_none,
- 2, 47, :_reduce_15,
- 2, 47, :_reduce_16,
- 0, 52, :_reduce_17,
- 0, 53, :_reduce_18,
- 6, 47, :_reduce_19,
- 1, 47, :_reduce_none,
- 0, 56, :_reduce_21,
- 0, 57, :_reduce_22,
- 6, 48, :_reduce_23,
- 1, 48, :_reduce_none,
- 0, 58, :_reduce_25,
- 0, 59, :_reduce_26,
- 7, 48, :_reduce_none,
- 0, 60, :_reduce_28,
- 0, 61, :_reduce_29,
- 7, 48, :_reduce_30,
- 0, 62, :_reduce_31,
- 0, 63, :_reduce_32,
- 7, 48, :_reduce_33,
- 2, 54, :_reduce_none,
- 2, 54, :_reduce_35,
- 2, 54, :_reduce_36,
- 2, 54, :_reduce_37,
- 2, 54, :_reduce_38,
- 2, 54, :_reduce_39,
- 1, 64, :_reduce_40,
- 2, 64, :_reduce_41,
- 3, 64, :_reduce_42,
- 1, 67, :_reduce_43,
- 2, 67, :_reduce_44,
- 3, 68, :_reduce_45,
- 0, 70, :_reduce_none,
- 1, 70, :_reduce_none,
- 0, 71, :_reduce_none,
- 1, 71, :_reduce_none,
- 1, 65, :_reduce_50,
- 2, 65, :_reduce_51,
- 3, 65, :_reduce_52,
+ 5, 44, :_reduce_none,
+ 0, 45, :_reduce_none,
+ 2, 45, :_reduce_none,
+ 0, 50, :_reduce_4,
+ 0, 51, :_reduce_5,
+ 5, 49, :_reduce_6,
+ 2, 49, :_reduce_none,
+ 0, 46, :_reduce_8,
+ 2, 46, :_reduce_none,
+ 1, 52, :_reduce_none,
+ 2, 52, :_reduce_11,
+ 3, 52, :_reduce_none,
+ 2, 52, :_reduce_none,
+ 2, 52, :_reduce_none,
+ 2, 52, :_reduce_15,
+ 2, 52, :_reduce_16,
+ 0, 57, :_reduce_17,
+ 0, 58, :_reduce_18,
+ 7, 52, :_reduce_19,
+ 0, 59, :_reduce_20,
+ 0, 60, :_reduce_21,
+ 6, 52, :_reduce_22,
+ 1, 52, :_reduce_none,
+ 0, 63, :_reduce_24,
+ 0, 64, :_reduce_25,
+ 6, 53, :_reduce_26,
+ 1, 53, :_reduce_none,
+ 0, 65, :_reduce_28,
+ 0, 66, :_reduce_29,
+ 7, 53, :_reduce_none,
+ 0, 67, :_reduce_31,
+ 0, 68, :_reduce_32,
+ 7, 53, :_reduce_33,
+ 0, 69, :_reduce_34,
+ 0, 70, :_reduce_35,
+ 7, 53, :_reduce_36,
+ 2, 61, :_reduce_none,
+ 2, 61, :_reduce_38,
+ 2, 61, :_reduce_39,
+ 2, 61, :_reduce_40,
+ 2, 61, :_reduce_41,
+ 2, 61, :_reduce_42,
+ 1, 71, :_reduce_43,
+ 2, 71, :_reduce_44,
+ 3, 71, :_reduce_45,
+ 1, 74, :_reduce_46,
+ 2, 74, :_reduce_47,
+ 3, 75, :_reduce_48,
+ 0, 77, :_reduce_none,
+ 1, 77, :_reduce_none,
+ 0, 78, :_reduce_none,
+ 1, 78, :_reduce_none,
1, 72, :_reduce_53,
2, 72, :_reduce_54,
- 1, 73, :_reduce_none,
- 1, 73, :_reduce_none,
- 0, 75, :_reduce_57,
- 0, 76, :_reduce_58,
- 6, 51, :_reduce_59,
- 0, 77, :_reduce_60,
- 0, 78, :_reduce_61,
- 5, 51, :_reduce_62,
- 1, 66, :_reduce_63,
- 2, 66, :_reduce_64,
- 2, 66, :_reduce_65,
- 1, 79, :_reduce_66,
- 2, 79, :_reduce_67,
+ 3, 72, :_reduce_55,
+ 1, 79, :_reduce_56,
+ 2, 79, :_reduce_57,
1, 80, :_reduce_none,
- 1, 69, :_reduce_69,
- 1, 69, :_reduce_70,
- 1, 42, :_reduce_none,
- 2, 42, :_reduce_none,
- 1, 81, :_reduce_none,
- 2, 81, :_reduce_none,
- 4, 82, :_reduce_75,
- 1, 85, :_reduce_76,
- 3, 85, :_reduce_77,
- 2, 85, :_reduce_none,
- 0, 86, :_reduce_79,
- 3, 86, :_reduce_80,
- 0, 87, :_reduce_81,
- 0, 88, :_reduce_82,
- 7, 86, :_reduce_83,
- 0, 89, :_reduce_84,
- 0, 90, :_reduce_85,
- 6, 86, :_reduce_86,
- 3, 86, :_reduce_87,
- 0, 84, :_reduce_none,
- 3, 84, :_reduce_89,
- 1, 83, :_reduce_none,
- 0, 43, :_reduce_none,
- 0, 91, :_reduce_92,
- 3, 43, :_reduce_93,
- 1, 49, :_reduce_none,
- 0, 50, :_reduce_none,
- 1, 50, :_reduce_none,
- 1, 50, :_reduce_none,
- 1, 50, :_reduce_none,
- 1, 55, :_reduce_99,
- 2, 55, :_reduce_100,
- 1, 92, :_reduce_none,
- 1, 92, :_reduce_none,
- 1, 74, :_reduce_103 ]
-
-racc_reduce_n = 104
-
-racc_shift_n = 164
+ 1, 80, :_reduce_none,
+ 0, 82, :_reduce_60,
+ 0, 83, :_reduce_61,
+ 6, 56, :_reduce_62,
+ 0, 84, :_reduce_63,
+ 0, 85, :_reduce_64,
+ 5, 56, :_reduce_65,
+ 1, 73, :_reduce_66,
+ 2, 73, :_reduce_67,
+ 2, 73, :_reduce_68,
+ 1, 86, :_reduce_69,
+ 2, 86, :_reduce_70,
+ 1, 87, :_reduce_none,
+ 1, 76, :_reduce_72,
+ 1, 76, :_reduce_73,
+ 1, 47, :_reduce_none,
+ 2, 47, :_reduce_none,
+ 1, 88, :_reduce_none,
+ 2, 88, :_reduce_none,
+ 4, 89, :_reduce_78,
+ 1, 92, :_reduce_79,
+ 3, 92, :_reduce_80,
+ 2, 92, :_reduce_none,
+ 0, 93, :_reduce_82,
+ 1, 93, :_reduce_83,
+ 3, 93, :_reduce_84,
+ 2, 93, :_reduce_85,
+ 2, 93, :_reduce_86,
+ 2, 93, :_reduce_87,
+ 0, 94, :_reduce_88,
+ 0, 95, :_reduce_89,
+ 7, 93, :_reduce_90,
+ 3, 93, :_reduce_91,
+ 0, 91, :_reduce_none,
+ 3, 91, :_reduce_93,
+ 1, 90, :_reduce_none,
+ 0, 48, :_reduce_none,
+ 0, 96, :_reduce_96,
+ 3, 48, :_reduce_97,
+ 1, 54, :_reduce_none,
+ 0, 55, :_reduce_none,
+ 1, 55, :_reduce_none,
+ 1, 55, :_reduce_none,
+ 1, 55, :_reduce_none,
+ 1, 62, :_reduce_103,
+ 2, 62, :_reduce_104,
+ 1, 97, :_reduce_none,
+ 1, 97, :_reduce_none,
+ 1, 81, :_reduce_107 ]
+
+racc_reduce_n = 108
+
+racc_shift_n = 169
racc_token_table = {
false => 0,
@@ -964,28 +989,33 @@ racc_token_table = {
"%param" => 15,
"%lex-param" => 16,
"%parse-param" => 17,
- "%initial-action" => 18,
+ "%code" => 18,
"{" => 19,
"}" => 20,
- ";" => 21,
- "%union" => 22,
- "%destructor" => 23,
- "%printer" => 24,
- "%error-token" => 25,
- "%token" => 26,
- "%type" => 27,
- "%left" => 28,
- "%right" => 29,
- "%precedence" => 30,
- "%nonassoc" => 31,
- ":" => 32,
- "|" => 33,
- "%prec" => 34,
- "[" => 35,
- "]" => 36,
- "{...}" => 37 }
-
-racc_nt_base = 38
+ "%initial-action" => 21,
+ ";" => 22,
+ "%union" => 23,
+ "%destructor" => 24,
+ "%printer" => 25,
+ "%error-token" => 26,
+ "%token" => 27,
+ "%type" => 28,
+ "%left" => 29,
+ "%right" => 30,
+ "%precedence" => 31,
+ "%nonassoc" => 32,
+ ":" => 33,
+ "|" => 34,
+ "%empty" => 35,
+ "?" => 36,
+ "+" => 37,
+ "*" => 38,
+ "%prec" => 39,
+ "[" => 40,
+ "]" => 41,
+ "{...}" => 42 }
+
+racc_nt_base = 43
racc_use_result_var = true
@@ -1025,9 +1055,10 @@ Racc_token_to_s_table = [
"\"%param\"",
"\"%lex-param\"",
"\"%parse-param\"",
- "\"%initial-action\"",
+ "\"%code\"",
"\"{\"",
"\"}\"",
+ "\"%initial-action\"",
"\";\"",
"\"%union\"",
"\"%destructor\"",
@@ -1041,6 +1072,10 @@ Racc_token_to_s_table = [
"\"%nonassoc\"",
"\":\"",
"\"|\"",
+ "\"%empty\"",
+ "\"?\"",
+ "\"+\"",
+ "\"*\"",
"\"%prec\"",
"\"[\"",
"\"]\"",
@@ -1061,16 +1096,18 @@ Racc_token_to_s_table = [
"params",
"@3",
"@4",
- "symbol_declaration",
- "generic_symlist",
"@5",
"@6",
+ "symbol_declaration",
+ "generic_symlist",
"@7",
"@8",
"@9",
"@10",
"@11",
"@12",
+ "@13",
+ "@14",
"token_declarations",
"symbol_declarations",
"token_declarations_for_precedence",
@@ -1082,10 +1119,10 @@ Racc_token_to_s_table = [
"symbol_declaration_list",
"symbol",
"string_as_id",
- "@13",
- "@14",
"@15",
"@16",
+ "@17",
+ "@18",
"token_declaration_list_for_precedence",
"token_declaration_for_precedence",
"rules_or_grammar_declaration",
@@ -1094,15 +1131,13 @@ Racc_token_to_s_table = [
"named_ref_opt",
"rhs_list",
"rhs",
- "@17",
- "@18",
"@19",
"@20",
"@21",
"generic_symlist_item" ]
Ractor.make_shareable(Racc_token_to_s_table) if defined?(Ractor)
-Racc_debug_parser = false
+Racc_debug_parser = true
##### State transition tables end #####
@@ -1114,26 +1149,24 @@ Racc_debug_parser = false
# reduce 3 omitted
-module_eval(<<'.,.,', 'parser.y', 10)
+module_eval(<<'.,.,', 'parser.y', 14)
def _reduce_4(val, _values, result)
- @lexer.status = :c_declaration
- @lexer.end_symbol = '%}'
+ begin_c_declaration("%}")
@grammar.prologue_first_lineno = @lexer.line
result
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 16)
+module_eval(<<'.,.,', 'parser.y', 19)
def _reduce_5(val, _values, result)
- @lexer.status = :initial
- @lexer.end_symbol = nil
+ end_c_declaration
result
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 21)
+module_eval(<<'.,.,', 'parser.y', 23)
def _reduce_6(val, _values, result)
@grammar.prologue = val[2].s_value
@@ -1143,7 +1176,7 @@ module_eval(<<'.,.,', 'parser.y', 21)
# reduce 7 omitted
-module_eval(<<'.,.,', 'parser.y', 25)
+module_eval(<<'.,.,', 'parser.y', 27)
def _reduce_8(val, _values, result)
result = ""
result
@@ -1154,7 +1187,7 @@ module_eval(<<'.,.,', 'parser.y', 25)
# reduce 10 omitted
-module_eval(<<'.,.,', 'parser.y', 29)
+module_eval(<<'.,.,', 'parser.y', 31)
def _reduce_11(val, _values, result)
@grammar.expect = val[1]
result
@@ -1167,7 +1200,7 @@ module_eval(<<'.,.,', 'parser.y', 29)
# reduce 14 omitted
-module_eval(<<'.,.,', 'parser.y', 35)
+module_eval(<<'.,.,', 'parser.y', 37)
def _reduce_15(val, _values, result)
val[1].each {|token|
token.references = []
@@ -1178,7 +1211,7 @@ module_eval(<<'.,.,', 'parser.y', 35)
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 42)
+module_eval(<<'.,.,', 'parser.y', 44)
def _reduce_16(val, _values, result)
val[1].each {|token|
token.references = []
@@ -1189,19 +1222,17 @@ module_eval(<<'.,.,', 'parser.y', 42)
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 49)
+module_eval(<<'.,.,', 'parser.y', 51)
def _reduce_17(val, _values, result)
- @lexer.status = :c_declaration
- @lexer.end_symbol = '}'
+ begin_c_declaration("}")
result
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 54)
+module_eval(<<'.,.,', 'parser.y', 55)
def _reduce_18(val, _values, result)
- @lexer.status = :initial
- @lexer.end_symbol = nil
+ end_c_declaration
result
end
@@ -1209,46 +1240,49 @@ module_eval(<<'.,.,', 'parser.y', 54)
module_eval(<<'.,.,', 'parser.y', 59)
def _reduce_19(val, _values, result)
- @grammar.initial_action = @grammar.build_code(:initial_action, val[3])
+ @grammar.add_percent_code(id: val[1], code: val[4])
result
end
.,.,
-# reduce 20 omitted
+module_eval(<<'.,.,', 'parser.y', 63)
+ def _reduce_20(val, _values, result)
+ begin_c_declaration("}")
+
+ result
+ end
+.,.,
-module_eval(<<'.,.,', 'parser.y', 65)
+module_eval(<<'.,.,', 'parser.y', 67)
def _reduce_21(val, _values, result)
- @lexer.status = :c_declaration
- @lexer.end_symbol = '}'
+ end_c_declaration
result
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 70)
+module_eval(<<'.,.,', 'parser.y', 71)
def _reduce_22(val, _values, result)
- @lexer.status = :initial
- @lexer.end_symbol = nil
+ @grammar.initial_action = @grammar.build_code(:initial_action, val[3])
result
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 75)
- def _reduce_23(val, _values, result)
- @grammar.set_union(@grammar.build_code(:union, val[3]), val[3].line)
+# reduce 23 omitted
+
+module_eval(<<'.,.,', 'parser.y', 77)
+ def _reduce_24(val, _values, result)
+ begin_c_declaration("}")
result
end
.,.,
-# reduce 24 omitted
-
-module_eval(<<'.,.,', 'parser.y', 80)
+module_eval(<<'.,.,', 'parser.y', 81)
def _reduce_25(val, _values, result)
- @lexer.status = :c_declaration
- @lexer.end_symbol = '}'
+ end_c_declaration
result
end
@@ -1256,8 +1290,7 @@ module_eval(<<'.,.,', 'parser.y', 80)
module_eval(<<'.,.,', 'parser.y', 85)
def _reduce_26(val, _values, result)
- @lexer.status = :initial
- @lexer.end_symbol = nil
+ @grammar.set_union(@grammar.build_code(:union, val[3]), val[3].line)
result
end
@@ -1265,62 +1298,76 @@ module_eval(<<'.,.,', 'parser.y', 85)
# reduce 27 omitted
-module_eval(<<'.,.,', 'parser.y', 91)
+module_eval(<<'.,.,', 'parser.y', 90)
def _reduce_28(val, _values, result)
- @lexer.status = :c_declaration
- @lexer.end_symbol = '}'
+ begin_c_declaration("}")
result
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 96)
+module_eval(<<'.,.,', 'parser.y', 94)
def _reduce_29(val, _values, result)
- @lexer.status = :initial
- @lexer.end_symbol = nil
+ end_c_declaration
result
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 101)
- def _reduce_30(val, _values, result)
- @grammar.add_printer(ident_or_tags: val[6], code: @grammar.build_code(:printer, val[3]), lineno: val[3].line)
+# reduce 30 omitted
+
+module_eval(<<'.,.,', 'parser.y', 99)
+ def _reduce_31(val, _values, result)
+ begin_c_declaration("}")
result
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 105)
- def _reduce_31(val, _values, result)
- @lexer.status = :c_declaration
- @lexer.end_symbol = '}'
+module_eval(<<'.,.,', 'parser.y', 103)
+ def _reduce_32(val, _values, result)
+ end_c_declaration
result
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 110)
- def _reduce_32(val, _values, result)
- @lexer.status = :initial
- @lexer.end_symbol = nil
+module_eval(<<'.,.,', 'parser.y', 107)
+ def _reduce_33(val, _values, result)
+ @grammar.add_printer(ident_or_tags: val[6], code: @grammar.build_code(:printer, val[3]), lineno: val[3].line)
+
+ result
+ end
+.,.,
+
+module_eval(<<'.,.,', 'parser.y', 111)
+ def _reduce_34(val, _values, result)
+ begin_c_declaration("}")
result
end
.,.,
module_eval(<<'.,.,', 'parser.y', 115)
- def _reduce_33(val, _values, result)
+ def _reduce_35(val, _values, result)
+ end_c_declaration
+
+ result
+ end
+.,.,
+
+module_eval(<<'.,.,', 'parser.y', 119)
+ def _reduce_36(val, _values, result)
@grammar.add_error_token(ident_or_tags: val[6], code: @grammar.build_code(:error_token, val[3]), lineno: val[3].line)
result
end
.,.,
-# reduce 34 omitted
+# reduce 37 omitted
-module_eval(<<'.,.,', 'parser.y', 121)
- def _reduce_35(val, _values, result)
+module_eval(<<'.,.,', 'parser.y', 125)
+ def _reduce_38(val, _values, result)
val[1].each {|hash|
hash[:tokens].each {|id|
@grammar.add_type(id: id, tag: hash[:tag])
@@ -1331,8 +1378,8 @@ module_eval(<<'.,.,', 'parser.y', 121)
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 129)
- def _reduce_36(val, _values, result)
+module_eval(<<'.,.,', 'parser.y', 133)
+ def _reduce_39(val, _values, result)
val[1].each {|hash|
hash[:tokens].each {|id|
sym = @grammar.add_term(id: id)
@@ -1345,8 +1392,8 @@ module_eval(<<'.,.,', 'parser.y', 129)
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 139)
- def _reduce_37(val, _values, result)
+module_eval(<<'.,.,', 'parser.y', 143)
+ def _reduce_40(val, _values, result)
val[1].each {|hash|
hash[:tokens].each {|id|
sym = @grammar.add_term(id: id)
@@ -1359,8 +1406,8 @@ module_eval(<<'.,.,', 'parser.y', 139)
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 149)
- def _reduce_38(val, _values, result)
+module_eval(<<'.,.,', 'parser.y', 153)
+ def _reduce_41(val, _values, result)
val[1].each {|hash|
hash[:tokens].each {|id|
sym = @grammar.add_term(id: id)
@@ -1373,8 +1420,8 @@ module_eval(<<'.,.,', 'parser.y', 149)
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 159)
- def _reduce_39(val, _values, result)
+module_eval(<<'.,.,', 'parser.y', 163)
+ def _reduce_42(val, _values, result)
val[1].each {|hash|
hash[:tokens].each {|id|
sym = @grammar.add_term(id: id)
@@ -1387,8 +1434,8 @@ module_eval(<<'.,.,', 'parser.y', 159)
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 170)
- def _reduce_40(val, _values, result)
+module_eval(<<'.,.,', 'parser.y', 174)
+ def _reduce_43(val, _values, result)
val[0].each {|token_declaration|
@grammar.add_term(id: token_declaration[0], alias_name: token_declaration[2], token_id: token_declaration[1], tag: nil, replace: true)
}
@@ -1397,8 +1444,8 @@ module_eval(<<'.,.,', 'parser.y', 170)
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 176)
- def _reduce_41(val, _values, result)
+module_eval(<<'.,.,', 'parser.y', 180)
+ def _reduce_44(val, _values, result)
val[1].each {|token_declaration|
@grammar.add_term(id: token_declaration[0], alias_name: token_declaration[2], token_id: token_declaration[1], tag: val[0], replace: true)
}
@@ -1407,8 +1454,8 @@ module_eval(<<'.,.,', 'parser.y', 176)
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 182)
- def _reduce_42(val, _values, result)
+module_eval(<<'.,.,', 'parser.y', 186)
+ def _reduce_45(val, _values, result)
val[2].each {|token_declaration|
@grammar.add_term(id: token_declaration[0], alias_name: token_declaration[2], token_id: token_declaration[1], tag: val[1], replace: true)
}
@@ -1417,123 +1464,119 @@ module_eval(<<'.,.,', 'parser.y', 182)
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 187)
- def _reduce_43(val, _values, result)
+module_eval(<<'.,.,', 'parser.y', 191)
+ def _reduce_46(val, _values, result)
result = [val[0]]
result
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 188)
- def _reduce_44(val, _values, result)
+module_eval(<<'.,.,', 'parser.y', 192)
+ def _reduce_47(val, _values, result)
result = val[0].append(val[1])
result
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 190)
- def _reduce_45(val, _values, result)
+module_eval(<<'.,.,', 'parser.y', 194)
+ def _reduce_48(val, _values, result)
result = val
result
end
.,.,
-# reduce 46 omitted
+# reduce 49 omitted
-# reduce 47 omitted
+# reduce 50 omitted
-# reduce 48 omitted
+# reduce 51 omitted
-# reduce 49 omitted
+# reduce 52 omitted
-module_eval(<<'.,.,', 'parser.y', 200)
- def _reduce_50(val, _values, result)
+module_eval(<<'.,.,', 'parser.y', 204)
+ def _reduce_53(val, _values, result)
result = [{tag: nil, tokens: val[0]}]
result
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 204)
- def _reduce_51(val, _values, result)
+module_eval(<<'.,.,', 'parser.y', 208)
+ def _reduce_54(val, _values, result)
result = [{tag: val[0], tokens: val[1]}]
result
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 208)
- def _reduce_52(val, _values, result)
+module_eval(<<'.,.,', 'parser.y', 212)
+ def _reduce_55(val, _values, result)
result = val[0].append({tag: val[1], tokens: val[2]})
result
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 211)
- def _reduce_53(val, _values, result)
+module_eval(<<'.,.,', 'parser.y', 215)
+ def _reduce_56(val, _values, result)
result = [val[0]]
result
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 212)
- def _reduce_54(val, _values, result)
+module_eval(<<'.,.,', 'parser.y', 216)
+ def _reduce_57(val, _values, result)
result = val[0].append(val[1])
result
end
.,.,
-# reduce 55 omitted
+# reduce 58 omitted
-# reduce 56 omitted
+# reduce 59 omitted
-module_eval(<<'.,.,', 'parser.y', 219)
- def _reduce_57(val, _values, result)
- @lexer.status = :c_declaration
- @lexer.end_symbol = '}'
+module_eval(<<'.,.,', 'parser.y', 223)
+ def _reduce_60(val, _values, result)
+ begin_c_declaration("}")
result
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 224)
- def _reduce_58(val, _values, result)
- @lexer.status = :initial
- @lexer.end_symbol = nil
+module_eval(<<'.,.,', 'parser.y', 227)
+ def _reduce_61(val, _values, result)
+ end_c_declaration
result
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 229)
- def _reduce_59(val, _values, result)
+module_eval(<<'.,.,', 'parser.y', 231)
+ def _reduce_62(val, _values, result)
result = val[0].append(val[3])
result
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 233)
- def _reduce_60(val, _values, result)
- @lexer.status = :c_declaration
- @lexer.end_symbol = '}'
+module_eval(<<'.,.,', 'parser.y', 235)
+ def _reduce_63(val, _values, result)
+ begin_c_declaration("}")
result
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 238)
- def _reduce_61(val, _values, result)
- @lexer.status = :initial
- @lexer.end_symbol = nil
+module_eval(<<'.,.,', 'parser.y', 239)
+ def _reduce_64(val, _values, result)
+ end_c_declaration
result
end
.,.,
module_eval(<<'.,.,', 'parser.y', 243)
- def _reduce_62(val, _values, result)
+ def _reduce_65(val, _values, result)
result = [val[2]]
result
@@ -1541,7 +1584,7 @@ module_eval(<<'.,.,', 'parser.y', 243)
.,.,
module_eval(<<'.,.,', 'parser.y', 248)
- def _reduce_63(val, _values, result)
+ def _reduce_66(val, _values, result)
result = [{tag: nil, tokens: val[0]}]
result
@@ -1549,7 +1592,7 @@ module_eval(<<'.,.,', 'parser.y', 248)
.,.,
module_eval(<<'.,.,', 'parser.y', 252)
- def _reduce_64(val, _values, result)
+ def _reduce_67(val, _values, result)
result = [{tag: val[0], tokens: val[1]}]
result
@@ -1557,7 +1600,7 @@ module_eval(<<'.,.,', 'parser.y', 252)
.,.,
module_eval(<<'.,.,', 'parser.y', 256)
- def _reduce_65(val, _values, result)
+ def _reduce_68(val, _values, result)
result = val[0].append({tag: nil, tokens: val[1]})
result
@@ -1565,47 +1608,47 @@ module_eval(<<'.,.,', 'parser.y', 256)
.,.,
module_eval(<<'.,.,', 'parser.y', 259)
- def _reduce_66(val, _values, result)
+ def _reduce_69(val, _values, result)
result = [val[0]]
result
end
.,.,
module_eval(<<'.,.,', 'parser.y', 260)
- def _reduce_67(val, _values, result)
+ def _reduce_70(val, _values, result)
result = val[0].append(val[1])
result
end
.,.,
-# reduce 68 omitted
+# reduce 71 omitted
module_eval(<<'.,.,', 'parser.y', 264)
- def _reduce_69(val, _values, result)
+ def _reduce_72(val, _values, result)
raise "Ident after %prec" if @prec_seen
result
end
.,.,
module_eval(<<'.,.,', 'parser.y', 265)
- def _reduce_70(val, _values, result)
+ def _reduce_73(val, _values, result)
raise "Char after %prec" if @prec_seen
result
end
.,.,
-# reduce 71 omitted
+# reduce 74 omitted
-# reduce 72 omitted
+# reduce 75 omitted
-# reduce 73 omitted
+# reduce 76 omitted
-# reduce 74 omitted
+# reduce 77 omitted
module_eval(<<'.,.,', 'parser.y', 275)
- def _reduce_75(val, _values, result)
+ def _reduce_78(val, _values, result)
lhs = val[0]
- lhs.alias = val[1]
+ lhs.alias_name = val[1]
val[3].each {|hash|
@grammar.add_rule(lhs: lhs, rhs: hash[:rhs], lineno: hash[:lineno])
}
@@ -1615,7 +1658,7 @@ module_eval(<<'.,.,', 'parser.y', 275)
.,.,
module_eval(<<'.,.,', 'parser.y', 284)
- def _reduce_76(val, _values, result)
+ def _reduce_79(val, _values, result)
result = [{rhs: val[0], lineno: val[0].first&.line || @lexer.line - 1}]
result
@@ -1623,101 +1666,102 @@ module_eval(<<'.,.,', 'parser.y', 284)
.,.,
module_eval(<<'.,.,', 'parser.y', 288)
- def _reduce_77(val, _values, result)
+ def _reduce_80(val, _values, result)
result = val[0].append({rhs: val[2], lineno: val[2].first&.line || @lexer.line - 1})
result
end
.,.,
-# reduce 78 omitted
+# reduce 81 omitted
module_eval(<<'.,.,', 'parser.y', 294)
- def _reduce_79(val, _values, result)
- result = []
- @prec_seen = false
- @code_after_prec = false
+ def _reduce_82(val, _values, result)
+ reset_precs
+ result = []
result
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 300)
- def _reduce_80(val, _values, result)
+module_eval(<<'.,.,', 'parser.y', 299)
+ def _reduce_83(val, _values, result)
+ reset_precs
+ result = []
+
+ result
+ end
+.,.,
+
+module_eval(<<'.,.,', 'parser.y', 304)
+ def _reduce_84(val, _values, result)
token = val[1]
- token.alias = val[2]
+ token.alias_name = val[2]
result = val[0].append(token)
result
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 306)
- def _reduce_81(val, _values, result)
- if @prec_seen
- raise "Multiple User_code after %prec" if @code_after_prec
- @code_after_prec = true
- end
- @lexer.status = :c_declaration
- @lexer.end_symbol = '}'
+module_eval(<<'.,.,', 'parser.y', 310)
+ def _reduce_85(val, _values, result)
+ token = Lrama::Lexer::Token::Parameterizing.new(s_value: val[1])
+ result = val[0].append(token)
result
end
.,.,
module_eval(<<'.,.,', 'parser.y', 315)
- def _reduce_82(val, _values, result)
- @lexer.status = :initial
- @lexer.end_symbol = nil
+ def _reduce_86(val, _values, result)
+ token = Lrama::Lexer::Token::Parameterizing.new(s_value: val[1])
+ result = val[0].append(token)
result
end
.,.,
module_eval(<<'.,.,', 'parser.y', 320)
- def _reduce_83(val, _values, result)
- token = val[3]
- token.alias = val[6]
- result = val[0].append(token)
+ def _reduce_87(val, _values, result)
+ token = Lrama::Lexer::Token::Parameterizing.new(s_value: val[1])
+ result = val[0].append(token)
result
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 326)
- def _reduce_84(val, _values, result)
+module_eval(<<'.,.,', 'parser.y', 325)
+ def _reduce_88(val, _values, result)
if @prec_seen
raise "Multiple User_code after %prec" if @code_after_prec
@code_after_prec = true
end
- @lexer.status = :c_declaration
- @lexer.end_symbol = '}'
+ begin_c_declaration("}")
result
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 335)
- def _reduce_85(val, _values, result)
- @lexer.status = :initial
- @lexer.end_symbol = nil
+module_eval(<<'.,.,', 'parser.y', 333)
+ def _reduce_89(val, _values, result)
+ end_c_declaration
result
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 340)
- def _reduce_86(val, _values, result)
- token = val[2]
- token.alias = val[5]
- result = [token]
+module_eval(<<'.,.,', 'parser.y', 337)
+ def _reduce_90(val, _values, result)
+ token = val[3]
+ token.alias_name = val[6]
+ result = val[0].append(token)
result
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 346)
- def _reduce_87(val, _values, result)
+module_eval(<<'.,.,', 'parser.y', 343)
+ def _reduce_91(val, _values, result)
sym = @grammar.find_symbol_by_id!(val[2])
result = val[0].append(sym)
@prec_seen = true
@@ -1726,70 +1770,68 @@ module_eval(<<'.,.,', 'parser.y', 346)
end
.,.,
-# reduce 88 omitted
+# reduce 92 omitted
-module_eval(<<'.,.,', 'parser.y', 352)
- def _reduce_89(val, _values, result)
+module_eval(<<'.,.,', 'parser.y', 349)
+ def _reduce_93(val, _values, result)
result = val[1].s_value
result
end
.,.,
-# reduce 90 omitted
+# reduce 94 omitted
-# reduce 91 omitted
+# reduce 95 omitted
-module_eval(<<'.,.,', 'parser.y', 359)
- def _reduce_92(val, _values, result)
- @lexer.status = :c_declaration
- @lexer.end_symbol = '\Z'
+module_eval(<<'.,.,', 'parser.y', 356)
+ def _reduce_96(val, _values, result)
+ begin_c_declaration('\Z')
@grammar.epilogue_first_lineno = @lexer.line + 1
result
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 365)
- def _reduce_93(val, _values, result)
- @lexer.status = :initial
- @lexer.end_symbol = nil
+module_eval(<<'.,.,', 'parser.y', 361)
+ def _reduce_97(val, _values, result)
+ end_c_declaration
@grammar.epilogue = val[2].s_value
result
end
.,.,
-# reduce 94 omitted
+# reduce 98 omitted
-# reduce 95 omitted
+# reduce 99 omitted
-# reduce 96 omitted
+# reduce 100 omitted
-# reduce 97 omitted
+# reduce 101 omitted
-# reduce 98 omitted
+# reduce 102 omitted
-module_eval(<<'.,.,', 'parser.y', 377)
- def _reduce_99(val, _values, result)
+module_eval(<<'.,.,', 'parser.y', 372)
+ def _reduce_103(val, _values, result)
result = [val[0]]
result
end
.,.,
-module_eval(<<'.,.,', 'parser.y', 378)
- def _reduce_100(val, _values, result)
+module_eval(<<'.,.,', 'parser.y', 373)
+ def _reduce_104(val, _values, result)
result = val[0].append(val[1])
result
end
.,.,
-# reduce 101 omitted
+# reduce 105 omitted
-# reduce 102 omitted
+# reduce 106 omitted
-module_eval(<<'.,.,', 'parser.y', 383)
- def _reduce_103(val, _values, result)
- result = Lrama::Lexer::Token.new(type: Lrama::Lexer::Token::Ident, s_value: val[0])
+module_eval(<<'.,.,', 'parser.y', 378)
+ def _reduce_107(val, _values, result)
+ result = Lrama::Lexer::Token::Ident.new(s_value: val[0])
result
end
.,.,
diff --git a/tool/lrama/lib/lrama/version.rb b/tool/lrama/lib/lrama/version.rb
index 79d6ce74fe..49ff1ec97c 100644
--- a/tool/lrama/lib/lrama/version.rb
+++ b/tool/lrama/lib/lrama/version.rb
@@ -1,3 +1,3 @@
module Lrama
- VERSION = "0.5.8".freeze
+ VERSION = "0.5.9".freeze
end
diff --git a/tool/lrama/template/bison/_yacc.h b/tool/lrama/template/bison/_yacc.h
index 2af4d1e1b8..34ed6d81f5 100644
--- a/tool/lrama/template/bison/_yacc.h
+++ b/tool/lrama/template/bison/_yacc.h
@@ -12,7 +12,7 @@
#if YYDEBUG && !defined(yydebug)
extern int yydebug;
#endif
- <%-# b4_percent_code_get([[requires]]). %code is not supported -%>
+<%= output.percent_code("requires") %>
<%-# b4_token_enums_defines -%>
/* Token kinds. */
@@ -64,7 +64,7 @@ struct YYLTYPE
int yyparse (<%= output.parse_param %>);
- <%-# b4_percent_code_get([[provides]]). %code is not supported -%>
+<%= output.percent_code("provides") %>
<%-# b4_cpp_guard_close([b4_spec_mapped_header_file]) -%>
<%- if output.spec_mapped_header_file -%>
#endif /* !<%= output.b4_cpp_guard__b4_spec_mapped_header_file %> */
diff --git a/tool/lrama/template/bison/yacc.c b/tool/lrama/template/bison/yacc.c
index 840afc22a5..90ea228e22 100644
--- a/tool/lrama/template/bison/yacc.c
+++ b/tool/lrama/template/bison/yacc.c
@@ -68,8 +68,6 @@
#define YYPULL 1
-
-
<%# b4_user_pre_prologue -%>
/* First part of user prologue. */
#line <%= output.aux.prologue_first_lineno %> "<%= output.grammar_file_path %>"