Class: RedParse

Inherits:
Object
  • Object
show all
Includes:
Reducer
Defined in:
lib/redparse.rb,
lib/redparse/node.rb,
lib/redparse/cache.rb,
lib/redparse/compile.rb,
lib/redparse/version.rb,
lib/redparse/generate.rb,
lib/redparse/pthelper.rb

Overview

require “redparse/compile”

Defined Under Namespace

Modules: BareSymbolUtils, BracketsCall, ErrorNode, FlattenedIvars, HasRescue, KeywordOpNode, ListInNode, LowerOp_inspect, NamedConstant, Nodes, Reducer, Stackable Classes: AccessorAssignNode, AliasNode, AndNode, ArrayLiteralNode, ArrowOpNode, AssignNode, AssigneeList, AssignmentRhsNode, BeginNode, BlockFormalsNode, BlockNode, BlockParams, BracketsGetNode, BracketsModifyNode, BracketsSetNode, Cache, CallNode, CallSiteNode, CallWithBlockNode, CaseNode, ClassNode, CommaOpNode, Conditional, ConstantNode, DanglingCommaNode, DanglingStarNode, DeleteMonkey, DotCallNode, DottedRule, DoubleColonNode, ElseNode, ElsifNode, EnsureNode, EoiToken, ForNode, GoalPostNode, GoalPostToken, HashLiteralNode, HereDocNode, HerePlaceholderToken, IfNode, IfOpNode, KWCallNode, KeywordToken, ListOpNode, LiteralNode, LogicalNode, LoopNode, MatchNode, MetaClassNode, MethNameToken, MethodNode, MisparsedNode, ModuleNode, MultiAssign, MultiAssignNode, MultiReduce, MultiShift, NamespaceNode, NestedAssign, Node, NopNode, NotEqualNode, NotMatchNode, NumberToken, OpNode, OperatorToken, OrNode, ParenedNode, ParseError, ParserState, RangeNode, RawOpNode, RescueHeaderNode, RescueNode, RescueOpNode, Rule, RuleSet, SequenceNode, SpecializedKeywordToken, StackMonkey, StartToken, StringCatNode, StringNode, StringToken, TernaryNode, Token, UnOpNode, UnaryStarNode, UndefNode, UnlessOpNode, UntilOpNode, ValueNode, VarLikeNode, VarNameToken, VarNode, WhenNode, WhileOpNode

Constant Summary collapse

UCLETTER =
RubyLexer::UCLETTER
LCLETTER =
RubyLexer::LCLETTER
LETTER =
RubyLexer::LETTER
LETTER_DIGIT =
RubyLexer::LETTER_DIGIT
Value =

NumberToken|SymbolToken|

ValueNode&-{:lvalue =>nil}
Expr =

HerePlaceholderToken|

Value
KW2class =
{}
Punc2name =
{
  "("=>"lparen",    ")"=>"rparen",
  "["=>"lbracket",    "]"=>"rbracket",
  "{"=>"lbrace",    "}"=>"rbrace",
  ","=>"comma",
  ";"=>"semicolon",
  "::"=>"double_colon",
  "."=>"dot",
  "?"=>"question_mark", ":"=>"colon",
  "="=>"equals",
  "|"=>"pipe",
  "<<"=>"leftleft", ">>"=>"rightright",
  "=>"=>"arrow",
}
UNOP =
(OperatorToken|KeywordToken)&-{  #sppflt! KeywordToken here is a hack too
  :ident=>/^(?:[+-]@|unary[&*]|(?:lhs|rhs)[*])$/,
#    :ident=>/^(?:[+-]@|unary[&])$/,
  #:unary =>true,
}|
(OperatorToken|KeywordToken)&-{  #sppflt! KeywordToken here is a hack too
  :ident=>/^([~!]|not|defined\?)$/, #defined? should be removed from here, its handled separately
} #|
DEFOP =

|

(OperatorToken|KeywordToken)&-{  #sppflt! KeywordToken here is a hack too
  :ident=>"defined?",
}
BINOP_KEYWORDS =
%w[if unless while until and or && \|\|]
DotOp =

KeywordOp=

  KeywordToken & -{
    :ident=>/^(#{BINOP_KEYWORDS.join('|')})$/
  }
KeywordOp2= 
  KeywordToken & -{ 
    :ident=>/^([\[({!+*?:,]|\.{1,3}|::|=>)$/ 
  }
KW('.')
DoubleColonOp =

KeywordToken & -{ :ident=>“::” }

KW('::')
Op =
Op()
MODIFYASSIGNOP =
Op( /^(([^=])\2|[^<>=!])=$/, true )
NONASSIGNOP =
Op( /([^=]|[<>=!]=)$/)
KW_Op =

some of these ought to be regular operators, fer gosh sake

Op(/^(![=~]|\.\.\.?|=>)$/,true)|Op(/^(#{BINOP_KEYWORDS.join('|')})$/)
EPSILON =

this should be <<1 and >0

Float::EPSILON*10_000_000
WANTS_SEMI =
%w[while until if unless 
def case when in rescue 
elsif class module << => . ::
]
DotCall =

rule format: -[syntax pattern_matchers.+, lookahead.-]>>node type

stack_monkey("DotCall",4,CallNode){|stack|
  left,dot=*stack.slice!(-4..-3)
  right=stack[-2]
 
  right.startline=left.startline
  right.set_receiver! left
}
Lvalue =
(VarNode|CallSiteNode|BracketsGetNode|CommaOpNode|
ParenedNode|ConstantNode|UnaryStarNode)&-{:lvalue =>true}
BareMethod =
MethNameToken|(LiteralNode&-{:bare_method=>true})
ENDWORDLIST =

BEGINWORDLIST=RubyLexer::BEGINWORDLIST + %w“( [ {”

%w"end ) ] }"
ENDWORDS =
ENDWORDLIST.map{|x| Regexp.quote x}.join('|')
BEGINWORDS =
RubyLexer::BEGINWORDS
INNERBOUNDINGWORDS =
RubyLexer::INNERBOUNDINGWORDS
BEGIN2END =
{"{"=>"}", "("=>")", "["=>"]", BEGINWORDS=>"end"}
MULTIASSIGN =
UnaryStarNode|CommaOpNode|ParenedNode
WITHCOMMAS =
UnaryStarNode|CommaOpNode|(CallSiteNode&-{:with_commas=>true})
BEGINAFTEREQUALS =

(CallSiteNode&-:args=>-{:size=>~0.reg})

BeginNode&
-{:after_equals =>nil}&-{:non_empty=>true}
BEGINAFTEREQUALS_MARKED =
BeginNode&
-{:after_equals =>true}&-{:non_empty=>true}
LHS_COMMA =

&-=> :lhs

Op('lhs,',true)
RHS_COMMA =

&-=> :rhs

Op('rhs,',true)
FUNCLIKE_KEYWORD =

PARAM_COMMA=Op(‘param,’,true)#&-=> :param

KeywordToken&-{:ident=>RubyLexer::FUNCLIKE_KEYWORDS}
IGN_SEMI_BEFORE =
KW(/^(#{RubyLexer::INNERBOUNDINGWORDS.gsub(/(rescue|then)\|/,'')[1...-1]}|end|[)}\]])$/)|EoiToken
IGN_SEMI_AFTER =
KW(/^(begin|[;:({|]|then|do|else|ensure)$/)|BlockFormalsNode
OPERATORLIKE_LB =

for use in lookback patterns

OperatorToken|
KW(/^(not | defined\? | rescue3 | .*[@,] | [ ~ ! ; \( \[ \{ ? : ] | \.{1,3} | :: | => | ![=~])$/x)|
KW(%r{^( \*\*? | << | >> | &&? | \|\|? | \^ | % | / | - | \+ )?=$}x)|
KW(BEGINWORDS)|KW(/^#{INNERBOUNDINGWORDS}$/)|RescueHeaderNode|StartToken|
GoalPostToken|BlockFormalsNode|AssignmentRhsListStartToken
VALUELIKE_LA =

for use in lookahead patterns

KW(RubyLexer::VARLIKE_KEYWORDS)|NumberToken|SymbolToken|StringToken|UNOP|DEFOP|
KW(/^[({]$/x)|VarNameToken|MethNameToken|HerePlaceholderToken|
KW(BEGINWORDS)|FUNCLIKE_KEYWORD|AssignmentRhsListStartToken
LOWEST_OP =
KW(/^(#{ENDWORDS})$/)|KW(/^#{INNERBOUNDINGWORDS.sub('rescue|','')}$/)|
EoiToken|GoalPostToken|AssignmentRhsListEndToken
RESCUE_BODY =
-,]
RESCUE_OP =

|(KW(‘rescue’)&-:infix=>true)

Op('rescue')
RESCUE_KW =
KW('rescue')&-{:infix=>nil}
OP2CLASS =
{
  "!="=>NotEqualNode,
  "!~"=>NotMatchNode,
  "=~"=>MatchNode,
  "if"=>IfOpNode,
  "unless"=>UnlessOpNode,
  "while"=>WhileOpNode,
  "until"=>UntilOpNode,
  ".."=>RangeNode,
  "..."=>RangeNode,
  "=>"=>ArrowOpNode,
  "&&"=>AndNode,
  "||"=>OrNode,
  "and"=>AndNode,
  "or"=>OrNode,
  "rescue"=>RescueOpNode,
  "rescue3"=>RescueOpNode,
}
LookupNode =
ConstantNode
ACTION_PATTERN =
ParserState|Rule|MultiShift|MultiReduce|:accept|:error
VERSION =
'0.8.4'
CHARMAPPINGS =
{
?`=>'bquote',  ?~=>'tilde',  ?!=>'bang',    ?@=>'at',
?#=>'num',     ?$=>'dollar', ?%=>'percent', ?^=>'caret',
?&=>'and',     ?*=>'star',   ?(=>'lparen',  ?)=>'rparen',
?-=>'minus',   ?+=>'plus',   ?==>'equals',
?{=>'lbrace',  ?}=>'rbrace', ?[=>'lbrack',  ?]=>'rbrack',
?|=>'or',      ?\\=>'bslash',?:=>'colon',   ?;=>'semicolon',
?"=>'dquote',  ?'=>'squote', ?,=>'comma',   ?.=>'dot',
?<=>'less',    ?>=>'more',   ??=>'q',       ?/=>'y',
?\s=>'space',
?X=>'x',
}
STRMAPPINGS =
{
  '::'=>"XX",
  '++'=>"Xeval",
  '--'=>"Xsingleton",
  '[]'=>"Xbrackets",
  '->'=>"Xcalling",
}
STRMAP_REX =
/#{STRMAPPINGS.keys.map{|x| Regexp.quote x}.join "|"}/

Instance Attribute Summary collapse

Class Method Summary collapse

Instance Method Summary collapse

Methods included from Reducer

#new_reduce

Constructor Details

#initialize(input, name = "(eval)", line = 1, lvars = [], options = {}) ⇒ RedParse

Returns a new instance of RedParse.

Raises:

  • (ArgumentError)


1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
# File 'lib/redparse.rb', line 1017

def initialize(input,name="(eval)",line=1,lvars=[],options={})
  @rubyversion=options[:rubyversion]||1.8

  cache=Cache.new(name,line,lvars.sort.join(" "),@rubyversion,self.class.name)
  cache_mode=options[:cache_mode]||:read_write
  raise ArgumentError unless /^(?:read_(?:write|only)|write_only|none)$/===cache_mode.to_s    
  read_cache= /read/===cache_mode.to_s
  input.binmode if input.respond_to? :binmode
  if read_cache and cache and result=cache.get(input)
    @cached_result=result
    @write_cache=nil
    return
  end
  if /write/===cache_mode.to_s
    @write_cache,@input= cache,input 
  else
    @write_cache=nil
  end

  if Array===input
    def input.get1token; shift end
    @lexer=input
  else
    @lexer=RubyLexer.new(name,input,line,0,:rubyversion=>@rubyversion)
    lvars.each{|lvar| @lexer.localvars[lvar]=true }
  end
  @filename=name
  @min_sizes={}
  @compiled_rules={}
  @moretokens=[]
  @unary_or_binary_op=/^[-+]$/
#    @rules=self.expaneded_RULES
  @precedence=self.PRECEDENCE
  @RIGHT_ASSOCIATIVE=self.RIGHT_ASSOCIATIVE
if defined? END_ATTACK
  compile
end
  @saw_item_that=nil
end

Instance Attribute Details

#inputsObject

Returns the value of attribute inputs.



1348
1349
1350
# File 'lib/redparse/compile.rb', line 1348

def inputs
  @inputs
end

#lexerObject

Returns the value of attribute lexer.



1057
1058
1059
# File 'lib/redparse.rb', line 1057

def lexer
  @lexer
end

#oc_cacheObject

Returns the value of attribute oc_cache.



741
742
743
# File 'lib/redparse/compile.rb', line 741

def oc_cache
  @oc_cache
end

#rmd_cacheObject

Returns the value of attribute rmd_cache.



740
741
742
# File 'lib/redparse/compile.rb', line 740

def rmd_cache
  @rmd_cache
end

#rubyversionObject (readonly)

Returns the value of attribute rubyversion.



1058
1059
1060
# File 'lib/redparse.rb', line 1058

def rubyversion
  @rubyversion
end

#sl2ms_cacheObject

Returns the value of attribute sl2ms_cache.



742
743
744
# File 'lib/redparse/compile.rb', line 742

def sl2ms_cache
  @sl2ms_cache
end

#statesObject (readonly)

Returns the value of attribute states.



1517
1518
1519
# File 'lib/redparse/compile.rb', line 1517

def states
  @states
end

Class Method Details

.has_return_hash_fix?Boolean

is this needed? it’s not used in this file.…

Returns:

  • (Boolean)


387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
# File 'lib/redparse.rb', line 387

def self.has_return_hash_fix? #is this needed? it's not used in this file....
  rl=RubyLexer.new("","return {}.size")
  return(
    FileAndLineToken===rl.get1token and
    MethNameToken===rl.get1token and
    ImplicitParamListStartToken===rl.get1token and
    WsToken===rl.get1token and
    KeywordToken===rl.get1token and
    KeywordToken===rl.get1token and
    KeywordToken===rl.get1token and
    MethNameToken===rl.get1token and
    ImplicitParamListStartToken===rl.get1token and
    ImplicitParamListEndToken===rl.get1token and
    ImplicitParamListEndToken===rl.get1token and
    EoiToken===rl.get1token
  )
end

.inspect_constant_namesObject



1534
1535
1536
1537
1538
1539
1540
1541
# File 'lib/redparse/compile.rb', line 1534

def self.inspect_constant_names
  constants.each{|kn| 
    k=const_get(kn)
    next if Class|Module|Numeric|Symbol|true|false|nil===k
    k.extend NamedConstant
    k.constant_name=kn
  }
end

.KW(ident) ⇒ Object



534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
# File 'lib/redparse.rb', line 534

def self.KW(ident)
if defined? SPECIALIZED_KEYWORDS
  fail if /\\/===ident
  orig_ident=ident
  if Regexp===ident
    list=ident.to_s[/\(?-mix:\^\((.*)\)\$\)/,1]
    
    #pick apart any char class in ident
    if open_bracket_idx=list.index(/([^\\]|^)\[/)
      open_bracket_idx+=1 unless list[open_bracket_idx]=="["
      close_bracket_idx=list.index(/[^\\]\]/,open_bracket_idx+1)
      close_bracket_idx+=1 unless list[close_bracket_idx]=="]"
      cclass=list.slice!(open_bracket_idx..close_bracket_idx)
      cclass=cclass[1...-1]
      cclass=cclass.scan( /[^\\]|\\./ )
      cclass.map!{|ch| ch.size==1 ? ch : ch[1..1] }
    end

    #rest of it should be a list of words separated by |
    list=list.split(/\|/).reject{|x| x==''}
    list.concat cclass if cclass
    list.map{|w| 
      w.gsub!(/\\/,'')
      KW(w) 
    }.inject{|sum,kw| sum|kw}
  else
    fail unless String===ident
    ident=Punc2name[ident] unless /^(?:(?!#{LETTER_DIGIT}).)+$/o===ident
    fail "no name for #{orig_ident}" unless ident
    eval %{
      class Keyword_#{ident} < SpecializedKeywordToken
        def ident; '#{orig_ident}' end
#         def self.instance; @instance ||= allocate end
#         def self.new; instance end
        def initialize(offset)
          @offset=offset
        end
      end
    }
    KW2class[ident]||=const_get("Keyword_#{ident}")
  end
else
  ident=case ident
        when Integer;        ident.chr
        when String,Regexp;  ident
        else                 ident.to_s
        end

  return KeywordToken&-{:ident=>ident}
end
end

.Op(ident = nil, allow_keyword = false) ⇒ Object

MethNameToken&-{ #hack, shouldn’t be necessary

   #rubylexer should know to generally treat "defined?" as a keyword
   #or operator. (like most keywords, it can also be used as a method 
   #               name....)
  :ident=>"defined?"
}


621
622
623
624
625
626
627
# File 'lib/redparse.rb', line 621

def self.Op(ident=nil, allow_keyword=false)  
  result=OperatorToken
  result |= KeywordToken if allow_keyword
  result &= -{:ident=>ident} if ident
  #result[:infix?]=true
  return result
end

.remove_silly_begins(pt) ⇒ Object



2
3
4
5
6
7
8
9
10
11
# File 'lib/redparse/pthelper.rb', line 2

def self.remove_silly_begins(pt)
  pt.each_with_index{|x,i|
    if Array===x
      remove_silly_begins(x)
      if x.size==2 and x.first==:begin
        pt[i]=x=x.last
      end
    end
  }
end

.stack_monkey(*args, &block) ⇒ Object



102
# File 'lib/redparse.rb', line 102

def self.stack_monkey(*args,&block) StackMonkey.new(*args,&block) end

.str2cname(str) ⇒ Object



358
359
360
361
362
363
# File 'lib/redparse/generate.rb', line 358

def self.str2cname str
  str.gsub(STRMAP_REX){|str2| STRMAPPINGS[str2] } \
     .gsub(/(?!#{LETTER_DIGIT}).|[X]/o){|ch| 
       "X"+  esc=CHARMAPPINGS[ch[0]] ? esc : ch[0].to_s(16)
     } 
end

Instance Method Details

#[](*args) ⇒ Object



228
229
230
# File 'lib/redparse.rb', line 228

def [](*args)
  @stack.[](*args)
end

#[]=(*args) ⇒ Object



232
233
234
# File 'lib/redparse.rb', line 232

def []=(*args)
  @stack.[]=(*args)
end

#action2c(action) ⇒ Object



137
138
139
140
141
142
143
144
145
146
147
148
# File 'lib/redparse/generate.rb', line 137

def action2c(action)
             case action
             when Rule; "goto reduce_#{str2cname action.name};"
             when nil,:error;  "goto error_handler;"
             when ParserState;  "goto shift_state_#{str2cname action.name};"
             when :accept; "YYACCEPT;"
             when MultiReduce; action.action2c
             when MultiShift; action.action2c
#             when StackMonkey; action.action2c
             else fail "unexpected action type: #{action.class} = #{action}"
             end
end

#all_dotted_rulesObject



343
344
345
346
347
348
349
# File 'lib/redparse/compile.rb', line 343

def all_dotted_rules
  all_rules.map{|rule| 
    (0...rule.patterns.size).map{|i| 
      DottedRule.create(rule,i,self) 
    }
  }.flatten
end

#all_initial_dotted_rulesObject

$OLD_PAA=1



353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
# File 'lib/redparse/compile.rb', line 353

def all_initial_dotted_rules
  return @all_initial_dotted_rules if defined? @all_initial_dotted_rules
  @all_initial_dotted_rules=result=
    all_rules.map{|rule| DottedRule.create(rule,0,nil) }

  p :all_init

unless defined? $OLD_PAA
  scanning=result
  provisionals=nil
  while true
    old_provisionals=provisionals
    provisionals={}
    scanning.each{|dr| 
      dr.also_allow=dr.compute_also_allow(provisional=[false]) #fill out dr.also_allow
      provisionals[dr]=provisional[0]
    }
    scanning=provisionals.map{|dr,val| dr if val }.compact
  end until provisionals==old_provisionals
end
  p :all_init_done

  return result
end

#all_rulesObject



301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
# File 'lib/redparse/compile.rb', line 301

def all_rules
  return @all_rules if defined? @all_rules

  @inputs||=enumerate_exemplars
  @rules=expanded_RULES  #force it to be recalculated
  @all_rules = map_with_index(@rules){|r,i| Rule.new r,i}

  @all_rules.each{|r|
    if StackMonkey===r.action
      r.action.exemplars=@inputs.grep r.action.hint
    end
  }
 
  warn "error recovery rules disabled for now; creates too many states and masks errors"
  @all_rules.reject!{|r| r.action==MisparsedNode }

  #names have to be allocated globally to make sure they don't collide
  names=@all_rules.map{|r| 
    if r.action.respond_to? :name 
      r.action.name
    else
      r.action.to_s
    end
  }.sort
  dups={}
  names.each_with_index{|name,i|
    dups[name]=0 if name==names[i+1]
  }
  @all_rules.each{|r|
    r.name=
    if r.action.respond_to? :name 
      r.action.name.dup
    else
      r.action.to_s
    end
    if dups[r.name]
      count=dups[r.name]+=1
      r.name<<"_#{count}"
    end
  }
end

#all_statesObject



1350
1351
1352
1353
# File 'lib/redparse/compile.rb', line 1350

def all_states
  return @all_states if defined? @all_states
  @all_states=enumerate_states
end

#beginsendsmatcherObject



725
726
727
728
# File 'lib/redparse.rb', line 725

def beginsendsmatcher
  @bem||=
  /^(#{BEGINWORDS}|#{ENDWORDS})$/
end

#check_for_parsealike_inputsObject



1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
# File 'lib/redparse/compile.rb', line 1245

def check_for_parsealike_inputs
  all_patterns=all_rules.map{|r| r.patterns.map{|rp| Reg::Repeat===rp and rp=rp.subregs[0]; rp }}.flatten.uniq
  seen={}
  @identity_name_aliases={}
  warn "why are non_empty and after_equals params to BeginNode appearently ignored?"
  warn "some token identities overlap themselves?!?"
  warn "some overlaps are duplicated"
  warn ". and :: overlap => ..... surely that's not right"
  @inputs.map{|input|
    profile=all_patterns.map{|pat| Proc===pat ? pat : !!(pat===input)}
    if seen[profile]
      puts "#{input} overlaps #{seen[profile]}"
      @identity_name_aliases[seen[profile]]=input
      nil
    else
      seen[profile]=input
    end
  }.compact
end

#child_relations_among(*classes) ⇒ Object



1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
# File 'lib/redparse/compile.rb', line 1138

def child_relations_among(*classes)
    classes.unshift Object
    result={}
    classes.each{|klass| result[klass]=[] }

    #p classes
    classes.each{|klass|
      anclist=klass.ancestors
      anclist.shift==klass or fail
      anclist.each{|anc|
        if anc=result[anc]
          anc << klass
          break
        end
      }
    }

    return result
end

#compileObject



1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
# File 'lib/redparse/compile.rb', line 1372

def compile
  oldparser=Thread.current[:$RedParse_parser]
  Thread.current[:$RedParse_parser]||=self

  if File.exist?("cached_parse_tables.drb")
    dup=Marshal.load(f=open("cached_parse_tables.drb","rb"))
    instance_variables.each{|var| remove_instance_variable var }
    extend SingleForwardable
    def_singleton_delegators(dup,public_methods+private_methods+protected_methods)

    self.inputs=enumerate_exemplars
  else
    @generating_parse_tables=true
    @inputs||=enumerate_exemplars

    states=all_states
#      @rules=expanded_RULES 
    @inputs=nil #Marshal no like it

    begin
      p :dumping
      Marshal.dump(self,f=open("cached_parse_tables.drb","wb"))
      p :dump_done!
    rescue Exception
      p :dump_failed
      File.unlink "cached_parse_tables.drb"
    ensure
      @inputs=enumerate_exemplars
    end
  end
  f.close
 
  #look for unused dotted rules and actions 
  #also states with drs past the end
  past_end=0
  drs=all_dotted_rules
  dr_count=Hash.new(0)
  acts=all_rules#.map{|r| r.action }.uniq
  act_count=Hash.new(0)
  states.each{|state|
    state.dotteds.each{|dr| 
      dr_count[dr]+=1 
      past_end+=1 if dr.pos>=dr.rule.patterns.size
    }
    sav=state.actions.values
    sav.grep(Class|StackMonkey).each{|act| act_count[act.__id__]+=1 }
    sav.grep(MultiReduce|MultiShift).each{|multi| multi.actions.each{|act| act_count[act.__id__]+=1} }
    #p state.name if state.dotteds.select{|dr| dr.rule.action==BeginNode}
  }
  puts "#{past_end} dotted rules found past the end of their rule" if past_end>0
  nevers=0
  drs.each{|dr| 
    next unless dr_count[dr].zero? 
    puts "never reached #{dr.name}" 
    nevers+=1
  }
  puts "#{nevers} dotted rules were never reached (out of #{drs.size})"
  nevers=0
  acts.each{|act|
    next unless act_count[act.__id__].zero?
    puts "never reached #{act.name rescue act}" 
    nevers+=1
  }
  puts  "#{nevers} actions were never reached (out of #{acts.size})"
  p :most_popular_nontrivial_drs
  pp dr_count.reject{|(dr,n)| dr.pos.zero? or dr.pos==1 && dr.rule.lookback?} \
             .sort_by{|(dr,n)| n}[-15..-1].map{|(dr,n)| [dr.name,n] }

  #look for duplicate states
  actions2state={}
  dup_states=0
  states.each{|st| 
    cache=actions2state[st.actions]
    if cache
      st.equivalent_to=cache
      dup_states+=1
    else
      actions2state[st.actions]=st 
    end
  }
  puts "#{dup_states} duplicate states" if dup_states.nonzero?

  name2count={}
  states.each{|state| state.rename(name2count) }

  #divide each state's actions into sr and goto tables
  #also scan states for the most common sr and goto actions and make them default
  states.each{|state| state.make_sr_goto_tables @inputs}


#    pp states
#    pp states.size
  
  generate_c $stdout
  return self
ensure 
  remove_instance_variable :@generating_parse_tables rescue nil
  Thread.current[:$RedParse_parser]=oldparser
end

#delete_monkey(index, name) ⇒ Object



103
# File 'lib/redparse.rb', line 103

def delete_monkey(index,name) DeleteMonkey.new(index,name) end

#dont_postpone_semiObject



694
695
696
# File 'lib/redparse.rb', line 694

def dont_postpone_semi
  @dps||=~wants_semi_context
end

#enumerate_exemplarsObject



1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
# File 'lib/redparse/compile.rb', line 1226

def enumerate_exemplars
  return @@exemplars if defined? @@exemplars #dunno why this is necessary

  result= STACKABLE_CLASSES() \
    .map{|sc| sc.enumerate_exemplars } \
    .inject{|sum,sc| sum+sc}

  result.map!{|sc|
      res=sc.shift.allocate
      until sc.empty?
        eval "def res.#{sc.shift}; #{sc.shift.inspect} end"
      end
      def res.to_s; identity_name end
      res
  }

  return @@exemplars=result
end

#enumerate_statesObject



1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
# File 'lib/redparse/compile.rb', line 1265

def enumerate_states
  inputs=check_for_parsealike_inputs
  inputs.reject!{|x| StartToken===x}

  result=[]
  todo=[start_state]

  seenlist = {}
  seenlist.default=:dunno_yet

  j=0
  start=was=Time.now
  in_result={}  #this should go away; obsoleted by @states
  state_num=-1
  todo.each{|st| in_result[st]=(state_num+=1) }
  ps=todo.first
  pp [-in_result[ps], *ps.dotteds.map{|dr| dr.name }]
  old_todo_size=todo.size
  while state=todo.shift
    result<<state

    i=0
    inputs.each {|input|
      newstate=state.evolve input,self,seenlist
      assert ACTION_PATTERN===newstate
      #newstate is ParserState|MultiShift|MultiReduce|Rule|:accept|:error
      state[input.identity_name]=newstate
      next unless newstate.respond_to? :substates 
      #newstate.substates is just [newstate] for plain ParserStates
      morestates=newstate.substates.reject{|x| in_result[x]}
      morestates.each{|st| in_result[st]=(state_num+=1) }
#        p [in_result[state],:+,input.identity_name,:>>,pretty(newstate,in_result)]
      todo.concat morestates

#        pp morestates.map{|ps| 
#          [-in_result[ps], *ps.dotteds.map{|dr| dr.name }]
#        }
#        pp pretty(newstate,in_result) unless ParserState===newstate
    }

    now=Time.now
    p [:*,j+=1,todo.size,todo.size-old_todo_size,now-was,j/(now-start),(100.0*j/(j+todo.size)).to_i]
    old_todo_size=todo.size
    was=now

#      if state.actions.values.uniq==[:error]
       #this can happen when the only dotted rule is for an :error
       #maybe this case can be optimized?
#      end
  end
  self.rmd_cache=nil
  self.oc_cache=nil
  self.sl2ms_cache=nil
  return result
end

#error_handlerObject

4.5 Error Recovery yacc’s error recovery mechanism is rather idiosyncratic. In fact, examining two books, [LMB92] and [ASU86], and the output generated by yacc yields three dierent descriptions of the recovery mechanism. We have tried to be faithful to the output of yacc. Fortunately, the mechanism has few consequences to the generation of the rest of the hard- coded parser. The only change to the parser is the maintenance of the variable, yyerrorstatus. Although relatively short, the code below is very subtle, like the explanation of yacc’s error recovery mechanism. The code is given only for completeness.



305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
# File 'lib/redparse/generate.rb', line 305

def error_handler
%[
error_handler:
  if (yyerrorstatus > 2){
    yyerror("syntax error");
  }
user_error_handler:
  if (yyerrorstatus == 0){
    huh if (la_identity == 0) YYABORT;// End of input.
    la_identity = yylex(&la_token);
    switch (OLDSTACK){
    #{@states.map{|state| 
        i=state.small_int
        "case #{i}: goto state_action_#{str2cname state.name};\n"
      }
    }
  }else{
    yyerrorstatus = 0;
    while (stack != stack_start){
      switch (OLDSTACK){
      case N: goto state_M;// iff M = goto[N,error].
      .
      .
      .
      }
      stack--;
    }
    YYABORT;// Empty stack.
  }
]
end

#evaluate(rule) ⇒ Object



105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
# File 'lib/redparse.rb', line 105

def evaluate rule
  #dissect the rule
if false
  rule=rule.dup
  lookahead_processor=(rule.pop if Proc===rule.last)
  node_type=rule.pop
else
  Reg::Transform===rule or fail
  node_type= rule.right
  rule=rule.left.subregs.dup
  lookahead_processor=(rule.pop if Proc|::Reg::LookAhead===rule.last)
  lookback=rule[0]=rule[0].subregs[0] if ::Reg::LookBack===rule[0]
end
  
  #index of data at which to start matching
  i=@stack.size-1   #-1 because last element of @stack is always lookahead

  #I could call this a JIT compiler, but that's a bit grandiose....
  #more of a JIT pre-processor
  compiled_rule=@compiled_rules[rule]||=
    rule.map{|pattern| 
      String|Regexp===pattern ? KW(pattern) : pattern 
    }

  #what's the minimum @stack size this rule could match?
  rule_min_size=@min_sizes[compiled_rule]||=
    compiled_rule.inject(0){|sum,pattern| 
      sum + pattern.itemrange.begin 
    }
  i>=rule_min_size or return false

  matching=[]

  #actually try to match rule elements against each @stack element in turn
  compiled_rule.reverse_each{|matcher|
    i.zero? and fail
    target=matching
    #is this matcher optional? looping?
    loop= matcher.itemrange.last.to_f.infinite?
    minimum=matcher.itemrange.first
    optional=minimum.zero?
    matching.unshift target=[]  if loop
    if loop or optional
      matcher=matcher.subregs[0]
    end

    begin
      if matcher===@stack[i-=1]  #try match
        target.unshift @stack[i]
      else
        #if match failed, the whole rule fails
        #unless this match was optional, in which case, ignore it
        #or was looping and met its minimum
        #but bump the data position back up, since the latest datum
        #didn't actually match anything.
        return false unless optional or loop&&target.size>=minimum
        i+=1
        matching.unshift nil unless loop
        break
      end
    end while loop
  } 

  matchrange= i...-1  #what elems in @stack were matched?

  #give lookahead matcher (if any) a chance to fail the match
  case lookahead_processor
  when ::Reg::LookAhead
    return false unless lookahead_processor.subregs[0]===@stack.last
  when Proc
    return false unless lookahead_processor[self,@stack.last] 
  end

  #if there was a lookback item, don't include it in the new node
  if lookback
    matchrange= i+1...-1  #what elems in @stack were matched?
    matching.shift
  end


  #replace matching elements in @stack with node type found
  case node_type
  when Class
      node=node_type.create(*matching)
      node.startline||=@stack[matchrange.first].startline
      node.endline=@endline
      @stack[matchrange]=[node]
  when Proc,StackMonkey;   node_type[@stack]
  when :shift; return 0
  when :accept,:error; throw :ParserDone
  else fail
  end
  
  return true #let caller know we found a match

  
rescue Exception=>e
  #puts "error (#{e}) while executing rule: #{rule.inspect}"
  #puts e.backtrace.join("\n")
  raise
end

#exemplars_that_match(p) ⇒ Object



1355
1356
1357
# File 'lib/redparse/compile.rb', line 1355

def exemplars_that_match p
  @inputs.grep p 
end

#expanded_RULESObject

inline any subsequences in RULES right into the patterns reg should do this already, but current release does not



357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
# File 'lib/redparse.rb', line 357

def expanded_RULES
  result=RULES()
  return result if (-[:foo, -[:bar]]).subregs.grep(Reg::Subseq).empty?
  result.map!{|rule|
    unless rule.left.subregs.grep(Reg::Subseq)
    then rule
    else
      right=rule.right
      rule=rule.left.subregs.dup
      (rule.size-1).downto(0){|i|
        if Reg::Subseq===rule[i]
          rule[i,1]=rule[i].subregs
        end
      }
      -rule>>right
    end
  }
end

#generate_c(output) ⇒ Object

The case arms of the switch statement are taken directly from the goto table that was computed by the LALR(1) grammar analysis. Because this switch cannot fail, no default entry is needed. However, making the most common case arm the default is a trivial time and space optimization.



281
282
283
284
285
286
287
288
289
# File 'lib/redparse/generate.rb', line 281

def generate_c output
  output<< init_code
  output<< state_utils
  (0...RULES().size).each_with_index{|i,m| output<< (reduce i,m) }
  node_types.each{|nt| output<< (nonterminal nt) }
  map_with_index(all_states){|st,i| output<< (state st,i) }
  #output<< error_handler  #disabled, i have rules for error recovery
  output<< "}"
end

#get_token(recursing = false) ⇒ Object



1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
# File 'lib/redparse.rb', line 1060

def get_token(recursing=false)
  unless @moretokens.empty? 
    @last_token=@moretokens.shift
    p @last_token if ENV['PRINT_TOKENS'] unless recursing
    return @last_token
  end

  rpt=ENV['RAW_PRINT_TOKENS']
  begin
    result=@lexer.get1token or break
    p result if rpt

    #set token's line
    result.startline= @endline||=1
    result.endline||=@endline if result.respond_to? :endline=

    if result.respond_to?(:as) and as=result.as
      #result=make_kw(as,result.offset)
      #result.originally=result.ident
      if OperatorToken===result #or KeywordToken===result
        result=result.dup
        result.ident=as
      else
        result=make_kw(as,result.offset)
      end
      result.not_real! if result.respond_to? :not_real!
    else

    case result
    when FileAndLineToken #so __FILE__ and __LINE__ can know what their values are
      @file=result.file
      @endline=result.line
      redo
    
    when OperatorToken
      if @unary_or_binary_op===result.ident and result.unary || result.tag==:unary
        result=result.dup
        result.ident+="@"
      end

    #more symbol table maintenance....
    when KeywordToken
        case name=result.ident

        when /^(#{BINOP_KEYWORDS.join '|'})$/o #should be like this in rubylexer
          result=OperatorToken.new(name,result.offset) unless result.has_end?
        when "|"; result=GoalPostToken.new(result.offset) #is this needed still?
        when "__FILE__"; #I wish rubylexer would handle this
          class<<result; attr_accessor :value; end
          result.value=@file.dup
        when "__LINE__"; #I wish rubylexer would handle this
          class<<result; attr_accessor :value; end
          result.value=@endline
        else 
          result=make_kw name,result.offset if defined? SPECIALIZED_KEYWORDS
          #warning, this may discard information stored in instance vars of result
        end

    when EoiToken; break
    when HereBodyToken; break
    when AssignmentRhsListStartToken; break
    when AssignmentRhsListEndToken; break
    when IgnoreToken; redo
    end
    end
  end while false
  p result if ENV['PRINT_TOKENS'] unless recursing
  return @last_token=result
end

#identity_name_alias?(name) ⇒ Boolean

Returns:

  • (Boolean)


1367
1368
1369
1370
# File 'lib/redparse/compile.rb', line 1367

def identity_name_alias? name
  alias_=@identity_name_aliases[name]
  return( alias_||name )
end

#init_codeObject

3 LR-Parsing Mechanics We briefly explain the fundamentals of shift-reduce parsing (which represents the LR(1) family) without going into any more detail than necessary for subsequent exposition. LALR(1) parsers like yacc simulate, either directly or indirectly, a very simple automaton with a stack of automaton states [FL88]. (Parsers generated by yacc also maintain a semantic stack, but since that stack grows in parallel with the state stack, we only describe the use of the state stack here.) Simulating the automaton requires two mechanisms: one for determining the action, which is determined by the current input symbol and the state on the top of the stack, and one for determining state transitions based on the current top of stack and a grammar symbol. At parser-generation time LALR(1) grammar analysis builds these tables, called action and goto, respectively. (The analysis is necessary regardless of whether a table-driven or hard-coded parser is desired.) Functionally, these tables have the following signatures.

goto: state x symbol -> state action: state x token -> shift,reduce_y,accept,error

There are only four possible actions: reduce, shift, accept, and error. Reduce actions are parameterized by the grammar production being reduced. Actions are described below. let TOS be the state on the top of the stack, and let la_identity be the current lookahead token.

shift A shift pushes goto onto the stack, and updates la_identity by advancing the lexical analyzer.

reduce_y A reduction processes production Y : X -> x_1…x_n, which requires popping n states off the stack, followed by pushing goto[TOS, X]. (The semantic action of the parser relating to this production would be executed prior to popping states off the stack.)

accept An accept signals a successful parse.

error An error requires error reporting and/or recovery.

4 Simple Implementation mule creates a single parsing routine, yyparse(), that simulates the LALR(1) parser directly in ANSI C, without interpreting any tables. The routine has five simple parts: initialization, automata states, reduction actions, nonterminal transitions, and error recovery. Although very similar to the inverted table structure in [Pfa90], this structure avoids the duplication of semantic action routines. Another diverence is the yacc-compatible error recovery. The structure is simple, with all code being generated from a tiny set of small, well-defined templates that directly mirror the grammar or LALR(1) automaton. Since both the state stack and the semantic stack grow in unison, we wrap the stack entries into a single structure, StackType.

4.1 Initialization The initalization phase simply sets up bookkeeping and data structures for subsequent automata simulation. It is grammar-independent.



57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
# File 'lib/redparse/generate.rb', line 57

def init_code
"
#define YYABORT do { \\
  free(start_stack);return -1; \\
  } while(0)
#define YYACCEPT do { \\
  YYSTYPE result=SEMANTIC_STACK; \\
  free(start_stack); \\
  return result; \\
  } while(0)
/*#define yyclearin_token = yylex(&la_token)*/
#define yyerrok yyerrorstatus = 3
#define YYERROR goto user_error_handler
#define YYRECOVERING() (yyerrorstatus <= 2)


typedef VALUE YYSTYPE;

#if 0
typedef struct stackType{
  int state;// State stack element.
} StackType;
typedef struct {
  VALUE semantic;
} SemanticStackType;
#else
typedef int StackType;
typedef VALUE SemanticStackType;
#end
int yyparse(void){
  YYSTYPE la_token;// Semantic value computed by yylex().
  int la_identity;
  unsigned yyerrorstatus = 3;// Initialize error-recovery counter.
  YYSTYPE yyredval;// Variable holds semantic value of$$.
  VALUE semantic_stack; /*Array of Node|Token*/
//  SemanticStackType *semantic_stack_start;
  StackType *stack_start;// Stack.
  unsigned i=0;
  unsigned stack_size=64;
  
  stack_start=realloc(NULL,sizeof(StackType)*stack_size); 
  if (stack_start==NULL) MALLOC_ERROR(); 
  semantic_stack=rb_ary_new();
//  semantic_stack_start=realloc(NULL,sizeof(SemanticStackType)*stack_size); 
//  if (semantic_stack_start==NULL) MALLOC_ERROR(); 

  la_identity = yylex(&la_token); /* Get 1st token.*/

  goto shift_state_#{str2cname all_states.first.name};/* Start state.*/
"
end

#initial_stateObject



1508
1509
1510
1511
1512
1513
1514
1515
# File 'lib/redparse/compile.rb', line 1508

def initial_state
  @states={}
  all_initial_dotted_rules #is this still needed?
  result=new_state all_rules.map{|r| DottedRule.create(r,0,self)}
  result.name="initial"
  #result.perhaps_also_allow all_rules,self #silly here
  result
end

#item_that(*a, &b) ⇒ Object

this is a hack, should use graphcopy to search for Deferreds and replace with double-Deferred as below



676
677
678
679
680
681
682
683
684
685
# File 'lib/redparse.rb', line 676

def item_that(*a,&b)
  if defined? @generating_parse_tables
    huh unless b
    #double supers, one of them in a block executed after this method returns....
    #man that's weird
    super(*a){|ob| @saw_item_that[[super(*a,&b),ob]]=true}
  else
    super(*a,&b) #and then here's another
  end
end

#KW(ident) ⇒ Object



585
# File 'lib/redparse.rb', line 585

def KW(ident); self.class.KW(ident) end

#LEFTObject

just the left side (the stack/lookahead matchers)



1118
1119
1120
1121
1122
1123
# File 'lib/redparse/compile.rb', line 1118

def LEFT
#      require 'md5'
    @rules=expanded_RULES()
#      p MD5.new(@rules).to_s
    @rules.map{|r| r.left.subregs }.flatten
end

#LEFT_NO_LOOKINGObject

remove lookahead and lookback decoration (not used?)



1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
# File 'lib/redparse/compile.rb', line 1126

def LEFT_NO_LOOKING
  l=LEFT()
  l.map!{|m|
    case m #
    when Reg::LookAhead,Reg::LookBack; fail #should be gone already now
    when Proc; []
    else m #
    end #
  }
  l
end

#left_op_higher(op, op2) ⇒ Object



655
656
657
658
659
660
# File 'lib/redparse.rb', line 655

def left_op_higher(op,op2)
  KeywordToken===op2 or OperatorToken===op2 or return true
  rightprec=@precedence[op2.to_s] or return true
  rightprec+=EPSILON if @RIGHT_ASSOCIATIVE[op2.to_s]
  return @precedence[op.to_s]>=rightprec
end

#lower_opObject



667
668
669
670
671
672
673
# File 'lib/redparse.rb', line 667

def lower_op
  return @lower_op if defined? @lower_op
  lower_op=item_that{|op| left_op_higher(@stack[-3],op) }
  lower_op=(LOWEST_OP|(~VALUELIKE_LA & lower_op)).la
  lower_op.extend LowerOp_inspect
  @lower_op=lower_op
end

#make_specialized_kw(name, offset) ⇒ Object Also known as: make_kw



588
589
590
591
# File 'lib/redparse.rb', line 588

def make_specialized_kw(name,offset)
  name=Punc2name[name] unless /^((?!#{LETTER_DIGIT}).)+$/o===name
  KW2class[name].new(offset)
end

#map_with_index(list) ⇒ Object



295
296
297
298
299
# File 'lib/redparse/compile.rb', line 295

def map_with_index(list)
  result=[]
  list.each_with_index{|elem,i| result<<yield(elem,i)}
  result
end

#new_disabled_reduceObject

HIER=Class::FlattenedHierarchy.new *STACKABLE_CLASSES



347
348
349
350
351
352
# File 'lib/redparse.rb', line 347

def new_disabled_reduce
  #@hier||=Class::FlattenedHierarchy.new *STACKABLE_CLASSES()
  @reducer||=Reducer.new(@rules)

  @reducer.reduce(@stack)
end

#new_state(drs, unruly_also = false) ⇒ Object

def start_state

  goal=ultimate_goal_nodes
  result=all_rules.select{|rule|
    rt=rule.reduces_to and
      !goal.select{|node| node>=rt}.empty?
  }
  result.map!{|rule| DottedRule.create(rule,0,parser)}

  result=ParserState.new result
  result.name="start_state"
  result
end


1499
1500
1501
1502
1503
1504
1505
1506
# File 'lib/redparse/compile.rb', line 1499

def new_state(drs,unruly_also=false)
  result=ParserState.new drs,@states.size
  result.perhaps_also_allow all_rules,self
  cache=@states[result]
  return cache if cache
  @states[result]=@states.size
  return result
end

#nonterminal(j) ⇒ Object

User actions are associated with reductions, and the code corresponding to a given production is expanded in-place. After the user code, the symbols associated with right-hand side of the production are popped, followed by copying $$ onto the semantic stack. Finally, there is a jump to the code that will compute the appropriate state given the left-hand side symbol of this production.

4.4 Nonterminal Transitions For each nonterminal, code is produced to compute (and jump to) the appropriate state given the current state. This simple switch statement is given below.



259
260
261
262
263
264
265
266
267
268
269
270
271
272
# File 'lib/redparse/generate.rb', line 259

def nonterminal(j)
"
nonterminal_#{str2cname j.name}:  /*nonterminal_#{j.small_int}:*/
  switch (OLDSTACK){   // Top of stack.
    #{
      all_states.map_with_index do|state,k|
        %[  case #{k}: goto state_#{str2cname state.goto[j].name};\n]
      end
    }
  }
"
rescue Exception=>e
  backtrace.unshift("exception in node(nonterminal) #{j.name} #{e.class}:#{e}").join("\n")
end

#Op(*args) ⇒ Object



628
# File 'lib/redparse.rb', line 628

def Op(*args); self.class.Op(*args); end

#parseObject



245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
# File 'lib/redparse.rb', line 245

def parse

  #hack, so StringToken can know what parser its called from
  #so it can use it to parse inclusions
  oldparser=Thread.current[:$RedParse_parser]
  Thread.current[:$RedParse_parser]||=self

  return @cached_result if defined? @cached_result

  @rules||=expanded_RULES()
#    @inputs||=enumerate_exemplars

  @stack=[StartToken.new, get_token] 
         #last token on @stack is always implicitly the lookahead
  catch(:ParserDone){ loop {
    #try all possible reductions
    next if reduce==true 
    
    #no rule can match current @stack, get another token 
    tok=get_token  or break

    #are we done yet?
    #tok.nil? or EoiToken===tok && [email protected] and break

    #shift our token onto the @stack
    @stack.push tok
  }}

  @stack.size==2 and return result=NopNode.new #handle empty parse string

  #unless the @stack is 3 tokens, 
  #with the last an Eoi, and first a StartToken
  #there was a parse error
  unless @stack.size==3
    pp @stack[-[15,@stack.size].min..-1] if ENV['PRINT_STACK']
    top=MisparsedNode.new("(toplevel)", @stack[1...-1],'')
    raise ParseError.new(top.msg,@stack)
  end
  EoiToken===@stack.last or fail
  StartToken===@stack.first or fail

  result= @stack[1]


  #multiple assignment must be resolved 
  #afterwards by walking the parse tree.
  #(because the relative precedences of = and , 
  #are reversed in multiple assignment.)
#    result.respond_to? :fixup_multiple_assignments! and
#      result=result.fixup_multiple_assignments!

  #relative precedence of = and rescue are also inverted sometimes
#    result.respond_to? :fixup_rescue_assignments! and 
#      result=result.fixup_rescue_assignments!

  #do something with error nodes
  msgs=[]
  result.walk{|parent,i,subi,node|
    if node.respond_to? :error? and node.error?(@rubyversion)
      msgs<< @filename+":"+node.blame.msg
      false
    else
      true
    end
  } if result.respond_to? :walk #hack hack
  result.errors=msgs unless msgs.empty?
  #other types of errors (lexer errors, exceptions in lexer or parser actions)
  #should be handled in the same way, but currently are not
#    puts msgs.join("\n")

=begin
rescue Exception=>e
    input=@lexer
    if Array===input
      puts "error while parsing:"
      pp input
      input=nil
    else
      input=input.original_file
      [email protected]
      input.to_s.size>1000 and input=inputname
      puts "error while parsing: <<<  #{input}  >>>"
    end
  raise
else
=end

  unless msgs.empty?
    pp @stack[-[15,@stack.size].min..-1] if ENV['PRINT_STACK']
    raise RedParse::ParseError.new(msgs.join("\n"),@stack)
  end

#    result=NopNode.new if EoiToken===result
  return result
ensure
  @write_cache.put(@input,result) if @write_cache and result and !result.errors
  @stack=nil
  Thread.current[:$RedParse_parser]=oldparser
end

#pattern_matches_nodes?(p) ⇒ Boolean

Returns:

  • (Boolean)


1359
1360
1361
# File 'lib/redparse/compile.rb', line 1359

def pattern_matches_nodes? p
  !@inputs.grep(Node&p).empty?
end

#pattern_matches_tokens?(p) ⇒ Boolean

Returns:

  • (Boolean)


1363
1364
1365
# File 'lib/redparse/compile.rb', line 1363

def pattern_matches_tokens? p
  !@inputs.grep(Token&p).empty?
end

#PRECEDENCEObject



424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
# File 'lib/redparse.rb', line 424

def PRECEDENCE
  {

#  "("=>122,     #method param list
#  "{"=>122,    "do"=>122,    #blocks

  "::"=>121,    "."=>121,

 #   "defined?"=>120.5, 

  "["=>120,     #[] []= methods

  "!"=>119,    "~"=>119,
  "+@"=>119, 

  "**"=>118,   

  "-@"=>117, 

  "*"=>116,    "/"=>116,    "%"=>116,

  "+"=>115,    "-"=>115,

  "<<"=>114,   ">>"=>114,
 
  "&"=>113,

  "^"=>112,    "|"=>112,

  "<="=>111,   ">="=>111,   "<"=>111,    ">"=>111,

  "<=>"=>110,  "=="=>110,   "==="=>110,  
  "!="=>110,   "=~"=>110,   "!~"=>110,

  "&&"=>109,

  "||"=>108,

  ".."=>107, "..."=>107,

  "?"=>106, # ":"=>106,    #not sure what to do with ":"

  "unary&"=>105, #unary * and & operators
    "lhs*"=>105,  #this should remain above =
  "lhs,"=>105, 
  "rescue3"=>105,

  "="=>104,    "%="=>104,   "/="=>104,   "-="=>104,    "+="=>104,
  "|="=>104,   "&="=>104,   ">>="=>104,  "<<="=>104,   "*="=>104,
  "&&="=>104,  "||="=>104,  "**="=>104,  "^="=>104,

  "defined?"=>103,
  "not"=>103,
  ":"=>102, #but not when used as a substitute for 'then'

  "=>"=>101,
         "rhs,"=>100, #"call,"=>100, "array,"=>100, "param,"=>100,
  ","=>100, "rhs*"=>100, "unary*"=>100, 
    #the 'precedence' of comma is somewhat controversial. it actually has
    #several different precedences depending on which kind of comma it is.
    #the precedence of , is higher than :, => and the assignment operators 
    #in certain (lhs) contexts. therefore, the precedence of lhs, should 
    #really be above =.

  #"unary" prefix function names seen has operators have this precedence
  #but, rubylexer handles precedence of these and outputs fake parens 
  #to tell us how its parsed

  "or"=>99,   "and"=>99,

  "if"=>98,    "unless"=>98,    "while"=>98,    "until"=>98,

  "rescue"=>98,

  ";"=>96,
  }
end

#pretty(x, in_result) ⇒ Object



1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
# File 'lib/redparse/compile.rb', line 1321

def pretty(x,in_result)
  case x
  when ParserState; in_result[x]
  when MultiReduce
    pairs=x.list.dup
    result=[]
    until pairs.empty?
      cond,act,*pairs=*pairs
      cond = cond.inspect
      result<<[cond,pretty(act.action,in_result)]
    end
    result<<pretty(x.default,in_result)
    result.unshift :MultiReduce
  when MultiShift
    h={}
    mods=x.modifiers
    its=[]
    (0...mods.size).step(2){|i| its<<mods[i] }
    x.map.each_with_index{|xx,i| h[i]=pretty(xx) }
    [:MultiShift, its,h]
  when Class; x.name
  when StackMonkey; x.name
  when :accept,:error; x
  else fail "not a valid action: #{x}"
  end
end

#reduce(rule, m) ⇒ Object

try all possible reductions



237
238
239
240
241
242
243
# File 'lib/redparse.rb', line 237

def reduce
    shift=nil
    @rules.reverse_each{|rule|
      shift=evaluate(rule) and break
    }
    return shift
end

#repl(rule, m) ⇒ Object

The state number is stored in the stack, followed by possibly invoking the lexical analyzer. The three optional lines store the semantic value of the current token, advance the lexical analyzer, and do error-recovery bookkeeping. Incrementing the stack pointer completes the push. The case arms of the switch are determined by the action table computed by the LALR(1) analysis; for each condition met in the comments, a case arm must be generated. Default actions were developed for compressing table-driven parsers, and can be similarly employed here for generating the switchs default [FL88].

4.3 Reduction Actions One piece of code is generated for each production. Its template is given below.



201
202
203
204
205
206
207
208
209
210
211
212
# File 'lib/redparse/generate.rb', line 201

def repl(rule,m)
  repl=rule.replacement
  case repl
  when :shift,:accept #do nothing?
  when Class
    %[static VALUE repl_#{rule.name}=rb_const_lookup(rb_const_lookup(kNIL,"RedParse"),"#{repl.name});\n]
  when StackMonkey
    huh
  else
    huh
  end
end

#RIGHT_ASSOCIATIVEObject

see pickaxe, 1st ed, page 221



406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
# File 'lib/redparse.rb', line 406

def RIGHT_ASSOCIATIVE
  {
#    "defined?"=>120.5,
  "**"=>118,   

  "="=>105,    "%="=>105,   "/="=>105,   "-="=>105,    "+="=>105,
  "|="=>105,   "&="=>105,   ">>="=>105,  "<<="=>105,   "*="=>105,
  "&&="=>105,  "||="=>105,  "**="=>105,  "^="=>105,


#    "and"=>99, "or"=>99,

#   "if"=>98, "unless"=>98, "while"=>98, "until"=>98, "rescue"=>98, 

#    "&&"=>109, "||"=>108,
  }
end

#RULESObject



770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
# File 'lib/redparse.rb', line 770

def RULES
  lower_op= lower_op()

  [-[StartToken.lb, Expr.-, EoiToken.la]>>:accept,
   -[EoiToken]>>:error,
  ]+

  #these must be the lowest possible priority, and hence first in the rules list
  BEGIN2END.map{|_beg,_end| 
    -[KW(_beg), (KW(_beg)|KW(_end)).~.*, KW(_end), KW(/^(do|\{)$/).~.la]>>MisparsedNode
  }+

  [
  -[UNOP, Expr, lower_op]>>UnOpNode,
  -[DEFOP, ParenedNode]>>UnOpNode,
  -[Op(/^(?:unary|lhs|rhs)\*$/), ValueNode, lower_op]>>UnaryStarNode,

#    -[Op('=',true)|KW(/^(rescue|when|\[)$/)|Op(/,$/,true),
#      Op(/^(?:unary|rhs)\*$/), ValueNode, (MODIFYASSIGNOP|Op('=',true)).la]>>:shift,
#    -[MethNameToken|FUNCLIKE_KEYWORD, KW('('), 
#      Op(/^(?:unary|rhs)\*$/), ValueNode, (MODIFYASSIGNOP|Op('=',true)).la]>>:shift,
  #star should not be used in an lhs if an rhs or param list context is available to eat it.
  #(including param lists for keywords such as return,break,next,rescue,yield,when)

  #hmmm.... | in char classes below looks useless (predates GoalPostToken)
  -[Op(/^(?:unary|lhs)\*$/), (GoalPostToken|Op(/,$/,true)|KW(/^(in|[=)|;])$/)).la]>>DanglingStarNode, #dangling *
  -[Op(/,$/,true), (GoalPostToken|KW(/^(in|[=)|;])$/)).la]>> #dangling ,
    stack_monkey("DanglingComma",1,DanglingCommaNode){|stack| 
      dcomma=DanglingCommaNode.new
      dcomma.offset=stack.last.offset
      stack.push dcomma, stack.pop
    },

  -[Expr, Op|KW_Op, Expr, lower_op]>>RawOpNode,  #most operators
  
  #assignment
  -[Lvalue, MODIFYASSIGNOP, Expr, lower_op]>>AssignNode,
  -[Lvalue, Op('=',true), AssignmentRhsNode, lower_op]>>AssignNode,
  -[AssignmentRhsListStartToken, Expr, AssignmentRhsListEndToken]>>AssignmentRhsNode,

  # a = b rescue c acts like a ternary,,,
  #provided that both a and b are not multiple and b
  #(if it is a parenless callsite) has just 1 param
#    -[Lvalue&~MULTIASSIGN, Op('=',true), AssignmentRhsNode&-{:is_list=>true}, 
#           Op('rescue3'), Expr, lower_op]>>AssignNode,
  -[Lvalue, Op('=',true), AssignmentRhsNode, Op('rescue3'), Expr, lower_op]>>AssignNode,

#    -[Lvalue&~MULTIASSIGN, Op('=',true), AssignmentRhsNode&-{:is_list=>true}, 
#        Op('rescue3',true).la]>>:shift,

#    -[Lvalue&~MULTIASSIGN, Op('=',true), AssignmentRhsNode&-{:is_list=>true}, 
#        RESCUE_OP.la] >>
#        stack_monkey("rescue3",1,Op('rescue3',true)){|stack| 
#          resc=stack.last.dup
#          resc.ident += '3'
#          stack[-1]=resc
#        },
  #relative precedence of = and rescue are to be inverted if rescue
  #is to the right and assignment is not multiple.

  #if assignment rhs contains commas, don't reduce til they've been read
  #(unless we're already on an rhs)
  -[(Op('=',true)|Expr).~.lb, Lvalue, Op('=',true), Expr, RHS_COMMA.la]>>:shift,
  -[RHS_COMMA.lb, Lvalue, Op('=',true), Expr, RHS_COMMA.la ]>>AssignNode,
  -[ValueNode, LHS_COMMA, ValueNode, Op('=',true).la]>>CommaOpNode,
  #relative precedence of = and lhs/rhs , are to be inverted.

  #mark parentheses and unary stars that come after lhs commas
  -[LHS_COMMA, (UnaryStarNode|ParenedNode)&~-{:after_comma =>true}, Op('=',true)]>>
    stack_monkey("after_comma",3,(UnaryStarNode|ParenedNode)&-{:after_comma =>true}){|stack| 
      stack[-3].after_comma=true}, 
             #mebbe this should be a lexer hack?

  -[#(OPERATORLIKE_LB&~(MethNameToken|FUNCLIKE_KEYWORD)).lb, 
    '(', Expr, KW(')')&~(-{:callsite? =>true}|-{:not_real? =>true})]>>ParenedNode,
  -[#(OPERATORLIKE_LB&~(MethNameToken|FUNCLIKE_KEYWORD)).lb, 
    '(', KW(')')&~(-{:callsite? =>true}|-{:not_real? =>true})]>>VarLikeNode, #(), alias for nil

  -[#(OPERATORLIKE_LB&~Op('=',true)).lb, 
    Expr, RESCUE_OP, Expr, lower_op]>>RescueOpNode,

  #dot and double-colon
  -[DoubleColonOp, VarNode,  lower_op]>>ConstantNode,#unary ::
  -[Expr, DotOp, CallNode, lower_op]>>DotCall,      #binary .
  -[Expr, DoubleColonOp, CallNode, lower_op]>>DotCall,    #binary ::
  -[Expr, DoubleColonOp, VarNode, lower_op]>>ConstantNode,#binary ::

  -[Expr, "?", Expr, ":", Expr, lower_op]>>TernaryNode,


  -[MethNameToken, '(', Expr.-, ')', BlockNode.-, KW('do').~.la]>>CallNode,
  -[FUNCLIKE_KEYWORD, '(', Expr.-, ')', BlockNode.-, KW('do').~.la]>>KWCallNode,

  -[ValueNode, Op(/,$/,true), ValueNode, lower_op]>>CommaOpNode,

  -[(OPERATORLIKE_LB&dont_postpone_semi).lb, 
    Expr, ';', Expr, lower_op]>>SequenceNode,


  -[#(OPERATORLIKE_LB&~KW(')')).lb, 
    '{', (CommaOpNode|ArrowOpNode).-, '}']>>HashLiteralNode, #-40

  -[KW(')').lb, 'do', BlockFormalsNode.-, Expr.-, 'end']>>BlockNode,
  #this does {} as well... converted to do...end
  #rubylexer handles the 'low precedence' of do...end

  -[GoalPostToken, Expr.-, GoalPostToken]>>BlockFormalsNode,
  #rubylexer disambiguated operator vs keyword '|'

  -[/^(while|until)$/, Expr, /^([:;]|do)$/, Expr.-, 'end']>>LoopNode,

  -[/^(if|unless)$/, Expr, /^(;|then|:)$/, 
    Expr.-, ElsifNode.*, ElseNode.-, 'end'
   ]>>IfNode,

  -['else', Expr.-, KW(/^(ensure|end)$/).la]>>ElseNode,

  -['elsif', Expr, /^(;|then|:)$/, Expr.-,
    KW(/^(end|else|elsif)$/).la
   ]>>ElsifNode,

#     -['module', ConstantNode|VarNode, KW(/^(;|::)$/).~.la]>>
#       stack_monkey(1,KW(';')){|stack| #insert ; at end of module header if none was present
#         stack.push KeywordToken.new(';'), stack.pop
#       },
  -['module', ConstantNode|VarNode, ';', RESCUE_BODY, 'end']>>ModuleNode,
  -['class', Expr, ';', RESCUE_BODY, 'end']>>ClassNode,
  -['class', Expr, Op('<'), Expr, KW(';').~.la]>>:shift,
  -['class', Op('<<'), Expr, ';', RESCUE_BODY, 'end']>>MetaClassNode,  #-30

  -['alias', BareMethod|VarNode, BareMethod|VarNode]>>AliasNode,
  -['undef', BareMethod]>>UndefNode,
  -[UndefNode, Op(',',true), BareMethod]>>UndefNode,

  -['def', CallSiteNode, Op('=').-, KW(';'), RESCUE_BODY,
#        Expr.-, RescueNode.*, ElseNode.-, EnsureNode.-, 
    'end'
  ]>>MethodNode,

  -['begin', RESCUE_BODY,
 #       Expr.-, RescueNode.*, ElseNode.-, EnsureNode.-, 
    'end'
  ]>>BeginNode,

  -[Op('=',true), BEGINAFTEREQUALS, RESCUE_OP.la]>>
    stack_monkey("begin after equals",2,BEGINAFTEREQUALS_MARKED){ |stack| stack[-2].after_equals=true }, 
  #this is bs. all for an extra :begin in the parsetree

  -[(KW(/^(;|begin)$/)|RescueNode).lb, #ParenedNode|RescueOpNode|BeginNode used to be here too
    RESCUE_KW, KW('=>').-, Expr.-, /^([:;]|then)$/,
  ]>>RescueHeaderNode,
  -[ RescueHeaderNode, Expr.-, KW(';').-, (KW(/^(else|ensure|end)$/)|RESCUE_KW).la
  ]>>RescueNode,

  -['ensure', Expr.-, KW('end').la]>>EnsureNode,

  -['[', Expr.-, ']']>>ArrayLiteralNode, #-20

  -[Expr, '[', Expr.-, ']']>>BracketsGetNode,

  -[HereDocNode, StringToken+1, StringToken.~.la]>>StringCatNode,  
  -[(OPERATORLIKE_LB&~(StringToken|HereDocNode)).lb, StringToken+2, StringToken.~.la]>>StringCatNode,  
  -[(OPERATORLIKE_LB&~(StringToken|HereDocNode)).lb, StringToken, StringToken.~.la]>>StringNode,  
    #includes regexp, wordlist, backquotes

  -['case', Expr.-, KW(';').-, WhenNode.*, ElseNode.-, 'end']>>CaseNode,

  -['when', Expr, /^([:;]|then)$/, Expr.-, 
   KW(/^(when|else|end)$/).la
  ]>>WhenNode,            

  -['for', Expr, 'in', Expr, /^([:;]|do)$/, Expr.-, 'end']>>ForNode,

  #semicolon cleanup....
  -[(OPERATORLIKE_LB&dont_postpone_semi).lb,Expr, ';', IGN_SEMI_BEFORE.la] \
                                                   >>delete_monkey(2,"semi_cleanup_before_ISB"),
  -[Expr, ';', KW('then').la]                      >>delete_monkey(2,"semi_cleanup_before_then"),
  -[dont_postpone_semi.lb, Expr, ';', RescueNode]  >>delete_monkey(3,"semi_cleanup_before_rescue"),   #-10
  -[IGN_SEMI_AFTER.lb, ';']                        >>delete_monkey(2,"semi_cleanup_after_oplike"),
  -[(StartToken|RescueHeaderNode).lb, ';' ]        >>delete_monkey(2,"semi_cleanup_after_rescue"),
   #this rule is somewhat more forgiving than matz' parser...
   #not all semicolons after :, (, and { keywords should 
   #be ignored. some should cause syntax errors.

 
  #comma cleanup....
  -[Op(/,$/,true), KW(/^([}\]])$/).la]             >>delete_monkey(2, "comma_cleanup"),
  #likewise, this is somewhat too forgiving.
  #some commas before } or ] should cause syntax errors

  #turn lvalues into rvalues if not followed by an assignop
  -[-{:lvalue =>true}, (Op('=',true)|MODIFYASSIGNOP|LHS_COMMA).~.la]>>
    stack_monkey("lval2rval",2,-{:lvalue =>nil}){|stack| 
       stack[-2].lvalue=nil
    },
  
  #expand the = into a separate token in calls to settors (after . or ::).
  #but not in method headers
  -[(OPERATORLIKE_LB&~KW('def')).lb, Expr, DotOp|DoubleColonOp, 
    (MethNameToken&-{:has_equals=>true}).la]>>
    stack_monkey("expand_equals",1,CallNode){|stack| 
      methname=stack.pop
      methname.ident.chomp!('=')
      offset=methname.offset+methname.ident.size
      stack.push(
        CallNode.new(methname,nil,nil,nil,nil),
        OperatorToken.new('=',offset)
      )
    },

 -[NumberToken|SymbolToken]>>LiteralNode,

 #lexer does the wrong thing with -22**44.5, making the - part
 #of the first number token. it's actually lower precedence than
 #**... this rule fixes that problem.
 #in theory, unary - is lower precedence than ., ::, and [] as well, but
 #that appears not to apply to unary - in numeric tokens
 -[NumberToken&-{:negative=>true}, Op('**').la]>>
    stack_monkey("fix_neg_exp",2,Op("-@",true)){|stack|
      #neg_op.unary=true
      num=stack[-2]
      op=OperatorToken.new("-@",num.offset)
#        op.startline=num.startline
      stack[-2,0]=op
      num.ident.sub!(/\A-/,'')
      num.offset+=1
    },
 
 #treat these keywords like (rvalue) variables.
 -[RubyLexer::VARLIKE_KEYWORDS]>>VarLikeNode,

 #here docs
 -[HerePlaceholderToken]>>HereDocNode,
 -[HereBodyToken.la]>>delete_monkey(1,"delete_here_body"),
 ##this is rediculous. this should be a lexer hack?

 -[VarNameToken]>>VarNode,


]
end

#sc_juice(m) ⇒ Object

def juice(m)

  case m  #
  when Class
    return [m] unless @subclasses_of
    result=[m]  # and subclasses too
    i=0
    while item=result[i]
      p item
      result.concat @subclasses_of[item] rescue nil
      i += 1
    end
    result
  when String,Regexp; juice(RedParse.KW(m))
  when Reg::And; m.subregs.map{|x| juice(x).flatten.compact}.inject{|sum,rr| sum&rr}
  when Reg::Or; m.subregs.map &method(:juice)
  when Reg::Not
    m=m.subregs[0]
    if Class===m or (Reg::Or===m  and
         m.subregs.find{|x| Class===x })
      juice(m)
    else []
    end
  else []
  end
end


1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
# File 'lib/redparse/compile.rb', line 1201

def sc_juice(m)
    case m #
    when Class; [m]
    when String,Regexp; [KeywordToken]
    when Reg::And; m.subregs.map{|x| sc_juice(x)}.compact.map{|x| x.flatten.compact}.inject{|sum,rr| sum&rr }
    when Reg::Or; m.subregs.map(&method(:sc_juice))
    when Reg::Not; sc_juice(m.subregs[0])
    when Reg::LookAhead, Reg::LookBack; sc_juice(m.subregs[0])
    when Reg::Repeat; sc_juice(m.subregs[0])
    else []
    end
end

#stack_monkey(*args, &block) ⇒ Object



101
# File 'lib/redparse.rb', line 101

def stack_monkey(*args,&block) StackMonkey.new(*args,&block) end

#STACKABLE_CLASSESObject

all classes mentioned in rules, on left and right sides



1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
# File 'lib/redparse/compile.rb', line 1159

def STACKABLE_CLASSES #
    return @sc_result if defined? @sc_result
    @sc_result=[]
    @subclasses_of=child_relations_among(*vertices)
#      @sc_result=false
    l=LEFT()
    l=l.map{|lm| sc_juice lm}.flatten.compact
    assert l.grep(nil).empty?
    r=  @rules.map{|rr| rr.right }.grep(Class) #classes in productions
    result=l+r
    @subclasses_of=nil
    @sc_result.replace result.grep(Class).uniq
   fail if @sc_result.empty?
    return @sc_result
end

#start_stateObject



1519
1520
1521
1522
1523
1524
1525
1526
1527
1528
# File 'lib/redparse/compile.rb', line 1519

def start_state
  seenlist = {}
  seenlist.default=:dunno_yet
  result=initial_state.evolve StartToken.new, self,seenlist
  result.perhaps_also_allow all_rules,self
  result.name="start"
  result
  #pp [:initial_seenlist, seenlist]
#ensure  p :/
end

#state(state_n, n) ⇒ Object

4.2 Hard-coded States For each automata state, mule creates code responsible for simulating the action of that state based on the current input token. All transitions into a given state are labeled with the same grammar symbol. States labeled with a token are called shift states and they require extra code to advance the lexical analyzer. The template of this code for state N is



162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
# File 'lib/redparse/generate.rb', line 162

def state(state_n,n)
#n=state_n.small_int
name=state_n.name
"
shift_state_#{name}: 
  GET_TOKEN();  /*modifies token, la_token*/
state_#{name}:  /*state_#{n}:*/
  STACK = #{n};

  RESERVE_STACK_SLOT();
state_action_#{name}: /* Error-recovery entry point.*/
/*state_action_#{n}:*/
  switch (la_identity){
    #{state_n.actions.map do |tok,action|
        %[  case #{str2cname(tok)}: #{action2c action}]
      end.join(%[\n])
    }
    default: #{action2c state_n.actions.default}
  }
"
rescue Exception=>e
  backtrace.unshift("exception in state #{name} #{e.class}:#{e}").join("\n")
end

#state_utilsObject



110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
# File 'lib/redparse/generate.rb', line 110

def state_utils
"
#define MALLOC_ERROR() huh
#define RESERVE_STACK_SLOT() \\
  if (++i >= stack_size){ \\
    unsigned new_stack_size=stack_size*2; \\
    stack_start=realloc(stack_start,sizeof(StackType)*new_stack_size); \\
    if (stack_start==NULL) MALLOC_ERROR(); \\
    //semantic_stack_start=realloc(semantic_stack_start,sizeof(SemanticStackType)*new_stack_size); \\
    //if (semantic_stack_start==NULL) MALLOC_ERROR(); \\
    stack_size=new_stack_size; \\
  }

#define GET_TOKEN() \\
  do { \\
    SEMANTIC_STACK_SET(la_token); /*Put lexical semantic entry on stack.*/ \\
    la_identity = yylex(&la_token); /* Advance lexical analysis.*/ \\
    yyerrorstatus++; /* Update error-recovery counter.*/ \\
  } while(0)

#define STACK stack_start[i]
#define SEMANTIC_STACK rb_ary_get(semantic_stack,rb_int2fixnum(i))
#define SEMANTIC_STACK_SET(x) rb_ary_set(semantic_stack,rb_int2fixnum(i),x)
#define OLDSTACK stack_start[i-1]
"
end

#str2cname(str) ⇒ Object



364
# File 'lib/redparse/generate.rb', line 364

def str2cname(str) RedParse.str2cname(str) end

#ultimate_goal_nodesObject



1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
# File 'lib/redparse/compile.rb', line 1472

def ultimate_goal_nodes
  result=[]
  all_rules.each{|rule|
    if rule.patterns.size==0 and
       rule.patterns.first==StartToken and
       rule.patterns.last==EoiToken
      result << juice(rule.patterns[1])
    end
  }
  result.flatten!
  return result
end

#undumpablesObject



1543
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554
1555
1556
1557
1558
1559
1560
# File 'lib/redparse/compile.rb', line 1543

def undumpables
  return @undumpables if @undumpables
  @rules||=expanded_RULES
  n=-1
  @undumpables={}
  abortable_graphwalk(@rules){|cntr,o,i,ty|
    !case o
     when StackMonkey
       @undumpables[o.name]=o
     when Reg::Deferred
       @undumpables[n+=1]=o
       class<<o
         attr_accessor :undump_key
       end
       o.undump_key=n
     end
  }
end

#unget_token(token) ⇒ Object



1134
1135
1136
# File 'lib/redparse.rb', line 1134

def unget_token(token)
  @moretokens.unshift token
end

#unget_tokens(*tokens) ⇒ Object



1130
1131
1132
# File 'lib/redparse.rb', line 1130

def unget_tokens(*tokens)
  @moretokens=tokens.concat @moretokens
end

#unruly_rulesObject



1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
# File 'lib/redparse/compile.rb', line 1214

def unruly_rules
  return @unruly_rules if defined? @unruly_rules

  @unruly_rules=
    all_rules.select{|rule| rule.unruly? }

  p :unruly_rules
  pp @unruly_rules.map{|r| r.name}

  @unruly_rules
end

#verticesObject



385
# File 'lib/redparse.rb', line 385

def vertices; self.class.constants.grep(Node|Token) end

#wants_semi_contextObject



691
692
693
# File 'lib/redparse.rb', line 691

def wants_semi_context
  Op(/^(<<|=>|\.|::)$/)|KW(/^(#{WANTS_SEMI.map{|ws| Regexp.quote ws }.join('|')})$/)
end