Class: GenericLexer
- Inherits:
-
Object
- Object
- GenericLexer
- Defined in:
- lib/crokus/generic_lexer.rb
Direct Known Subclasses
Instance Method Summary collapse
- #get_token ⇒ Object
- #ignore(pattern) ⇒ Object
-
#initialize ⇒ GenericLexer
constructor
A new instance of GenericLexer.
- #keyword(str) ⇒ Object
- #next_token ⇒ Object
- #open(code) ⇒ Object
- #position ⇒ Object
- #token(h_token_pattern) ⇒ Object
- #tokenize(code) ⇒ Object
Constructor Details
#initialize ⇒ GenericLexer
Returns a new instance of GenericLexer.
7 8 9 10 |
# File 'lib/crokus/generic_lexer.rb', line 7 def initialize @rules = [] @rules << [:newline,/[\n]/] end |
Instance Method Details
#get_token ⇒ Object
36 37 38 39 40 41 42 43 |
# File 'lib/crokus/generic_lexer.rb', line 36 def get_token linecol=position() @rules.each do |rule, regexp| val = @ssc.scan(regexp) return Token.new([rule, val, linecol]) if val end raise "lexing error line #{linecol.first} around '#{@ssc.peek(10)}' " end |
#ignore(pattern) ⇒ Object
12 13 14 |
# File 'lib/crokus/generic_lexer.rb', line 12 def ignore pattern @rules << [:skip,pattern] end |
#keyword(str) ⇒ Object
16 17 18 |
# File 'lib/crokus/generic_lexer.rb', line 16 def keyword str @rules.unshift [str.to_sym,/#{str}\b/] end |
#next_token ⇒ Object
30 31 32 33 34 |
# File 'lib/crokus/generic_lexer.rb', line 30 def next_token return [nil,nil,nil] if @ssc.empty? tok = get_token return (tok.is? :skip) ? next_token : tok end |
#open(code) ⇒ Object
25 26 27 28 |
# File 'lib/crokus/generic_lexer.rb', line 25 def open code @ssc = StringScanner.new code @line=0 end |
#position ⇒ Object
45 46 47 48 49 50 51 |
# File 'lib/crokus/generic_lexer.rb', line 45 def position if @ssc.bol? @line+=1 @old_pos=@ssc.pos end [@line,@ssc.pos-@old_pos+1] end |
#token(h_token_pattern) ⇒ Object
20 21 22 23 |
# File 'lib/crokus/generic_lexer.rb', line 20 def token h_token_pattern token,pattern=*h_token_pattern.to_a.first @rules << [token, pattern] end |
#tokenize(code) ⇒ Object
53 54 55 56 57 58 |
# File 'lib/crokus/generic_lexer.rb', line 53 def tokenize code open(code) tokens=[] tokens << next_token() while not @ssc.eos? tokens end |