Module: Regexp::Lexer

Defined in:
lib/regexp_parser/lexer.rb

Overview

A very thin wrapper around the scanner that breaks quantified literal runs, collects emitted tokens into an array, calculates their nesting depth, and normalizes tokens for the parser, and checks if they are implemented by the given syntax flavor.

Constant Summary collapse

OPENING_TOKENS =
[:capture, :options, :passive, :atomic, :named,
 :lookahead, :nlookahead, :lookbehind, :nlookbehind
].freeze
CLOSING_TOKENS =
[:close].freeze

Class Method Summary collapse

Class Method Details

.lex(input, syntax = "ruby/#{RUBY_VERSION}", &block) ⇒ Object Also known as: scan



13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
# File 'lib/regexp_parser/lexer.rb', line 13

def self.lex(input, syntax = "ruby/#{RUBY_VERSION}", &block)
  syntax = Regexp::Syntax.new(syntax)

  @tokens = []
  @nesting, @set_nesting, @conditional_nesting = 0, 0, 0

  last = nil
  Regexp::Scanner.scan(input) do |type, token, text, ts, te|
    type, token = *syntax.normalize(type, token)
    syntax.check! type, token

    ascend(type, token)

    break_literal(last) if type == :quantifier and
      last and last.type == :literal

    current = Regexp::Token.new(type, token, text, ts, te,
              @nesting, @set_nesting, @conditional_nesting)

    current = merge_literal(current) if type == :literal and
      last and last.type == :literal

    current = merge_condition(current) if type == :conditional and
      [:condition, :condition_close].include?(token)

    last.next(current) if last
    current.previous(last) if last

    @tokens << current
    last = current

    descend(type, token)
  end

  if block_given?
    @tokens.map {|t| block.call(t)}
  else
    @tokens
  end
end