Class: Twig::Lexer
- Inherits:
-
Object
- Object
- Twig::Lexer
- Defined in:
- lib/twig/lexer.rb
Constant Summary collapse
- TAG_COMMENT =
%w[{# #}].freeze
- TAG_BLOCK =
%w[{% %}].freeze
- TAG_VARIABLE =
%w[{{ }}].freeze
- WHITESPACE_TRIM =
'-'
- WHITESPACE_LINE_TRIM =
'~'
- WHITESPACE_LINE_CHARS =
" \t\0\x0B"
- INTERPOLATION =
%w[#{ }].freeze
- OPENING_BRACKET =
'([{'.chars
- CLOSING_BRACKET =
')]}'.chars
- PUNCTUATION =
OPENING_BRACKET + CLOSING_BRACKET + '?:.,|'.chars
- REGEX_LNUM =
/[0-9]+(_[0-9]+)*/
- REGEX_FRAC =
/\.#{REGEX_LNUM}/
- REGEX_EXPONENT =
/[eE][+-]?#{REGEX_LNUM}/
- REGEX_DNUM =
/#{REGEX_LNUM}(?:#{REGEX_FRAC})?/
- REGEX_NAME =
/[a-zA-Z_\u{007f}-\u{00ff}][a-zA-Z0-9_\u{007f}-\u{00ff}]*/u
- REGEX_SYMBOL =
/:#{REGEX_NAME}/
- REGEX_CVAR =
/@#{REGEX_NAME}/
- REGEX_STRING =
/\G"([^#"\\]*(?:\\.[^#"\\]*)*)"|'([^'\\]*(?:\\.[^'\\]*)*)'/mu
- REGEX_DQ_STRING_PART =
/\G[^#"\\]*(?:(?:\.|#(?!\{))[^#"\\]*)*/mu
- REGEX_INLINE_COMMENT =
/#[^\n]*/
- REGEX_DQ_STRING_DELIM =
/\G"/
- REGEX_INTERP_START =
/\G#\{[[:space:]]*/
- REGEX_INTERP_END =
/\G[[:space:]]*\}/
- REGEX_NUMBER =
/\G(?:#{REGEX_DNUM}(?:#{REGEX_EXPONENT})?)/x
- STATE_DATA =
0
- STATE_BLOCK =
1
- STATE_VAR =
2
- STATE_STRING =
3
- STATE_INTERPOLATION =
4
- SPECIAL_CHARS =
{ 'f' => "\f", 'n' => "\n", 'r' => "\r", 't' => "\t", 'v' => "\v", }.freeze
Instance Method Summary collapse
-
#initialize(environment) ⇒ Lexer
constructor
A new instance of Lexer.
- #tokenize(source) ⇒ Object
Constructor Details
#initialize(environment) ⇒ Lexer
Returns a new instance of Lexer.
47 48 49 |
# File 'lib/twig/lexer.rb', line 47 def initialize(environment) @environment = environment end |
Instance Method Details
#tokenize(source) ⇒ Object
52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 |
# File 'lib/twig/lexer.rb', line 52 def tokenize(source) @source = source @code = source.code.tr("\r\n", "\n") @cursor = 0 @lineno = 1 @end = @code.length @tokens = [] @state = STATE_DATA @states = [] @brackets = [] @position = -1 @positions = @code.to_enum(:scan, lex_tokens_start).map { Regexp.last_match } while @cursor < @end case @state when STATE_DATA lex_data when STATE_BLOCK lex_block when STATE_VAR lex_var when STATE_STRING lex_string when STATE_INTERPOLATION lex_interpolation else raise "Unknown state: #{@state}" end end push_token(Token::EOF_TYPE) TokenStream.new(@tokens, @source) end |