Class: Puppet::Parser::Lexer::TokenList

Inherits:
Object
  • Object
show all
Extended by:
Forwardable
Defined in:
lib/puppet/parser/lexer.rb

Overview

Maintain a list of tokens.

Instance Attribute Summary collapse

Instance Method Summary collapse

Constructor Details

#initializeTokenList

Returns a new instance of TokenList.



98
99
100
101
102
103
# File 'lib/puppet/parser/lexer.rb', line 98

def initialize
  @tokens = {}
  @regex_tokens = []
  @string_tokens = []
  @tokens_by_string = {}
end

Instance Attribute Details

#regex_tokensObject (readonly)



78
79
80
# File 'lib/puppet/parser/lexer.rb', line 78

def regex_tokens
  @regex_tokens
end

#string_tokensObject (readonly)



78
79
80
# File 'lib/puppet/parser/lexer.rb', line 78

def string_tokens
  @string_tokens
end

Instance Method Details

#add_token(name, regex, options = {}, &block) ⇒ Object

Create a new token.

Raises:

  • (ArgumentError)


82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
# File 'lib/puppet/parser/lexer.rb', line 82

def add_token(name, regex, options = {}, &block)
  raise(ArgumentError, "Token #{name} already exists") if @tokens.include?(name)
  token = Token.new(regex, name, options)
  @tokens[token.name] = token
  if token.string
    @string_tokens << token
    @tokens_by_string[token.string] = token
  else
    @regex_tokens << token
  end

  token.meta_def(:convert, &block) if block_given?

  token
end

#add_tokens(hash) ⇒ Object

Define more tokens.



111
112
113
114
115
# File 'lib/puppet/parser/lexer.rb', line 111

def add_tokens(hash)
  hash.each do |regex, name|
    add_token(name, regex)
  end
end

#eachObject

Yield each token name and value in turn.



124
125
126
# File 'lib/puppet/parser/lexer.rb', line 124

def each
  @tokens.each {|name, value| yield name, value }
end

#lookup(string) ⇒ Object

Look up a token by its value, rather than name.



106
107
108
# File 'lib/puppet/parser/lexer.rb', line 106

def lookup(string)
  @tokens_by_string[string]
end

#sort_tokensObject

Sort our tokens by length, so we know once we match, we’re done. This helps us avoid the O(n^2) nature of token matching.



119
120
121
# File 'lib/puppet/parser/lexer.rb', line 119

def sort_tokens
  @string_tokens.sort! { |a, b| b.string.length <=> a.string.length }
end