Class: Gitlab::Ci::Pipeline::Expression::Lexer

Inherits:
Object
  • Object
show all
Includes:
Utils::StrongMemoize
Defined in:
lib/gitlab/ci/pipeline/expression/lexer.rb

Constant Summary collapse

SyntaxError =
Class.new(Expression::ExpressionError)
LEXEMES =
[
  Expression::Lexeme::ParenthesisOpen,
  Expression::Lexeme::ParenthesisClose,
  Expression::Lexeme::Variable,
  Expression::Lexeme::String,
  Expression::Lexeme::Pattern,
  Expression::Lexeme::Null,
  Expression::Lexeme::Equals,
  Expression::Lexeme::Matches,
  Expression::Lexeme::NotEquals,
  Expression::Lexeme::NotMatches,
  Expression::Lexeme::And,
  Expression::Lexeme::Or
].freeze
LEGACY_LEXEMES =

To be removed with `ci_if_parenthesis_enabled`

[
  Expression::Lexeme::Variable,
  Expression::Lexeme::String,
  Expression::Lexeme::Pattern,
  Expression::Lexeme::Null,
  Expression::Lexeme::Equals,
  Expression::Lexeme::Matches,
  Expression::Lexeme::NotEquals,
  Expression::Lexeme::NotMatches,
  Expression::Lexeme::And,
  Expression::Lexeme::Or
].freeze
MAX_TOKENS =
100

Class Method Summary collapse

Instance Method Summary collapse

Methods included from Utils::StrongMemoize

#clear_memoization, #strong_memoize, #strong_memoized?

Constructor Details

#initialize(statement, max_tokens: MAX_TOKENS) ⇒ Lexer

Returns a new instance of Lexer.


51
52
53
54
# File 'lib/gitlab/ci/pipeline/expression/lexer.rb', line 51

def initialize(statement, max_tokens: MAX_TOKENS)
  @scanner = StringScanner.new(statement)
  @max_tokens = max_tokens
end

Class Method Details

.lexemesObject


41
42
43
44
45
46
47
# File 'lib/gitlab/ci/pipeline/expression/lexer.rb', line 41

def self.lexemes
  if ::Gitlab::Ci::Features.ci_if_parenthesis_enabled?
    LEXEMES
  else
    LEGACY_LEXEMES
  end
end

Instance Method Details

#lexemesObject


60
61
62
# File 'lib/gitlab/ci/pipeline/expression/lexer.rb', line 60

def lexemes
  tokens.map(&:to_lexeme)
end

#tokensObject


56
57
58
# File 'lib/gitlab/ci/pipeline/expression/lexer.rb', line 56

def tokens
  strong_memoize(:tokens) { tokenize }
end