Module: Sparkql::ParserTools

Included in:
Parser
Defined in:
lib/sparkql/parser_tools.rb

Overview

This is the guts of the parser internals and is mixed into the parser for organization.

Constant Summary collapse

DATE_TYPES =

Coercible types from highest precision to lowest

[:datetime, :date]
NUMBER_TYPES =
[:decimal, :integer]

Instance Method Summary collapse

Instance Method Details

#coercible_types(type1, type2) ⇒ Object

If both types support coercion with eachother, always selects the highest precision type to return as a reflection of the two. Any type that doesn’t support coercion with the other type returns nil



199
200
201
202
203
204
205
206
207
# File 'lib/sparkql/parser_tools.rb', line 199

def coercible_types type1, type2
  if DATE_TYPES.include?(type1) && DATE_TYPES.include?(type2)
    DATE_TYPES.first
  elsif NUMBER_TYPES.include?(type1) && NUMBER_TYPES.include?(type2)
    NUMBER_TYPES.first
  else
    nil
  end
end

#next_tokenObject



17
18
19
20
21
22
23
# File 'lib/sparkql/parser_tools.rb', line 17

def next_token
  t = @lexer.shift
  while t[0] == :SPACE or t[0] == :NEWLINE
    t = @lexer.shift
  end
  t
end

#on_error(error_token_id, error_value, value_stack) ⇒ Object



149
150
151
152
153
154
155
156
157
# File 'lib/sparkql/parser_tools.rb', line 149

def on_error(error_token_id, error_value, value_stack)
  token_name = token_to_str(error_token_id)
  token_name.downcase!
  token = error_value.to_s.inspect
  tokenizer_error(:token => @lexer.current_token_value, 
                  :message => "Error parsing token #{token_name}",
                  :status => :fatal, 
                  :syntax => true)    
end

#parse(str) ⇒ Object



8
9
10
11
12
13
14
15
# File 'lib/sparkql/parser_tools.rb', line 8

def parse(str)
  @lexer = Sparkql::Lexer.new(str)
  @expression_count = 0
  results = do_parse
  return if results.nil?
  validate_expressions results
  results
end

#tokenize_conjunction(exp1, conj, exp2) ⇒ Object



56
57
58
59
60
# File 'lib/sparkql/parser_tools.rb', line 56

def tokenize_conjunction(exp1, conj, exp2)
  exp2.first[:conjunction] = conj
  exp2.first[:conjunction_level] = @lexer.level
  exp1 + exp2
end

#tokenize_expression(field, op, val) ⇒ Object



25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
# File 'lib/sparkql/parser_tools.rb', line 25

def tokenize_expression(field, op, val)
  operator = get_operator(val,op) unless val.nil?
  field_args = {}
  # Function support for fields is stapled in here. The function information
  # is remapped to the expression
  if field.is_a?(Hash) && field[:type] == :function
    function = Sparkql::FunctionResolver::SUPPORTED_FUNCTIONS[field[:value].to_sym]
    if !function.nil?
      field_args[:field_function] = field[:value]
      field_args[:field_function_type] = function[:return_type]
    else
      tokenizer_error(:token => field[:value], 
        :message => "Unsupported function type", :status => :fatal )
    end
    field = field[:args].first
  end
  custom_field = field.start_with?('"')
  block_group = (@lexer.level == 0) ? 0 : @lexer.block_group_identifier
  expression = {:field => field, :operator => operator, :conjunction => 'And', 
    :level => @lexer.level, :block_group => block_group, :custom_field => custom_field}.merge!(field_args)
  expression = val.merge(expression) unless val.nil?
  expression[:condition] ||= expression[:value]
  validate_level_depth expression
  if operator.nil?
    tokenizer_error(:token => op, :expression => expression,
      :message => "Operator not supported for this type and value string", :status => :fatal )
  end
  @expression_count += 1
  [expression]
end

#tokenize_field_arg(field) ⇒ Object



116
117
118
119
120
121
# File 'lib/sparkql/parser_tools.rb', line 116

def tokenize_field_arg(field)
  {
    :type => :field,
    :value => field,
  }
end

#tokenize_function(name, f_args) ⇒ Object



123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
# File 'lib/sparkql/parser_tools.rb', line 123

def tokenize_function(name, f_args)
  @lexer.leveldown
  @lexer.block_group_identifier -= 1

  args = f_args.instance_of?(Array) ? f_args : [f_args]
  validate_multiple_arguments args
  condition_list = []
  args.each do |arg|
    condition_list << arg[:value] # Needs to be pure string value
    arg[:value] = escape_value(arg)
  end
  resolver = Sparkql::FunctionResolver.new(name, args)
  
  resolver.validate
  if(resolver.errors?)
    tokenizer_error(:token => @lexer.last_field, 
                    :message => "Error parsing function #{resolver.errors.join(',')}",
                    :status => :fatal, 
                    :syntax => true)    
    return nil
  else
    result = resolver.call()
    return result.nil? ? result : result.merge(:condition => "#{name}(#{condition_list.join(',')})")
  end
end

#tokenize_function_args(lit1, lit2) ⇒ Object



110
111
112
113
114
# File 'lib/sparkql/parser_tools.rb', line 110

def tokenize_function_args(lit1, lit2)
  array = lit1.kind_of?(Array) ? lit1 : [lit1]
  array << lit2
  array
end

#tokenize_group(expressions) ⇒ Object



76
77
78
79
# File 'lib/sparkql/parser_tools.rb', line 76

def tokenize_group(expressions)
  @lexer.leveldown
  expressions
end

#tokenize_list(list) ⇒ Object



81
82
83
84
85
# File 'lib/sparkql/parser_tools.rb', line 81

def tokenize_list(list)
  validate_multiple_values list[:value]
  list[:condition] ||= list[:value]
  list
end

#tokenize_multiple(lit1, lit2) ⇒ Object



87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
# File 'lib/sparkql/parser_tools.rb', line 87

def tokenize_multiple(lit1, lit2)
  final_type = lit1[:type]
  if lit1[:type] != lit2[:type]
    final_type = coercible_types(lit1[:type],lit2[:type])
    if final_type.nil?
      final_type = lit1[:type]
      tokenizer_error(:token => @lexer.last_field, 
                      :message => "Type mismatch in field list.",
                      :status => :fatal, 
                      :syntax => true)
    end
  end
  array = Array(lit1[:value])
  condition = lit1[:condition] || lit1[:value] 
  array << lit2[:value]
  {
    :type => final_type ,
    :value => array,
    :multiple => "true",
    :condition => condition + "," + (lit2[:condition] || lit2[:value])
  }
end

#tokenize_unary_conjunction(conj, exp) ⇒ Object



62
63
64
65
66
67
68
69
70
71
72
73
74
# File 'lib/sparkql/parser_tools.rb', line 62

def tokenize_unary_conjunction(conj, exp)

  # Handles the case when a SparkQL filter string
  # begins with a unary operator, and is nested, such as:
  # Not (Not Field Eq 1)
  if @expression_count == 1 && @lexer.level > 0
    exp.first[:conjunction] = conj 
  end

  exp.first[:unary] = conj
  exp.first[:unary_level] = @lexer.level
  exp
end

#validate_expressions(results) ⇒ Object



167
168
169
170
171
172
173
174
# File 'lib/sparkql/parser_tools.rb', line 167

def validate_expressions results
  if results.size > max_expressions 
    compile_error(:token => results[max_expressions][:field], :expression => results[max_expressions],
          :message => "You have exceeded the maximum expression count.  Please limit to no more than #{max_expressions} expressions in a filter.",
          :status => :fatal, :syntax => false, :constraint => true )
    results.slice!(max_expressions..-1)
  end
end

#validate_level_depth(expression) ⇒ Object



159
160
161
162
163
164
165
# File 'lib/sparkql/parser_tools.rb', line 159

def validate_level_depth expression
  if @lexer.level > max_level_depth
    compile_error(:token => "(", :expression => expression,
          :message => "You have exceeded the maximum nesting level.  Please nest no more than #{max_level_depth} levels deep.",
          :status => :fatal, :syntax => false, :constraint => true )
  end
end

#validate_multiple_arguments(args) ⇒ Object



186
187
188
189
190
191
192
193
194
# File 'lib/sparkql/parser_tools.rb', line 186

def validate_multiple_arguments args
  args = Array(args)
  if args.size > max_values 
    compile_error(:token => args[max_values],
          :message => "You have exceeded the maximum parameter count.  Please limit to #{max_values} parameters to a single function.",
          :status => :fatal, :syntax => false, :constraint => true )
    args.slice!(max_values..-1)
  end
end

#validate_multiple_values(values) ⇒ Object



176
177
178
179
180
181
182
183
184
# File 'lib/sparkql/parser_tools.rb', line 176

def validate_multiple_values values
  values = Array(values)
  if values.size > max_values 
    compile_error(:token => values[max_values],
          :message => "You have exceeded the maximum value count.  Please limit to #{max_values} values in a single expression.",
          :status => :fatal, :syntax => false, :constraint => true )
    values.slice!(max_values..-1)
  end
end