Module: Sparkql::ParserTools

Included in:
Parser
Defined in:
lib/sparkql/parser_tools.rb

Constant Summary collapse

DATE_TYPES =

Coercible types from highest precision to lowest

%i[datetime date].freeze
NUMBER_TYPES =
%i[decimal integer].freeze
ARITHMETIC_TYPES =
%i[decimal integer field arithmetic].freeze
GROUP =
'Group'.freeze
NEGATION =
'Negation'.freeze

Instance Method Summary collapse

Instance Method Details

#add_fold(node1, node2) ⇒ Object



268
269
270
271
272
273
# File 'lib/sparkql/parser_tools.rb', line 268

def add_fold(node1, node2)
  return if arithmetic_error?(node1) || arithmetic_error?(node2)

  value = escape_arithmetic_value(node1) + escape_arithmetic_value(node2)
  { type: arithmetic_type(node1, node2), value: unescape_arithmetic(value) }
end

#arithmetic_error?(side) ⇒ Boolean

Returns:

  • (Boolean)


252
253
254
255
256
257
258
259
260
# File 'lib/sparkql/parser_tools.rb', line 252

def arithmetic_error?(side)
  side_type = side[:type] == :function ? side[:return_type] : side[:type]
  return false if ARITHMETIC_TYPES.include?(side_type)

  compile_error(token: side[:value], expression: side,
                message: "Error attempting arithmetic with type: #{side_type}",
                status: :fatal, syntax: false, constraint: true)
  true
end

#arithmetic_field(nested_representation) ⇒ Object



28
29
30
31
32
33
34
35
36
37
38
39
# File 'lib/sparkql/parser_tools.rb', line 28

def arithmetic_field(nested_representation)
  return if nested_representation.nil?

  return nested_representation[:value] if nested_representation[:type] == :field
  return nested_representation[:field] if nested_representation.key?(:field)

  field = arithmetic_field(nested_representation[:lhs])
  return field unless field.nil?

  field = arithmetic_field(nested_representation[:rhs])
  return field unless field.nil?
end

#arithmetic_type(num1, num2) ⇒ Object



307
308
309
310
311
312
313
# File 'lib/sparkql/parser_tools.rb', line 307

def arithmetic_type(num1, num2)
  if num1[:type] == :decimal || num2[:type] == :decimal
    :decimal
  else
    :integer
  end
end

#coercible_types(type1, type2) ⇒ Object

If both types support coercion with eachother, always selects the highest precision type to return as a reflection of the two. Any type that doesn’t support coercion with the other type returns nil



397
398
399
400
401
402
403
404
405
# File 'lib/sparkql/parser_tools.rb', line 397

def coercible_types(type1, type2)
  if DATE_TYPES.include?(type1) && DATE_TYPES.include?(type2)
    DATE_TYPES.first
  elsif NUMBER_TYPES.include?(type1) && NUMBER_TYPES.include?(type2)
    NUMBER_TYPES.first
  else
    nil
  end
end

#current_timestampObject



444
445
446
# File 'lib/sparkql/parser_tools.rb', line 444

def current_timestamp
  @current_timestamp ||= Time.now
end

#div_fold(node1, node2) ⇒ Object



289
290
291
292
293
294
295
296
# File 'lib/sparkql/parser_tools.rb', line 289

def div_fold(node1, node2)
  return if arithmetic_error?(node1) ||
            arithmetic_error?(node2) ||
            zero_error?(node2)

  value = escape_arithmetic_value(node1) / escape_arithmetic_value(node2)
  { type: arithmetic_type(node1, node2), value: unescape_arithmetic(value) }
end

#escape_arithmetic_value(expression) ⇒ Object



315
316
317
318
319
320
321
322
# File 'lib/sparkql/parser_tools.rb', line 315

def escape_arithmetic_value(expression)
  case expression[:type]
  when :decimal
    BigDecimal(expression[:value])
  else
    escape_value(expression)
  end
end

#function_resolver(function_name, function_args = []) ⇒ Object



430
431
432
433
434
# File 'lib/sparkql/parser_tools.rb', line 430

def function_resolver(function_name, function_args = [])
  Sparkql::FunctionResolver.new(function_name,
                                function_args,
                                current_timestamp: current_timestamp)
end

#group_fold(exp) ⇒ Object



262
263
264
265
266
# File 'lib/sparkql/parser_tools.rb', line 262

def group_fold(exp)
  @lexer.leveldown
  @lexer.block_group_identifier -= 1
  exp
end

#lookup_function(function_name) ⇒ Object



440
441
442
# File 'lib/sparkql/parser_tools.rb', line 440

def lookup_function(function_name)
  Sparkql::FunctionResolver.lookup(function_name)
end

#mod_fold(node1, node2) ⇒ Object



298
299
300
301
302
303
304
305
# File 'lib/sparkql/parser_tools.rb', line 298

def mod_fold(node1, node2)
  return if arithmetic_error?(node1) ||
            arithmetic_error?(node2) ||
            zero_error?(node2)

  value = escape_arithmetic_value(node1) % escape_arithmetic_value(node2)
  { type: arithmetic_type(node1, node2), value: unescape_arithmetic(value) }
end

#mul_fold(node1, node2) ⇒ Object



282
283
284
285
286
287
# File 'lib/sparkql/parser_tools.rb', line 282

def mul_fold(node1, node2)
  return if arithmetic_error?(node1) || arithmetic_error?(node2)

  value = escape_arithmetic_value(node1) * escape_arithmetic_value(node2)
  { type: arithmetic_type(node1, node2), value: unescape_arithmetic(value) }
end

#nested_function_depth(expression) ⇒ Object



407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
# File 'lib/sparkql/parser_tools.rb', line 407

def nested_function_depth(expression)
  return 0 unless expression && expression[:type] == :function

  height = 0
  queue = []
  queue.push(expression)

  loop do
    count = queue.size
    return height if count.zero?

    height += 1

    while count.positive?
      node = queue.shift
      node[:args].each do |child|
        queue.push(child) if child[:type] == :function
      end
      count -= 1
    end
  end
end

#next_tokenObject



22
23
24
25
26
# File 'lib/sparkql/parser_tools.rb', line 22

def next_token
  t = @lexer.shift
  t = @lexer.shift while (t[0] == :SPACE) || (t[0] == :NEWLINE)
  t
end

#no_field_error(field, operator) ⇒ Object



41
42
43
44
45
# File 'lib/sparkql/parser_tools.rb', line 41

def no_field_error(field, operator)
  tokenizer_error(token: field,
                  expression: { operator: operator, conjuction: 'And', conjunction_level: 0, level: @lexer.level },
                  message: "Each expression must evaluate a field", status: :fatal)
end

#offsetObject



448
449
450
# File 'lib/sparkql/parser_tools.rb', line 448

def offset
  @offset ||= current_timestamp.strftime('%:z')
end

#on_error(error_token_id, _error_value, _value_stack) ⇒ Object



340
341
342
343
344
345
346
347
# File 'lib/sparkql/parser_tools.rb', line 340

def on_error(error_token_id, _error_value, _value_stack)
  token_name = token_to_str(error_token_id)
  token_name.downcase!
  tokenizer_error(token: @lexer.current_token_value,
                  message: "Error parsing token #{token_name}",
                  status: :fatal,
                  syntax: true)
end

#parse(str) ⇒ Object



12
13
14
15
16
17
18
19
20
# File 'lib/sparkql/parser_tools.rb', line 12

def parse(str)
  @lexer = Sparkql::Lexer.new(str)
  @expression_count = 0
  results = do_parse
  return if results.nil?

  validate_expressions results
  results
end

#sub_fold(node1, node2) ⇒ Object



275
276
277
278
279
280
# File 'lib/sparkql/parser_tools.rb', line 275

def sub_fold(node1, node2)
  return if arithmetic_error?(node1) || arithmetic_error?(node2)

  value = escape_arithmetic_value(node1) - escape_arithmetic_value(node2)
  { type: arithmetic_type(node1, node2), value: unescape_arithmetic(value) }
end

#supported_function?(function_name) ⇒ Boolean

Returns:

  • (Boolean)


436
437
438
# File 'lib/sparkql/parser_tools.rb', line 436

def supported_function?(function_name)
  !lookup_function(function_name).nil?
end

#tokenize_arithmetic(lhs, operator, rhs) ⇒ Object



238
239
240
241
242
243
244
245
246
247
248
249
250
# File 'lib/sparkql/parser_tools.rb', line 238

def tokenize_arithmetic(lhs, operator, rhs)
  lhs = { type: :field, value: lhs } if lhs.is_a?(String)
  rhs = { type: :field, value: rhs } if rhs.is_a?(String)

  arithmetic_error?(lhs)
  arithmetic_error?(rhs)
  {
    type: :arithmetic,
    op: operator,
    lhs: lhs,
    rhs: rhs
  }
end

#tokenize_arithmetic_group(lhs) ⇒ Object



126
127
128
129
130
131
132
133
134
135
# File 'lib/sparkql/parser_tools.rb', line 126

def tokenize_arithmetic_group(lhs)
  @lexer.leveldown
  @lexer.block_group_identifier -= 1
  lhs = { type: :field, value: lhs } if lhs.is_a?(String)
  {
    type: :arithmetic,
    op: GROUP,
    lhs: lhs
  }
end

#tokenize_arithmetic_negation(lhs) ⇒ Object



137
138
139
140
141
142
143
144
# File 'lib/sparkql/parser_tools.rb', line 137

def tokenize_arithmetic_negation(lhs)
  lhs = { type: :field, value: lhs } if lhs.is_a?(String)
  {
    type: :arithmetic,
    op: NEGATION,
    lhs: lhs
  }
end

#tokenize_conjunction(exp1, conj, exp2) ⇒ Object



97
98
99
100
101
# File 'lib/sparkql/parser_tools.rb', line 97

def tokenize_conjunction(exp1, conj, exp2)
  exp2.first[:conjunction] = conj
  exp2.first[:conjunction_level] = @lexer.level
  exp1 + exp2
end

#tokenize_expression(field, op_token, val) ⇒ Object



47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
# File 'lib/sparkql/parser_tools.rb', line 47

def tokenize_expression(field, op_token, val)
  operator = get_operator(val, op_token) unless val.nil?

  field_manipulations = nil
  if field.is_a?(Hash) && field[:type] == :function
    unless supported_function?(field[:function_name])
      tokenizer_error(token: field[:function_name],
                      message: 'Unsupported function type',
                      status: :fatal)
    end
    field_manipulations = field
    field = field[:field]
  elsif field.is_a?(Hash) && field[:type] == :arithmetic
    field_manipulations = field
    field = arithmetic_field(field)
    no_field_error(field, operator) if field.nil?
  elsif field.is_a?(Hash)
    no_field_error(field, operator)
  end

  custom_field = !field.nil? && field.is_a?(String) && field.start_with?('"')

  block_group = @lexer.level.zero? ? 0 : @lexer.block_group_identifier
  expression = { field: field, operator: operator, conjunction: 'And',
                 conjunction_level: 0, level: @lexer.level,
                 block_group: block_group, custom_field: custom_field }

  if !field_manipulations.nil?
    # Keeping field_function and field_function_type for backward compatibility with datacon
    expression.merge!(field_manipulations: field_manipulations)

    if field_manipulations[:type] == :function
      expression.merge!(field_function: field_manipulations[:function_name],
                        field_function_type: field_manipulations[:return_type],
                        args: field_manipulations[:function_parameters])
    end
  end

  expression = val.merge(expression) unless val.nil?
  expression[:condition] ||= expression[:value]
  validate_level_depth expression
  validate_field_function_depth(expression[:field_manipulations])
  if operator.nil?
    tokenizer_error(token: op_token, expression: expression,
                    message: "Operator not supported for this type and value string", status: :fatal)
  end
  @expression_count += 1
  [expression]
end

#tokenize_field_arg(field) ⇒ Object



202
203
204
205
206
207
208
209
210
211
# File 'lib/sparkql/parser_tools.rb', line 202

def tokenize_field_arg(field)
  if field.is_a?(String)
    {
      type: :field,
      value: field
    }
  else
    field
  end
end

#tokenize_function(name, f_args) ⇒ Object



213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
# File 'lib/sparkql/parser_tools.rb', line 213

def tokenize_function(name, f_args)
  @lexer.leveldown
  @lexer.block_group_identifier -= 1

  args = f_args.instance_of?(Array) ? f_args : [f_args]
  validate_multiple_arguments args
  condition_list = []
  args.each do |arg|
    condition_list << arg[:value] # Needs to be pure string value
    arg[:value] = escape_value(arg)
  end
  resolver = function_resolver(name, args)
  resolver.validate
  if resolver.errors?
    tokenizer_error(token: @lexer.last_field,
                    message: "Error parsing function #{resolver.errors.join(',')}",
                    status: :fatal,
                    syntax: true)
    nil
  else
    result = resolver.call
    result.nil? ? result : result.merge(condition: "#{name}(#{condition_list.join(',')})")
  end
end

#tokenize_function_args(lit1, lit2) ⇒ Object



196
197
198
199
200
# File 'lib/sparkql/parser_tools.rb', line 196

def tokenize_function_args(lit1, lit2)
  array = lit1.is_a?(Array) ? lit1 : [lit1]
  array << lit2
  array
end

#tokenize_group(expressions) ⇒ Object



121
122
123
124
# File 'lib/sparkql/parser_tools.rb', line 121

def tokenize_group(expressions)
  @lexer.leveldown
  expressions
end

#tokenize_list(list) ⇒ Object



146
147
148
149
150
151
152
# File 'lib/sparkql/parser_tools.rb', line 146

def tokenize_list(list)
  return if list.nil?

  validate_multiple_values list[:value]
  list[:condition] ||= list[:value]
  list
end

#tokenize_literal_negation(number_token) ⇒ Object



154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
# File 'lib/sparkql/parser_tools.rb', line 154

def tokenize_literal_negation(number_token)
  old_val = case number_token[:type]
            when :integer
              number_token[:value].to_i
            when :decimal
              number_token[:value].to_f
            else
              tokenizer_error(token: @lexer.current_token_value,
                              expression: number_token,
                              message: "Negation is only allowed for integer and floats",
                              status: :fatal,
                              syntax: true)
              return number_token
            end
  number_token[:value] = (-1 * old_val).to_s

  number_token
end

#tokenize_multiple(lit1, lit2) ⇒ Object



173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
# File 'lib/sparkql/parser_tools.rb', line 173

def tokenize_multiple(lit1, lit2)
  final_type = lit1[:type]
  if lit1[:type] != lit2[:type]
    final_type = coercible_types(lit1[:type], lit2[:type])
    if final_type.nil?
      final_type = lit1[:type]
      tokenizer_error(token: @lexer.last_field,
                      message: "Type mismatch in field list.",
                      status: :fatal,
                      syntax: true)
    end
  end
  array = Array(lit1[:value])
  condition = lit1[:condition] || lit1[:value]
  array << lit2[:value]
  {
    type: final_type,
    value: array,
    multiple: "true",
    condition: "#{condition},#{lit2[:condition] || lit2[:value]}"
  }
end

#tokenize_unary_conjunction(conj, exp) ⇒ Object



103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
# File 'lib/sparkql/parser_tools.rb', line 103

def tokenize_unary_conjunction(conj, exp)
  # Handles the case when a SparkQL filter string
  # begins with a unary operator, and is nested, such as:
  #   Not (Not Field Eq 1)
  # In this instance we treat the outer unary as a conjunction. With any other
  # expression this would be the case, so that should make processing
  # consistent.
  if exp.first[:unary] && @lexer.level.zero?
    exp.first[:conjunction] = conj
    exp.first[:conjunction_level] = @lexer.level
  else
    exp.first[:unary] = conj
    exp.first[:unary_level] = @lexer.level
  end

  exp
end

#unescape_arithmetic(value) ⇒ Object



324
325
326
327
328
329
330
# File 'lib/sparkql/parser_tools.rb', line 324

def unescape_arithmetic(value)
  if value.is_a?(BigDecimal)
    value.round(20).to_s('F')
  else
    value.to_s
  end
end

#validate_expressions(results) ⇒ Object



365
366
367
368
369
370
371
372
# File 'lib/sparkql/parser_tools.rb', line 365

def validate_expressions(results)
  if results.size > max_expressions
    compile_error(token: results[max_expressions][:field], expression: results[max_expressions],
                  message: "You have exceeded the maximum expression count.  Please limit to no more than #{max_expressions} expressions in a filter.",
                  status: :fatal, syntax: false, constraint: true)
    results.slice!(max_expressions..-1)
  end
end

#validate_field_function_depth(expression) ⇒ Object



357
358
359
360
361
362
363
# File 'lib/sparkql/parser_tools.rb', line 357

def validate_field_function_depth(expression)
  if nested_function_depth(expression) > max_function_depth
    compile_error(token: "(", expression: expression,
                  message: "You have exceeded the maximum function nesting level.  Please nest no more than #{max_function_depth} levels deep.",
                  status: :fatal, syntax: false, constraint: true)
  end
end

#validate_level_depth(expression) ⇒ Object



349
350
351
352
353
354
355
# File 'lib/sparkql/parser_tools.rb', line 349

def validate_level_depth(expression)
  if @lexer.level > max_level_depth
    compile_error(token: "(", expression: expression,
                  message: "You have exceeded the maximum nesting level.  Please nest no more than #{max_level_depth} levels deep.",
                  status: :fatal, syntax: false, constraint: true)
  end
end

#validate_multiple_arguments(args) ⇒ Object



384
385
386
387
388
389
390
391
392
# File 'lib/sparkql/parser_tools.rb', line 384

def validate_multiple_arguments(args)
  args = Array(args)
  if args.size > max_values
    compile_error(token: args[max_values],
                  message: "You have exceeded the maximum parameter count.  Please limit to #{max_values} parameters to a single function.",
                  status: :fatal, syntax: false, constraint: true)
    args.slice!(max_values..-1)
  end
end

#validate_multiple_values(values) ⇒ Object



374
375
376
377
378
379
380
381
382
# File 'lib/sparkql/parser_tools.rb', line 374

def validate_multiple_values(values)
  values = Array(values)
  if values.size > max_values
    compile_error(token: values[max_values],
                  message: "You have exceeded the maximum value count.  Please limit to #{max_values} values in a single expression.",
                  status: :fatal, syntax: false, constraint: true)
    values.slice!(max_values..-1)
  end
end

#zero_error?(number) ⇒ Boolean

Returns:

  • (Boolean)


332
333
334
335
336
337
338
# File 'lib/sparkql/parser_tools.rb', line 332

def zero_error?(number)
  return unless escape_value(number).zero?

  compile_error(token: (number[:value]).to_s, expression: number,
                message: "Error attempting to divide by zero",
                status: :fatal, syntax: false, constraint: true)
end