Class: Sparkql::Parser

Inherits:
Racc::Parser
  • Object
show all
Includes:
ParserCompatibility, ParserTools
Defined in:
lib/sparkql/parser.rb

Constant Summary collapse

Racc_arg =
[
racc_action_table,
racc_action_check,
racc_action_default,
racc_action_pointer,
racc_goto_table,
racc_goto_check,
racc_goto_default,
racc_goto_pointer,
racc_nt_base,
racc_reduce_table,
racc_token_table,
racc_shift_n,
racc_reduce_n,
racc_use_result_var ]
Racc_token_to_s_table =
[
"$end",
"error",
"UMINUS",
"OPERATOR",
"RANGE_OPERATOR",
"UNARY_CONJUNCTION",
"CONJUNCTION",
"LPAREN",
"RPAREN",
"STANDARD_FIELD",
"CUSTOM_FIELD",
"KEYWORD",
"COMMA",
"INTEGER",
"DECIMAL",
"CHARACTER",
"DATE",
"DATETIME",
"TIME",
"BOOLEAN",
"NULL",
"$start",
"target",
"expressions",
"expression",
"conjunction",
"unary_conjunction",
"field",
"condition",
"range",
"group",
"function",
"literal",
"literal_list",
"function_name",
"function_args",
"function_arg",
"literals",
"rangeable" ]
Racc_debug_parser =
false

Constants included from ParserCompatibility

Sparkql::ParserCompatibility::FILTER_VALUES, Sparkql::ParserCompatibility::MAXIMUM_EXPRESSIONS, Sparkql::ParserCompatibility::MAXIMUM_LEVEL_DEPTH, Sparkql::ParserCompatibility::MAXIMUM_MULTIPLE_VALUES, Sparkql::ParserCompatibility::OPERATORS_SUPPORTING_MULTIPLES

Constants included from ParserTools

Sparkql::ParserTools::DATE_TYPES, Sparkql::ParserTools::NUMBER_TYPES

Instance Method Summary collapse

Methods included from ParserCompatibility

#boolean_escape, #character_escape, #compile, #date_escape, #datetime_escape, #decimal_escape, #dropped_errors?, #errors, #errors?, #escape_value, #escape_value_list, #fatal_errors?, #integer_escape, #max_expressions, #max_level_depth, #max_values, #process_errors, #recovered_errors?, #rules_for_type, #supports_multiple?, #time_escape, #tokenize

Methods included from ParserTools

#coercible_types, #next_token, #on_error, #parse, #tokenize_conjunction, #tokenize_expression, #tokenize_field_arg, #tokenize_function, #tokenize_function_args, #tokenize_group, #tokenize_list, #tokenize_literal_negation, #tokenize_multiple, #tokenize_unary_conjunction, #validate_expressions, #validate_level_depth, #validate_multiple_arguments, #validate_multiple_values

Instance Method Details

#_reduce_10(val, _values, result) ⇒ Object



260
261
262
263
# File 'lib/sparkql/parser.rb', line 260

def _reduce_10(val, _values, result)
 result = tokenize_conjunction(val[0], val[1],val[2]) 
    result
end

#_reduce_11(val, _values, result) ⇒ Object



265
266
267
268
# File 'lib/sparkql/parser.rb', line 265

def _reduce_11(val, _values, result)
 result = tokenize_conjunction(val[0], val[1],val[2]) 
    result
end

#_reduce_12(val, _values, result) ⇒ Object



270
271
272
273
# File 'lib/sparkql/parser.rb', line 270

def _reduce_12(val, _values, result)
 result = tokenize_group(val[1]) 
    result
end

#_reduce_18(val, _values, result) ⇒ Object

reduce 17 omitted



285
286
287
288
# File 'lib/sparkql/parser.rb', line 285

def _reduce_18(val, _values, result)
 result = tokenize_list(val[0]) 
    result
end

#_reduce_19(val, _values, result) ⇒ Object



290
291
292
293
# File 'lib/sparkql/parser.rb', line 290

def _reduce_19(val, _values, result)
 result = tokenize_function(val[0], []) 
    result
end

#_reduce_2(val, _values, result) ⇒ Object

reduce 1 omitted



232
233
234
235
# File 'lib/sparkql/parser.rb', line 232

def _reduce_2(val, _values, result)
 result = 0 
    result
end

#_reduce_20(val, _values, result) ⇒ Object



295
296
297
298
# File 'lib/sparkql/parser.rb', line 295

def _reduce_20(val, _values, result)
 result = tokenize_function(val[0], val[2]) 
    result
end

#_reduce_23(val, _values, result) ⇒ Object

reduce 22 omitted



304
305
306
307
# File 'lib/sparkql/parser.rb', line 304

def _reduce_23(val, _values, result)
 result = tokenize_function_args(val[0], val[2]) 
    result
end

#_reduce_26(val, _values, result) ⇒ Object

reduce 25 omitted



313
314
315
316
# File 'lib/sparkql/parser.rb', line 313

def _reduce_26(val, _values, result)
 result = tokenize_field_arg(val[0]) 
    result
end

#_reduce_29(val, _values, result) ⇒ Object

reduce 28 omitted



322
323
324
325
# File 'lib/sparkql/parser.rb', line 322

def _reduce_29(val, _values, result)
 result = tokenize_multiple(val[0], val[2]) 
    result
end

#_reduce_30(val, _values, result) ⇒ Object



327
328
329
330
# File 'lib/sparkql/parser.rb', line 327

def _reduce_30(val, _values, result)
 result = tokenize_multiple(val[0], val[2]) 
    result
end

#_reduce_31(val, _values, result) ⇒ Object



332
333
334
335
# File 'lib/sparkql/parser.rb', line 332

def _reduce_31(val, _values, result)
 result = tokenize_multiple(val[0], val[2]) 
    result
end

#_reduce_35(val, _values, result) ⇒ Object

reduce 34 omitted



343
344
345
346
# File 'lib/sparkql/parser.rb', line 343

def _reduce_35(val, _values, result)
 result = val[1] 
    result
end

#_reduce_36(val, _values, result) ⇒ Object



348
349
350
351
# File 'lib/sparkql/parser.rb', line 348

def _reduce_36(val, _values, result)
 result = tokenize_literal_negation(val[1]) 
    result
end

#_reduce_6(val, _values, result) ⇒ Object

reduce 5 omitted



243
244
245
246
# File 'lib/sparkql/parser.rb', line 243

def _reduce_6(val, _values, result)
 result = tokenize_expression(val[0], val[1],val[2]) 
    result
end

#_reduce_7(val, _values, result) ⇒ Object



248
249
250
251
# File 'lib/sparkql/parser.rb', line 248

def _reduce_7(val, _values, result)
 result = tokenize_expression(val[0], val[1], val[2]) 
    result
end

#_reduce_9(val, _values, result) ⇒ Object

reduce 8 omitted



255
256
257
258
# File 'lib/sparkql/parser.rb', line 255

def _reduce_9(val, _values, result)
 result = tokenize_unary_conjunction(val[0], val[1]) 
    result
end

#_reduce_none(val, _values, result) ⇒ Object

reduce 47 omitted



375
376
377
# File 'lib/sparkql/parser.rb', line 375

def _reduce_none(val, _values, result)
  val[0]
end