Class: Sparkql::Parser

Inherits:
Racc::Parser
  • Object
show all
Includes:
ParserCompatibility, ParserTools
Defined in:
lib/sparkql/parser.rb

Constant Summary collapse

Racc_arg =
[
racc_action_table,
racc_action_check,
racc_action_default,
racc_action_pointer,
racc_goto_table,
racc_goto_check,
racc_goto_default,
racc_goto_pointer,
racc_nt_base,
racc_reduce_table,
racc_token_table,
racc_shift_n,
racc_reduce_n,
racc_use_result_var ]
Racc_token_to_s_table =
[
"$end",
"error",
"UMINUS",
"OPERATOR",
"RANGE_OPERATOR",
"UNARY_CONJUNCTION",
"CONJUNCTION",
"LPAREN",
"RPAREN",
"STANDARD_FIELD",
"CUSTOM_FIELD",
"KEYWORD",
"COMMA",
"INTEGER",
"DECIMAL",
"CHARACTER",
"DATE",
"DATETIME",
"TIME",
"BOOLEAN",
"NULL",
"$start",
"target",
"expressions",
"expression",
"conjunction",
"unary_conjunction",
"field",
"condition",
"range",
"group",
"function",
"literal",
"literal_list",
"function_name",
"function_args",
"function_arg",
"literals",
"rangeable" ]
Racc_debug_parser =
false

Constants included from ParserCompatibility

Sparkql::ParserCompatibility::FILTER_VALUES, Sparkql::ParserCompatibility::MAXIMUM_EXPRESSIONS, Sparkql::ParserCompatibility::MAXIMUM_LEVEL_DEPTH, Sparkql::ParserCompatibility::MAXIMUM_MULTIPLE_VALUES, Sparkql::ParserCompatibility::OPERATORS_SUPPORTING_MULTIPLES

Constants included from ParserTools

Sparkql::ParserTools::DATE_TYPES, Sparkql::ParserTools::NUMBER_TYPES

Instance Method Summary collapse

Methods included from ParserCompatibility

#boolean_escape, #character_escape, #compile, #date_escape, #datetime_escape, #decimal_escape, #dropped_errors?, #errors, #errors?, #escape_value, #escape_value_list, #fatal_errors?, #integer_escape, #max_expressions, #max_level_depth, #max_values, #process_errors, #recovered_errors?, #rules_for_type, #supports_multiple?, #time_escape, #tokenize

Methods included from ParserTools

#coercible_types, #next_token, #on_error, #parse, #tokenize_conjunction, #tokenize_expression, #tokenize_field_arg, #tokenize_function, #tokenize_function_args, #tokenize_group, #tokenize_list, #tokenize_multiple, #tokenize_unary_conjunction, #validate_expressions, #validate_level_depth, #validate_multiple_arguments, #validate_multiple_values

Instance Method Details

#_reduce_10(val, _values, result) ⇒ Object



254
255
256
257
# File 'lib/sparkql/parser.rb', line 254

def _reduce_10(val, _values, result)
 result = tokenize_conjunction(val[0], val[1],val[2]) 
    result
end

#_reduce_11(val, _values, result) ⇒ Object



259
260
261
262
# File 'lib/sparkql/parser.rb', line 259

def _reduce_11(val, _values, result)
 result = tokenize_conjunction(val[0], val[1],val[2]) 
    result
end

#_reduce_12(val, _values, result) ⇒ Object



264
265
266
267
# File 'lib/sparkql/parser.rb', line 264

def _reduce_12(val, _values, result)
 result = tokenize_group(val[1]) 
    result
end

#_reduce_18(val, _values, result) ⇒ Object

reduce 17 omitted



279
280
281
282
# File 'lib/sparkql/parser.rb', line 279

def _reduce_18(val, _values, result)
 result = tokenize_list(val[0]) 
    result
end

#_reduce_19(val, _values, result) ⇒ Object



284
285
286
287
# File 'lib/sparkql/parser.rb', line 284

def _reduce_19(val, _values, result)
 result = tokenize_function(val[0], []) 
    result
end

#_reduce_2(val, _values, result) ⇒ Object

reduce 1 omitted



226
227
228
229
# File 'lib/sparkql/parser.rb', line 226

def _reduce_2(val, _values, result)
 result = 0 
    result
end

#_reduce_20(val, _values, result) ⇒ Object



289
290
291
292
# File 'lib/sparkql/parser.rb', line 289

def _reduce_20(val, _values, result)
 result = tokenize_function(val[0], val[2]) 
    result
end

#_reduce_23(val, _values, result) ⇒ Object

reduce 22 omitted



298
299
300
301
# File 'lib/sparkql/parser.rb', line 298

def _reduce_23(val, _values, result)
 result = tokenize_function_args(val[0], val[2]) 
    result
end

#_reduce_26(val, _values, result) ⇒ Object

reduce 25 omitted



307
308
309
310
# File 'lib/sparkql/parser.rb', line 307

def _reduce_26(val, _values, result)
 result = tokenize_field_arg(val[0]) 
    result
end

#_reduce_29(val, _values, result) ⇒ Object

reduce 28 omitted



316
317
318
319
# File 'lib/sparkql/parser.rb', line 316

def _reduce_29(val, _values, result)
 result = tokenize_multiple(val[0], val[2]) 
    result
end

#_reduce_30(val, _values, result) ⇒ Object



321
322
323
324
# File 'lib/sparkql/parser.rb', line 321

def _reduce_30(val, _values, result)
 result = tokenize_multiple(val[0], val[2]) 
    result
end

#_reduce_31(val, _values, result) ⇒ Object



326
327
328
329
# File 'lib/sparkql/parser.rb', line 326

def _reduce_31(val, _values, result)
 result = tokenize_multiple(val[0], val[2]) 
    result
end

#_reduce_6(val, _values, result) ⇒ Object

reduce 5 omitted



237
238
239
240
# File 'lib/sparkql/parser.rb', line 237

def _reduce_6(val, _values, result)
 result = tokenize_expression(val[0], val[1],val[2]) 
    result
end

#_reduce_7(val, _values, result) ⇒ Object



242
243
244
245
# File 'lib/sparkql/parser.rb', line 242

def _reduce_7(val, _values, result)
 result = tokenize_expression(val[0], val[1], val[2]) 
    result
end

#_reduce_9(val, _values, result) ⇒ Object

reduce 8 omitted



249
250
251
252
# File 'lib/sparkql/parser.rb', line 249

def _reduce_9(val, _values, result)
 result = tokenize_unary_conjunction(val[0], val[1]) 
    result
end

#_reduce_none(val, _values, result) ⇒ Object

reduce 45 omitted



359
360
361
# File 'lib/sparkql/parser.rb', line 359

def _reduce_none(val, _values, result)
  val[0]
end