Module: Sparkql::ParserTools
- Included in:
- Parser
- Defined in:
- lib/sparkql/parser_tools.rb
Overview
This is the guts of the parser internals and is mixed into the parser for organization.
Constant Summary collapse
- DATE_TYPES =
Coercible types from highest precision to lowest
[:datetime, :date]
- NUMBER_TYPES =
[:decimal, :integer]
Instance Method Summary collapse
-
#coercible_types(type1, type2) ⇒ Object
If both types support coercion with eachother, always selects the highest precision type to return as a reflection of the two.
- #next_token ⇒ Object
- #on_error(error_token_id, error_value, value_stack) ⇒ Object
- #parse(str) ⇒ Object
- #tokenize_conjunction(exp1, conj, exp2) ⇒ Object
- #tokenize_expression(field, op, val) ⇒ Object
- #tokenize_field_arg(field) ⇒ Object
- #tokenize_function(name, f_args) ⇒ Object
- #tokenize_function_args(lit1, lit2) ⇒ Object
- #tokenize_group(expressions) ⇒ Object
- #tokenize_list(list) ⇒ Object
- #tokenize_literal_negation(number_token) ⇒ Object
- #tokenize_multiple(lit1, lit2) ⇒ Object
- #tokenize_unary_conjunction(conj, exp) ⇒ Object
- #validate_expressions(results) ⇒ Object
- #validate_level_depth(expression) ⇒ Object
- #validate_multiple_arguments(args) ⇒ Object
- #validate_multiple_values(values) ⇒ Object
Instance Method Details
#coercible_types(type1, type2) ⇒ Object
If both types support coercion with eachother, always selects the highest precision type to return as a reflection of the two. Any type that doesn’t support coercion with the other type returns nil
225 226 227 228 229 230 231 232 233 |
# File 'lib/sparkql/parser_tools.rb', line 225 def coercible_types type1, type2 if DATE_TYPES.include?(type1) && DATE_TYPES.include?(type2) DATE_TYPES.first elsif NUMBER_TYPES.include?(type1) && NUMBER_TYPES.include?(type2) NUMBER_TYPES.first else nil end end |
#next_token ⇒ Object
17 18 19 20 21 22 23 |
# File 'lib/sparkql/parser_tools.rb', line 17 def next_token t = @lexer.shift while t[0] == :SPACE or t[0] == :NEWLINE t = @lexer.shift end t end |
#on_error(error_token_id, error_value, value_stack) ⇒ Object
175 176 177 178 179 180 181 182 183 |
# File 'lib/sparkql/parser_tools.rb', line 175 def on_error(error_token_id, error_value, value_stack) token_name = token_to_str(error_token_id) token_name.downcase! token = error_value.to_s.inspect tokenizer_error(:token => @lexer.current_token_value, :message => "Error parsing token #{token_name}", :status => :fatal, :syntax => true) end |
#parse(str) ⇒ Object
8 9 10 11 12 13 14 15 |
# File 'lib/sparkql/parser_tools.rb', line 8 def parse(str) @lexer = Sparkql::Lexer.new(str) @expression_count = 0 results = do_parse return if results.nil? validate_expressions results results end |
#tokenize_conjunction(exp1, conj, exp2) ⇒ Object
59 60 61 62 63 |
# File 'lib/sparkql/parser_tools.rb', line 59 def tokenize_conjunction(exp1, conj, exp2) exp2.first[:conjunction] = conj exp2.first[:conjunction_level] = @lexer.level exp1 + exp2 end |
#tokenize_expression(field, op, val) ⇒ Object
25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 |
# File 'lib/sparkql/parser_tools.rb', line 25 def tokenize_expression(field, op, val) operator = get_operator(val,op) unless val.nil? field_args = {} # Function support for fields is stapled in here. The function information # is remapped to the expression if field.is_a?(Hash) && field[:type] == :function function = Sparkql::FunctionResolver::SUPPORTED_FUNCTIONS[field[:value].to_sym] if !function.nil? field_args[:field_function] = field[:value] field_args[:field_function_type] = function[:return_type] field_args[:args] = field[:args] else tokenizer_error(:token => field[:value], :message => "Unsupported function type", :status => :fatal ) end field = field[:args].first end custom_field = field.start_with?('"') block_group = (@lexer.level == 0) ? 0 : @lexer.block_group_identifier expression = {:field => field, :operator => operator, :conjunction => 'And', :conjunction_level => 0, :level => @lexer.level, :block_group => block_group, :custom_field => custom_field}. merge!(field_args) expression = val.merge(expression) unless val.nil? expression[:condition] ||= expression[:value] validate_level_depth expression if operator.nil? tokenizer_error(:token => op, :expression => expression, :message => "Operator not supported for this type and value string", :status => :fatal ) end @expression_count += 1 [expression] end |
#tokenize_field_arg(field) ⇒ Object
142 143 144 145 146 147 |
# File 'lib/sparkql/parser_tools.rb', line 142 def tokenize_field_arg(field) { :type => :field, :value => field, } end |
#tokenize_function(name, f_args) ⇒ Object
149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 |
# File 'lib/sparkql/parser_tools.rb', line 149 def tokenize_function(name, f_args) @lexer.leveldown @lexer.block_group_identifier -= 1 args = f_args.instance_of?(Array) ? f_args : [f_args] validate_multiple_arguments args condition_list = [] args.each do |arg| condition_list << arg[:value] # Needs to be pure string value arg[:value] = escape_value(arg) end resolver = Sparkql::FunctionResolver.new(name, args) resolver.validate if(resolver.errors?) tokenizer_error(:token => @lexer.last_field, :message => "Error parsing function #{resolver.errors.join(',')}", :status => :fatal, :syntax => true) return nil else result = resolver.call() result.nil? ? result : result.merge(:condition => "#{name}(#{condition_list.join(',')})") end end |
#tokenize_function_args(lit1, lit2) ⇒ Object
136 137 138 139 140 |
# File 'lib/sparkql/parser_tools.rb', line 136 def tokenize_function_args(lit1, lit2) array = lit1.kind_of?(Array) ? lit1 : [lit1] array << lit2 array end |
#tokenize_group(expressions) ⇒ Object
83 84 85 86 |
# File 'lib/sparkql/parser_tools.rb', line 83 def tokenize_group(expressions) @lexer.leveldown expressions end |
#tokenize_list(list) ⇒ Object
88 89 90 91 92 |
# File 'lib/sparkql/parser_tools.rb', line 88 def tokenize_list(list) validate_multiple_values list[:value] list[:condition] ||= list[:value] list end |
#tokenize_literal_negation(number_token) ⇒ Object
94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 |
# File 'lib/sparkql/parser_tools.rb', line 94 def tokenize_literal_negation(number_token) old_val = case number_token[:type] when :integer number_token[:value].to_i when :decimal number_token[:value].to_f else tokenizer_error(:token => @lexer.current_token_value, :expression => number_token, :message => "Negation is only allowed for integer and floats", :status => :fatal, :syntax => true) return number_token end number_token[:value] = (-1 * old_val).to_s number_token end |
#tokenize_multiple(lit1, lit2) ⇒ Object
113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 |
# File 'lib/sparkql/parser_tools.rb', line 113 def tokenize_multiple(lit1, lit2) final_type = lit1[:type] if lit1[:type] != lit2[:type] final_type = coercible_types(lit1[:type],lit2[:type]) if final_type.nil? final_type = lit1[:type] tokenizer_error(:token => @lexer.last_field, :message => "Type mismatch in field list.", :status => :fatal, :syntax => true) end end array = Array(lit1[:value]) condition = lit1[:condition] || lit1[:value] array << lit2[:value] { :type => final_type , :value => array, :multiple => "true", :condition => condition + "," + (lit2[:condition] || lit2[:value]) } end |
#tokenize_unary_conjunction(conj, exp) ⇒ Object
65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 |
# File 'lib/sparkql/parser_tools.rb', line 65 def tokenize_unary_conjunction(conj, exp) # Handles the case when a SparkQL filter string # begins with a unary operator, and is nested, such as: # Not (Not Field Eq 1) # In this instance we treat the outer unary as a conjunction. With any other # expression this would be the case, so that should make processing # consistent. if exp.first[:unary] && @lexer.level == 0 exp.first[:conjunction] = conj exp.first[:conjunction_level] = @lexer.level else exp.first[:unary] = conj exp.first[:unary_level] = @lexer.level end exp end |
#validate_expressions(results) ⇒ Object
193 194 195 196 197 198 199 200 |
# File 'lib/sparkql/parser_tools.rb', line 193 def validate_expressions results if results.size > max_expressions compile_error(:token => results[max_expressions][:field], :expression => results[max_expressions], :message => "You have exceeded the maximum expression count. Please limit to no more than #{max_expressions} expressions in a filter.", :status => :fatal, :syntax => false, :constraint => true ) results.slice!(max_expressions..-1) end end |
#validate_level_depth(expression) ⇒ Object
185 186 187 188 189 190 191 |
# File 'lib/sparkql/parser_tools.rb', line 185 def validate_level_depth expression if @lexer.level > max_level_depth compile_error(:token => "(", :expression => expression, :message => "You have exceeded the maximum nesting level. Please nest no more than #{max_level_depth} levels deep.", :status => :fatal, :syntax => false, :constraint => true ) end end |
#validate_multiple_arguments(args) ⇒ Object
212 213 214 215 216 217 218 219 220 |
# File 'lib/sparkql/parser_tools.rb', line 212 def validate_multiple_arguments args args = Array(args) if args.size > max_values compile_error(:token => args[max_values], :message => "You have exceeded the maximum parameter count. Please limit to #{max_values} parameters to a single function.", :status => :fatal, :syntax => false, :constraint => true ) args.slice!(max_values..-1) end end |
#validate_multiple_values(values) ⇒ Object
202 203 204 205 206 207 208 209 210 |
# File 'lib/sparkql/parser_tools.rb', line 202 def validate_multiple_values values values = Array(values) if values.size > max_values compile_error(:token => values[max_values], :message => "You have exceeded the maximum value count. Please limit to #{max_values} values in a single expression.", :status => :fatal, :syntax => false, :constraint => true ) values.slice!(max_values..-1) end end |