Class: Druid::Query::Builder
- Inherits:
-
Object
- Object
- Druid::Query::Builder
- Defined in:
- lib/druid/query.rb
Instance Attribute Summary collapse
-
#query ⇒ Object
readonly
Returns the value of attribute query.
Instance Method Summary collapse
- #cardinality(metric, dimensions, by_row = false) ⇒ Object
- #chain_having(having) ⇒ Object
- #data_source(source) ⇒ Object
-
#filter(hash = nil, type = :in, &block) ⇒ Object
filters.
- #filter_from_block(&block) ⇒ Object
- #filter_from_hash(hash, type = :in) ⇒ Object
- #granularity(gran, time_zone = "UTC") ⇒ Object
- #group_by(*dimensions) ⇒ Object
-
#having(hash = nil, &block) ⇒ Object
having.
- #having_from_block(&block) ⇒ Object
- #having_from_hash(h) ⇒ Object
- #histogram(metric, type = "equalBuckets", args = {}) ⇒ Object
- #histograms(metrics) ⇒ Object
-
#initialize ⇒ Builder
constructor
A new instance of Builder.
- #interval(from, to = Time.now) ⇒ Object
- #intervals(is) ⇒ Object
- #js_aggregation(metric, columns, functions) ⇒ Object
- #last(duration) ⇒ Object
-
#limit(limit, columns) ⇒ Object
limit/sort.
-
#metadata ⇒ Object
query types.
-
#postagg(type = :long_sum, &block) ⇒ Object
post aggregations.
- #query_type(type) ⇒ Object
- #search(what = "", dimensions = [], limit = nil) ⇒ Object
- #timeseries ⇒ Object
- #topn(dimension, metric, threshold) ⇒ Object
Constructor Details
Instance Attribute Details
#query ⇒ Object (readonly)
Returns the value of attribute query.
303 304 305 |
# File 'lib/druid/query.rb', line 303 def query @query end |
Instance Method Details
#cardinality(metric, dimensions, by_row = false) ⇒ Object
432 433 434 435 436 437 438 439 440 |
# File 'lib/druid/query.rb', line 432 def cardinality(metric, dimensions, by_row = false) @query.aggregations << Aggregation.new({ type: 'cardinality', name: metric, fieldNames: dimensions, byRow: by_row, }) unless @query.contains_aggregation?(metric) self end |
#chain_having(having) ⇒ Object
502 503 504 505 506 |
# File 'lib/druid/query.rb', line 502 def chain_having(having) having = @query.having.chain(having) if @query.having @query.having = having self end |
#data_source(source) ⇒ Object
316 317 318 319 |
# File 'lib/druid/query.rb', line 316 def data_source(source) @query.dataSource = source.split('/').last self end |
#filter(hash = nil, type = :in, &block) ⇒ Object
filters
466 467 468 469 470 |
# File 'lib/druid/query.rb', line 466 def filter(hash = nil, type = :in, &block) filter_from_hash(hash, type) if hash filter_from_block(&block) if block self end |
#filter_from_block(&block) ⇒ Object
481 482 483 484 |
# File 'lib/druid/query.rb', line 481 def filter_from_block(&block) filter = Filter.new.instance_exec(&block) @query.filter = @query.filter ? @query.filter.&(filter) : filter end |
#filter_from_hash(hash, type = :in) ⇒ Object
472 473 474 475 476 477 478 479 |
# File 'lib/druid/query.rb', line 472 def filter_from_hash(hash, type = :in) last = nil hash.each do |k, values| filter = DimensionFilter.new(dimension: k).__send__(type, values) last = last ? last.&(filter) : filter end @query.filter = @query.filter ? @query.filter.&(last) : last end |
#granularity(gran, time_zone = "UTC") ⇒ Object
338 339 340 341 342 343 344 345 346 347 348 349 350 |
# File 'lib/druid/query.rb', line 338 def granularity(gran, time_zone = "UTC") gran = gran.to_s if %w(all none minute fifteen_minute thirty_minute hour day).include?(gran) @query.granularity = gran else @query.granularity = Granularity.new({ type: 'period', period: gran, timeZone: time_zone }) end self end |
#group_by(*dimensions) ⇒ Object
366 367 368 369 370 371 372 |
# File 'lib/druid/query.rb', line 366 def group_by(*dimensions) query_type(:groupBy) @query.dimensions = dimensions.flatten.map do |dimension| dimension.is_a?(Dimension) ? dimension : Dimension.new(dimension) end self end |
#having(hash = nil, &block) ⇒ Object
having
488 489 490 491 492 |
# File 'lib/druid/query.rb', line 488 def having(hash = nil, &block) having_from_hash(hash) if hash having_from_block(&block) if block self end |
#having_from_block(&block) ⇒ Object
494 495 496 |
# File 'lib/druid/query.rb', line 494 def having_from_block(&block) chain_having(Having.new.instance_exec(&block)) end |
#having_from_hash(h) ⇒ Object
498 499 500 |
# File 'lib/druid/query.rb', line 498 def having_from_hash(h) chain_having(Having.new(h)) end |
#histogram(metric, type = "equalBuckets", args = {}) ⇒ Object
414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 |
# File 'lib/druid/query.rb', line 414 def histogram(metric, type = "equalBuckets", args = {}) @query.aggregations << Aggregation.new({ type: "approxHistogramFold", name: "raw_#{metric}", fieldName: metric, }) type = type.dup type[0] = type[0].upcase = args.dup.merge({ name: metric, fieldName: "raw_#{metric}" }) @query.postAggregations << ::Druid.const_get("PostAggregationHistogram#{type}").new() self end |
#histograms(metrics) ⇒ Object
409 410 411 412 |
# File 'lib/druid/query.rb', line 409 def histograms(metrics) metrics.each{|m| histogram(m) } self end |
#interval(from, to = Time.now) ⇒ Object
321 322 323 |
# File 'lib/druid/query.rb', line 321 def interval(from, to = Time.now) intervals([[from, to]]) end |
#intervals(is) ⇒ Object
325 326 327 328 329 330 331 332 |
# File 'lib/druid/query.rb', line 325 def intervals(is) @query.intervals = is.map do |from, to| from = from.respond_to?(:iso8601) ? from.iso8601 : ISO8601::DateTime.new(from).to_s to = to.respond_to?(:iso8601) ? to.iso8601 : ISO8601::DateTime.new(to).to_s "#{from}/#{to}" end self end |
#js_aggregation(metric, columns, functions) ⇒ Object
442 443 444 445 446 447 448 449 450 451 452 |
# File 'lib/druid/query.rb', line 442 def js_aggregation(metric, columns, functions) @query.aggregations << Aggregation.new({ type: 'javascript', name: metric, fieldNames: columns, fnAggregate: functions[:aggregate], fnCombine: functions[:combine], fnReset: functions[:reset], }) unless @query.contains_aggregation?(metric) self end |
#last(duration) ⇒ Object
334 335 336 |
# File 'lib/druid/query.rb', line 334 def last(duration) interval(Time.now - duration) end |
#limit(limit, columns) ⇒ Object
limit/sort
510 511 512 513 514 515 516 517 518 519 |
# File 'lib/druid/query.rb', line 510 def limit(limit, columns) @query.limitSpec = { type: :default, limit: limit, columns: columns.map do |dimension, direction| { dimension: dimension, direction: direction } end } self end |
#metadata ⇒ Object
query types
354 355 356 357 358 359 |
# File 'lib/druid/query.rb', line 354 def query_type(:segmentMetadata) @query.context.useCache = false @query.context.populateCache = false self end |
#postagg(type = :long_sum, &block) ⇒ Object
post aggregations
456 457 458 459 460 461 462 |
# File 'lib/druid/query.rb', line 456 def postagg(type = :long_sum, &block) post_agg = PostAggregation.new.instance_exec(&block) @query.postAggregations << post_agg # make sure, the required fields are in the query self.method(type).call(post_agg.field_names) self end |
#query_type(type) ⇒ Object
311 312 313 314 |
# File 'lib/druid/query.rb', line 311 def query_type(type) @query.queryType = type.to_s self end |
#search(what = "", dimensions = [], limit = nil) ⇒ Object
382 383 384 385 386 387 388 389 390 391 392 393 |
# File 'lib/druid/query.rb', line 382 def search(what = "", dimensions = [], limit = nil) query_type(:search) @query.searchDimensions = dimensions unless dimensions.empty? @query.limit = limit if limit # for now we always sort lexicographic @query.sort = { type: 'lexicographic' } @query.query = { type: "insensitive_contains", value: what } self end |
#timeseries ⇒ Object
361 362 363 364 |
# File 'lib/druid/query.rb', line 361 def timeseries query_type(:timeseries) self end |
#topn(dimension, metric, threshold) ⇒ Object
374 375 376 377 378 379 380 |
# File 'lib/druid/query.rb', line 374 def topn(dimension, metric, threshold) query_type(:topN) @query.dimension = dimension @query.metric = metric @query.threshold = threshold self end |