Class: Fluent::Plugin::ElasticsearchOutput

Inherits:
Output
  • Object
show all
Includes:
ElasticsearchIndexTemplate, ElasticsearchConstants
Defined in:
lib/fluent/plugin/out_elasticsearch.rb

Direct Known Subclasses

ElasticsearchOutputDynamic

Defined Under Namespace

Classes: ConnectionFailure, ConnectionRetryFailure, MissingIdFieldError, RequestInfo, RetryStreamError

Constant Summary collapse

DEFAULT_BUFFER_TYPE =
"memory"
DEFAULT_ELASTICSEARCH_VERSION =

For compatibility.

5
DEFAULT_TYPE_NAME_ES_7x =
"_doc".freeze
DEFAULT_TYPE_NAME =
"fluentd".freeze
DEFAULT_RELOAD_AFTER =
-1

Constants included from ElasticsearchConstants

Fluent::Plugin::ElasticsearchConstants::BODY_DELIMITER, Fluent::Plugin::ElasticsearchConstants::CREATE_OP, Fluent::Plugin::ElasticsearchConstants::ID_FIELD, Fluent::Plugin::ElasticsearchConstants::INDEX_OP, Fluent::Plugin::ElasticsearchConstants::TIMESTAMP_FIELD, Fluent::Plugin::ElasticsearchConstants::UPDATE_OP, Fluent::Plugin::ElasticsearchConstants::UPSERT_OP

Instance Method Summary collapse

Methods included from ElasticsearchIndexTemplate

#get_custom_template, #get_template, #indexcreation, #retry_install, #template_custom_install, #template_exists?, #template_install, #template_put, #templates_hash_install

Constructor Details

#initializeElasticsearchOutput

Returns a new instance of ElasticsearchOutput.



137
138
139
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 137

def initialize
  super
end

Instance Method Details

#append_record_to_messages(op, meta, header, record, msgs) ⇒ Object

append_record_to_messages adds a record to the bulk message payload to be submitted to Elasticsearch. Records that do not include ‘_id’ field are skipped when ‘write_operation’ is configured for ‘create’ or ‘update’

returns ‘true’ if record was appended to the bulk message

and 'false' otherwise


422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 422

def append_record_to_messages(op, meta, header, record, msgs)
  case op
  when UPDATE_OP, UPSERT_OP
    if meta.has_key?(ID_FIELD)
      header[UPDATE_OP] = meta
      msgs << @dump_proc.call(header) << BODY_DELIMITER
      msgs << @dump_proc.call(update_body(record, op)) << BODY_DELIMITER
      return true
    end
  when CREATE_OP
    if meta.has_key?(ID_FIELD)
      header[CREATE_OP] = meta
      msgs << @dump_proc.call(header) << BODY_DELIMITER
      msgs << @dump_proc.call(record) << BODY_DELIMITER
      return true
    end
  when INDEX_OP
    header[INDEX_OP] = meta
    msgs << @dump_proc.call(header) << BODY_DELIMITER
    msgs << @dump_proc.call(record) << BODY_DELIMITER
    return true
  end
  return false
end

#backend_optionsObject



259
260
261
262
263
264
265
266
267
268
269
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 259

def backend_options
  case @http_backend
  when :excon
    { client_key: @client_key, client_cert: @client_cert, client_key_pass: @client_key_pass }
  when :typhoeus
    require 'typhoeus'
    { sslkey: @client_key, sslcert: @client_cert, keypasswd: @client_key_pass }
  end
rescue LoadError
  raise Fluent::ConfigError, "You must install #{@http_backend} gem."
end

#clientObject



323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 323

def client
  @_es ||= begin
    adapter_conf = lambda {|f| f.adapter @http_backend, @backend_options }
    local_reload_connections = @reload_connections
    if local_reload_connections && @reload_after > DEFAULT_RELOAD_AFTER
      local_reload_connections = @reload_after
    end
    transport = Elasticsearch::Transport::Transport::HTTP::Faraday.new(get_connection_options.merge(
                                                                        options: {
                                                                          reload_connections: local_reload_connections,
                                                                          reload_on_failure: @reload_on_failure,
                                                                          resurrect_after: @resurrect_after,
                                                                          retry_on_failure: 5,
                                                                          logger: @transport_logger,
                                                                          transport_options: {
                                                                            headers: { 'Content-Type' => @content_type.to_s },
                                                                            request: { timeout: @request_timeout },
                                                                            ssl: { verify: @ssl_verify, ca_file: @ca_file, version: @ssl_version }
                                                                          },
                                                                          http: {
                                                                            user: @user,
                                                                            password: @password
                                                                          },
                                                                          sniffer_class: @sniffer_class,
                                                                          serializer_class: @serializer_class,
                                                                        }), &adapter_conf)
    es = Elasticsearch::Client.new transport: transport

    begin
      raise ConnectionFailure, "Can not reach Elasticsearch cluster (#{connection_options_description})!" unless es.ping
    rescue *es.transport.host_unreachable_exceptions => e
      raise ConnectionFailure, "Can not reach Elasticsearch cluster (#{connection_options_description})! #{e.message}"
    end

    log.info "Connection opened to Elasticsearch cluster => #{connection_options_description}"
    es
  end
end

#client_library_versionObject



276
277
278
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 276

def client_library_version
  Elasticsearch::VERSION
end

#configure(conf) ⇒ Object

Raises:

  • (Fluent::ConfigError)


141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 141

def configure(conf)
  compat_parameters_convert(conf, :buffer)

  super
  raise Fluent::ConfigError, "'tag' in chunk_keys is required." if not @chunk_key_tag

  @time_parser = create_time_parser
  @backend_options = backend_options

  if @remove_keys
    @remove_keys = @remove_keys.split(/\s*,\s*/)
  end

  if @target_index_key && @target_index_key.is_a?(String)
    @target_index_key = @target_index_key.split '.'
  end

  if @target_type_key && @target_type_key.is_a?(String)
    @target_type_key = @target_type_key.split '.'
  end

  if @remove_keys_on_update && @remove_keys_on_update.is_a?(String)
    @remove_keys_on_update = @remove_keys_on_update.split ','
  end

  raise Fluent::ConfigError, "'max_retry_putting_template' must be positive number." if @max_retry_putting_template < 0
  
  if @template_name && @template_file
    retry_install(@max_retry_putting_template) do
      if @customize_template
        if @rollover_index
          raise Fluent::ConfigError, "'deflector_alias' must be provided if 'rollover_index' is set true ." if not @deflector_alias
        end
        template_custom_install(@template_name, @template_file, @template_overwrite, @customize_template, @index_prefix, @rollover_index, @deflector_alias, @application_name)
      else
        template_install(@template_name, @template_file, @template_overwrite)
      end
    end
  elsif @templates
    retry_install(@max_retry_putting_template) do
      templates_hash_install(@templates, @template_overwrite)
    end
  end

  # Consider missing the prefix of "$." in nested key specifiers.
  @id_key = convert_compat_id_key(@id_key) if @id_key
  @parent_key = convert_compat_id_key(@parent_key) if @parent_key
  @routing_key = convert_compat_id_key(@routing_key) if @routing_key

  @meta_config_map = create_meta_config_map

  @serializer_class = nil
  begin
    require 'oj'
    @dump_proc = Oj.method(:dump)
    if @prefer_oj_serializer
      @serializer_class = Fluent::Plugin::Serializer::Oj
      Elasticsearch::API.settings[:serializer] = Fluent::Plugin::Serializer::Oj
    end
  rescue LoadError
    @dump_proc = Yajl.method(:dump)
  end

  if @user && m = @user.match(/%{(?<user>.*)}/)
    @user = URI.encode_www_form_component(m["user"])
  end
  if @password && m = @password.match(/%{(?<password>.*)}/)
    @password = URI.encode_www_form_component(m["password"])
  end

  @transport_logger = nil
  if @with_transporter_log
    @transport_logger = log
    log_level = conf['@log_level'] || conf['log_level']
    log.warn "Consider to specify log_level with @log_level." unless log_level
  end

  @last_seen_major_version =
    begin
      detect_es_major_version
    rescue
      log.warn "Could not connect Elasticsearch or obtain version. Assuming Elasticsearch 5."
      DEFAULT_ELASTICSEARCH_VERSION
    end
  if @last_seen_major_version == 6 && @type_name != DEFAULT_TYPE_NAME_ES_7x
    log.info "Detected ES 6.x: ES 7.x will only accept `_doc` in type_name."
  end
  if @last_seen_major_version >= 7 && @type_name != DEFAULT_TYPE_NAME_ES_7x
    log.warn "Detected ES 7.x or above: `_doc` will be used as the document `_type`."
    @type_name = '_doc'.freeze
  end

  if @validate_client_version
    if @last_seen_major_version != client_library_version.to_i
      raise Fluent::ConfigError, <<-EOC
        Detected ES #{@last_seen_major_version} but you use ES client #{client_library_version}.
        Please consider to use #{@last_seen_major_version}.x series ES client.
      EOC
    end
  end

  if @last_seen_major_version >= 6
    case @ssl_version
    when :SSLv23, :TLSv1, :TLSv1_1
      if @scheme == :https
        log.warn "Detected ES 6.x or above and enabled insecure security:
                  You might have to specify `ssl_version TLSv1_2` in configuration."
      end
    end
  end
  @sniffer_class = nil
  begin
    @sniffer_class = Object.const_get(@sniffer_class_name) if @sniffer_class_name
  rescue Exception => ex
    raise Fluent::ConfigError, "Could not load sniffer class #{@sniffer_class_name}: #{ex}"
  end
end

#connection_options_descriptionObject



407
408
409
410
411
412
413
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 407

def connection_options_description
  get_connection_options[:hosts].map do |host_info|
    attributes = host_info.dup
    attributes[:password] = 'obfuscated' if attributes.has_key?(:password)
    attributes.inspect
  end.join(', ')
end

#convert_compat_id_key(key) ⇒ Object



280
281
282
283
284
285
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 280

def convert_compat_id_key(key)
  if key.include?('.') && !key.start_with?('$[')
    key = "$.#{key}" unless key.start_with?('$.')
  end
  key
end

#create_meta_config_mapObject



287
288
289
290
291
292
293
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 287

def create_meta_config_map
  result = []
  result << [record_accessor_create(@id_key), '_id'] if @id_key
  result << [record_accessor_create(@parent_key), '_parent'] if @parent_key
  result << [record_accessor_create(@routing_key), '_routing'] if @routing_key
  result
end

#create_time_parserObject

once fluent v0.14 is released we might be able to use Fluent::Parser::TimeParser, but it doesn’t quite do what we want - if gives

sec,nsec

where as we want something we can call ‘strftime` on…



298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 298

def create_time_parser
  if @time_key_format
    begin
      # Strptime doesn't support all formats, but for those it does it's
      # blazingly fast.
      strptime = Strptime.new(@time_key_format)
      Proc.new { |value| strptime.exec(value).to_datetime }
    rescue
      # Can happen if Strptime doesn't recognize the format; or
      # if strptime couldn't be required (because it's not installed -- it's
      # ruby 2 only)
      Proc.new { |value| DateTime.strptime(value, @time_key_format) }
    end
  else
    Proc.new { |value| DateTime.parse(value) }
  end
end

#detect_es_major_versionObject



271
272
273
274
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 271

def detect_es_major_version
  @_es_info ||= client.info
  @_es_info["version"]["number"].to_i
end

#expand_placeholders(metadata) ⇒ Object



484
485
486
487
488
489
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 484

def expand_placeholders()
  logstash_prefix = extract_placeholders(@logstash_prefix, )
  index_name = extract_placeholders(@index_name, )
  type_name = extract_placeholders(@type_name, )
  return logstash_prefix, index_name, type_name
end

#flatten_record(record, prefix = []) ⇒ Object



469
470
471
472
473
474
475
476
477
478
479
480
481
482
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 469

def flatten_record(record, prefix=[])
  ret = {}
  if record.is_a? Hash
    record.each { |key, value|
      ret.merge! flatten_record(value, prefix + [key.to_s])
    }
  elsif record.is_a? Array
    # Don't mess with arrays, leave them unprocessed
    ret.merge!({prefix.join(@flatten_hashes_separator) => record})
  else
    return {prefix.join(@flatten_hashes_separator) => record}
  end
  ret
end

#get_connection_optionsObject



374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 374

def get_connection_options
  raise "`password` must be present if `user` is present" if @user && !@password

  hosts = if @hosts
    @hosts.split(',').map do |host_str|
      # Support legacy hosts format host:port,host:port,host:port...
      if host_str.match(%r{^[^:]+(\:\d+)?$})
        {
          host:   host_str.split(':')[0],
          port:   (host_str.split(':')[1] || @port).to_i,
          scheme: @scheme.to_s
        }
      else
        # New hosts format expects URLs such as http://logs.foo.com,https://john:[email protected]/elastic
        uri = URI(get_escaped_userinfo(host_str))
        %w(user password path).inject(host: uri.host, port: uri.port, scheme: uri.scheme) do |hash, key|
          hash[key.to_sym] = uri.public_send(key) unless uri.public_send(key).nil? || uri.public_send(key) == ''
          hash
        end
      end
    end.compact
  else
    [{host: @host, port: @port, scheme: @scheme.to_s}]
  end.each do |host|
    host.merge!(user: @user, password: @password) if !host[:user] && @user
    host.merge!(path: @path) if !host[:path] && @path
  end

  {
    hosts: hosts
  }
end

#get_escaped_userinfo(host_str) ⇒ Object



362
363
364
365
366
367
368
369
370
371
372
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 362

def get_escaped_userinfo(host_str)
  if m = host_str.match(/(?<scheme>.*)%{(?<user>.*)}:%{(?<password>.*)}(?<path>@.*)/)
    m["scheme"] +
      URI.encode_www_form_component(m["user"]) +
      ':' +
      URI.encode_www_form_component(m["password"]) +
      m["path"]
  else
    host_str
  end
end

#get_parent_of(record, path) ⇒ Object

returns [parent, child_key] of child described by path array in record’s tree returns [nil, child_key] if path doesnt exist in record



617
618
619
620
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 617

def get_parent_of(record, path)
  parent_object = path[0..-2].reduce(record) { |a, e| a.is_a?(Hash) ? a[e] : nil }
  [parent_object, path[-1]]
end

#multi_workers_ready?Boolean

Returns:

  • (Boolean)


491
492
493
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 491

def multi_workers_ready?
  true
end

#parse_time(value, event_time, tag) ⇒ Object



316
317
318
319
320
321
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 316

def parse_time(value, event_time, tag)
  @time_parser.call(value)
rescue => e
  router.emit_error_event(@time_parse_error_tag, Fluent::Engine.now, {'tag' => tag, 'time' => event_time, 'format' => @time_key_format, 'value' => value}, e)
  return Time.at(event_time).to_datetime
end

#process_message(tag, meta, header, time, record, extracted_values) ⇒ Object



536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 536

def process_message(tag, meta, header, time, record, extracted_values)
  logstash_prefix, index_name, type_name = extracted_values

  if @flatten_hashes
    record = flatten_record(record)
  end

  dt = nil
  if @logstash_format || @include_timestamp
    if record.has_key?(TIMESTAMP_FIELD)
      rts = record[TIMESTAMP_FIELD]
      dt = parse_time(rts, time, tag)
    elsif record.has_key?(@time_key)
      rts = record[@time_key]
      dt = parse_time(rts, time, tag)
      record[TIMESTAMP_FIELD] = dt.iso8601(@time_precision) unless @time_key_exclude_timestamp
    else
      dt = Time.at(time).to_datetime
      record[TIMESTAMP_FIELD] = dt.iso8601(@time_precision)
    end
  end

  target_index_parent, target_index_child_key = @target_index_key ? get_parent_of(record, @target_index_key) : nil
  if target_index_parent && target_index_parent[target_index_child_key]
    target_index = target_index_parent.delete(target_index_child_key)
  elsif @logstash_format
    dt = dt.new_offset(0) if @utc_index
    target_index = "#{logstash_prefix}#{@logstash_prefix_separator}#{dt.strftime(@logstash_dateformat)}"
  else
    target_index = index_name
  end

  # Change target_index to lower-case since Elasticsearch doesn't
  # allow upper-case characters in index names.
  target_index = target_index.downcase
  if @include_tag_key
    record[@tag_key] = tag
  end

  target_type_parent, target_type_child_key = @target_type_key ? get_parent_of(record, @target_type_key) : nil
  if target_type_parent && target_type_parent[target_type_child_key]
    target_type = target_type_parent.delete(target_type_child_key)
    if @last_seen_major_version == 6
      log.warn "Detected ES 6.x: `@type_name` will be used as the document `_type`."
      target_type = type_name
    elsif @last_seen_major_version >= 7
      log.warn "Detected ES 7.x or above: `_doc` will be used as the document `_type`."
      target_type = '_doc'.freeze
    end
  else
    if @last_seen_major_version >= 7 && target_type != DEFAULT_TYPE_NAME_ES_7x
      log.warn "Detected ES 7.x or above: `_doc` will be used as the document `_type`."
      target_type = '_doc'.freeze
    else
      target_type = type_name
    end
  end

  meta.clear
  meta["_index".freeze] = target_index
  meta["_type".freeze] = target_type

  if @pipeline
    meta["pipeline".freeze] = @pipeline
  end

  @meta_config_map.each do |record_accessor, meta_key|
    if raw_value = record_accessor.call(record)
      meta[meta_key] = raw_value
    end
  end

  if @remove_keys
    @remove_keys.each { |key| record.delete(key) }
  end

  return [meta, header, record]
end

#remove_keys(record) ⇒ Object



460
461
462
463
464
465
466
467
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 460

def remove_keys(record)
  keys = record[@remove_keys_on_update_key] || @remove_keys_on_update || []
  record.delete(@remove_keys_on_update_key)
  return record unless keys.any?
  record = record.dup
  keys.each { |key| record.delete(key) }
  record
end

#send_bulk(data, tag, chunk, bulk_message_count, extracted_values, index) ⇒ Object

send_bulk given a specific bulk request, the original tag, chunk, and bulk_message_count



624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 624

def send_bulk(data, tag, chunk, bulk_message_count, extracted_values, index)
  retries = 0
  begin

    log.on_trace { log.trace "bulk request: #{data}" }
    response = client.bulk body: data, index: index
    log.on_trace { log.trace "bulk response: #{response}" }

    if response['errors']
      error = Fluent::Plugin::ElasticsearchErrorHandler.new(self)
      error.handle_error(response, tag, chunk, bulk_message_count, extracted_values)
    end
  rescue RetryStreamError => e
    emit_tag = @retry_tag ? @retry_tag : tag
    router.emit_stream(emit_tag, e.retry_stream)
  rescue *client.transport.host_unreachable_exceptions => e
    if retries < 2
      retries += 1
      @_es = nil
      @_es_info = nil
      log.warn "Could not push logs to Elasticsearch, resetting connection and trying again. #{e.message}"
      sleep 2**retries
      retry
    end
    raise ConnectionRetryFailure, "Could not push logs to Elasticsearch after #{retries} retries. #{e.message}"
  rescue Exception
    @_es = nil if @reconnect_on_error
    @_es_info = nil if @reconnect_on_error
    raise
  end
end

#update_body(record, op) ⇒ Object



447
448
449
450
451
452
453
454
455
456
457
458
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 447

def update_body(record, op)
  update = remove_keys(record)
  body = {"doc".freeze => update}
  if op == UPSERT_OP
    if update == record
      body["doc_as_upsert".freeze] = true
    else
      body[UPSERT_OP] = record
    end
  end
  body
end

#write(chunk) ⇒ Object



495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 495

def write(chunk)
  bulk_message_count = Hash.new { |h,k| h[k] = 0 }
  bulk_message = Hash.new { |h,k| h[k] = '' }
  header = {}
  meta = {}

  tag = chunk..tag
  extracted_values = expand_placeholders(chunk.)
  @last_seen_major_version = detect_es_major_version rescue DEFAULT_ELASTICSEARCH_VERSION

  chunk.msgpack_each do |time, record|
    next unless record.is_a? Hash
    begin
      meta, header, record = process_message(tag, meta, header, time, record, extracted_values)
      info = if @include_index_in_url
               RequestInfo.new(nil, meta.delete("_index".freeze))
             else
               RequestInfo.new(nil, nil)
             end

      if append_record_to_messages(@write_operation, meta, header, record, bulk_message[info])
        bulk_message_count[info] += 1;
      else
        if @emit_error_for_missing_id
          raise MissingIdFieldError, "Missing '_id' field. Write operation is #{@write_operation}"
        else
          log.on_debug { log.debug("Dropping record because its missing an '_id' field and write_operation is #{@write_operation}: #{record}") }
        end
      end
    rescue => e
      router.emit_error_event(tag, time, record, e)
    end
  end


  bulk_message.each do |info, msgs|
    send_bulk(msgs, tag, chunk, bulk_message_count[info], extracted_values, info.index) unless msgs.empty?
    msgs.clear
  end
end