Class: Fluent::Plugin::ElasticsearchOutput
Defined Under Namespace
Classes: MissingIdFieldError, RecoverableRequestFailure, RequestInfo, RetryStreamError, UnrecoverableRequestFailure
Constant Summary
collapse
- DEFAULT_BUFFER_TYPE =
"memory"
- DEFAULT_ELASTICSEARCH_VERSION =
5
- DEFAULT_TYPE_NAME_ES_7x =
"_doc".freeze
- DEFAULT_TYPE_NAME =
"fluentd".freeze
- DEFAULT_RELOAD_AFTER =
-1
Fluent::Plugin::ElasticsearchConstants::BODY_DELIMITER, Fluent::Plugin::ElasticsearchConstants::CREATE_OP, Fluent::Plugin::ElasticsearchConstants::ID_FIELD, Fluent::Plugin::ElasticsearchConstants::INDEX_OP, Fluent::Plugin::ElasticsearchConstants::TIMESTAMP_FIELD, Fluent::Plugin::ElasticsearchConstants::UPDATE_OP, Fluent::Plugin::ElasticsearchConstants::UPSERT_OP
Instance Method Summary
collapse
-
#append_record_to_messages(op, meta, header, record, msgs) ⇒ Object
append_record_to_messages adds a record to the bulk message payload to be submitted to Elasticsearch.
-
#backend_options ⇒ Object
-
#client ⇒ Object
-
#client_library_version ⇒ Object
-
#configure(conf) ⇒ Object
-
#configure_routing_key_name ⇒ Object
-
#connection_options_description ⇒ Object
-
#convert_compat_id_key(key) ⇒ Object
-
#create_meta_config_map ⇒ Object
-
#create_time_parser ⇒ Object
once fluent v0.14 is released we might be able to use Fluent::Parser::TimeParser, but it doesn’t quite do what we want - if gives [sec,nsec] where as we want something we can call ‘strftime` on…
-
#detect_es_major_version ⇒ Object
-
#expand_placeholders(metadata) ⇒ Object
-
#flatten_record(record, prefix = []) ⇒ Object
-
#get_connection_options ⇒ Object
-
#get_escaped_userinfo(host_str) ⇒ Object
-
#get_parent_of(record, path) ⇒ Object
returns [parent, child_key] of child described by path array in record’s tree returns [nil, child_key] if path doesnt exist in record.
-
#initialize ⇒ ElasticsearchOutput
constructor
A new instance of ElasticsearchOutput.
-
#multi_workers_ready? ⇒ Boolean
-
#parse_time(value, event_time, tag) ⇒ Object
-
#process_message(tag, meta, header, time, record, extracted_values) ⇒ Object
-
#remove_keys(record) ⇒ Object
-
#send_bulk(data, tag, chunk, bulk_message_count, extracted_values, index) ⇒ Object
send_bulk given a specific bulk request, the original tag, chunk, and bulk_message_count.
-
#update_body(record, op) ⇒ Object
-
#write(chunk) ⇒ Object
#get_custom_template, #get_template, #indexcreation, #retry_operate, #template_custom_install, #template_exists?, #template_install, #template_put, #templates_hash_install
Constructor Details
144
145
146
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 144
def initialize
super
end
|
Instance Method Details
#append_record_to_messages(op, meta, header, record, msgs) ⇒ Object
append_record_to_messages adds a record to the bulk message payload to be submitted to Elasticsearch. Records that do not include ‘_id’ field are skipped when ‘write_operation’ is configured for ‘create’ or ‘update’
returns ‘true’ if record was appended to the bulk message
and 'false' otherwise
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 437
def append_record_to_messages(op, meta, , record, msgs)
case op
when UPDATE_OP, UPSERT_OP
if meta.has_key?(ID_FIELD)
[UPDATE_OP] = meta
msgs << @dump_proc.call() << BODY_DELIMITER
msgs << @dump_proc.call(update_body(record, op)) << BODY_DELIMITER
return true
end
when CREATE_OP
if meta.has_key?(ID_FIELD)
[CREATE_OP] = meta
msgs << @dump_proc.call() << BODY_DELIMITER
msgs << @dump_proc.call(record) << BODY_DELIMITER
return true
end
when INDEX_OP
[INDEX_OP] = meta
msgs << @dump_proc.call() << BODY_DELIMITER
msgs << @dump_proc.call(record) << BODY_DELIMITER
return true
end
return false
end
|
#backend_options ⇒ Object
275
276
277
278
279
280
281
282
283
284
285
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 275
def backend_options
case @http_backend
when :excon
{ client_key: @client_key, client_cert: @client_cert, client_key_pass: @client_key_pass }
when :typhoeus
require 'typhoeus'
{ sslkey: @client_key, sslcert: @client_cert, keypasswd: @client_key_pass }
end
rescue LoadError
raise Fluent::ConfigError, "You must install #{@http_backend} gem."
end
|
#client ⇒ Object
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 347
def client
@_es ||= begin
adapter_conf = lambda {|f| f.adapter @http_backend, @backend_options }
local_reload_connections = @reload_connections
if local_reload_connections && @reload_after > DEFAULT_RELOAD_AFTER
local_reload_connections = @reload_after
end
= { 'Content-Type' => @content_type.to_s }.merge()
transport = Elasticsearch::Transport::Transport::HTTP::Faraday.new(get_connection_options.merge(
options: {
reload_connections: local_reload_connections,
reload_on_failure: @reload_on_failure,
resurrect_after: @resurrect_after,
logger: @transport_logger,
transport_options: {
headers: ,
request: { timeout: @request_timeout },
ssl: { verify: @ssl_verify, ca_file: @ca_file, version: @ssl_version }
},
http: {
user: @user,
password: @password
},
sniffer_class: @sniffer_class,
serializer_class: @serializer_class,
}), &adapter_conf)
Elasticsearch::Client.new transport: transport
end
end
|
#client_library_version ⇒ Object
292
293
294
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 292
def client_library_version
Elasticsearch::VERSION
end
|
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 148
def configure(conf)
compat_parameters_convert(conf, :buffer)
super
raise Fluent::ConfigError, "'tag' in chunk_keys is required." if not @chunk_key_tag
@time_parser = create_time_parser
@backend_options = backend_options
if @remove_keys
@remove_keys = @remove_keys.split(/\s*,\s*/)
end
if @target_index_key && @target_index_key.is_a?(String)
@target_index_key = @target_index_key.split '.'
end
if @target_type_key && @target_type_key.is_a?(String)
@target_type_key = @target_type_key.split '.'
end
if @remove_keys_on_update && @remove_keys_on_update.is_a?(String)
@remove_keys_on_update = @remove_keys_on_update.split ','
end
raise Fluent::ConfigError, "'max_retry_putting_template' must be positive number." if @max_retry_putting_template < 0
if @template_name && @template_file
retry_operate(@max_retry_putting_template) do
if @customize_template
if @rollover_index
raise Fluent::ConfigError, "'deflector_alias' must be provided if 'rollover_index' is set true ." if not @deflector_alias
end
template_custom_install(@template_name, @template_file, @template_overwrite, @customize_template, @index_prefix, @rollover_index, @deflector_alias, @application_name, @index_date_pattern)
else
template_install(@template_name, @template_file, @template_overwrite)
end
end
elsif @templates
retry_operate(@max_retry_putting_template) do
templates_hash_install(@templates, @template_overwrite)
end
end
@id_key = convert_compat_id_key(@id_key) if @id_key
@parent_key = convert_compat_id_key(@parent_key) if @parent_key
@routing_key = convert_compat_id_key(@routing_key) if @routing_key
@meta_config_map = create_meta_config_map
@serializer_class = nil
begin
require 'oj'
@dump_proc = Oj.method(:dump)
if @prefer_oj_serializer
@serializer_class = Fluent::Plugin::Serializer::Oj
Elasticsearch::API.settings[:serializer] = Fluent::Plugin::Serializer::Oj
end
rescue LoadError
@dump_proc = Yajl.method(:dump)
end
if @user && m = @user.match(/%{(?<user>.*)}/)
@user = URI.encode_www_form_component(m["user"])
end
if @password && m = @password.match(/%{(?<password>.*)}/)
@password = URI.encode_www_form_component(m["password"])
end
@transport_logger = nil
if @with_transporter_log
@transport_logger = log
log_level = conf['@log_level'] || conf['log_level']
log.warn "Consider to specify log_level with @log_level." unless log_level
end
@sniffer_class = nil
begin
@sniffer_class = Object.const_get(@sniffer_class_name) if @sniffer_class_name
rescue Exception => ex
raise Fluent::ConfigError, "Could not load sniffer class #{@sniffer_class_name}: #{ex}"
end
@last_seen_major_version =
if @verify_es_version_at_startup
retry_operate(@max_retry_get_es_version) do
detect_es_major_version
end
else
@default_elasticsearch_version
end
if @last_seen_major_version == 6 && @type_name != DEFAULT_TYPE_NAME_ES_7x
log.info "Detected ES 6.x: ES 7.x will only accept `_doc` in type_name."
end
if @last_seen_major_version >= 7 && @type_name != DEFAULT_TYPE_NAME_ES_7x
log.warn "Detected ES 7.x or above: `_doc` will be used as the document `_type`."
@type_name = '_doc'.freeze
end
if @validate_client_version
if @last_seen_major_version != client_library_version.to_i
raise Fluent::ConfigError, " Detected ES \#{@last_seen_major_version} but you use ES client \#{client_library_version}.\n Please consider to use \#{@last_seen_major_version}.x series ES client.\n EOC\n end\n end\n\n if @last_seen_major_version >= 6\n case @ssl_version\n when :SSLv23, :TLSv1, :TLSv1_1\n if @scheme == :https\n log.warn \"Detected ES 6.x or above and enabled insecure security:\n You might have to specify `ssl_version TLSv1_2` in configuration.\"\n end\n end\n end\n\n if @buffer_config.flush_thread_count < 2\n log.warn \"To prevent events traffic jam, you should specify 2 or more 'flush_thread_count'.\"\n end\n\n @routing_key_name = configure_routing_key_name\nend\n"
|
296
297
298
299
300
301
302
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 296
def configure_routing_key_name
if @last_seen_major_version >= 7
'routing'
else
'_routing'
end
end
|
#connection_options_description ⇒ Object
422
423
424
425
426
427
428
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 422
def connection_options_description
get_connection_options[:hosts].map do |host_info|
attributes = host_info.dup
attributes[:password] = 'obfuscated' if attributes.has_key?(:password)
attributes.inspect
end.join(', ')
end
|
#convert_compat_id_key(key) ⇒ Object
304
305
306
307
308
309
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 304
def convert_compat_id_key(key)
if key.include?('.') && !key.start_with?('$[')
key = "$.#{key}" unless key.start_with?('$.')
end
key
end
|
311
312
313
314
315
316
317
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 311
def create_meta_config_map
result = []
result << [record_accessor_create(@id_key), '_id'] if @id_key
result << [record_accessor_create(@parent_key), '_parent'] if @parent_key
result << [record_accessor_create(@routing_key), @routing_key_name] if @routing_key
result
end
|
#create_time_parser ⇒ Object
once fluent v0.14 is released we might be able to use Fluent::Parser::TimeParser, but it doesn’t quite do what we want - if gives
- sec,nsec
-
where as we want something we can call ‘strftime` on…
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 322
def create_time_parser
if @time_key_format
begin
strptime = Strptime.new(@time_key_format)
Proc.new { |value| strptime.exec(value).to_datetime }
rescue
Proc.new { |value| DateTime.strptime(value, @time_key_format) }
end
else
Proc.new { |value| DateTime.parse(value) }
end
end
|
#detect_es_major_version ⇒ Object
287
288
289
290
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 287
def detect_es_major_version
@_es_info ||= client.info
@_es_info["version"]["number"].to_i
end
|
#expand_placeholders(metadata) ⇒ Object
499
500
501
502
503
504
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 499
def expand_placeholders(metadata)
logstash_prefix = (@logstash_prefix, metadata)
index_name = (@index_name, metadata)
type_name = (@type_name, metadata)
return logstash_prefix, index_name, type_name
end
|
#flatten_record(record, prefix = []) ⇒ Object
484
485
486
487
488
489
490
491
492
493
494
495
496
497
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 484
def flatten_record(record, prefix=[])
ret = {}
if record.is_a? Hash
record.each { |key, value|
ret.merge! flatten_record(value, prefix + [key.to_s])
}
elsif record.is_a? Array
ret.merge!({prefix.join(@flatten_hashes_separator) => record})
else
return {prefix.join(@flatten_hashes_separator) => record}
end
ret
end
|
#get_connection_options ⇒ Object
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 389
def get_connection_options
raise "`password` must be present if `user` is present" if @user && !@password
hosts = if @hosts
@hosts.split(',').map do |host_str|
if host_str.match(%r{^[^:]+(\:\d+)?$})
{
host: host_str.split(':')[0],
port: (host_str.split(':')[1] || @port).to_i,
scheme: @scheme.to_s
}
else
uri = URI(get_escaped_userinfo(host_str))
%w(user password path).inject(host: uri.host, port: uri.port, scheme: uri.scheme) do |hash, key|
hash[key.to_sym] = uri.public_send(key) unless uri.public_send(key).nil? || uri.public_send(key) == ''
hash
end
end
end.compact
else
[{host: @host, port: @port, scheme: @scheme.to_s}]
end.each do |host|
host.merge!(user: @user, password: @password) if !host[:user] && @user
host.merge!(path: @path) if !host[:path] && @path
end
{
hosts: hosts
}
end
|
#get_escaped_userinfo(host_str) ⇒ Object
377
378
379
380
381
382
383
384
385
386
387
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 377
def get_escaped_userinfo(host_str)
if m = host_str.match(/(?<scheme>.*)%{(?<user>.*)}:%{(?<password>.*)}(?<path>@.*)/)
m["scheme"] +
URI.encode_www_form_component(m["user"]) +
':' +
URI.encode_www_form_component(m["password"]) +
m["path"]
else
host_str
end
end
|
#get_parent_of(record, path) ⇒ Object
returns [parent, child_key] of child described by path array in record’s tree returns [nil, child_key] if path doesnt exist in record
631
632
633
634
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 631
def get_parent_of(record, path)
parent_object = path[0..-2].reduce(record) { |a, e| a.is_a?(Hash) ? a[e] : nil }
[parent_object, path[-1]]
end
|
#multi_workers_ready? ⇒ Boolean
506
507
508
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 506
def multi_workers_ready?
true
end
|
#parse_time(value, event_time, tag) ⇒ Object
340
341
342
343
344
345
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 340
def parse_time(value, event_time, tag)
@time_parser.call(value)
rescue => e
router.emit_error_event(@time_parse_error_tag, Fluent::Engine.now, {'tag' => tag, 'time' => event_time, 'format' => @time_key_format, 'value' => value}, e)
return Time.at(event_time).to_datetime
end
|
#process_message(tag, meta, header, time, record, extracted_values) ⇒ Object
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 550
def process_message(tag, meta, , time, record, )
logstash_prefix, index_name, type_name =
if @flatten_hashes
record = flatten_record(record)
end
dt = nil
if @logstash_format || @include_timestamp
if record.has_key?(TIMESTAMP_FIELD)
rts = record[TIMESTAMP_FIELD]
dt = parse_time(rts, time, tag)
elsif record.has_key?(@time_key)
rts = record[@time_key]
dt = parse_time(rts, time, tag)
record[TIMESTAMP_FIELD] = dt.iso8601(@time_precision) unless @time_key_exclude_timestamp
else
dt = Time.at(time).to_datetime
record[TIMESTAMP_FIELD] = dt.iso8601(@time_precision)
end
end
target_index_parent, target_index_child_key = @target_index_key ? get_parent_of(record, @target_index_key) : nil
if target_index_parent && target_index_parent[target_index_child_key]
target_index = target_index_parent.delete(target_index_child_key)
elsif @logstash_format
dt = dt.new_offset(0) if @utc_index
target_index = "#{logstash_prefix}#{@logstash_prefix_separator}#{dt.strftime(@logstash_dateformat)}"
else
target_index = index_name
end
target_index = target_index.downcase
if @include_tag_key
record[@tag_key] = tag
end
target_type_parent, target_type_child_key = @target_type_key ? get_parent_of(record, @target_type_key) : nil
if target_type_parent && target_type_parent[target_type_child_key]
target_type = target_type_parent.delete(target_type_child_key)
if @last_seen_major_version == 6
log.warn "Detected ES 6.x: `@type_name` will be used as the document `_type`."
target_type = type_name
elsif @last_seen_major_version >= 7
log.warn "Detected ES 7.x or above: `_doc` will be used as the document `_type`."
target_type = '_doc'.freeze
end
else
if @last_seen_major_version >= 7 && target_type != DEFAULT_TYPE_NAME_ES_7x
log.warn "Detected ES 7.x or above: `_doc` will be used as the document `_type`."
target_type = '_doc'.freeze
else
target_type = type_name
end
end
meta.clear
meta["_index".freeze] = target_index
meta["_type".freeze] = target_type
if @pipeline
meta["pipeline".freeze] = @pipeline
end
@meta_config_map.each do |record_accessor, meta_key|
if raw_value = record_accessor.call(record)
meta[meta_key] = raw_value
end
end
if @remove_keys
@remove_keys.each { |key| record.delete(key) }
end
return [meta, , record]
end
|
#remove_keys(record) ⇒ Object
475
476
477
478
479
480
481
482
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 475
def remove_keys(record)
keys = record[@remove_keys_on_update_key] || @remove_keys_on_update || []
record.delete(@remove_keys_on_update_key)
return record unless keys.any?
record = record.dup
keys.each { |key| record.delete(key) }
record
end
|
#send_bulk(data, tag, chunk, bulk_message_count, extracted_values, index) ⇒ Object
send_bulk given a specific bulk request, the original tag, chunk, and bulk_message_count
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 638
def send_bulk(data, tag, chunk, bulk_message_count, , index)
begin
log.on_trace { log.trace "bulk request: #{data}" }
response = client.bulk body: data, index: index
log.on_trace { log.trace "bulk response: #{response}" }
if response['errors']
error = Fluent::Plugin::ElasticsearchErrorHandler.new(self)
error.handle_error(response, tag, chunk, bulk_message_count, )
end
rescue RetryStreamError => e
emit_tag = @retry_tag ? @retry_tag : tag
router.emit_stream(emit_tag, e.retry_stream)
rescue => e
@_es = nil if @reconnect_on_error
@_es_info = nil if @reconnect_on_error
raise RecoverableRequestFailure, "could not push logs to Elasticsearch cluster (#{connection_options_description}): #{e.message}"
end
end
|
#update_body(record, op) ⇒ Object
462
463
464
465
466
467
468
469
470
471
472
473
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 462
def update_body(record, op)
update = remove_keys(record)
body = {"doc".freeze => update}
if op == UPSERT_OP
if update == record
body["doc_as_upsert".freeze] = true
else
body[UPSERT_OP] = record
end
end
body
end
|
#write(chunk) ⇒ Object
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 510
def write(chunk)
bulk_message_count = Hash.new { |h,k| h[k] = 0 }
bulk_message = Hash.new { |h,k| h[k] = '' }
= {}
meta = {}
tag = chunk.metadata.tag
= expand_placeholders(chunk.metadata)
chunk.msgpack_each do |time, record|
next unless record.is_a? Hash
begin
meta, , record = process_message(tag, meta, , time, record, )
info = if @include_index_in_url
RequestInfo.new(nil, meta.delete("_index".freeze))
else
RequestInfo.new(nil, nil)
end
if append_record_to_messages(@write_operation, meta, , record, bulk_message[info])
bulk_message_count[info] += 1;
else
if @emit_error_for_missing_id
raise MissingIdFieldError, "Missing '_id' field. Write operation is #{@write_operation}"
else
log.on_debug { log.debug("Dropping record because its missing an '_id' field and write_operation is #{@write_operation}: #{record}") }
end
end
rescue => e
router.emit_error_event(tag, time, record, e)
end
end
bulk_message.each do |info, msgs|
send_bulk(msgs, tag, chunk, bulk_message_count[info], , info.index) unless msgs.empty?
msgs.clear
end
end
|