Class: Fluent::Plugin::ElasticsearchOutput
Defined Under Namespace
Classes: ConnectionFailure, MissingIdFieldError, RetryStreamError
Constant Summary
collapse
- DEFAULT_BUFFER_TYPE =
"memory"
- DEFAULT_ELASTICSEARCH_VERSION =
5
- DEFAULT_TYPE_NAME_ES_7x =
"_doc".freeze
- DEFAULT_TYPE_NAME =
"fluentd".freeze
Fluent::Plugin::ElasticsearchConstants::BODY_DELIMITER, Fluent::Plugin::ElasticsearchConstants::CREATE_OP, Fluent::Plugin::ElasticsearchConstants::ID_FIELD, Fluent::Plugin::ElasticsearchConstants::INDEX_OP, Fluent::Plugin::ElasticsearchConstants::TIMESTAMP_FIELD, Fluent::Plugin::ElasticsearchConstants::UPDATE_OP, Fluent::Plugin::ElasticsearchConstants::UPSERT_OP
Instance Method Summary
collapse
-
#append_record_to_messages(op, meta, header, record, msgs) ⇒ Object
append_record_to_messages adds a record to the bulk message payload to be submitted to Elasticsearch.
-
#client ⇒ Object
-
#configure(conf) ⇒ Object
-
#connection_options_description ⇒ Object
-
#convert_compat_id_key(key) ⇒ Object
-
#create_meta_config_map ⇒ Object
-
#create_time_parser ⇒ Object
once fluent v0.14 is released we might be able to use Fluent::Parser::TimeParser, but it doesn’t quite do what we want - if gives [sec,nsec] where as we want something we can call ‘strftime` on…
-
#detect_es_major_version ⇒ Object
-
#expand_placeholders(metadata) ⇒ Object
-
#flatten_record(record, prefix = []) ⇒ Object
-
#get_connection_options ⇒ Object
-
#get_escaped_userinfo(host_str) ⇒ Object
-
#get_parent_of(record, path) ⇒ Object
returns [parent, child_key] of child described by path array in record’s tree returns [nil, child_key] if path doesnt exist in record.
-
#initialize ⇒ ElasticsearchOutput
constructor
A new instance of ElasticsearchOutput.
-
#multi_workers_ready? ⇒ Boolean
-
#parse_time(value, event_time, tag) ⇒ Object
-
#process_message(tag, meta, header, time, record, bulk_message, extracted_values) ⇒ Object
-
#remove_keys(record) ⇒ Object
-
#send_bulk(data, tag, chunk, bulk_message_count, extracted_values) ⇒ Object
send_bulk given a specific bulk request, the original tag, chunk, and bulk_message_count.
-
#update_body(record, op) ⇒ Object
-
#write(chunk) ⇒ Object
#get_template, #retry_install, #template_exists?, #template_install, #template_put, #templates_hash_install
Constructor Details
Returns a new instance of ElasticsearchOutput.
117
118
119
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 117
def initialize
super
end
|
Instance Method Details
#append_record_to_messages(op, meta, header, record, msgs) ⇒ Object
append_record_to_messages adds a record to the bulk message payload to be submitted to Elasticsearch. Records that do not include ‘_id’ field are skipped when ‘write_operation’ is configured for ‘create’ or ‘update’
returns ‘true’ if record was appended to the bulk message
and 'false' otherwise
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 349
def append_record_to_messages(op, meta, , record, msgs)
case op
when UPDATE_OP, UPSERT_OP
if meta.has_key?(ID_FIELD)
[UPDATE_OP] = meta
msgs << @dump_proc.call() << BODY_DELIMITER
msgs << @dump_proc.call(update_body(record, op)) << BODY_DELIMITER
return true
end
when CREATE_OP
if meta.has_key?(ID_FIELD)
[CREATE_OP] = meta
msgs << @dump_proc.call() << BODY_DELIMITER
msgs << @dump_proc.call(record) << BODY_DELIMITER
return true
end
when INDEX_OP
[INDEX_OP] = meta
msgs << @dump_proc.call() << BODY_DELIMITER
msgs << @dump_proc.call(record) << BODY_DELIMITER
return true
end
return false
end
|
#client ⇒ Object
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 255
def client
@_es ||= begin
excon_options = { client_key: @client_key, client_cert: @client_cert, client_key_pass: @client_key_pass }
adapter_conf = lambda {|f| f.adapter :excon, excon_options }
transport = Elasticsearch::Transport::Transport::HTTP::Faraday.new(get_connection_options.merge(
options: {
reload_connections: @reload_connections,
reload_on_failure: @reload_on_failure,
resurrect_after: @resurrect_after,
retry_on_failure: 5,
logger: @transport_logger,
transport_options: {
headers: { 'Content-Type' => @content_type.to_s },
request: { timeout: @request_timeout },
ssl: { verify: @ssl_verify, ca_file: @ca_file, version: @ssl_version }
},
http: {
user: @user,
password: @password
}
}), &adapter_conf)
es = Elasticsearch::Client.new transport: transport
begin
raise ConnectionFailure, "Can not reach Elasticsearch cluster (#{connection_options_description})!" unless es.ping
rescue *es.transport.host_unreachable_exceptions => e
raise ConnectionFailure, "Can not reach Elasticsearch cluster (#{connection_options_description})! #{e.message}"
end
log.info "Connection opened to Elasticsearch cluster => #{connection_options_description}"
es
end
end
|
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 121
def configure(conf)
compat_parameters_convert(conf, :buffer)
super
raise Fluent::ConfigError, "'tag' in chunk_keys is required." if not @chunk_key_tag
@time_parser = create_time_parser
if @remove_keys
@remove_keys = @remove_keys.split(/\s*,\s*/)
end
if @target_index_key && @target_index_key.is_a?(String)
@target_index_key = @target_index_key.split '.'
end
if @target_type_key && @target_type_key.is_a?(String)
@target_type_key = @target_type_key.split '.'
end
if @remove_keys_on_update && @remove_keys_on_update.is_a?(String)
@remove_keys_on_update = @remove_keys_on_update.split ','
end
raise Fluent::ConfigError, "'max_retry_putting_template' must be positive number." if @max_retry_putting_template < 0
if @template_name && @template_file
retry_install(@max_retry_putting_template) do
template_install(@template_name, @template_file, @template_overwrite)
end
elsif @templates
retry_install(@max_retry_putting_template) do
templates_hash_install(@templates, @template_overwrite)
end
end
@id_key = convert_compat_id_key(@id_key) if @id_key
@parent_key = convert_compat_id_key(@parent_key) if @parent_key
@routing_key = convert_compat_id_key(@routing_key) if @routing_key
@meta_config_map = create_meta_config_map
begin
require 'oj'
@dump_proc = Oj.method(:dump)
rescue LoadError
@dump_proc = Yajl.method(:dump)
end
if @user && m = @user.match(/%{(?<user>.*)}/)
@user = URI.encode_www_form_component(m["user"])
end
if @password && m = @password.match(/%{(?<password>.*)}/)
@password = URI.encode_www_form_component(m["password"])
end
if @hash_config
raise Fluent::ConfigError, "@hash_config.hash_id_key and id_key must be equal." unless @hash_config.hash_id_key == @id_key
end
@transport_logger = nil
if @with_transporter_log
@transport_logger = log
log_level = conf['@log_level'] || conf['log_level']
log.warn "Consider to specify log_level with @log_level." unless log_level
end
@last_seen_major_version = detect_es_major_version rescue DEFAULT_ELASTICSEARCH_VERSION
if @last_seen_major_version == 6 && @type_name != DEFAULT_TYPE_NAME_ES_7x
log.info "Detected ES 6.x: ES 7.x will only accept `_doc` in type_name."
end
if @last_seen_major_version >= 7 && @type_name != DEFAULT_TYPE_NAME_ES_7x
log.warn "Detected ES 7.x or above: `_doc` will be used as the document `_type`."
@type_name = '_doc'.freeze
end
if @last_seen_major_version >= 6
case @ssl_version
when :SSLv23, :TLSv1, :TLSv1_1
if @scheme == :https
log.warn "Detected ES 6.x or above and enabled insecure security:
You might have to specify `ssl_version TLSv1_2` in configuration."
end
end
end
end
|
#connection_options_description ⇒ Object
334
335
336
337
338
339
340
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 334
def connection_options_description
get_connection_options[:hosts].map do |host_info|
attributes = host_info.dup
attributes[:password] = 'obfuscated' if attributes.has_key?(:password)
attributes.inspect
end.join(', ')
end
|
#convert_compat_id_key(key) ⇒ Object
212
213
214
215
216
217
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 212
def convert_compat_id_key(key)
if key.include?('.') && !key.start_with?('$[')
key = "$.#{key}" unless key.start_with?('$.')
end
key
end
|
219
220
221
222
223
224
225
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 219
def create_meta_config_map
result = []
result << [record_accessor_create(@id_key), '_id'] if @id_key
result << [record_accessor_create(@parent_key), '_parent'] if @parent_key
result << [record_accessor_create(@routing_key), '_routing'] if @routing_key
result
end
|
#create_time_parser ⇒ Object
once fluent v0.14 is released we might be able to use Fluent::Parser::TimeParser, but it doesn’t quite do what we want - if gives
- sec,nsec
-
where as we want something we can call ‘strftime` on…
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 230
def create_time_parser
if @time_key_format
begin
strptime = Strptime.new(@time_key_format)
Proc.new { |value| strptime.exec(value).to_datetime }
rescue
Proc.new { |value| DateTime.strptime(value, @time_key_format) }
end
else
Proc.new { |value| DateTime.parse(value) }
end
end
|
#detect_es_major_version ⇒ Object
207
208
209
210
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 207
def detect_es_major_version
@_es_info ||= client.info
@_es_info["version"]["number"].to_i
end
|
#expand_placeholders(metadata) ⇒ Object
411
412
413
414
415
416
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 411
def expand_placeholders(metadata)
logstash_prefix = (@logstash_prefix, metadata)
index_name = (@index_name, metadata)
type_name = (@type_name, metadata)
return logstash_prefix, index_name, type_name
end
|
#flatten_record(record, prefix = []) ⇒ Object
396
397
398
399
400
401
402
403
404
405
406
407
408
409
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 396
def flatten_record(record, prefix=[])
ret = {}
if record.is_a? Hash
record.each { |key, value|
ret.merge! flatten_record(value, prefix + [key.to_s])
}
elsif record.is_a? Array
ret.merge!({prefix.join(@flatten_hashes_separator) => record})
else
return {prefix.join(@flatten_hashes_separator) => record}
end
ret
end
|
#get_connection_options ⇒ Object
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 301
def get_connection_options
raise "`password` must be present if `user` is present" if @user && !@password
hosts = if @hosts
@hosts.split(',').map do |host_str|
if host_str.match(%r{^[^:]+(\:\d+)?$})
{
host: host_str.split(':')[0],
port: (host_str.split(':')[1] || @port).to_i,
scheme: @scheme.to_s
}
else
uri = URI(get_escaped_userinfo(host_str))
%w(user password path).inject(host: uri.host, port: uri.port, scheme: uri.scheme) do |hash, key|
hash[key.to_sym] = uri.public_send(key) unless uri.public_send(key).nil? || uri.public_send(key) == ''
hash
end
end
end.compact
else
[{host: @host, port: @port, scheme: @scheme.to_s}]
end.each do |host|
host.merge!(user: @user, password: @password) if !host[:user] && @user
host.merge!(path: @path) if !host[:path] && @path
end
{
hosts: hosts
}
end
|
#get_escaped_userinfo(host_str) ⇒ Object
289
290
291
292
293
294
295
296
297
298
299
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 289
def get_escaped_userinfo(host_str)
if m = host_str.match(/(?<scheme>.*)%{(?<user>.*)}:%{(?<password>.*)}(?<path>@.*)/)
m["scheme"] +
URI.encode_www_form_component(m["user"]) +
':' +
URI.encode_www_form_component(m["password"]) +
m["path"]
else
host_str
end
end
|
#get_parent_of(record, path) ⇒ Object
returns [parent, child_key] of child described by path array in record’s tree returns [nil, child_key] if path doesnt exist in record
538
539
540
541
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 538
def get_parent_of(record, path)
parent_object = path[0..-2].reduce(record) { |a, e| a.is_a?(Hash) ? a[e] : nil }
[parent_object, path[-1]]
end
|
#multi_workers_ready? ⇒ Boolean
418
419
420
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 418
def multi_workers_ready?
true
end
|
#parse_time(value, event_time, tag) ⇒ Object
248
249
250
251
252
253
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 248
def parse_time(value, event_time, tag)
@time_parser.call(value)
rescue => e
router.emit_error_event(@time_parse_error_tag, Fluent::Engine.now, {'tag' => tag, 'time' => event_time, 'format' => @time_key_format, 'value' => value}, e)
return Time.at(event_time).to_datetime
end
|
#process_message(tag, meta, header, time, record, bulk_message, extracted_values) ⇒ Object
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 453
def process_message(tag, meta, , time, record, bulk_message, )
logstash_prefix, index_name, type_name =
if @flatten_hashes
record = flatten_record(record)
end
if @hash_config
record = generate_hash_id_key(record)
end
dt = nil
if @logstash_format || @include_timestamp
if record.has_key?(TIMESTAMP_FIELD)
rts = record[TIMESTAMP_FIELD]
dt = parse_time(rts, time, tag)
elsif record.has_key?(@time_key)
rts = record[@time_key]
dt = parse_time(rts, time, tag)
record[TIMESTAMP_FIELD] = dt.iso8601(@time_precision) unless @time_key_exclude_timestamp
else
dt = Time.at(time).to_datetime
record[TIMESTAMP_FIELD] = dt.iso8601(@time_precision)
end
end
target_index_parent, target_index_child_key = @target_index_key ? get_parent_of(record, @target_index_key) : nil
if target_index_parent && target_index_parent[target_index_child_key]
target_index = target_index_parent.delete(target_index_child_key)
elsif @logstash_format
dt = dt.new_offset(0) if @utc_index
target_index = "#{logstash_prefix}#{@logstash_prefix_separator}#{dt.strftime(@logstash_dateformat)}"
else
target_index = index_name
end
target_index = target_index.downcase
if @include_tag_key
record[@tag_key] = tag
end
target_type_parent, target_type_child_key = @target_type_key ? get_parent_of(record, @target_type_key) : nil
if target_type_parent && target_type_parent[target_type_child_key]
target_type = target_type_parent.delete(target_type_child_key)
if @last_seen_major_version == 6
log.warn "Detected ES 6.x: `@type_name` will be used as the document `_type`."
target_type = type_name
elsif @last_seen_major_version >= 7
log.warn "Detected ES 7.x or above: `_doc` will be used as the document `_type`."
target_type = '_doc'.freeze
end
else
if @last_seen_major_version >= 7 && target_type != DEFAULT_TYPE_NAME_ES_7x
log.warn "Detected ES 7.x or above: `_doc` will be used as the document `_type`."
target_type = '_doc'.freeze
else
target_type = type_name
end
end
meta.clear
meta["_index".freeze] = target_index
meta["_type".freeze] = target_type
if @pipeline
meta["pipeline".freeze] = @pipeline
end
@meta_config_map.each do |record_accessor, meta_key|
if raw_value = record_accessor.call(record)
meta[meta_key] = raw_value
end
end
if @remove_keys
@remove_keys.each { |key| record.delete(key) }
end
append_record_to_messages(@write_operation, meta, , record, bulk_message)
end
|
#remove_keys(record) ⇒ Object
387
388
389
390
391
392
393
394
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 387
def remove_keys(record)
keys = record[@remove_keys_on_update_key] || @remove_keys_on_update || []
record.delete(@remove_keys_on_update_key)
return record unless keys.any?
record = record.dup
keys.each { |key| record.delete(key) }
record
end
|
#send_bulk(data, tag, chunk, bulk_message_count, extracted_values) ⇒ Object
send_bulk given a specific bulk request, the original tag, chunk, and bulk_message_count
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 545
def send_bulk(data, tag, chunk, bulk_message_count, )
retries = 0
begin
log.on_trace { log.trace "bulk request: #{data}" }
response = client.bulk body: data
log.on_trace { log.trace "bulk response: #{response}" }
if response['errors']
error = Fluent::Plugin::ElasticsearchErrorHandler.new(self)
error.handle_error(response, tag, chunk, bulk_message_count, )
end
rescue RetryStreamError => e
emit_tag = @retry_tag ? @retry_tag : tag
router.emit_stream(emit_tag, e.retry_stream)
rescue *client.transport.host_unreachable_exceptions => e
if retries < 2
retries += 1
@_es = nil
@_es_info = nil
log.warn "Could not push logs to Elasticsearch, resetting connection and trying again. #{e.message}"
sleep 2**retries
retry
end
raise ConnectionFailure, "Could not push logs to Elasticsearch after #{retries} retries. #{e.message}"
rescue Exception
@_es = nil if @reconnect_on_error
@_es_info = nil if @reconnect_on_error
raise
end
end
|
#update_body(record, op) ⇒ Object
374
375
376
377
378
379
380
381
382
383
384
385
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 374
def update_body(record, op)
update = remove_keys(record)
body = {"doc".freeze => update}
if op == UPSERT_OP
if update == record
body["doc_as_upsert".freeze] = true
else
body[UPSERT_OP] = record
end
end
body
end
|
#write(chunk) ⇒ Object
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 422
def write(chunk)
bulk_message_count = 0
bulk_message = ''
= {}
meta = {}
tag = chunk.metadata.tag
= expand_placeholders(chunk.metadata)
@last_seen_major_version = detect_es_major_version rescue DEFAULT_ELASTICSEARCH_VERSION
chunk.msgpack_each do |time, record|
next unless record.is_a? Hash
begin
if process_message(tag, meta, , time, record, bulk_message, )
bulk_message_count += 1
else
if @emit_error_for_missing_id
raise MissingIdFieldError, "Missing '_id' field. Write operation is #{@write_operation}"
else
log.on_debug { log.debug("Dropping record because its missing an '_id' field and write_operation is #{@write_operation}: #{record}") }
end
end
rescue => e
router.emit_error_event(tag, time, record, e)
end
end
send_bulk(bulk_message, tag, chunk, bulk_message_count, ) unless bulk_message.empty?
bulk_message.clear
end
|