Class: Fluent::Plugin::ElasticsearchOutput
Defined Under Namespace
Classes: ConnectionFailure, MissingIdFieldError, RetryStreamError
Constant Summary
collapse
- DEFAULT_BUFFER_TYPE =
"memory"
- DEFAULT_ELASTICSEARCH_VERSION =
5
- DEFAULT_TYPE_NAME_ES_7x =
"_doc".freeze
- DEFAULT_TYPE_NAME =
"fluentd".freeze
Fluent::Plugin::ElasticsearchConstants::BODY_DELIMITER, Fluent::Plugin::ElasticsearchConstants::CREATE_OP, Fluent::Plugin::ElasticsearchConstants::ID_FIELD, Fluent::Plugin::ElasticsearchConstants::INDEX_OP, Fluent::Plugin::ElasticsearchConstants::TIMESTAMP_FIELD, Fluent::Plugin::ElasticsearchConstants::UPDATE_OP, Fluent::Plugin::ElasticsearchConstants::UPSERT_OP
Instance Method Summary
collapse
-
#append_record_to_messages(op, meta, header, record, msgs) ⇒ Object
append_record_to_messages adds a record to the bulk message payload to be submitted to Elasticsearch.
-
#client ⇒ Object
-
#configure(conf) ⇒ Object
-
#connection_options_description ⇒ Object
-
#convert_compat_id_key(key) ⇒ Object
-
#create_meta_config_map ⇒ Object
-
#create_time_parser ⇒ Object
once fluent v0.14 is released we might be able to use Fluent::Parser::TimeParser, but it doesn’t quite do what we want - if gives [sec,nsec] where as we want something we can call ‘strftime` on…
-
#detect_es_major_version ⇒ Object
-
#expand_placeholders(metadata) ⇒ Object
-
#flatten_record(record, prefix = []) ⇒ Object
-
#get_connection_options ⇒ Object
-
#get_escaped_userinfo(host_str) ⇒ Object
-
#get_parent_of(record, path) ⇒ Object
returns [parent, child_key] of child described by path array in record’s tree returns [nil, child_key] if path doesnt exist in record.
-
#initialize ⇒ ElasticsearchOutput
constructor
A new instance of ElasticsearchOutput.
-
#multi_workers_ready? ⇒ Boolean
-
#parse_time(value, event_time, tag) ⇒ Object
-
#process_message(tag, meta, header, time, record, bulk_message, extracted_values) ⇒ Object
-
#remove_keys(record) ⇒ Object
-
#send_bulk(data, tag, chunk, bulk_message_count, extracted_values) ⇒ Object
send_bulk given a specific bulk request, the original tag, chunk, and bulk_message_count.
-
#update_body(record, op) ⇒ Object
-
#write(chunk) ⇒ Object
#get_template, #retry_install, #template_exists?, #template_install, #template_put, #templates_hash_install
Constructor Details
Returns a new instance of ElasticsearchOutput.
117
118
119
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 117
def initialize
super
end
|
Instance Method Details
#append_record_to_messages(op, meta, header, record, msgs) ⇒ Object
append_record_to_messages adds a record to the bulk message payload to be submitted to Elasticsearch. Records that do not include ‘_id’ field are skipped when ‘write_operation’ is configured for ‘create’ or ‘update’
returns ‘true’ if record was appended to the bulk message
and 'false' otherwise
#client ⇒ Object
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 245
def client
@_es ||= begin
excon_options = { client_key: @client_key, client_cert: @client_cert, client_key_pass: @client_key_pass }
adapter_conf = lambda {|f| f.adapter :excon, excon_options }
transport = Elasticsearch::Transport::Transport::HTTP::Faraday.new(get_connection_options.merge(
options: {
reload_connections: @reload_connections,
reload_on_failure: @reload_on_failure,
resurrect_after: @resurrect_after,
retry_on_failure: 5,
logger: @transport_logger,
transport_options: {
headers: { 'Content-Type' => @content_type.to_s },
request: { timeout: @request_timeout },
ssl: { verify: @ssl_verify, ca_file: @ca_file, version: @ssl_version }
},
http: {
user: @user,
password: @password
}
}), &adapter_conf)
es = Elasticsearch::Client.new transport: transport
begin
raise ConnectionFailure, "Can not reach Elasticsearch cluster (#{connection_options_description})!" unless es.ping
rescue *es.transport.host_unreachable_exceptions => e
raise ConnectionFailure, "Can not reach Elasticsearch cluster (#{connection_options_description})! #{e.message}"
end
log.info "Connection opened to Elasticsearch cluster => #{connection_options_description}"
es
end
end
|
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 121
def configure(conf)
compat_parameters_convert(conf, :buffer)
super
raise Fluent::ConfigError, "'tag' in chunk_keys is required." if not @chunk_key_tag
@time_parser = create_time_parser
if @remove_keys
@remove_keys = @remove_keys.split(/\s*,\s*/)
end
if @target_index_key && @target_index_key.is_a?(String)
@target_index_key = @target_index_key.split '.'
end
if @target_type_key && @target_type_key.is_a?(String)
@target_type_key = @target_type_key.split '.'
end
if @remove_keys_on_update && @remove_keys_on_update.is_a?(String)
@remove_keys_on_update = @remove_keys_on_update.split ','
end
raise Fluent::ConfigError, "'max_retry_putting_template' must be positive number." if @max_retry_putting_template < 0
if @template_name && @template_file
retry_install(@max_retry_putting_template) do
template_install(@template_name, @template_file, @template_overwrite)
end
elsif @templates
retry_install(@max_retry_putting_template) do
templates_hash_install(@templates, @template_overwrite)
end
end
@id_key = convert_compat_id_key(@id_key) if @id_key
@parent_key = convert_compat_id_key(@parent_key) if @parent_key
@routing_key = convert_compat_id_key(@routing_key) if @routing_key
@meta_config_map = create_meta_config_map
begin
require 'oj'
@dump_proc = Oj.method(:dump)
rescue LoadError
@dump_proc = Yajl.method(:dump)
end
if @user && m = @user.match(/%{(?<user>.*)}/)
@user = URI.encode_www_form_component(m["user"])
end
if @password && m = @password.match(/%{(?<password>.*)}/)
@password = URI.encode_www_form_component(m["password"])
end
if @hash_config
raise Fluent::ConfigError, "@hash_config.hash_id_key and id_key must be equal." unless @hash_config.hash_id_key == @id_key
end
@transport_logger = nil
if @with_transporter_log
@transport_logger = log
log_level = conf['@log_level'] || conf['log_level']
log.warn "Consider to specify log_level with @log_level." unless log_level
end
@last_seen_major_version = detect_es_major_version rescue DEFAULT_ELASTICSEARCH_VERSION
if @last_seen_major_version == 6 && @type_name != DEFAULT_TYPE_NAME_ES_7x
log.info "Detected ES 6.x: ES 7.x will only accept `_doc` in type_name."
end
if @last_seen_major_version >= 7 && @type_name != DEFAULT_TYPE_NAME_ES_7x
log.warn "Detected ES 7.x or above: `_doc` will be used as the document `_type`."
@type_name = '_doc'.freeze
end
end
|
#connection_options_description ⇒ Object
324
325
326
327
328
329
330
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 324
def connection_options_description
get_connection_options[:hosts].map do |host_info|
attributes = host_info.dup
attributes[:password] = 'obfuscated' if attributes.has_key?(:password)
attributes.inspect
end.join(', ')
end
|
#convert_compat_id_key(key) ⇒ Object
202
203
204
205
206
207
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 202
def convert_compat_id_key(key)
if key.include?('.') && !key.start_with?('$[')
key = "$.#{key}" unless key.start_with?('$.')
end
key
end
|
209
210
211
212
213
214
215
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 209
def create_meta_config_map
result = []
result << [record_accessor_create(@id_key), '_id'] if @id_key
result << [record_accessor_create(@parent_key), '_parent'] if @parent_key
result << [record_accessor_create(@routing_key), '_routing'] if @routing_key
result
end
|
#create_time_parser ⇒ Object
once fluent v0.14 is released we might be able to use Fluent::Parser::TimeParser, but it doesn’t quite do what we want - if gives
- sec,nsec
-
where as we want something we can call ‘strftime` on…
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 220
def create_time_parser
if @time_key_format
begin
strptime = Strptime.new(@time_key_format)
Proc.new { |value| strptime.exec(value).to_datetime }
rescue
Proc.new { |value| DateTime.strptime(value, @time_key_format) }
end
else
Proc.new { |value| DateTime.parse(value) }
end
end
|
#detect_es_major_version ⇒ Object
197
198
199
200
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 197
def detect_es_major_version
@_es_info ||= client.info
@_es_info["version"]["number"].to_i
end
|
#expand_placeholders(metadata) ⇒ Object
401
402
403
404
405
406
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 401
def expand_placeholders(metadata)
logstash_prefix = (@logstash_prefix, metadata)
index_name = (@index_name, metadata)
type_name = (@type_name, metadata)
return logstash_prefix, index_name, type_name
end
|
#flatten_record(record, prefix = []) ⇒ Object
386
387
388
389
390
391
392
393
394
395
396
397
398
399
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 386
def flatten_record(record, prefix=[])
ret = {}
if record.is_a? Hash
record.each { |key, value|
ret.merge! flatten_record(value, prefix + [key.to_s])
}
elsif record.is_a? Array
ret.merge!({prefix.join(@flatten_hashes_separator) => record})
else
return {prefix.join(@flatten_hashes_separator) => record}
end
ret
end
|
#get_connection_options ⇒ Object
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 291
def get_connection_options
raise "`password` must be present if `user` is present" if @user && !@password
hosts = if @hosts
@hosts.split(',').map do |host_str|
if host_str.match(%r{^[^:]+(\:\d+)?$})
{
host: host_str.split(':')[0],
port: (host_str.split(':')[1] || @port).to_i,
scheme: @scheme
}
else
uri = URI(get_escaped_userinfo(host_str))
%w(user password path).inject(host: uri.host, port: uri.port, scheme: uri.scheme) do |hash, key|
hash[key.to_sym] = uri.public_send(key) unless uri.public_send(key).nil? || uri.public_send(key) == ''
hash
end
end
end.compact
else
[{host: @host, port: @port, scheme: @scheme}]
end.each do |host|
host.merge!(user: @user, password: @password) if !host[:user] && @user
host.merge!(path: @path) if !host[:path] && @path
end
{
hosts: hosts
}
end
|
#get_escaped_userinfo(host_str) ⇒ Object
279
280
281
282
283
284
285
286
287
288
289
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 279
def get_escaped_userinfo(host_str)
if m = host_str.match(/(?<scheme>.*)%{(?<user>.*)}:%{(?<password>.*)}(?<path>@.*)/)
m["scheme"] +
URI.encode_www_form_component(m["user"]) +
':' +
URI.encode_www_form_component(m["password"]) +
m["path"]
else
host_str
end
end
|
#get_parent_of(record, path) ⇒ Object
returns [parent, child_key] of child described by path array in record’s tree returns [nil, child_key] if path doesnt exist in record
528
529
530
531
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 528
def get_parent_of(record, path)
parent_object = path[0..-2].reduce(record) { |a, e| a.is_a?(Hash) ? a[e] : nil }
[parent_object, path[-1]]
end
|
#multi_workers_ready? ⇒ Boolean
408
409
410
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 408
def multi_workers_ready?
true
end
|
#parse_time(value, event_time, tag) ⇒ Object
238
239
240
241
242
243
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 238
def parse_time(value, event_time, tag)
@time_parser.call(value)
rescue => e
router.emit_error_event(@time_parse_error_tag, Fluent::Engine.now, {'tag' => tag, 'time' => event_time, 'format' => @time_key_format, 'value' => value}, e)
return Time.at(event_time).to_datetime
end
|
#process_message(tag, meta, header, time, record, bulk_message, extracted_values) ⇒ Object
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 443
def process_message(tag, meta, , time, record, bulk_message, )
logstash_prefix, index_name, type_name =
if @flatten_hashes
record = flatten_record(record)
end
if @hash_config
record = generate_hash_id_key(record)
end
dt = nil
if @logstash_format || @include_timestamp
if record.has_key?(TIMESTAMP_FIELD)
rts = record[TIMESTAMP_FIELD]
dt = parse_time(rts, time, tag)
elsif record.has_key?(@time_key)
rts = record[@time_key]
dt = parse_time(rts, time, tag)
record[TIMESTAMP_FIELD] = dt.iso8601(@time_precision) unless @time_key_exclude_timestamp
else
dt = Time.at(time).to_datetime
record[TIMESTAMP_FIELD] = dt.iso8601(@time_precision)
end
end
target_index_parent, target_index_child_key = @target_index_key ? get_parent_of(record, @target_index_key) : nil
if target_index_parent && target_index_parent[target_index_child_key]
target_index = target_index_parent.delete(target_index_child_key)
elsif @logstash_format
dt = dt.new_offset(0) if @utc_index
target_index = "#{logstash_prefix}#{@logstash_prefix_separator}#{dt.strftime(@logstash_dateformat)}"
else
target_index = index_name
end
target_index = target_index.downcase
if @include_tag_key
record[@tag_key] = tag
end
target_type_parent, target_type_child_key = @target_type_key ? get_parent_of(record, @target_type_key) : nil
if target_type_parent && target_type_parent[target_type_child_key]
target_type = target_type_parent.delete(target_type_child_key)
if @last_seen_major_version == 6
log.warn "Detected ES 6.x: `@type_name` will be used as the document `_type`."
target_type = type_name
elsif @last_seen_major_version >= 7
log.warn "Detected ES 7.x or above: `_doc` will be used as the document `_type`."
target_type = '_doc'.freeze
end
else
if @last_seen_major_version >= 7 && target_type != DEFAULT_TYPE_NAME_ES_7x
log.warn "Detected ES 7.x or above: `_doc` will be used as the document `_type`."
target_type = '_doc'.freeze
else
target_type = type_name
end
end
meta.clear
meta["_index".freeze] = target_index
meta["_type".freeze] = target_type
if @pipeline
meta["pipeline".freeze] = @pipeline
end
@meta_config_map.each do |record_accessor, meta_key|
if raw_value = record_accessor.call(record)
meta[meta_key] = raw_value
end
end
if @remove_keys
@remove_keys.each { |key| record.delete(key) }
end
append_record_to_messages(@write_operation, meta, , record, bulk_message)
end
|
#remove_keys(record) ⇒ Object
377
378
379
380
381
382
383
384
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 377
def remove_keys(record)
keys = record[@remove_keys_on_update_key] || @remove_keys_on_update || []
record.delete(@remove_keys_on_update_key)
return record unless keys.any?
record = record.dup
keys.each { |key| record.delete(key) }
record
end
|
#send_bulk(data, tag, chunk, bulk_message_count, extracted_values) ⇒ Object
send_bulk given a specific bulk request, the original tag, chunk, and bulk_message_count
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 535
def send_bulk(data, tag, chunk, bulk_message_count, )
retries = 0
begin
log.on_trace { log.trace "bulk request: #{data}" }
response = client.bulk body: data
log.on_trace { log.trace "bulk response: #{response}" }
if response['errors']
error = Fluent::Plugin::ElasticsearchErrorHandler.new(self)
error.handle_error(response, tag, chunk, bulk_message_count, )
end
rescue RetryStreamError => e
emit_tag = @retry_tag ? @retry_tag : tag
router.emit_stream(emit_tag, e.retry_stream)
rescue *client.transport.host_unreachable_exceptions => e
if retries < 2
retries += 1
@_es = nil
@_es_info = nil
log.warn "Could not push logs to Elasticsearch, resetting connection and trying again. #{e.message}"
sleep 2**retries
retry
end
raise ConnectionFailure, "Could not push logs to Elasticsearch after #{retries} retries. #{e.message}"
rescue Exception
@_es = nil if @reconnect_on_error
@_es_info = nil if @reconnect_on_error
raise
end
end
|
#update_body(record, op) ⇒ Object
364
365
366
367
368
369
370
371
372
373
374
375
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 364
def update_body(record, op)
update = remove_keys(record)
body = {"doc".freeze => update}
if op == UPSERT_OP
if update == record
body["doc_as_upsert".freeze] = true
else
body[UPSERT_OP] = record
end
end
body
end
|
#write(chunk) ⇒ Object
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 412
def write(chunk)
bulk_message_count = 0
bulk_message = ''
= {}
meta = {}
tag = chunk.metadata.tag
= expand_placeholders(chunk.metadata)
@last_seen_major_version = detect_es_major_version rescue DEFAULT_ELASTICSEARCH_VERSION
chunk.msgpack_each do |time, record|
next unless record.is_a? Hash
begin
if process_message(tag, meta, , time, record, bulk_message, )
bulk_message_count += 1
else
if @emit_error_for_missing_id
raise MissingIdFieldError, "Missing '_id' field. Write operation is #{@write_operation}"
else
log.on_debug { log.debug("Dropping record because its missing an '_id' field and write_operation is #{@write_operation}: #{record}") }
end
end
rescue => e
router.emit_error_event(tag, time, record, e)
end
end
send_bulk(bulk_message, tag, chunk, bulk_message_count, ) unless bulk_message.empty?
bulk_message.clear
end
|