Class: Fluent::ElasticsearchOutput
Defined Under Namespace
Classes: ConnectionFailure, MissingIdFieldError, RetryStreamError
Constant Summary
collapse
- DEFAULT_RELOAD_AFTER =
-1
Fluent::ElasticsearchConstants::BODY_DELIMITER, Fluent::ElasticsearchConstants::CREATE_OP, Fluent::ElasticsearchConstants::ID_FIELD, Fluent::ElasticsearchConstants::INDEX_OP, Fluent::ElasticsearchConstants::TIMESTAMP_FIELD, Fluent::ElasticsearchConstants::UPDATE_OP, Fluent::ElasticsearchConstants::UPSERT_OP
Instance Method Summary
collapse
-
#append_record_to_messages(op, meta, header, record, msgs) ⇒ Object
append_record_to_messages adds a record to the bulk message payload to be submitted to Elasticsearch.
-
#client ⇒ Object
-
#configure(conf) ⇒ Object
-
#connection_options_description ⇒ Object
-
#create_meta_config_map ⇒ Object
-
#create_time_parser ⇒ Object
once fluent v0.14 is released we might be able to use Fluent::Parser::TimeParser, but it doesn’t quite do what we want - if gives [sec,nsec] where as we want something we can call ‘strftime` on…
-
#flatten_record(record, prefix = []) ⇒ Object
-
#get_connection_options ⇒ Object
-
#get_escaped_userinfo(host_str) ⇒ Object
-
#get_parent_of(record, path) ⇒ Object
returns [parent, child_key] of child described by path array in record’s tree returns [nil, child_key] if path doesnt exist in record.
-
#initialize ⇒ ElasticsearchOutput
constructor
A new instance of ElasticsearchOutput.
-
#parse_time(value, event_time, tag) ⇒ Object
-
#process_message(tag, meta, header, time, record, bulk_message) ⇒ Object
-
#remove_keys(record) ⇒ Object
-
#send_bulk(data, tag, chunk, bulk_message_count) ⇒ Object
send_bulk given a specific bulk request, the original tag, chunk, and bulk_message_count.
-
#update_body(record, op) ⇒ Object
-
#write_objects(tag, chunk) ⇒ Object
#get_template, #template_exists?, #template_install, #template_put, #templates_hash_install
Constructor Details
Returns a new instance of ElasticsearchOutput.
97
98
99
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 97
def initialize
super
end
|
Instance Method Details
#append_record_to_messages(op, meta, header, record, msgs) ⇒ Object
append_record_to_messages adds a record to the bulk message payload to be submitted to Elasticsearch. Records that do not include ‘_id’ field are skipped when ‘write_operation’ is configured for ‘create’ or ‘update’
returns ‘true’ if record was appended to the bulk message
and 'false' otherwise
#client ⇒ Object
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 199
def client
@_es ||= begin
excon_options = { client_key: @client_key, client_cert: @client_cert, client_key_pass: @client_key_pass }
adapter_conf = lambda {|f| f.adapter :excon, excon_options }
local_reload_connections = @reload_connections
if local_reload_connections && @reload_after > DEFAULT_RELOAD_AFTER
local_reload_connections = @reload_after
end
transport = Elasticsearch::Transport::Transport::HTTP::Faraday.new(get_connection_options.merge(
options: {
reload_connections: local_reload_connections,
reload_on_failure: @reload_on_failure,
resurrect_after: @resurrect_after,
retry_on_failure: 5,
logger: @transport_logger,
transport_options: {
headers: { 'Content-Type' => 'application/json' },
request: { timeout: @request_timeout },
ssl: { verify: @ssl_verify, ca_file: @ca_file, version: @ssl_version }
},
http: {
user: @user,
password: @password
},
sniffer_class: @sniffer_class,
}), &adapter_conf)
es = Elasticsearch::Client.new transport: transport
begin
raise ConnectionFailure, "Can not reach Elasticsearch cluster (#{connection_options_description})!" unless es.ping
rescue *es.transport.host_unreachable_exceptions => e
raise ConnectionFailure, "Can not reach Elasticsearch cluster (#{connection_options_description})! #{e.message}"
end
log.info "Connection opened to Elasticsearch cluster => #{connection_options_description}"
es
end
end
|
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 101
def configure(conf)
super
@time_parser = create_time_parser
if @remove_keys
@remove_keys = @remove_keys.split(/\s*,\s*/)
end
if @target_index_key && @target_index_key.is_a?(String)
@target_index_key = @target_index_key.split '.'
end
if @target_type_key && @target_type_key.is_a?(String)
@target_type_key = @target_type_key.split '.'
end
if @remove_keys_on_update && @remove_keys_on_update.is_a?(String)
@remove_keys_on_update = @remove_keys_on_update.split ','
end
if @template_name && @template_file
template_install(@template_name, @template_file, @template_overwrite)
elsif @templates
templates_hash_install(@templates, @template_overwrite)
end
@meta_config_map = create_meta_config_map
begin
require 'oj'
@dump_proc = Oj.method(:dump)
rescue LoadError
@dump_proc = Yajl.method(:dump)
end
if @user && m = @user.match(/%{(?<user>.*)}/)
@user = URI.encode_www_form_component(m["user"])
end
if @password && m = @password.match(/%{(?<password>.*)}/)
@password = URI.encode_www_form_component(m["password"])
end
if @hash_config
raise Fluent::ConfigError, "@hash_config.hash_id_key and id_key must be equal." unless @hash_config.hash_id_key == @id_key
end
@transport_logger = nil
if @with_transporter_log
@transport_logger = log
log_level = conf['@log_level'] || conf['log_level']
log.warn "Consider to specify log_level with @log_level." unless log_level
end
@sniffer_class = nil
begin
@sniffer_class = Object.const_get(@sniffer_class_name) if @sniffer_class_name
rescue Exception => ex
raise Fluent::ConfigError, "Could not load sniffer class #{@sniffer_class_name}: #{ex}"
end
end
|
#connection_options_description ⇒ Object
283
284
285
286
287
288
289
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 283
def connection_options_description
get_connection_options[:hosts].map do |host_info|
attributes = host_info.dup
attributes[:password] = 'obfuscated' if attributes.has_key?(:password)
attributes.inspect
end.join(', ')
end
|
163
164
165
166
167
168
169
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 163
def create_meta_config_map
result = []
result << [@id_key, '_id'] if @id_key
result << [@parent_key, '_parent'] if @parent_key
result << [@routing_key, '_routing'] if @routing_key
result
end
|
#create_time_parser ⇒ Object
once fluent v0.14 is released we might be able to use Fluent::Parser::TimeParser, but it doesn’t quite do what we want - if gives
- sec,nsec
-
where as we want something we can call ‘strftime` on…
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 174
def create_time_parser
if @time_key_format
begin
strptime = Strptime.new(@time_key_format)
Proc.new { |value| strptime.exec(value).to_datetime }
rescue
Proc.new { |value| DateTime.strptime(value, @time_key_format) }
end
else
Proc.new { |value| DateTime.parse(value) }
end
end
|
#flatten_record(record, prefix = []) ⇒ Object
345
346
347
348
349
350
351
352
353
354
355
356
357
358
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 345
def flatten_record(record, prefix=[])
ret = {}
if record.is_a? Hash
record.each { |key, value|
ret.merge! flatten_record(value, prefix + [key.to_s])
}
elsif record.is_a? Array
ret.merge!({prefix.join(@flatten_hashes_separator) => record})
else
return {prefix.join(@flatten_hashes_separator) => record}
end
ret
end
|
#get_connection_options ⇒ Object
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 250
def get_connection_options
raise "`password` must be present if `user` is present" if @user && !@password
hosts = if @hosts
@hosts.split(',').map do |host_str|
if host_str.match(%r{^[^:]+(\:\d+)?$})
{
host: host_str.split(':')[0],
port: (host_str.split(':')[1] || @port).to_i,
scheme: @scheme.to_s
}
else
uri = URI(get_escaped_userinfo(host_str))
%w(user password path).inject(host: uri.host, port: uri.port, scheme: uri.scheme) do |hash, key|
hash[key.to_sym] = uri.public_send(key) unless uri.public_send(key).nil? || uri.public_send(key) == ''
hash
end
end
end.compact
else
[{host: @host, port: @port, scheme: @scheme.to_s}]
end.each do |host|
host.merge!(user: @user, password: @password) if !host[:user] && @user
host.merge!(path: @path) if !host[:path] && @path
end
{
hosts: hosts
}
end
|
#get_escaped_userinfo(host_str) ⇒ Object
238
239
240
241
242
243
244
245
246
247
248
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 238
def get_escaped_userinfo(host_str)
if m = host_str.match(/(?<scheme>.*)%{(?<user>.*)}:%{(?<password>.*)}(?<path>@.*)/)
m["scheme"] +
URI.encode_www_form_component(m["user"]) +
':' +
URI.encode_www_form_component(m["password"]) +
m["path"]
else
host_str
end
end
|
#get_parent_of(record, path) ⇒ Object
returns [parent, child_key] of child described by path array in record’s tree returns [nil, child_key] if path doesnt exist in record
455
456
457
458
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 455
def get_parent_of(record, path)
parent_object = path[0..-2].reduce(record) { |a, e| a.is_a?(Hash) ? a[e] : nil }
[parent_object, path[-1]]
end
|
#parse_time(value, event_time, tag) ⇒ Object
192
193
194
195
196
197
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 192
def parse_time(value, event_time, tag)
@time_parser.call(value)
rescue => e
router.emit_error_event(@time_parse_error_tag, Fluent::Engine.now, {'tag' => tag, 'time' => event_time, 'format' => @time_key_format, 'value' => value}, e)
return Time.at(event_time).to_datetime
end
|
#process_message(tag, meta, header, time, record, bulk_message) ⇒ Object
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 386
def process_message(tag, meta, , time, record, bulk_message)
if @flatten_hashes
record = flatten_record(record)
end
if @hash_config
record = generate_hash_id_key(record)
end
dt = nil
if @logstash_format || @include_timestamp
if record.has_key?(TIMESTAMP_FIELD)
rts = record[TIMESTAMP_FIELD]
dt = parse_time(rts, time, tag)
elsif record.has_key?(@time_key)
rts = record[@time_key]
dt = parse_time(rts, time, tag)
record[TIMESTAMP_FIELD] = rts unless @time_key_exclude_timestamp
else
dt = Time.at(time).to_datetime
record[TIMESTAMP_FIELD] = dt.iso8601(@time_precision)
end
end
target_index_parent, target_index_child_key = @target_index_key ? get_parent_of(record, @target_index_key) : nil
if target_index_parent && target_index_parent[target_index_child_key]
target_index = target_index_parent.delete(target_index_child_key)
elsif @logstash_format
dt = dt.new_offset(0) if @utc_index
target_index = "#{@logstash_prefix}#{@logstash_prefix_separator}#{dt.strftime(@logstash_dateformat)}"
else
target_index = @index_name
end
target_index = target_index.downcase
if @include_tag_key
record[@tag_key] = tag
end
target_type_parent, target_type_child_key = @target_type_key ? get_parent_of(record, @target_type_key) : nil
if target_type_parent && target_type_parent[target_type_child_key]
target_type = target_type_parent.delete(target_type_child_key)
else
target_type = @type_name
end
meta.clear
meta["_index".freeze] = target_index
meta["_type".freeze] = target_type
if @pipeline
meta["pipeline".freeze] = @pipeline
end
@meta_config_map.each do |record_key, meta_key|
meta[meta_key] = record[record_key] if record[record_key]
end
if @remove_keys
@remove_keys.each { |key| record.delete(key) }
end
append_record_to_messages(@write_operation, meta, , record, bulk_message)
end
|
#remove_keys(record) ⇒ Object
336
337
338
339
340
341
342
343
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 336
def remove_keys(record)
keys = record[@remove_keys_on_update_key] || @remove_keys_on_update || []
record.delete(@remove_keys_on_update_key)
return record unless keys.any?
record = record.dup
keys.each { |key| record.delete(key) }
record
end
|
#send_bulk(data, tag, chunk, bulk_message_count) ⇒ Object
send_bulk given a specific bulk request, the original tag, chunk, and bulk_message_count
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 462
def send_bulk(data, tag, chunk, bulk_message_count)
retries = 0
begin
log.on_trace { log.trace "bulk request: #{data}" }
response = client.bulk body: data
log.on_trace { log.trace "bulk response: #{response}" }
if response['errors']
error = Fluent::ElasticsearchErrorHandler.new(self)
error.handle_error(response, tag, chunk, bulk_message_count)
end
rescue RetryStreamError => e
emit_tag = @retry_tag ? @retry_tag : tag
router.emit_stream(emit_tag, e.retry_stream)
rescue *client.transport.host_unreachable_exceptions => e
if retries < 2
retries += 1
@_es = nil
log.warn "Could not push logs to Elasticsearch, resetting connection and trying again. #{e.message}"
sleep 2**retries
retry
end
raise ConnectionFailure, "Could not push logs to Elasticsearch after #{retries} retries. #{e.message}"
rescue Exception
@_es = nil if @reconnect_on_error
raise
end
end
|
#update_body(record, op) ⇒ Object
323
324
325
326
327
328
329
330
331
332
333
334
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 323
def update_body(record, op)
update = remove_keys(record)
body = {"doc".freeze => update}
if op == UPSERT_OP
if update == record
body["doc_as_upsert".freeze] = true
else
body[UPSERT_OP] = record
end
end
body
end
|
#write_objects(tag, chunk) ⇒ Object
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 360
def write_objects(tag, chunk)
bulk_message_count = 0
bulk_message = ''
= {}
meta = {}
chunk.msgpack_each do |time, record|
next unless record.is_a? Hash
begin
if process_message(tag, meta, , time, record, bulk_message)
bulk_message_count += 1
else
if @emit_error_for_missing_id
raise MissingIdFieldError, "Missing '_id' field. Write operation is #{@write_operation}"
else
log.on_debug { log.debug("Dropping record because its missing an '_id' field and write_operation is #{@write_operation}: #{record}") }
end
end
rescue=>e
router.emit_error_event(tag, time, record, e)
end
end
send_bulk(bulk_message, tag, chunk, bulk_message_count) unless bulk_message.empty?
bulk_message.clear
end
|