Class: Fluent::KafkaOutputBuffered

Inherits:
BufferedOutput
  • Object
show all
Includes:
Fluent::KafkaPluginUtil::SSLSettings, Fluent::KafkaPluginUtil::SaslSettings
Defined in:
lib/fluent/plugin/out_kafka_buffered.rb

Constant Summary

Constants included from Fluent::KafkaPluginUtil::SSLSettings

Fluent::KafkaPluginUtil::SSLSettings::DummyFormatter

Instance Attribute Summary collapse

Instance Method Summary collapse

Methods included from Fluent::KafkaPluginUtil::SaslSettings

included

Methods included from Fluent::KafkaPluginUtil::SSLSettings

included, #pickup_ssl_endpoint, #read_ssl_file

Constructor Details

#initializeKafkaOutputBuffered

Returns a new instance of KafkaOutputBuffered.



101
102
103
104
105
106
107
108
109
110
111
# File 'lib/fluent/plugin/out_kafka_buffered.rb', line 101

def initialize
  super

  require 'kafka'
  require 'fluent/plugin/kafka_producer_ext'

  @kafka = nil
  @producers = {}
  @producers_mutex = Mutex.new
  @field_separator = nil
end

Instance Attribute Details

#field_separatorObject

Returns the value of attribute field_separator.



95
96
97
# File 'lib/fluent/plugin/out_kafka_buffered.rb', line 95

def field_separator
  @field_separator
end

#output_data_typeObject

Returns the value of attribute output_data_type.



94
95
96
# File 'lib/fluent/plugin/out_kafka_buffered.rb', line 94

def output_data_type
  @output_data_type
end

Instance Method Details

#configure(conf) ⇒ Object



165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
# File 'lib/fluent/plugin/out_kafka_buffered.rb', line 165

def configure(conf)
  super

  log.warn "Support of fluentd v0.12 has ended. Use kafka2 instead. kafka_buffered will be an alias of kafka2"

  if @zookeeper
    require 'zookeeper'
  else
    @seed_brokers = @brokers.split(",")
    log.info "brokers has been set directly: #{@seed_brokers}"
  end

  if conf['ack_timeout_ms']
    log.warn "'ack_timeout_ms' parameter is deprecated. Use second unit 'ack_timeout' instead"
    @ack_timeout = conf['ack_timeout_ms'].to_i / 1000
  end

  @f_separator = case @field_separator
                 when /SPACE/i then ' '
                 when /COMMA/i then ','
                 when /SOH/i then "\x01"
                 else "\t"
                 end

  @formatter_proc = setup_formatter(conf)

  @producer_opts = {max_retries: @max_send_retries, required_acks: @required_acks, idempotent: @idempotent}
  @producer_opts[:ack_timeout] = @ack_timeout if @ack_timeout
  @producer_opts[:compression_codec] = @compression_codec.to_sym if @compression_codec

  if @discard_kafka_delivery_failed
    log.warn "'discard_kafka_delivery_failed' option discards events which cause delivery failure, e.g. invalid topic or something."
    log.warn "If this is unexpected, you need to check your configuration or data."
  end

  if @active_support_notification_regex
    require 'active_support/notifications'
    require 'active_support/core_ext/hash/keys'
    ActiveSupport::Notifications.subscribe(Regexp.new(@active_support_notification_regex)) do |*args|
      event = ActiveSupport::Notifications::Event.new(*args)
      message = event.payload.respond_to?(:stringify_keys) ? event.payload.stringify_keys : event.payload
      @router.emit("fluent_kafka_stats.#{event.name}", Time.now.to_i, message)
    end
  end

  @monitoring_list.each { |m|
    require "kafka/#{m}"
    log.info "#{m} monitoring started"
  }
end

#deliver_messages(producer, tag) ⇒ Object



287
288
289
290
291
292
293
294
295
296
297
298
# File 'lib/fluent/plugin/out_kafka_buffered.rb', line 287

def deliver_messages(producer, tag)
  if @discard_kafka_delivery_failed
    begin
      producer.deliver_messages
    rescue Kafka::DeliveryFailed => e
      log.warn "DeliveryFailed occurred. Discard broken event:", :error => e.to_s, :error_class => e.class.to_s, :tag => tag
      producer.clear_buffer
    end
  else
    producer.deliver_messages
  end
end

#emit(tag, es, chain) ⇒ Object



227
228
229
# File 'lib/fluent/plugin/out_kafka_buffered.rb', line 227

def emit(tag, es, chain)
  super(tag, es, chain, tag)
end

#format_stream(tag, es) ⇒ Object



231
232
233
# File 'lib/fluent/plugin/out_kafka_buffered.rb', line 231

def format_stream(tag, es)
  es.to_msgpack_stream
end

#get_producerObject



244
245
246
247
248
249
250
251
252
253
# File 'lib/fluent/plugin/out_kafka_buffered.rb', line 244

def get_producer
  @producers_mutex.synchronize {
    producer = @producers[Thread.current.object_id]
    unless producer
      producer = @kafka.producer(**@producer_opts)
      @producers[Thread.current.object_id] = producer
    end
    producer
  }
end

#multi_workers_ready?Boolean

Returns:

  • (Boolean)


113
114
115
# File 'lib/fluent/plugin/out_kafka_buffered.rb', line 113

def multi_workers_ready?
  true
end

#refresh_client(raise_error = true) ⇒ Object



117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
# File 'lib/fluent/plugin/out_kafka_buffered.rb', line 117

def refresh_client(raise_error = true)
  if @zookeeper
    @seed_brokers = []
    z = Zookeeper.new(@zookeeper)
    z.get_children(:path => @zookeeper_path)[:children].each do |id|
      broker = Yajl.load(z.get(:path => @zookeeper_path + "/#{id}")[:data])
      if @ssl_client_cert
        @seed_brokers.push(pickup_ssl_endpoint(broker))
      else
        @seed_brokers.push("#{broker['host']}:#{broker['port']}")
      end
    end
    z.close
    log.info "brokers has been refreshed via Zookeeper: #{@seed_brokers}"
  end
  begin
    if @seed_brokers.length > 0
      logger = @get_kafka_client_log ? log : nil
      if @scram_mechanism != nil && @username != nil && @password != nil
        @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, ssl_ca_cert_file_path: @ssl_ca_cert,
                           ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key), ssl_ca_certs_from_system: @ssl_ca_certs_from_system,
                           sasl_scram_username: @username, sasl_scram_password: @password, sasl_scram_mechanism: @scram_mechanism, sasl_over_ssl: @sasl_over_ssl,
                           ssl_verify_hostname: @ssl_verify_hostname,
                           partitioner: Kafka::Partitioner.new(hash_function: @partitioner_hash_function))
      elsif @username != nil && @password != nil
        @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, ssl_ca_cert_file_path: @ssl_ca_cert,
                           ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key), ssl_ca_certs_from_system: @ssl_ca_certs_from_system,
                           sasl_plain_username: @username, sasl_plain_password: @password, sasl_over_ssl: @sasl_over_ssl, ssl_verify_hostname: @ssl_verify_hostname,
                           partitioner: Kafka::Partitioner.new(hash_function: @partitioner_hash_function))
      else
        @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, ssl_ca_cert_file_path: @ssl_ca_cert,
                           ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key), ssl_ca_certs_from_system: @ssl_ca_certs_from_system,
                           sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab, sasl_over_ssl: @sasl_over_ssl,  ssl_verify_hostname: @ssl_verify_hostname,
                           partitioner: Kafka::Partitioner.new(hash_function: @partitioner_hash_function))
      end
      log.info "initialized kafka producer: #{@client_id}"
    else
      log.warn "No brokers found on Zookeeper"
    end
  rescue Exception => e
    if raise_error # During startup, error should be reported to engine and stop its phase for safety.
      raise e
    else
      log.error e
    end
  end
end

#setup_formatter(conf) ⇒ Object



255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
# File 'lib/fluent/plugin/out_kafka_buffered.rb', line 255

def setup_formatter(conf)
  if @output_data_type == 'json'
    begin
      require 'oj'
      Oj.default_options = Fluent::DEFAULT_OJ_OPTIONS
      Proc.new { |tag, time, record| Oj.dump(record) }
    rescue LoadError
      require 'yajl'
      Proc.new { |tag, time, record| Yajl::Encoder.encode(record) }
    end
  elsif @output_data_type == 'ltsv'
    require 'ltsv'
    Proc.new { |tag, time, record| LTSV.dump(record) }
  elsif @output_data_type == 'msgpack'
    require 'msgpack'
    Proc.new { |tag, time, record| record.to_msgpack }
  elsif @output_data_type =~ /^attr:(.*)$/
    @custom_attributes = $1.split(',').map(&:strip).reject(&:empty?)
    @custom_attributes.unshift('time') if @output_include_time
    @custom_attributes.unshift('tag') if @output_include_tag
    Proc.new { |tag, time, record|
      @custom_attributes.map { |attr|
        record[attr].nil? ? '' : record[attr].to_s
      }.join(@f_separator)
    }
  else
    @formatter = Fluent::Plugin.new_formatter(@output_data_type)
    @formatter.configure(conf)
    @formatter.method(:format)
  end
end

#shutdownObject



221
222
223
224
225
# File 'lib/fluent/plugin/out_kafka_buffered.rb', line 221

def shutdown
  super
  shutdown_producers
  @kafka = nil
end

#shutdown_producersObject



235
236
237
238
239
240
241
242
# File 'lib/fluent/plugin/out_kafka_buffered.rb', line 235

def shutdown_producers
  @producers_mutex.synchronize {
    @producers.each { |key, producer|
      producer.shutdown
    }
    @producers = {}
  }
end

#startObject



216
217
218
219
# File 'lib/fluent/plugin/out_kafka_buffered.rb', line 216

def start
  super
  refresh_client
end

#write(chunk) ⇒ Object



300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
# File 'lib/fluent/plugin/out_kafka_buffered.rb', line 300

def write(chunk)
  tag = chunk.key
  def_topic = @default_topic || tag
  producer = get_producer

  records_by_topic = {}
  bytes_by_topic = {}
  messages = 0
  messages_bytes = 0
  record_buf = nil
  record_buf_bytes = nil

  begin
    chunk.msgpack_each { |time, record|
      begin
        if @output_include_time
          if @time_format
            record['time'.freeze] = Time.at(time).strftime(@time_format)
          else
            record['time'.freeze] = time
          end
        end

        record['tag'] = tag if @output_include_tag
        topic = (@exclude_topic_key ? record.delete(@topic_key) : record[@topic_key]) || def_topic
        partition_key = (@exclude_partition_key ? record.delete(@partition_key_key) : record[@partition_key_key]) || @default_partition_key
        partition = (@exclude_partition ? record.delete(@partition_key) : record[@partition_key]) || @default_partition
        message_key = (@exclude_message_key ? record.delete(@message_key_key) : record[@message_key_key]) || @default_message_key

        records_by_topic[topic] ||= 0
        bytes_by_topic[topic] ||= 0

        record_buf = @formatter_proc.call(tag, time, record)
        record_buf_bytes = record_buf.bytesize
        if @max_send_limit_bytes && record_buf_bytes > @max_send_limit_bytes
          log.warn "record size exceeds max_send_limit_bytes. Skip event:", :time => time, :record_size => record_buf_bytes
          log.debug "Skipped event:", :record => record
          next
        end
      rescue StandardError => e
        log.warn "unexpected error during format record. Skip broken event:", :error => e.to_s, :error_class => e.class.to_s, :time => time, :record => record
        next
      end

      if (messages > 0) and (messages_bytes + record_buf_bytes > @kafka_agg_max_bytes) or (@kafka_agg_max_messages && messages >= @kafka_agg_max_messages)
        log.debug { "#{messages} messages send because reaches the limit of batch transmission." }
        deliver_messages(producer, tag)
        messages = 0
        messages_bytes = 0
      end
      log.trace { "message will send to #{topic} with partition_key: #{partition_key}, partition: #{partition}, message_key: #{message_key} and value: #{record_buf}." }
      messages += 1
      producer.produce_for_buffered(record_buf, topic: topic, key: message_key, partition_key: partition_key, partition: partition,
                                    create_time: @use_event_time ? Time.at(time) : Time.now)
      messages_bytes += record_buf_bytes

      records_by_topic[topic] += 1
      bytes_by_topic[topic] += record_buf_bytes
    }
    if messages > 0
      log.debug { "#{messages} messages send." }
      deliver_messages(producer, tag)
    end
    log.debug { "(records|bytes) (#{records_by_topic}|#{bytes_by_topic})" }
  end
rescue Exception => e
  log.warn "Send exception occurred: #{e}"
  log.warn "Exception Backtrace : #{e.backtrace.join("\n")}"
  # For safety, refresh client and its producers
  shutdown_producers
  refresh_client(false)
  # Raise exception to retry sendind messages
  raise e
end