Class: Fluent::KafkaGroupInput

Inherits:
Input
  • Object
show all
Includes:
Fluent::KafkaPluginUtil::SSLSettings, Fluent::KafkaPluginUtil::SaslSettings
Defined in:
lib/fluent/plugin/in_kafka_group.rb

Defined Under Namespace

Classes: ForShutdown

Constant Summary collapse

BufferError =
if defined?(Fluent::Plugin::Buffer::BufferOverflowError)
  Fluent::Plugin::Buffer::BufferOverflowError
else
  Fluent::BufferQueueLimitError
end

Constants included from Fluent::KafkaPluginUtil::SSLSettings

Fluent::KafkaPluginUtil::SSLSettings::DummyFormatter

Instance Method Summary collapse

Methods included from Fluent::KafkaPluginUtil::SaslSettings

included

Methods included from Fluent::KafkaPluginUtil::SSLSettings

included, #pickup_ssl_endpoint, #read_ssl_file

Constructor Details

#initializeKafkaGroupInput

Returns a new instance of KafkaGroupInput.



85
86
87
88
89
90
91
# File 'lib/fluent/plugin/in_kafka_group.rb', line 85

def initialize
  super
  require 'kafka'

  @time_parser = nil
  @retry_count = 1
end

Instance Method Details

#configure(conf) ⇒ Object



107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
# File 'lib/fluent/plugin/in_kafka_group.rb', line 107

def configure(conf)
  super

  $log.info "Will watch for topics #{@topics} at brokers " \
            "#{@brokers} and '#{@consumer_group}' group"

  @topics = _config_to_array(@topics)

  if conf['max_wait_ms']
    log.warn "'max_wait_ms' parameter is deprecated. Use second unit 'max_wait_time' instead"
    @max_wait_time = conf['max_wait_ms'].to_i / 1000
  end

  @parser_proc = setup_parser

  @consumer_opts = {:group_id => @consumer_group}
  @consumer_opts[:session_timeout] = @session_timeout if @session_timeout
  @consumer_opts[:offset_commit_interval] = @offset_commit_interval if @offset_commit_interval
  @consumer_opts[:offset_commit_threshold] = @offset_commit_threshold if @offset_commit_threshold
  @consumer_opts[:fetcher_max_queue_size] = @fetcher_max_queue_size if @fetcher_max_queue_size

  @fetch_opts = {}
  @fetch_opts[:max_wait_time] = @max_wait_time if @max_wait_time
  @fetch_opts[:min_bytes] = @min_bytes if @min_bytes

  @time_source = :record if @use_record_time

  if @time_source == :record and @time_format
    if defined?(Fluent::TimeParser)
      @time_parser = Fluent::TimeParser.new(@time_format)
    else
      @time_parser = Fluent::TextParser::TimeParser.new(@time_format)
    end
  end
end

#emit_events(tag, es) ⇒ Object



296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
# File 'lib/fluent/plugin/in_kafka_group.rb', line 296

def emit_events(tag, es)
  retries = 0
  begin
    router.emit_stream(tag, es)
  rescue BufferError
    raise ForShutdown if @consumer.nil?

    if @retry_emit_limit.nil?
      sleep 1
      retry
    end

    if retries < @retry_emit_limit
      retries += 1
      sleep 1
      retry
    else
      raise RuntimeError, "Exceeds retry_emit_limit"
    end
  end
end

#multi_workers_ready?Boolean

Returns:

  • (Boolean)


101
102
103
# File 'lib/fluent/plugin/in_kafka_group.rb', line 101

def multi_workers_ready?
  true
end

#reconnect_consumerObject



218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
# File 'lib/fluent/plugin/in_kafka_group.rb', line 218

def reconnect_consumer
  log.warn "Stopping Consumer"
  consumer = @consumer
  @consumer = nil
  if consumer
    consumer.stop
  end
  log.warn "Could not connect to broker. retry_time:#{@retry_count}. Next retry will be in #{@retry_wait_seconds} seconds"
  @retry_count = @retry_count + 1
  sleep @retry_wait_seconds
  @consumer = setup_consumer
  log.warn "Re-starting consumer #{Time.now.to_s}"
  @retry_count = 0
rescue =>e
  log.error "unexpected error during re-starting consumer object access", :error => e.to_s
  log.error_backtrace
  if @retry_count <= @retry_limit or disable_retry_limit
    reconnect_consumer
  end
end

#runObject



239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
# File 'lib/fluent/plugin/in_kafka_group.rb', line 239

def run
  while @consumer
    begin
      @consumer.each_batch(@fetch_opts) { |batch|
        es = Fluent::MultiEventStream.new
        tag = batch.topic
        tag = @add_prefix + "." + tag if @add_prefix
        tag = tag + "." + @add_suffix if @add_suffix

        batch.messages.each { |msg|
          begin
            record = @parser_proc.call(msg)
            case @time_source
            when :kafka
              record_time = Fluent::EventTime.from_time(msg.create_time)
            when :now
              record_time = Fluent::Engine.now
            when :record
              if @time_format
                record_time = @time_parser.parse(record[@record_time_key].to_s)
              else
                record_time = record[@record_time_key]
              end
            else
              log.fatal "BUG: invalid time_source: #{@time_source}"
            end
            if @kafka_message_key
              record[@kafka_message_key] = msg.key
            end
            if @add_headers
              msg.headers.each_pair { |k, v|
                record[k] = v
              }
            end
            es.add(record_time, record)
          rescue => e
            log.warn "parser error in #{batch.topic}/#{batch.partition}", :error => e.to_s, :value => msg.value, :offset => msg.offset
            log.debug_backtrace
          end
        }

        unless es.empty?
          emit_events(tag, es)
        end
      }
    rescue ForShutdown
    rescue => e
      log.error "unexpected error during consuming events from kafka. Re-fetch events.", :error => e.to_s
      log.error_backtrace
      reconnect_consumer
    end
  end
rescue => e
  log.error "unexpected error during consumer object access", :error => e.to_s
  log.error_backtrace
end

#setup_consumerObject



203
204
205
206
207
208
209
210
211
212
213
214
215
216
# File 'lib/fluent/plugin/in_kafka_group.rb', line 203

def setup_consumer
  consumer = @kafka.consumer(@consumer_opts)
  @topics.each { |topic|
    if m = /^\/(.+)\/$/.match(topic)
      topic_or_regex = Regexp.new(m[1])
      $log.info "Subscribe to topics matching the regex #{topic}"
    else
      topic_or_regex = topic
      $log.info "Subscribe to topic #{topic}"
    end
    consumer.subscribe(topic_or_regex, start_from_beginning: @start_from_beginning, max_bytes_per_partition: @max_bytes)
  }
  consumer
end

#setup_parserObject



143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
# File 'lib/fluent/plugin/in_kafka_group.rb', line 143

def setup_parser
  case @format
  when 'json'
    begin
      require 'oj'
      Oj.default_options = Fluent::DEFAULT_OJ_OPTIONS
      Proc.new { |msg| Oj.load(msg.value) }
    rescue LoadError
      require 'yajl'
      Proc.new { |msg| Yajl::Parser.parse(msg.value) }
    end
  when 'ltsv'
    require 'ltsv'
    Proc.new { |msg| LTSV.parse(msg.value, {:symbolize_keys => false}).first }
  when 'msgpack'
    require 'msgpack'
    Proc.new { |msg| MessagePack.unpack(msg.value) }
  when 'text'
    Proc.new { |msg| {@message_key => msg.value} }
  end
end

#shutdownObject



190
191
192
193
194
195
196
197
198
199
200
201
# File 'lib/fluent/plugin/in_kafka_group.rb', line 190

def shutdown
  # This nil assignment should be guarded by mutex in multithread programming manner.
  # But the situation is very low contention, so we don't use mutex for now.
  # If the problem happens, we will add a guard for consumer.
  consumer = @consumer
  @consumer = nil
  consumer.stop

  @thread.join
  @kafka.close
  super
end

#startObject



165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
# File 'lib/fluent/plugin/in_kafka_group.rb', line 165

def start
  super

  logger = @get_kafka_client_log ? log : nil
  if @scram_mechanism != nil && @username != nil && @password != nil
    @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
                       ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
                       ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_scram_username: @username, sasl_scram_password: @password,
                       sasl_scram_mechanism: @scram_mechanism, sasl_over_ssl: @sasl_over_ssl, ssl_verify_hostname: @ssl_verify_hostname)
  elsif @username != nil && @password != nil
    @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
                       ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
                       ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_plain_username: @username, sasl_plain_password: @password,
                       sasl_over_ssl: @sasl_over_ssl, ssl_verify_hostname: @ssl_verify_hostname)
  else
    @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
                       ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
                       ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab,
                       ssl_verify_hostname: @ssl_verify_hostname)
  end

  @consumer = setup_consumer
  @thread = Thread.new(&method(:run))
end