Class: Fluent::KafkaOutput

Inherits:
Output
  • Object
show all
Includes:
Fluent::KafkaPluginUtil::SSLSettings
Defined in:
lib/fluent/plugin/out_kafka.rb

Instance Attribute Summary collapse

Instance Method Summary collapse

Methods included from Fluent::KafkaPluginUtil::SSLSettings

included, #read_ssl_file

Constructor Details

#initializeKafkaOutput



70
71
72
73
74
75
76
# File 'lib/fluent/plugin/out_kafka.rb', line 70

def initialize
  super

  require 'kafka'

  @kafka = nil
end

Instance Attribute Details

#field_separatorObject

Returns the value of attribute field_separator.



64
65
66
# File 'lib/fluent/plugin/out_kafka.rb', line 64

def field_separator
  @field_separator
end

#output_data_typeObject

Returns the value of attribute output_data_type.



63
64
65
# File 'lib/fluent/plugin/out_kafka.rb', line 63

def output_data_type
  @output_data_type
end

Instance Method Details

#configure(conf) ⇒ Object



102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
# File 'lib/fluent/plugin/out_kafka.rb', line 102

def configure(conf)
  super

  if @zookeeper
    require 'zookeeper'
  else
    @seed_brokers = @brokers.match(",").nil? ? [@brokers] : @brokers.split(",")
    log.info "brokers has been set directly: #{@seed_brokers}"
  end

  if conf['ack_timeout_ms']
    log.warn "'ack_timeout_ms' parameter is deprecated. Use second unit 'ack_timeout' instead"
    @ack_timeout = conf['ack_timeout_ms'].to_i / 1000
  end

  @f_separator = case @field_separator
                 when /SPACE/i then ' '
                 when /COMMA/i then ','
                 when /SOH/i then "\x01"
                 else "\t"
                 end

  @formatter_proc = setup_formatter(conf)

  @producer_opts = {max_retries: @max_send_retries, required_acks: @required_acks}
  @producer_opts[:ack_timeout] = @ack_timeout if @ack_timeout
  @producer_opts[:compression_codec] = @compression_codec.to_sym if @compression_codec
end

#emit(tag, es, chain) ⇒ Object



189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
# File 'lib/fluent/plugin/out_kafka.rb', line 189

def emit(tag, es, chain)
  begin
    chain.next

    # out_kafka is mainly for testing so don't need the performance unlike out_kafka_buffered.
    producer = @kafka.producer(@producer_opts)

    es.each do |time, record|
      record = record.select{|key, value| !key.nil? && !key.empty?}
      if @output_include_time
        if @time_format
          record['time'] = Time.at(time).strftime(@time_format)
        else
          record['time'] = time
        end
      end
      record['tag'] = tag if @output_include_tag
      partition_key = (@exclude_partition_key ? record.delete('partition_key') : record['partition_key']) || @default_partition_key
      partition = (@exclude_partition ? record.delete('partition'.freeze) : record['partition'.freeze]) || @default_partition
      message_key = (@exclude_message_key ? record.delete('message_key') : record['message_key']) || @default_message_key

      value = @formatter_proc.call(tag, time, record)
      topic = (@exclude_topic_key ? record.delete('topic') : record['topic']) || @default_topic || @topic_name

      log.on_trace { log.trace("message will send to #{topic} with partition_key: #{partition_key}, partition: #{partition}, message_key: #{message_key} and value: #{record_buf}.") }
      producer.produce(value, topic: topic, key: message_key, partition: partition, partition_key: partition_key)
    end

    producer.deliver_messages
    producer.shutdown
  rescue Exception => e
    log.warn "Send exception occurred: #{e}"
    producer.shutdown if producer
    refresh_client
    raise e
  end
end

#refresh_clientObject



78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
# File 'lib/fluent/plugin/out_kafka.rb', line 78

def refresh_client
  if @zookeeper
    @seed_brokers = []
    z = Zookeeper.new(@zookeeper)
    z.get_children(:path => @zookeeper_path)[:children].each do |id|
      broker = Yajl.load(z.get(:path => @zookeeper_path + "/#{id}")[:data])
      @seed_brokers.push("#{broker['host']}:#{broker['port']}")
    end
    z.close
    log.info "brokers has been refreshed via Zookeeper: #{@seed_brokers}"
  end
  begin
    if @seed_brokers.length > 0
      @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
                         ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key))
      log.info "initialized kafka producer: #{@client_id}"
    else
      log.warn "No brokers found on Zookeeper"
    end
  rescue Exception => e
    log.error e
  end
end

#setup_formatter(conf) ⇒ Object



141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
# File 'lib/fluent/plugin/out_kafka.rb', line 141

def setup_formatter(conf)
  if @output_data_type == 'json'
    require 'yajl'
    Proc.new { |tag, time, record| Yajl::Encoder.encode(record) }
  elsif @output_data_type == 'avro'
    require "avro_turf"
    require 'avro_turf/messaging'
    require "avro/builder"
    Proc.new do |tag, time, record|
      record = record.select{|key, value| !key.nil? && !key.empty?}.map{|k, v| [k.tr('[]-', '_').delete('$'), ((!v.is_a?(Fixnum) && !v.is_a?(Float)) ? v.to_s.force_encoding("UTF-8") : v)]}.to_h
      timestamp = Time.new
      record['enchilada_time_with_format'] = timestamp.strftime("%Y-%m-%dT%H:%M:%S.%LZ")

      fields = record.map{|key, value| {'name' => key, 'type' => (value.is_a?(Fixnum) ? 'int' : (value.is_a?(Float) ? 'float' : 'string'))}}
      record['enchilada_timestamp'] = timestamp.strftime('%s%3N').to_i
      fields << {"name" => "enchilada_timestamp", "type" => "long"}
      @topic_name = schema_name = "#{tag.to_s.tr('.$:', '_')}_#{Digest::MD5.new.hexdigest(fields.to_s)[0..5]}"
      schema_json = {
          "type": "record",
          "name": schema_name,
          "fields": fields
      }.to_json
      schema = Avro::Schema.parse(schema_json)
      avro = AvroTurf::Messaging.new(registry_url: @schema_registry)
      avro.encode(record, schema: schema, subject: "#{schema_name}-value")
    end
  elsif @output_data_type == 'ltsv'
    require 'ltsv'
    Proc.new { |tag, time, record| LTSV.dump(record) }
  elsif @output_data_type == 'msgpack'
    require 'msgpack'
    Proc.new { |tag, time, record| record.to_msgpack }
  elsif @output_data_type =~ /^attr:(.*)$/
    @custom_attributes = $1.split(',').map(&:strip).reject(&:empty?)
    @custom_attributes.unshift('time') if @output_include_time
    @custom_attributes.unshift('tag') if @output_include_tag
    Proc.new { |tag, time, record|
      @custom_attributes.map { |attr|
        record[attr].nil? ? '' : record[attr].to_s
      }.join(@f_separator)
    }
  else
    @formatter = Fluent::Plugin.new_formatter(@output_data_type)
    @formatter.configure(conf)
    @formatter.method(:format)
  end
end

#shutdownObject



136
137
138
139
# File 'lib/fluent/plugin/out_kafka.rb', line 136

def shutdown
  super
  @kafka = nil
end

#startObject



131
132
133
134
# File 'lib/fluent/plugin/out_kafka.rb', line 131

def start
  super
  refresh_client
end