Class: LogStash::Outputs::Kafka

Inherits:
Base
  • Object
show all
Defined in:
lib/logstash/outputs/kafka.rb

Overview

Write events to a Kafka topic. This uses the Kafka Producer API to write messages to a topic on the broker.

The only required configuration is the topic name. The default codec is json, so events will be persisted on the broker in json format. If you select a codec of plain, Logstash will encode your messages with not only the message but also with a timestamp and hostname. If you do not want anything but your message passing through, you should make the output configuration something like:

source,ruby

output

kafka {
  codec => plain {
     format => "%{message"
  }
}

}

For more information see kafka.apache.org/documentation.html#theproducer

Kafka producer configuration: kafka.apache.org/documentation.html#newproducerconfigs

Instance Method Summary collapse

Instance Method Details

#closeObject



151
152
153
# File 'lib/logstash/outputs/kafka.rb', line 151

def close
  @producer.close
end

#receive(event) ⇒ Object

def register



143
144
145
146
147
148
149
# File 'lib/logstash/outputs/kafka.rb', line 143

def receive(event)
  
  if event == LogStash::SHUTDOWN
    return
  end
  @codec.encode(event)
end

#registerObject



101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
# File 'lib/logstash/outputs/kafka.rb', line 101

def register
  LogStash::Logger.setup_log4j(@logger)

  options = {
    :key_serializer => @key_serializer,
    :value_serializer => @value_serializer,
    :bootstrap_servers => @bootstrap_servers,
    :acks => @acks,
    :buffer_memory => @buffer_memory,
    :compression_type => @compression_type,
    :retries => @retries,
    :batch_size => @batch_size,
    :client_id => @client_id,
    :linger_ms => @linger_ms,
    :max_request_size => @max_request_size,
    :receive_buffer_bytes => @receive_buffer_bytes,
    :send_buffer_bytes => @send_buffer_bytes,
    :timeout_ms => @timeout_ms,
    :block_on_buffer_full => @block_on_buffer_full,
    :metadata_fetch_timeout_ms => @metadata_fetch_timeout_ms,
    :metadata_max_age_ms => @metadata_max_age_ms,
    :reconnect_backoff_ms => @reconnect_backoff_ms,
    :retry_backoff_ms => @retry_backoff_ms
  }
  @producer = Kafka::KafkaProducer.new(options)
  @producer.connect

  @logger.info('Registering kafka producer', :topic_id => @topic_id, :bootstrap_servers => @bootstrap_servers)

  @codec.on_event do |event, data|
    begin
      key = if @message_key.nil? then nil else event.sprintf(@message_key) end
      @producer.send_msg(event.sprintf(@topic_id), nil, key, data)
    rescue LogStash::ShutdownSignal
      @logger.info('Kafka producer got shutdown signal')
    rescue => e
      @logger.warn('kafka producer threw exception, restarting',
                   :exception => e)
    end
  end
end