Class: Fluent::Rdkafka2Output
- Inherits:
-
Output
- Object
- Output
- Fluent::Rdkafka2Output
- Defined in:
- lib/fluent/plugin/out_rdkafka2.rb
Constant Summary
Constants included from KafkaPluginUtil::SSLSettings
KafkaPluginUtil::SSLSettings::DummyFormatter
Instance Method Summary collapse
- #build_config ⇒ Object
- #close_producer(producer) ⇒ Object
- #configure(conf) ⇒ Object
- #enqueue_with_retry(producer, topic, record_buf, message_key, partition, headers) ⇒ Object
- #get_producer ⇒ Object
-
#initialize ⇒ Rdkafka2Output
constructor
A new instance of Rdkafka2Output.
- #multi_workers_ready? ⇒ Boolean
- #setup_formatter(conf) ⇒ Object
- #shutdown ⇒ Object
- #shutdown_producers ⇒ Object
- #start ⇒ Object
- #write(chunk) ⇒ Object
Methods included from KafkaPluginUtil::SaslSettings
Methods included from KafkaPluginUtil::SSLSettings
included, #pickup_ssl_endpoint, #read_ssl_file
Constructor Details
#initialize ⇒ Rdkafka2Output
Returns a new instance of Rdkafka2Output.
99 100 101 102 103 104 105 |
# File 'lib/fluent/plugin/out_rdkafka2.rb', line 99 def initialize super @producers = nil @producers_mutex = nil @shared_producer = nil end |
Instance Method Details
#build_config ⇒ Object
146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 |
# File 'lib/fluent/plugin/out_rdkafka2.rb', line 146 def build_config config = {:"bootstrap.servers" => @brokers} if @ssl_ca_cert && @ssl_ca_cert[0] ssl = true config[:"ssl.ca.location"] = @ssl_ca_cert[0] config[:"ssl.certificate.location"] = @ssl_client_cert if @ssl_client_cert config[:"ssl.key.location"] = @ssl_client_cert_key if @ssl_client_cert_key config[:"ssl.key.password"] = @ssl_client_cert_key_password if @ssl_client_cert_key_password end if @principal sasl = true config[:"sasl.mechanisms"] = "GSSAPI" config[:"sasl.kerberos.principal"] = @principal config[:"sasl.kerberos.service.name"] = @service_name if @service_name config[:"sasl.kerberos.keytab"] = @keytab if @keytab end if ssl && sasl security_protocol = "SASL_SSL" elsif ssl && !sasl security_protocol = "SSL" elsif !ssl && sasl security_protocol = "SASL_PLAINTEXT" else security_protocol = "PLAINTEXT" end config[:"security.protocol"] = security_protocol config[:"compression.codec"] = @compression_codec if @compression_codec config[:"message.send.max.retries"] = @max_send_retries if @max_send_retries config[:"request.required.acks"] = @required_acks if @required_acks config[:"request.timeout.ms"] = @ack_timeout * 1000 if @ack_timeout config[:"queue.buffering.max.ms"] = @rdkafka_buffering_max_ms if @rdkafka_buffering_max_ms config[:"queue.buffering.max.messages"] = @rdkafka_buffering_max_messages if @rdkafka_buffering_max_messages config[:"message.max.bytes"] = @rdkafka_message_max_bytes if @rdkafka_message_max_bytes config[:"batch.num.messages"] = @rdkafka_message_max_num if @rdkafka_message_max_num @rdkafka_options.each { |k, v| config[k.to_sym] = v } config end |
#close_producer(producer) ⇒ Object
231 232 233 234 235 |
# File 'lib/fluent/plugin/out_rdkafka2.rb', line 231 def close_producer(producer) unless producer.close(10) log.warn("Queue is forcefully closed after 10 seconds wait") end end |
#configure(conf) ⇒ Object
107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 |
# File 'lib/fluent/plugin/out_rdkafka2.rb', line 107 def configure(conf) super log.instance_eval { def add(level, &block) if block self.info(block.call) end end } Rdkafka::Config.logger = log config = build_config @rdkafka = Rdkafka::Config.new(config) if @default_topic.nil? if @chunk_keys.include?(@topic_key) && !@chunk_key_tag log.warn "Use '#{@topic_key}' field of event record for topic but no fallback. Recommend to set default_topic or set 'tag' in buffer chunk keys like <buffer #{@topic_key},tag>" end else if @chunk_key_tag log.warn "default_topic is set. Fluentd's event tag is not used for topic" end end formatter_conf = conf.elements('format').first unless formatter_conf raise Fluent::ConfigError, "<format> section is required." end unless formatter_conf["@type"] raise Fluent::ConfigError, "format/@type is required." end @formatter_proc = setup_formatter(formatter_conf) @topic_key_sym = @topic_key.to_sym @headers_from_record_accessors = {} @headers_from_record.each do |key, value| @headers_from_record_accessors[key] = record_accessor_create(value) end end |
#enqueue_with_retry(producer, topic, record_buf, message_key, partition, headers) ⇒ Object
313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 |
# File 'lib/fluent/plugin/out_rdkafka2.rb', line 313 def enqueue_with_retry(producer, topic, record_buf, , partition, headers) attempt = 0 loop do begin return producer.produce(topic: topic, payload: record_buf, key: , partition: partition, headers: headers) rescue Exception => e if e.respond_to?(:code) && e.code == :queue_full if attempt <= @max_enqueue_retries log.warn "Failed to enqueue message; attempting retry #{attempt} of #{@max_enqueue_retries} after #{@enqueue_retry_backoff}s" sleep @enqueue_retry_backoff attempt += 1 else raise "Failed to enqueue message although tried retry #{@max_enqueue_retries} times" end else raise e end end end end |
#get_producer ⇒ Object
237 238 239 240 241 242 243 244 245 246 247 248 249 250 |
# File 'lib/fluent/plugin/out_rdkafka2.rb', line 237 def get_producer if @share_producer @shared_producer else @producers_mutex.synchronize { producer = @producers[Thread.current.object_id] unless producer producer = @rdkafka.producer @producers[Thread.current.object_id] = producer end producer } end end |
#multi_workers_ready? ⇒ Boolean
203 204 205 |
# File 'lib/fluent/plugin/out_rdkafka2.rb', line 203 def multi_workers_ready? true end |
#setup_formatter(conf) ⇒ Object
252 253 254 255 256 257 258 259 260 261 262 |
# File 'lib/fluent/plugin/out_rdkafka2.rb', line 252 def setup_formatter(conf) type = conf['@type'] case type when 'ltsv' require 'ltsv' Proc.new { |tag, time, record| LTSV.dump(record) } else @formatter = formatter_create(usage: 'rdkafka-plugin', conf: conf) @formatter.method(:format) end end |
#shutdown ⇒ Object
207 208 209 210 |
# File 'lib/fluent/plugin/out_rdkafka2.rb', line 207 def shutdown super shutdown_producers end |
#shutdown_producers ⇒ Object
212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 |
# File 'lib/fluent/plugin/out_rdkafka2.rb', line 212 def shutdown_producers if @share_producer close_producer(@shared_producer) @shared_producer = nil else @producers_mutex.synchronize { shutdown_threads = @producers.map { |key, producer| th = Thread.new { close_producer(producer) } th.abort_on_exception = true th } shutdown_threads.each { |th| th.join } @producers = {} } end end |
#start ⇒ Object
192 193 194 195 196 197 198 199 200 201 |
# File 'lib/fluent/plugin/out_rdkafka2.rb', line 192 def start if @share_producer @shared_producer = @rdkafka.producer else @producers = {} @producers_mutex = Mutex.new end super end |
#write(chunk) ⇒ Object
264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 |
# File 'lib/fluent/plugin/out_rdkafka2.rb', line 264 def write(chunk) tag = chunk..tag topic = (chunk..variables && chunk..variables[@topic_key_sym]) || @default_topic || tag handlers = [] record_buf = nil record_buf_bytes = nil headers = @headers.clone begin producer = get_producer chunk.msgpack_each { |time, record| begin record = inject_values_to_record(tag, time, record) record.delete(@topic_key) if @exclude_topic_key partition = (@exclude_partition ? record.delete(@partition_key) : record[@partition_key]) || @default_partition = (@exclude_message_key ? record.delete(@message_key_key) : record[@message_key_key]) || @default_message_key @headers_from_record_accessors.each do |key, header_accessor| headers[key] = header_accessor.call(record) end record_buf = @formatter_proc.call(tag, time, record) record_buf_bytes = record_buf.bytesize if @max_send_limit_bytes && record_buf_bytes > @max_send_limit_bytes log.warn "record size exceeds max_send_limit_bytes. Skip event:", :time => time, :record => record next end rescue StandardError => e log.warn "unexpected error during format record. Skip broken event:", :error => e.to_s, :error_class => e.class.to_s, :time => time, :record => record next end handler = enqueue_with_retry(producer, topic, record_buf, , partition, headers) if @rdkafka_delivery_handle_poll_timeout != 0 handlers << handler end } handlers.each { |handler| handler.wait(max_wait_timeout: @rdkafka_delivery_handle_poll_timeout) } end rescue Exception => e log.warn "Send exception occurred: #{e} at #{e.backtrace.first}" # Raise exception to retry sendind messages raise e end |