Class: Kafka::Datadog::ProducerSubscriber
- Inherits:
-
StatsdSubscriber
- Object
- ActiveSupport::Subscriber
- StatsdSubscriber
- Kafka::Datadog::ProducerSubscriber
- Defined in:
- lib/kafka/datadog.rb
Instance Method Summary collapse
- #ack_message(event) ⇒ Object
- #buffer_overflow(event) ⇒ Object
- #deliver_messages(event) ⇒ Object
- #produce_message(event) ⇒ Object
- #topic_error(event) ⇒ Object
Instance Method Details
#ack_message(event) ⇒ Object
273 274 275 276 277 278 279 280 281 282 283 284 |
# File 'lib/kafka/datadog.rb', line 273 def (event) = { client: event.payload.fetch(:client_id), topic: event.payload.fetch(:topic), } # Number of messages ACK'd for the topic. increment("producer.ack.messages", tags: ) # Histogram of delay between a message being produced and it being ACK'd. histogram("producer.ack.delay", event.payload.fetch(:delay), tags: ) end |
#buffer_overflow(event) ⇒ Object
242 243 244 245 246 247 248 249 |
# File 'lib/kafka/datadog.rb', line 242 def buffer_overflow(event) = { client: event.payload.fetch(:client_id), topic: event.payload.fetch(:topic), } increment("producer.produce.errors", tags: ) end |
#deliver_messages(event) ⇒ Object
251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 |
# File 'lib/kafka/datadog.rb', line 251 def (event) client = event.payload.fetch(:client_id) = event.payload.fetch(:delivered_message_count) attempts = event.payload.fetch(:attempts) = { client: client, } if event.payload.key?(:exception) increment("producer.deliver.errors", tags: ) end timing("producer.deliver.latency", event.duration, tags: ) # Messages delivered to Kafka: count("producer.deliver.messages", , tags: ) # Number of attempts to deliver messages: histogram("producer.deliver.attempts", attempts, tags: ) end |
#produce_message(event) ⇒ Object
215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 |
# File 'lib/kafka/datadog.rb', line 215 def (event) client = event.payload.fetch(:client_id) topic = event.payload.fetch(:topic) = event.payload.fetch(:message_size) buffer_size = event.payload.fetch(:buffer_size) max_buffer_size = event.payload.fetch(:max_buffer_size) buffer_fill_ratio = buffer_size.to_f / max_buffer_size.to_f buffer_fill_percentage = buffer_fill_ratio * 100.0 = { client: client, topic: topic, } # This gets us the write rate. increment("producer.produce.messages", tags: .merge(topic: topic)) histogram("producer.produce.message_size", , tags: .merge(topic: topic)) # This gets us the avg/max buffer size per producer. histogram("producer.buffer.size", buffer_size, tags: ) # This gets us the avg/max buffer fill ratio per producer. histogram("producer.buffer.fill_ratio", buffer_fill_ratio, tags: ) histogram("producer.buffer.fill_percentage", buffer_fill_percentage, tags: ) end |
#topic_error(event) ⇒ Object
286 287 288 289 290 291 292 293 |
# File 'lib/kafka/datadog.rb', line 286 def topic_error(event) = { client: event.payload.fetch(:client_id), topic: event.payload.fetch(:topic) } increment("producer.ack.errors", tags: ) end |