Class: Kafka::Datadog::ConsumerSubscriber
- Inherits:
-
StatsdSubscriber
- Object
- ActiveSupport::Subscriber
- StatsdSubscriber
- Kafka::Datadog::ConsumerSubscriber
- Defined in:
- lib/kafka/datadog.rb
Instance Method Summary collapse
- #join_group(event) ⇒ Object
- #leave_group(event) ⇒ Object
- #process_batch(event) ⇒ Object
- #process_message(event) ⇒ Object
- #sync_group(event) ⇒ Object
Instance Method Details
#join_group(event) ⇒ Object
172 173 174 175 176 177 178 179 180 181 182 183 |
# File 'lib/kafka/datadog.rb', line 172 def join_group(event) = { client: event.payload.fetch(:client_id), group_id: event.payload.fetch(:group_id), } timing("consumer.join_group", event.duration, tags: ) if event.payload.key?(:exception) increment("consumer.join_group.errors", tags: ) end end |
#leave_group(event) ⇒ Object
198 199 200 201 202 203 204 205 206 207 208 209 |
# File 'lib/kafka/datadog.rb', line 198 def leave_group(event) = { client: event.payload.fetch(:client_id), group_id: event.payload.fetch(:group_id), } timing("consumer.leave_group", event.duration, tags: ) if event.payload.key?(:exception) increment("consumer.leave_group.errors", tags: ) end end |
#process_batch(event) ⇒ Object
151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 |
# File 'lib/kafka/datadog.rb', line 151 def process_batch(event) lag = event.payload.fetch(:offset_lag) = event.payload.fetch(:message_count) = { client: event.payload.fetch(:client_id), group_id: event.payload.fetch(:group_id), topic: event.payload.fetch(:topic), partition: event.payload.fetch(:partition), } if event.payload.key?(:exception) increment("consumer.process_batch.errors", tags: ) else timing("consumer.process_batch.latency", event.duration, tags: ) count("consumer.messages", , tags: ) end gauge("consumer.lag", lag, tags: ) end |
#process_message(event) ⇒ Object
124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 |
# File 'lib/kafka/datadog.rb', line 124 def (event) offset_lag = event.payload.fetch(:offset_lag) create_time = event.payload.fetch(:create_time) time_lag = create_time && ((Time.now - create_time) * 1000).to_i = { client: event.payload.fetch(:client_id), group_id: event.payload.fetch(:group_id), topic: event.payload.fetch(:topic), partition: event.payload.fetch(:partition), } if event.payload.key?(:exception) increment("consumer.process_message.errors", tags: ) else timing("consumer.process_message.latency", event.duration, tags: ) increment("consumer.messages", tags: ) end gauge("consumer.lag", offset_lag, tags: ) # Not all messages have timestamps. if time_lag gauge("consumer.time_lag", time_lag, tags: ) end end |
#sync_group(event) ⇒ Object
185 186 187 188 189 190 191 192 193 194 195 196 |
# File 'lib/kafka/datadog.rb', line 185 def sync_group(event) = { client: event.payload.fetch(:client_id), group_id: event.payload.fetch(:group_id), } timing("consumer.sync_group", event.duration, tags: ) if event.payload.key?(:exception) increment("consumer.sync_group.errors", tags: ) end end |