Class: Kafka::Datadog::ConsumerSubscriber

Inherits:
StatsdSubscriber
  • Object
show all
Defined in:
lib/kafka/datadog.rb

Instance Method Summary collapse

Instance Method Details

#join_group(event) ⇒ Object



183
184
185
186
187
188
189
190
191
192
193
194
# File 'lib/kafka/datadog.rb', line 183

def join_group(event)
  tags = {
    client: event.payload.fetch(:client_id),
    group_id: event.payload.fetch(:group_id),
  }

  timing("consumer.join_group", event.duration, tags: tags)

  if event.payload.key?(:exception)
    increment("consumer.join_group.errors", tags: tags)
  end
end

#leave_group(event) ⇒ Object



209
210
211
212
213
214
215
216
217
218
219
220
# File 'lib/kafka/datadog.rb', line 209

def leave_group(event)
  tags = {
    client: event.payload.fetch(:client_id),
    group_id: event.payload.fetch(:group_id),
  }

  timing("consumer.leave_group", event.duration, tags: tags)

  if event.payload.key?(:exception)
    increment("consumer.leave_group.errors", tags: tags)
  end
end

#loop(event) ⇒ Object



222
223
224
225
226
227
228
229
# File 'lib/kafka/datadog.rb', line 222

def loop(event)
  tags = {
    client: event.payload.fetch(:client_id),
    group_id: event.payload.fetch(:group_id),
  }

  histogram("consumer.loop.duration", event.duration, tags: tags)
end

#pause_status(event) ⇒ Object



231
232
233
234
235
236
237
238
239
240
241
242
# File 'lib/kafka/datadog.rb', line 231

def pause_status(event)
  tags = {
    client: event.payload.fetch(:client_id),
    group_id: event.payload.fetch(:group_id),
    topic: event.payload.fetch(:topic),
    partition: event.payload.fetch(:partition),
  }

  duration = event.payload.fetch(:duration)

  gauge("consumer.pause.duration", duration, tags: tags)
end

#process_batch(event) ⇒ Object



160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
# File 'lib/kafka/datadog.rb', line 160

def process_batch(event)
  offset = event.payload.fetch(:last_offset)
  lag = event.payload.fetch(:offset_lag)
  messages = event.payload.fetch(:message_count)

  tags = {
    client: event.payload.fetch(:client_id),
    group_id: event.payload.fetch(:group_id),
    topic: event.payload.fetch(:topic),
    partition: event.payload.fetch(:partition),
  }

  if event.payload.key?(:exception)
    increment("consumer.process_batch.errors", tags: tags)
  else
    timing("consumer.process_batch.latency", event.duration, tags: tags)
    count("consumer.messages", messages, tags: tags)
  end

  gauge("consumer.offset", offset, tags: tags)
  gauge("consumer.lag", lag, tags: tags)
end

#process_message(event) ⇒ Object



131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
# File 'lib/kafka/datadog.rb', line 131

def process_message(event)
  offset = event.payload.fetch(:offset)
  offset_lag = event.payload.fetch(:offset_lag)
  create_time = event.payload.fetch(:create_time)
  time_lag = create_time && ((Time.now - create_time) * 1000).to_i

  tags = {
    client: event.payload.fetch(:client_id),
    group_id: event.payload.fetch(:group_id),
    topic: event.payload.fetch(:topic),
    partition: event.payload.fetch(:partition),
  }

  if event.payload.key?(:exception)
    increment("consumer.process_message.errors", tags: tags)
  else
    timing("consumer.process_message.latency", event.duration, tags: tags)
    increment("consumer.messages", tags: tags)
  end

  gauge("consumer.offset", offset, tags: tags)
  gauge("consumer.lag", offset_lag, tags: tags)

  # Not all messages have timestamps.
  if time_lag
    gauge("consumer.time_lag", time_lag, tags: tags)
  end
end

#sync_group(event) ⇒ Object



196
197
198
199
200
201
202
203
204
205
206
207
# File 'lib/kafka/datadog.rb', line 196

def sync_group(event)
  tags = {
    client: event.payload.fetch(:client_id),
    group_id: event.payload.fetch(:group_id),
  }

  timing("consumer.sync_group", event.duration, tags: tags)

  if event.payload.key?(:exception)
    increment("consumer.sync_group.errors", tags: tags)
  end
end