Class: Phobos::Listener

Inherits:
Object
  • Object
show all
Includes:
Instrumentation
Defined in:
lib/phobos/listener.rb

Constant Summary collapse

KAFKA_CONSUMER_OPTS =
%i(
  session_timeout
  offset_commit_interval
  offset_commit_threshold
  heartbeat_interval
  offset_retention_time
).freeze
DEFAULT_MAX_BYTES_PER_PARTITION =

1 MB

1048576
DELIVERY_OPTS =
%w[batch message].freeze

Constants included from Instrumentation

Instrumentation::NAMESPACE

Instance Attribute Summary collapse

Instance Method Summary collapse

Methods included from Instrumentation

#instrument, subscribe, unsubscribe

Constructor Details

#initialize(handler:, group_id:, topic:, min_bytes: nil, max_wait_time: nil, force_encoding: nil, start_from_beginning: true, backoff: nil, delivery: 'batch', max_bytes_per_partition: DEFAULT_MAX_BYTES_PER_PARTITION, session_timeout: nil, offset_commit_interval: nil, heartbeat_interval: nil, offset_commit_threshold: nil, offset_retention_time: nil) ⇒ Listener

Returns a new instance of Listener.



19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
# File 'lib/phobos/listener.rb', line 19

def initialize(handler:, group_id:, topic:, min_bytes: nil,
               max_wait_time: nil, force_encoding: nil,
               start_from_beginning: true, backoff: nil,
               delivery: 'batch',
               max_bytes_per_partition: DEFAULT_MAX_BYTES_PER_PARTITION,
               session_timeout: nil, offset_commit_interval: nil,
               heartbeat_interval: nil, offset_commit_threshold: nil,
               offset_retention_time: nil
              )
  @id = SecureRandom.hex[0...6]
  @handler_class = handler
  @group_id = group_id
  @topic = topic
  @backoff = backoff
  @delivery = delivery.to_s
  @subscribe_opts = {
    start_from_beginning: start_from_beginning,
    max_bytes_per_partition: max_bytes_per_partition
  }
  @kafka_consumer_opts = compact(
    session_timeout: session_timeout,
    offset_commit_interval: offset_commit_interval,
    heartbeat_interval: heartbeat_interval,
    offset_retention_time: offset_retention_time,
    offset_commit_threshold: offset_commit_threshold
  )
  @encoding = Encoding.const_get(force_encoding.to_sym) if force_encoding
  @message_processing_opts = compact(min_bytes: min_bytes, max_wait_time: max_wait_time)
  @kafka_client = Phobos.create_kafka_client
  @producer_enabled = @handler_class.ancestors.include?(Phobos::Producer)
end

Instance Attribute Details

#encodingObject (readonly)

Returns the value of attribute encoding.



17
18
19
# File 'lib/phobos/listener.rb', line 17

def encoding
  @encoding
end

#group_idObject (readonly)

Returns the value of attribute group_id.



16
17
18
# File 'lib/phobos/listener.rb', line 16

def group_id
  @group_id
end

#handler_classObject (readonly)

Returns the value of attribute handler_class.



17
18
19
# File 'lib/phobos/listener.rb', line 17

def handler_class
  @handler_class
end

#idObject (readonly)

Returns the value of attribute id.



16
17
18
# File 'lib/phobos/listener.rb', line 16

def id
  @id
end

#topicObject (readonly)

Returns the value of attribute topic.



16
17
18
# File 'lib/phobos/listener.rb', line 16

def topic
  @topic
end

Instance Method Details

#consume_each_batchObject



97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/phobos/listener.rb', line 97

def consume_each_batch
  @consumer.each_batch(@message_processing_opts) do |batch|
    batch_processor = Phobos::Actions::ProcessBatch.new(
      listener: self,
      batch: batch,
      listener_metadata: 
    )

    batch_processor.execute
    Phobos.logger.info { Hash(message: 'Committed offset').merge(batch_processor.) }
    return if should_stop?
  end
end

#consume_each_messageObject



111
112
113
114
115
116
117
118
119
120
121
122
123
# File 'lib/phobos/listener.rb', line 111

def consume_each_message
  @consumer.each_message(@message_processing_opts) do |message|
    message_processor = Phobos::Actions::ProcessMessage.new(
      listener: self,
      message: message,
      listener_metadata: 
    )

    message_processor.execute
    Phobos.logger.info { Hash(message: 'Committed offset').merge(message_processor.) }
    return if should_stop?
  end
end

#create_exponential_backoffObject



134
135
136
# File 'lib/phobos/listener.rb', line 134

def create_exponential_backoff
  Phobos.create_exponential_backoff(@backoff)
end

#should_stop?Boolean

Returns:

  • (Boolean)


138
139
140
# File 'lib/phobos/listener.rb', line 138

def should_stop?
  @signal_to_stop == true
end

#startObject



51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
# File 'lib/phobos/listener.rb', line 51

def start
  @signal_to_stop = false
  instrument('listener.start', ) do
    @consumer = create_kafka_consumer
    @consumer.subscribe(topic, @subscribe_opts)

    # This is done here because the producer client is bound to the current thread and
    # since "start" blocks a thread might be used to call it
    @handler_class.producer.configure_kafka_client(@kafka_client) if @producer_enabled

    instrument('listener.start_handler', ) { @handler_class.start(@kafka_client) }
    Phobos.logger.info { Hash(message: 'Listener started').merge() }
  end

  begin
    @delivery == 'batch' ? consume_each_batch : consume_each_message

  # Abort is an exception to prevent the consumer from committing the offset.
  # Since "listener" had a message being retried while "stop" was called
  # it's wise to not commit the batch offset to avoid data loss. This will
  # cause some messages to be reprocessed
  #
  rescue Kafka::ProcessingError, Phobos::AbortError
    instrument('listener.retry_aborted', ) do
      Phobos.logger.info({ message: 'Retry loop aborted, listener is shutting down' }.merge())
    end
  end

ensure
  instrument('listener.stop', ) do
    instrument('listener.stop_handler', ) { @handler_class.stop }

    @consumer&.stop

    if @producer_enabled
      @handler_class.producer.async_producer_shutdown
      @handler_class.producer.configure_kafka_client(nil)
    end

    @kafka_client.close
    if should_stop?
      Phobos.logger.info { Hash(message: 'Listener stopped').merge() }
    end
  end
end

#stopObject



125
126
127
128
129
130
131
132
# File 'lib/phobos/listener.rb', line 125

def stop
  return if should_stop?
  instrument('listener.stopping', ) do
    Phobos.logger.info { Hash(message: 'Listener stopping').merge() }
    @consumer&.stop
    @signal_to_stop = true
  end
end