Class: Phobos::Listener
- Inherits:
-
Object
- Object
- Phobos::Listener
- Includes:
- Instrumentation
- Defined in:
- lib/phobos/listener.rb
Constant Summary collapse
- KAFKA_CONSUMER_OPTS =
%i(session_timeout offset_commit_interval offset_commit_threshold heartbeat_interval).freeze
- DEFAULT_MAX_BYTES_PER_PARTITION =
512 KB
524288
Constants included from Instrumentation
Instance Attribute Summary collapse
-
#group_id ⇒ Object
readonly
Returns the value of attribute group_id.
-
#id ⇒ Object
readonly
Returns the value of attribute id.
-
#topic ⇒ Object
readonly
Returns the value of attribute topic.
Instance Method Summary collapse
-
#initialize(handler:, group_id:, topic:, min_bytes: nil, max_wait_time: nil, force_encoding: nil, start_from_beginning: true, max_bytes_per_partition: DEFAULT_MAX_BYTES_PER_PARTITION) ⇒ Listener
constructor
A new instance of Listener.
- #start ⇒ Object
- #stop ⇒ Object
Methods included from Instrumentation
#instrument, subscribe, unsubscribe
Constructor Details
#initialize(handler:, group_id:, topic:, min_bytes: nil, max_wait_time: nil, force_encoding: nil, start_from_beginning: true, max_bytes_per_partition: DEFAULT_MAX_BYTES_PER_PARTITION) ⇒ Listener
Returns a new instance of Listener.
10 11 12 13 14 15 16 17 18 19 20 21 22 23 |
# File 'lib/phobos/listener.rb', line 10 def initialize(handler:, group_id:, topic:, min_bytes: nil, max_wait_time: nil, force_encoding: nil, start_from_beginning: true, max_bytes_per_partition: DEFAULT_MAX_BYTES_PER_PARTITION) @id = SecureRandom.hex[0...6] @handler_class = handler @group_id = group_id @topic = topic @subscribe_opts = { start_from_beginning: start_from_beginning, max_bytes_per_partition: max_bytes_per_partition } @encoding = Encoding.const_get(force_encoding.to_sym) if force_encoding @consumer_opts = compact(min_bytes: min_bytes, max_wait_time: max_wait_time) @kafka_client = Phobos.create_kafka_client @producer_enabled = @handler_class.ancestors.include?(Phobos::Producer) end |
Instance Attribute Details
#group_id ⇒ Object (readonly)
Returns the value of attribute group_id.
8 9 10 |
# File 'lib/phobos/listener.rb', line 8 def group_id @group_id end |
#id ⇒ Object (readonly)
Returns the value of attribute id.
8 9 10 |
# File 'lib/phobos/listener.rb', line 8 def id @id end |
#topic ⇒ Object (readonly)
Returns the value of attribute topic.
8 9 10 |
# File 'lib/phobos/listener.rb', line 8 def topic @topic end |
Instance Method Details
#start ⇒ Object
25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 |
# File 'lib/phobos/listener.rb', line 25 def start @signal_to_stop = false instrument('listener.start', ) do @consumer = create_kafka_consumer @consumer.subscribe(topic, @subscribe_opts) # This is done here because the producer client is bound to the current thread and # since "start" blocks a thread might be used to call it @handler_class.producer.configure_kafka_client(@kafka_client) if @producer_enabled instrument('listener.start_handler', ) { @handler_class.start(@kafka_client) } Phobos.logger.info { Hash(message: 'Listener started').merge() } end begin @consumer.each_batch(@consumer_opts) do |batch| = { batch_size: batch..count, partition: batch.partition, offset_lag: batch.offset_lag, # the offset of the most recent message in the partition highwater_mark_offset: batch.highwater_mark_offset }.merge() instrument('listener.process_batch', ) do || time_elapsed = measure { process_batch(batch) } .merge!(time_elapsed: time_elapsed) Phobos.logger.info { Hash(message: 'Committed offset').merge() } end return if @signal_to_stop end # Abort is an exception to prevent the consumer from committing the offset. # Since "listener" had a message being retried while "stop" was called # it's wise to not commit the batch offset to avoid data loss. This will # cause some messages to be reprocessed # rescue Phobos::AbortError instrument('listener.retry_aborted', ) do Phobos.logger.info do {message: 'Retry loop aborted, listener is shutting down'}.merge() end end end ensure instrument('listener.stop', ) do instrument('listener.stop_handler', ) { @handler_class.stop } @consumer&.stop if @producer_enabled @handler_class.producer.async_producer_shutdown @handler_class.producer.configure_kafka_client(nil) end @kafka_client.close if @signal_to_stop Phobos.logger.info { Hash(message: 'Listener stopped').merge() } end end end |