Class: Phobos::Listener
- Inherits:
-
Object
- Object
- Phobos::Listener
- Includes:
- Instrumentation
- Defined in:
- lib/phobos/listener.rb
Constant Summary collapse
- KAFKA_CONSUMER_OPTS =
%i(session_timeout offset_commit_interval offset_commit_threshold heartbeat_interval).freeze
- DEFAULT_MAX_BYTES_PER_PARTITION =
512 KB
524288
Constants included from Instrumentation
Instance Attribute Summary collapse
-
#encoding ⇒ Object
readonly
Returns the value of attribute encoding.
-
#group_id ⇒ Object
readonly
Returns the value of attribute group_id.
-
#handler_class ⇒ Object
readonly
Returns the value of attribute handler_class.
-
#id ⇒ Object
readonly
Returns the value of attribute id.
-
#topic ⇒ Object
readonly
Returns the value of attribute topic.
Instance Method Summary collapse
- #create_exponential_backoff ⇒ Object
-
#initialize(handler:, group_id:, topic:, min_bytes: nil, max_wait_time: nil, force_encoding: nil, start_from_beginning: true, backoff: nil, max_bytes_per_partition: DEFAULT_MAX_BYTES_PER_PARTITION) ⇒ Listener
constructor
A new instance of Listener.
- #should_stop? ⇒ Boolean
- #start ⇒ Object
- #stop ⇒ Object
Methods included from Instrumentation
#instrument, #measure, subscribe, unsubscribe
Constructor Details
#initialize(handler:, group_id:, topic:, min_bytes: nil, max_wait_time: nil, force_encoding: nil, start_from_beginning: true, backoff: nil, max_bytes_per_partition: DEFAULT_MAX_BYTES_PER_PARTITION) ⇒ Listener
Returns a new instance of Listener.
11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 |
# File 'lib/phobos/listener.rb', line 11 def initialize(handler:, group_id:, topic:, min_bytes: nil, max_wait_time: nil, force_encoding: nil, start_from_beginning: true, backoff: nil, max_bytes_per_partition: DEFAULT_MAX_BYTES_PER_PARTITION) @id = SecureRandom.hex[0...6] @handler_class = handler @group_id = group_id @topic = topic @backoff = backoff @subscribe_opts = { start_from_beginning: start_from_beginning, max_bytes_per_partition: max_bytes_per_partition } @encoding = Encoding.const_get(force_encoding.to_sym) if force_encoding @consumer_opts = compact(min_bytes: min_bytes, max_wait_time: max_wait_time) @kafka_client = Phobos.create_kafka_client @producer_enabled = @handler_class.ancestors.include?(Phobos::Producer) end |
Instance Attribute Details
#encoding ⇒ Object (readonly)
Returns the value of attribute encoding.
9 10 11 |
# File 'lib/phobos/listener.rb', line 9 def encoding @encoding end |
#group_id ⇒ Object (readonly)
Returns the value of attribute group_id.
8 9 10 |
# File 'lib/phobos/listener.rb', line 8 def group_id @group_id end |
#handler_class ⇒ Object (readonly)
Returns the value of attribute handler_class.
9 10 11 |
# File 'lib/phobos/listener.rb', line 9 def handler_class @handler_class end |
#id ⇒ Object (readonly)
Returns the value of attribute id.
8 9 10 |
# File 'lib/phobos/listener.rb', line 8 def id @id end |
#topic ⇒ Object (readonly)
Returns the value of attribute topic.
8 9 10 |
# File 'lib/phobos/listener.rb', line 8 def topic @topic end |
Instance Method Details
#create_exponential_backoff ⇒ Object
107 108 109 |
# File 'lib/phobos/listener.rb', line 107 def create_exponential_backoff Phobos.create_exponential_backoff(@backoff) end |
#should_stop? ⇒ Boolean
111 112 113 |
# File 'lib/phobos/listener.rb', line 111 def should_stop? @signal_to_stop == true end |
#start ⇒ Object
30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 |
# File 'lib/phobos/listener.rb', line 30 def start @signal_to_stop = false instrument('listener.start', ) do @consumer = create_kafka_consumer @consumer.subscribe(topic, @subscribe_opts) # This is done here because the producer client is bound to the current thread and # since "start" blocks a thread might be used to call it @handler_class.producer.configure_kafka_client(@kafka_client) if @producer_enabled instrument('listener.start_handler', ) { @handler_class.start(@kafka_client) } Phobos.logger.info { Hash(message: 'Listener started').merge() } end begin @consumer.each_batch(@consumer_opts) do |batch| = { batch_size: batch..count, partition: batch.partition, offset_lag: batch.offset_lag, # the offset of the most recent message in the partition highwater_mark_offset: batch.highwater_mark_offset }.merge() instrument('listener.process_batch', ) do || time_elapsed = measure do Phobos::Actions::ProcessBatch.new( listener: self, batch: batch, listener_metadata: ).execute end .merge!(time_elapsed: time_elapsed) Phobos.logger.info { Hash(message: 'Committed offset').merge() } end return if should_stop? end # Abort is an exception to prevent the consumer from committing the offset. # Since "listener" had a message being retried while "stop" was called # it's wise to not commit the batch offset to avoid data loss. This will # cause some messages to be reprocessed # rescue Kafka::ProcessingError, Phobos::AbortError instrument('listener.retry_aborted', ) do Phobos.logger.info({ message: 'Retry loop aborted, listener is shutting down' }.merge()) end end ensure instrument('listener.stop', ) do instrument('listener.stop_handler', ) { @handler_class.stop } @consumer&.stop if @producer_enabled @handler_class.producer.async_producer_shutdown @handler_class.producer.configure_kafka_client(nil) end @kafka_client.close if should_stop? Phobos.logger.info { Hash(message: 'Listener stopped').merge() } end end end |