Class: Phobos::Listener
- Inherits:
-
Object
- Object
- Phobos::Listener
- Includes:
- Instrumentation
- Defined in:
- lib/phobos/listener.rb
Constant Summary collapse
- KAFKA_CONSUMER_OPTS =
%i(session_timeout offset_commit_interval offset_commit_threshold heartbeat_interval).freeze
- DEFAULT_MAX_BYTES_PER_PARTITION =
512 KB
524288- DELIVERY_OPTS =
%w[batch message].freeze
Constants included from Instrumentation
Instance Attribute Summary collapse
-
#encoding ⇒ Object
readonly
Returns the value of attribute encoding.
-
#group_id ⇒ Object
readonly
Returns the value of attribute group_id.
-
#handler_class ⇒ Object
readonly
Returns the value of attribute handler_class.
-
#id ⇒ Object
readonly
Returns the value of attribute id.
-
#topic ⇒ Object
readonly
Returns the value of attribute topic.
Instance Method Summary collapse
- #consume_each_batch ⇒ Object
- #consume_each_message ⇒ Object
- #create_exponential_backoff ⇒ Object
-
#initialize(handler:, group_id:, topic:, min_bytes: nil, max_wait_time: nil, force_encoding: nil, start_from_beginning: true, backoff: nil, delivery: 'batch', max_bytes_per_partition: DEFAULT_MAX_BYTES_PER_PARTITION) ⇒ Listener
constructor
A new instance of Listener.
- #should_stop? ⇒ Boolean
- #start ⇒ Object
- #stop ⇒ Object
Methods included from Instrumentation
#instrument, #measure, subscribe, unsubscribe
Constructor Details
#initialize(handler:, group_id:, topic:, min_bytes: nil, max_wait_time: nil, force_encoding: nil, start_from_beginning: true, backoff: nil, delivery: 'batch', max_bytes_per_partition: DEFAULT_MAX_BYTES_PER_PARTITION) ⇒ Listener
Returns a new instance of Listener.
12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 |
# File 'lib/phobos/listener.rb', line 12 def initialize(handler:, group_id:, topic:, min_bytes: nil, max_wait_time: nil, force_encoding: nil, start_from_beginning: true, backoff: nil, delivery: 'batch', max_bytes_per_partition: DEFAULT_MAX_BYTES_PER_PARTITION) @id = SecureRandom.hex[0...6] @handler_class = handler @group_id = group_id @topic = topic @backoff = backoff @delivery = delivery.to_s @subscribe_opts = { start_from_beginning: start_from_beginning, max_bytes_per_partition: max_bytes_per_partition } @encoding = Encoding.const_get(force_encoding.to_sym) if force_encoding @consumer_opts = compact(min_bytes: min_bytes, max_wait_time: max_wait_time) @kafka_client = Phobos.create_kafka_client @producer_enabled = @handler_class.ancestors.include?(Phobos::Producer) end |
Instance Attribute Details
#encoding ⇒ Object (readonly)
Returns the value of attribute encoding.
10 11 12 |
# File 'lib/phobos/listener.rb', line 10 def encoding @encoding end |
#group_id ⇒ Object (readonly)
Returns the value of attribute group_id.
9 10 11 |
# File 'lib/phobos/listener.rb', line 9 def group_id @group_id end |
#handler_class ⇒ Object (readonly)
Returns the value of attribute handler_class.
10 11 12 |
# File 'lib/phobos/listener.rb', line 10 def handler_class @handler_class end |
#id ⇒ Object (readonly)
Returns the value of attribute id.
9 10 11 |
# File 'lib/phobos/listener.rb', line 9 def id @id end |
#topic ⇒ Object (readonly)
Returns the value of attribute topic.
9 10 11 |
# File 'lib/phobos/listener.rb', line 9 def topic @topic end |
Instance Method Details
#consume_each_batch ⇒ Object
79 80 81 82 83 84 85 86 87 88 89 90 91 |
# File 'lib/phobos/listener.rb', line 79 def consume_each_batch @consumer.each_batch(@consumer_opts) do |batch| batch_processor = Phobos::Actions::ProcessBatch.new( listener: self, batch: batch, listener_metadata: ) batch_processor.execute Phobos.logger.info { Hash(message: 'Committed offset').merge(batch_processor.) } return if should_stop? end end |
#consume_each_message ⇒ Object
93 94 95 96 97 98 99 100 101 102 103 104 105 |
# File 'lib/phobos/listener.rb', line 93 def @consumer.(@consumer_opts) do || = Phobos::Actions::ProcessMessage.new( listener: self, message: , listener_metadata: ) .execute Phobos.logger.info { Hash(message: 'Committed offset').merge(.) } return if should_stop? end end |
#create_exponential_backoff ⇒ Object
116 117 118 |
# File 'lib/phobos/listener.rb', line 116 def create_exponential_backoff Phobos.create_exponential_backoff(@backoff) end |
#should_stop? ⇒ Boolean
120 121 122 |
# File 'lib/phobos/listener.rb', line 120 def should_stop? @signal_to_stop == true end |
#start ⇒ Object
33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 |
# File 'lib/phobos/listener.rb', line 33 def start @signal_to_stop = false instrument('listener.start', ) do @consumer = create_kafka_consumer @consumer.subscribe(topic, @subscribe_opts) # This is done here because the producer client is bound to the current thread and # since "start" blocks a thread might be used to call it @handler_class.producer.configure_kafka_client(@kafka_client) if @producer_enabled instrument('listener.start_handler', ) { @handler_class.start(@kafka_client) } Phobos.logger.info { Hash(message: 'Listener started').merge() } end begin @delivery == 'batch' ? consume_each_batch : # Abort is an exception to prevent the consumer from committing the offset. # Since "listener" had a message being retried while "stop" was called # it's wise to not commit the batch offset to avoid data loss. This will # cause some messages to be reprocessed # rescue Kafka::ProcessingError, Phobos::AbortError instrument('listener.retry_aborted', ) do Phobos.logger.info({ message: 'Retry loop aborted, listener is shutting down' }.merge()) end end ensure instrument('listener.stop', ) do instrument('listener.stop_handler', ) { @handler_class.stop } @consumer&.stop if @producer_enabled @handler_class.producer.async_producer_shutdown @handler_class.producer.configure_kafka_client(nil) end @kafka_client.close if should_stop? Phobos.logger.info { Hash(message: 'Listener stopped').merge() } end end end |