Module: Phobos
- Defined in:
- lib/phobos/deep_struct.rb,
lib/phobos.rb,
lib/phobos/cli.rb,
lib/phobos/errors.rb,
lib/phobos/handler.rb,
lib/phobos/version.rb,
lib/phobos/executor.rb,
lib/phobos/listener.rb,
lib/phobos/producer.rb,
lib/phobos/cli/start.rb,
lib/phobos/cli/runner.rb,
lib/phobos/test/helper.rb,
lib/phobos/echo_handler.rb,
lib/phobos/instrumentation.rb,
lib/phobos/actions/process_batch.rb,
lib/phobos/actions/process_message.rb
Overview
Please use this with at least the same consideration as you would when using OpenStruct. Right now we only use this to parse our internal configuration files. It is not meant to be used on incoming data.
Defined Under Namespace
Modules: Actions, CLI, Handler, Instrumentation, Producer, Test Classes: AbortError, DeepStruct, EchoHandler, Error, Executor, Listener
Constant Summary collapse
- VERSION =
'1.7.2'
Class Attribute Summary collapse
-
.config ⇒ Object
readonly
Returns the value of attribute config.
-
.logger ⇒ Object
readonly
Returns the value of attribute logger.
-
.silence_log ⇒ Object
Returns the value of attribute silence_log.
Class Method Summary collapse
- .add_listeners(listeners_configuration) ⇒ Object
- .configure(configuration) ⇒ Object
- .configure_logger ⇒ Object
- .create_exponential_backoff(backoff_config = nil) ⇒ Object
- .create_kafka_client ⇒ Object
Class Attribute Details
.config ⇒ Object (readonly)
Returns the value of attribute config.
30 31 32 |
# File 'lib/phobos.rb', line 30 def config @config end |
.logger ⇒ Object (readonly)
Returns the value of attribute logger.
30 31 32 |
# File 'lib/phobos.rb', line 30 def logger @logger end |
.silence_log ⇒ Object
Returns the value of attribute silence_log.
31 32 33 |
# File 'lib/phobos.rb', line 31 def silence_log @silence_log end |
Class Method Details
.add_listeners(listeners_configuration) ⇒ Object
42 43 44 45 |
# File 'lib/phobos.rb', line 42 def add_listeners(listeners_configuration) listeners_config = DeepStruct.new(fetch_settings(listeners_configuration)) @config.listeners += listeners_config.listeners end |
.configure(configuration) ⇒ Object
33 34 35 36 37 38 39 40 |
# File 'lib/phobos.rb', line 33 def configure(configuration) @config = DeepStruct.new(fetch_settings(configuration)) @config.class.send(:define_method, :producer_hash) { Phobos.config.producer&.to_hash } @config.class.send(:define_method, :consumer_hash) { Phobos.config.consumer&.to_hash } @config.listeners ||= [] configure_logger logger.info { Hash(message: 'Phobos configured', env: ENV['RAILS_ENV'] || ENV['RACK_ENV'] || 'N/A') } end |
.configure_logger ⇒ Object
58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 |
# File 'lib/phobos.rb', line 58 def configure_logger log_file = config.logger.file ruby_kafka = config.logger.ruby_kafka date_pattern = '%Y-%m-%dT%H:%M:%S:%L%zZ' json_layout = Logging.layouts.json(date_pattern: date_pattern) stdout_layout = if config.logger.stdout_json == true json_layout else Logging.layouts.pattern(date_pattern: date_pattern) end appenders = [Logging.appenders.stdout(layout: stdout_layout)] Logging.backtrace(true) Logging.logger.root.level = silence_log ? :fatal : config.logger.level if log_file FileUtils.mkdir_p(File.dirname(log_file)) appenders << Logging.appenders.file(log_file, layout: json_layout) end @ruby_kafka_logger = nil if ruby_kafka @ruby_kafka_logger = Logging.logger['RubyKafka'] @ruby_kafka_logger.appenders = appenders @ruby_kafka_logger.level = silence_log ? :fatal : ruby_kafka.level end @logger = Logging.logger[self] @logger.appenders = appenders end |
.create_exponential_backoff(backoff_config = nil) ⇒ Object
51 52 53 54 55 56 |
# File 'lib/phobos.rb', line 51 def create_exponential_backoff(backoff_config = nil) backoff_config ||= Phobos.config.backoff.to_hash min = backoff_config[:min_ms] / 1000.0 max = backoff_config[:max_ms] / 1000.0 ExponentialBackoff.new(min, max).tap { |backoff| backoff.randomize_factor = rand } end |
.create_kafka_client ⇒ Object
47 48 49 |
# File 'lib/phobos.rb', line 47 def create_kafka_client Kafka.new(config.kafka.to_hash.merge(logger: @ruby_kafka_logger)) end |