Module: Phobos

Defined in:
lib/phobos/deep_struct.rb,
lib/phobos.rb,
lib/phobos/cli.rb,
lib/phobos/errors.rb,
lib/phobos/handler.rb,
lib/phobos/version.rb,
lib/phobos/executor.rb,
lib/phobos/listener.rb,
lib/phobos/producer.rb,
lib/phobos/cli/start.rb,
lib/phobos/cli/runner.rb,
lib/phobos/echo_handler.rb,
lib/phobos/instrumentation.rb

Overview

Please use this with at least the same consideration as you would when using OpenStruct. Right now we only use this to parse our internal configuration files. It is not meant to be used on incoming data.

Defined Under Namespace

Modules: CLI, Handler, Instrumentation, Producer Classes: AbortError, DeepStruct, EchoHandler, Error, Executor, Listener

Constant Summary collapse

VERSION =
'1.3.0'

Class Attribute Summary collapse

Class Method Summary collapse

Class Attribute Details

.configObject (readonly)

Returns the value of attribute config.



27
28
29
# File 'lib/phobos.rb', line 27

def config
  @config
end

.loggerObject (readonly)

Returns the value of attribute logger.



27
28
29
# File 'lib/phobos.rb', line 27

def logger
  @logger
end

.silence_logObject

Returns the value of attribute silence_log.



28
29
30
# File 'lib/phobos.rb', line 28

def silence_log
  @silence_log
end

Class Method Details

.configure(yml_path) ⇒ Object



30
31
32
33
34
35
36
37
# File 'lib/phobos.rb', line 30

def configure(yml_path)
  ENV['RAILS_ENV'] = ENV['RACK_ENV'] ||= 'development'
  @config = DeepStruct.new(YAML.load(ERB.new(File.read(File.expand_path(yml_path))).result))
  @config.class.send(:define_method, :producer_hash) { Phobos.config.producer&.to_hash }
  @config.class.send(:define_method, :consumer_hash) { Phobos.config.consumer&.to_hash }
  configure_logger
  logger.info { Hash(message: 'Phobos configured', env: ENV['RACK_ENV']) }
end

.configure_loggerObject



49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
# File 'lib/phobos.rb', line 49

def configure_logger
  log_file = config.logger.file
  ruby_kafka = config.logger.ruby_kafka
  date_pattern = '%Y-%m-%dT%H:%M:%S:%L%zZ'
  file_layout = Logging.layouts.json(date_pattern: date_pattern)
  stdout_layout = Logging.layouts.pattern(date_pattern: date_pattern)
  appenders = [Logging.appenders.stdout(layout: stdout_layout)]

  Logging.backtrace(true)
  Logging.logger.root.level = silence_log ? :fatal : config.logger.level

  if log_file
    FileUtils.mkdir_p(File.dirname(log_file))
    appenders << Logging.appenders.file(log_file, layout: file_layout)
  end

  @ruby_kafka_logger = nil

  if ruby_kafka
    @ruby_kafka_logger = Logging.logger['RubyKafka']
    @ruby_kafka_logger.appenders = appenders
    @ruby_kafka_logger.level = silence_log ? :fatal : ruby_kafka.level
  end

  @logger = Logging.logger[self]
  @logger.appenders = appenders
end

.create_exponential_backoffObject



43
44
45
46
47
# File 'lib/phobos.rb', line 43

def create_exponential_backoff
  min = Phobos.config.backoff.min_ms / 1000.0
  max = Phobos.config.backoff.max_ms / 1000.0
  ExponentialBackoff.new(min, max).tap { |backoff| backoff.randomize_factor = rand }
end

.create_kafka_clientObject



39
40
41
# File 'lib/phobos.rb', line 39

def create_kafka_client
  Kafka.new(config.kafka.to_hash.merge(logger: @ruby_kafka_logger))
end