Class: Fluent::MetricSenseOutput

Inherits:
BufferedOutput
  • Object
show all
Defined in:
lib/fluent/plugin/out_metricsense.rb

Defined Under Namespace

Modules: Backends, UpdateMode Classes: AddUpdater, AggregationKey, AverageUpdater, Backend, CountUpdater, MaxUpdater, SegmentedTotalUpdater

Constant Summary collapse

BACKENDS =
{}

Class Method Summary collapse

Instance Method Summary collapse

Class Method Details

.register_backend(name, klass) ⇒ Object



26
27
28
# File 'lib/fluent/plugin/out_metricsense.rb', line 26

def self.register_backend(name, klass)
  BACKENDS[name] = klass
end

Instance Method Details

#configure(conf) ⇒ Object



76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
# File 'lib/fluent/plugin/out_metricsense.rb', line 76

def configure(conf)
  super

  if @remove_tag_prefix
    @remove_tag_prefix = Regexp.new('^' + Regexp.escape(@remove_tag_prefix) + "\\.?")
  end

  @no_segment_keys = (conf.has_key?('no_segment_keys') && (conf['no_segment_keys'].empty? || conf['no_segment_keys'] == 'true'))

  if @only_segment_keys
    @only_segment_keys = @only_segment_keys.strip.split(/\s*,\s*/)
  end

  if @exclude_segment_keys
    @exclude_segment_keys = @exclude_segment_keys.strip.split(/\s*,\s*/)
  end

  be = BACKENDS[@backend]
  unless be
    raise ConfigError, "unknown backend: #{@backend.inspect}"
  end

  # aggregate_interval must be a multiple of 60 to normalize values
  # into X per minute
  @aggregate_interval = @aggregate_interval.to_i / 60 * 60
  @normalize_factor = @aggregate_interval / 60

  @backend = be.new
  @backend.log = log
  @backend.configure(conf)
end

#format_stream(tag, es) ⇒ Object



118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
# File 'lib/fluent/plugin/out_metricsense.rb', line 118

def format_stream(tag, es)
  # modify tag
  tag = tag.sub(@remove_tag_prefix, '') if @remove_tag_prefix
  tag = "#{add_tag_prefix}.#{tag}" if @add_tag_prefix

  out = ''
  es.each do |time,record|
    # dup record to modify
    record = record.dup

    # get value
    value = record.delete(@value_key)

    # ignore record if value is invalid or 0
    begin
      fv = value.to_f
    rescue
      next
    end
    next if fv == 0.0 || fv.nan? || fv.infinite?

    # use integer if value.to_f == value.to_f.to_i
    iv = fv.to_i
    if iv.to_f == fv
      value = iv
    else
      value = fv
    end

    # get update_mode key
    update_mode = record.delete(@update_mode_key)
    case update_mode
    when "max"
      update_mode = UpdateMode::MAX
    when "average"
      update_mode = UpdateMode::AVERAGE
    when "count"
      update_mode = UpdateMode::COUNT
    else
      # default is add
      update_mode = UpdateMode::ADD
    end

    # get segments
    if @no_segment_keys
      segments = {}
    else
      if @only_segment_keys
        segments = {}
        @only_segment_keys.each {|key|
          if v = record[key]
            segments[key] = v
          end
        }
      else
        segments = record
      end
      if @exclude_segment_keys
        @exclude_segment_keys.each {|key|
          segments.delete(key)
        }
      end
    end

    [tag, time, value, segments, update_mode].to_msgpack(out)
  end

  out
end

#shutdownObject



113
114
115
116
# File 'lib/fluent/plugin/out_metricsense.rb', line 113

def shutdown
  super
  @backend.shutdown
end

#startObject



108
109
110
111
# File 'lib/fluent/plugin/out_metricsense.rb', line 108

def start
  @backend.start
  super
end

#write(chunk) ⇒ Object



265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
# File 'lib/fluent/plugin/out_metricsense.rb', line 265

def write(chunk)
  simple_counters = {}
  segmented_counters = {}

  # select sum(value) from chunk group by tag, time/60, seg_val, seg_key
  chunk.msgpack_each {|tag,time,value,segments,update_mode|
    time = time / @aggregate_interval * @aggregate_interval

    case update_mode
    when UpdateMode::ADD
      updater = AddUpdater
    when UpdateMode::MAX
      updater = MaxUpdater
    when UpdateMode::AVERAGE # AVERAGE uses MaxUpdater and calculate average on server-side aggregation
      updater = AverageUpdater
    when UpdateMode::COUNT
      updater = CountUpdater
    else  # default is AddUpdater
      updater = AddUpdater
    end

    if segments.empty?
      # simple values
      ak = AggregationKey.new(tag, time, nil, nil)
      (simple_counters[ak] ||= updater.new).add(value)
    else
      # segmented values
      segments.each_pair {|seg_key,seg_val|
        ak = AggregationKey.new(tag, time, seg_val, seg_key)
        (segmented_counters[ak] ||= updater.new).add(value)
      }
    end
  }

  # calculate total value of segmented values
  segmented_totals = {}
  segmented_counters.each_pair {|ak,up|
    ak = AggregationKey.new(ak.tag, ak.time, nil, nil)
    (segmented_totals[ak] ||= SegmentedTotalUpdater.new(up.mode)).add(up.value)
  }

  # simple_counters have higher priority than segmented_totals
  counters = segmented_totals
  counters.merge!(segmented_counters)
  counters.merge!(simple_counters)

  data = []
  counters.each_pair {|ak,up|
    data << [ak.tag, ak.time, up.normalized_value(@normalize_factor), ak.seg_key, ak.seg_val, up.mode]
  }

  @backend.write(data)
end