Class: Fluent::Plugin::Sumologic

Inherits:
Output
  • Object
show all
Defined in:
lib/fluent/plugin/out_sumologic.rb

Constant Summary collapse

DEFAULT_BUFFER_TYPE =
"memory"
LOGS_DATA_TYPE =
"logs"
METRICS_DATA_TYPE =
"metrics"
DEFAULT_DATA_TYPE =
LOGS_DATA_TYPE
GRAPHITE_METRIC_FORMAT_TYPE =
"graphite"
CARBON2_METRIC_FORMAT_TYPE =
"carbon2"
DEFAULT_METRIC_FORMAT_TYPE =
CARBON2_METRIC_FORMAT_TYPE

Instance Method Summary collapse

Constructor Details

#initializeSumologic

Returns a new instance of Sumologic.



87
88
89
# File 'lib/fluent/plugin/out_sumologic.rb', line 87

def initialize
  super
end

Instance Method Details

#configure(conf) ⇒ Object

This method is called before starting.



96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
# File 'lib/fluent/plugin/out_sumologic.rb', line 96

def configure(conf)

  compat_parameters_convert(conf, :buffer)

  unless conf['endpoint'] =~ URI::regexp
    raise Fluent::ConfigError, "Invalid SumoLogic endpoint url: #{conf['endpoint']}"
  end

  unless conf['data_type'].nil?
    unless conf['data_type'] =~ /\A(?:logs|metrics)\z/
      raise Fluent::ConfigError, "Invalid data_type #{conf['data_type']} must be logs or metrics"
    end
  end

  if conf['data_type'].nil? || conf['data_type'] == LOGS_DATA_TYPE
    unless conf['log_format'].nil?
      unless conf['log_format'] =~ /\A(?:json|text|json_merge)\z/
        raise Fluent::ConfigError, "Invalid log_format #{conf['log_format']} must be text, json or json_merge"
      end
    end
  end

  if conf['data_type'] == METRICS_DATA_TYPE && ! conf['metrics_data_type'].nil?
    unless conf['metrics_data_type'] =~ /\A(?:graphite|carbon2)\z/
      raise Fluent::ConfigError, "Invalid metrics_data_type #{conf['metrics_data_type']} must be graphite or carbon2"
    end
  end

  @sumo_conn = SumologicConnection.new(conf['endpoint'], conf['verify_ssl'], conf['open_timeout'].to_i, conf['proxy_uri'])
  super
end

#dump_log(log) ⇒ Object

Strip sumo_metadata and dump to json



155
156
157
158
159
160
161
162
163
164
165
# File 'lib/fluent/plugin/out_sumologic.rb', line 155

def dump_log(log)
  log.delete('_sumo_metadata')
  begin
    parser = Yajl::Parser.new
    hash = parser.parse(log[@log_key])
    log[@log_key] = hash
    Yajl.dump(log)
  rescue
    Yajl.dump(log)
  end
end

#expand_param(param, tag, time, record) ⇒ Object



199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
# File 'lib/fluent/plugin/out_sumologic.rb', line 199

def expand_param(param, tag, time, record)
  # check for '${ ... }'
  #   yes => `eval`
  #   no  => return param
  return param if (param =~ /\${.+}/).nil?

  # check for 'tag_parts[]'
  # separated by a delimiter (default '.')
  tag_parts = tag.split(@delimiter) unless (param =~ /tag_parts\[.+\]/).nil? || tag.nil?

  # pull out section between ${} then eval
  inner = param.clone
  while inner.match(/\${.+}/)
    to_eval = inner.match(/\${(.+?)}/) { $1 }

    if !(to_eval =~ /record\[.+\]/).nil? && record.nil?
      return to_eval
    elsif !(to_eval =~/tag_parts\[.+\]/).nil? && tag_parts.nil?
      return to_eval
    elsif !(to_eval =~/time/).nil? && time.nil?
      return to_eval
    else
      inner.sub!(/\${.+?}/, eval(to_eval))
    end
  end
  inner
end

#format(tag, time, record) ⇒ Object



167
168
169
170
171
172
173
174
# File 'lib/fluent/plugin/out_sumologic.rb', line 167

def format(tag, time, record)
  if defined? time.nsec
    mstime = time * 1000 + (time.nsec / 1000000)
    [mstime, record].to_msgpack
  else
    [time, record].to_msgpack
  end
end

#formatted_to_msgpack_binaryObject



176
177
178
# File 'lib/fluent/plugin/out_sumologic.rb', line 176

def formatted_to_msgpack_binary
  true
end

#merge_json(record) ⇒ Object

Used to merge log record into top level json



139
140
141
142
143
144
145
146
147
148
149
150
151
152
# File 'lib/fluent/plugin/out_sumologic.rb', line 139

def merge_json(record)
  if record.has_key?(@log_key)
    log = record[@log_key].strip
    if log[0].eql?('{') && log[-1].eql?('}')
      begin
        record = JSON.parse(log).merge(record)
        record.delete(@log_key)
      rescue JSON::ParserError
        # do nothing, ignore
      end
    end
  end
  record
end

#multi_workers_ready?Boolean

Returns:

  • (Boolean)


91
92
93
# File 'lib/fluent/plugin/out_sumologic.rb', line 91

def multi_workers_ready?
  true
end

#shutdownObject

This method is called when shutting down.



134
135
136
# File 'lib/fluent/plugin/out_sumologic.rb', line 134

def shutdown
  super
end

#startObject

This method is called when starting.



129
130
131
# File 'lib/fluent/plugin/out_sumologic.rb', line 129

def start
  super
end

#sumo_key(sumo_metadata, record, tag) ⇒ Object



180
181
182
183
184
185
186
187
188
189
190
191
# File 'lib/fluent/plugin/out_sumologic.rb', line 180

def sumo_key(, record, tag)
  source_name = ['source'] || @source_name
  source_name = expand_param(source_name, tag, nil, record)

  source_category = ['category'] || @source_category
  source_category = expand_param(source_category, tag, nil, record)

  source_host = ['host'] || @source_host
  source_host = expand_param(source_host, tag, nil, record)

  "#{source_name}:#{source_category}:#{source_host}"
end

#sumo_timestamp(time) ⇒ Object

Convert timestamp to 13 digit epoch if necessary



194
195
196
# File 'lib/fluent/plugin/out_sumologic.rb', line 194

def sumo_timestamp(time)
  time.to_s.length == 13 ? time : time * 1000
end

#write(chunk) ⇒ Object

This method is called every flush interval. Write the buffer chunk



228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
# File 'lib/fluent/plugin/out_sumologic.rb', line 228

def write(chunk)
  tag = chunk..tag
  messages_list = {}

  # Sort messages
  chunk.msgpack_each do |time, record|
    # plugin dies randomly
    # https://github.com/uken/fluent-plugin-elasticsearch/commit/8597b5d1faf34dd1f1523bfec45852d380b26601#diff-ae62a005780cc730c558e3e4f47cc544R94
    next unless record.is_a? Hash
     = record.fetch('_sumo_metadata', {:source => record[@source_name_key] })
    key           = sumo_key(, record, tag)
    log_format    = ['log_format'] || @log_format

    # Strip any unwanted newlines
    record[@log_key].chomp! if record[@log_key] && record[@log_key].respond_to?(:chomp!)

    case @data_type
    when 'logs'
      case log_format
      when 'text'
        log = record[@log_key]
        unless log.nil?
          log.strip!
        end
      when 'json_merge'
        if @add_timestamp
          record = { :timestamp => sumo_timestamp(time) }.merge(record)
        end
        log = dump_log(merge_json(record))
      else
        if @add_timestamp
          record = { :timestamp => sumo_timestamp(time) }.merge(record)
        end
        log = dump_log(record)
      end
    when 'metrics'
      log = record[@log_key]
      unless log.nil?
        log.strip!
      end
    end

    unless log.nil?
      if messages_list.key?(key)
        messages_list[key].push(log)
      else
        messages_list[key] = [log]
      end
    end

  end

  # Push logs to sumo
  messages_list.each do |key, messages|
    source_name, source_category, source_host = key.split(':')
    @sumo_conn.publish(
        messages.join("\n"),
        source_host         =source_host,
        source_category     =source_category,
        source_name         =source_name,
        data_type           =@data_type,
        metric_data_format  =@metric_data_format
    )
  end

end