Class: LogStash::Outputs::AzureLogAnalytics

Inherits:
Base
  • Object
show all
Includes:
Stud::Buffer
Defined in:
lib/logstash/outputs/azure_loganalytics.rb

Instance Method Summary collapse

Instance Method Details

#flush(events, close = false) ⇒ Object



80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
# File 'lib/logstash/outputs/azure_loganalytics.rb', line 80

def flush (events, close=false)

  documentsByLogType = {}  # This is a map of log_type to list of documents (themselves maps) to send to Log Analytics
  events.each do |event|
    document = {}
    
    log_type_for_event = event.sprintf(@log_type)

    event_hash = event.to_hash()
    if @key_names.length > 0
      # Get the intersection of key_names and keys of event_hash
      keys_intersection = @key_names & event_hash.keys
      keys_intersection.each do |key|
        if @key_types.include?(key)
          document[key] = convert_value(@key_types[key], event_hash[key])
        else
          document[key] = event_hash[key]
        end
      end
    else
      document = event_hash
    end
    # Skip if document doesn't contain any items
    next if (document.keys).length < 1

    if documentsByLogType[log_type_for_event] == nil then
      documentsByLogType[log_type_for_event] = []
    end
    documentsByLogType[log_type_for_event].push(document)
  end

  # Skip in case there are no candidate documents to deliver
  if documentsByLogType.length < 1
    @logger.debug("No documents in batch. Skipping")
    return
  end

  documentsByLogType.each do |log_type_for_events, events|
    begin
      @logger.debug("Posting log batch (log count: #{events.length}) as log type #{log_type_for_events} to DataCollector API. First log: " + (events[0].to_json).to_s)
      res = @client.post_data(log_type_for_events, events, @time_generated_field)
      if Azure::Loganalytics::Datacollectorapi::Client.is_success(res)
        @logger.debug("Successfully posted logs as log type #{log_type_for_events} with result code #{res.code} to DataCollector API")
      else
        @logger.error("DataCollector API request failure: error code: #{res.code}, data=>" + (events.to_json).to_s)
      end
    rescue Exception => ex
      @logger.error("Exception occured in posting to DataCollector API: '#{ex}', data=>" + (events.to_json).to_s)
    end
  end
  
end

#receive(event) ⇒ Object



73
74
75
76
# File 'lib/logstash/outputs/azure_loganalytics.rb', line 73

def receive(event)
  # Simply save an event for later delivery
  buffer_receive(event)
end

#registerObject



51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
# File 'lib/logstash/outputs/azure_loganalytics.rb', line 51

def register
  require 'azure/loganalytics/datacollectorapi/client'

  @key_types.each { |k, v|
    t = v.downcase
    if ( !t.eql?('string') && !t.eql?('double') && !t.eql?('boolean') ) 
      raise ArgumentError, "Key type(#{v}) for key(#{k}) must be either string, boolean, or double"
    end
  }

  ## Start 
  @client=Azure::Loganalytics::Datacollectorapi::Client::new(@customer_id,@shared_key,@endpoint)

  buffer_initialize(
    :max_items => @flush_items,
    :max_interval => @flush_interval_time,
    :logger => @logger
  )

end