Class: LogStash::Outputs::MicrosoftSentinelOutputInternal::LogStashAutoResizeBuffer

Inherits:
LogStashEventsBatcher
  • Object
show all
Includes:
CustomSizeBasedBuffer
Defined in:
lib/logstash/sentinel/logStashAutoResizeBuffer.rb

Instance Method Summary collapse

Methods included from CustomSizeBasedBuffer

#buffer_flush, #buffer_full?, #buffer_initialize, #buffer_receive

Methods inherited from LogStashEventsBatcher

#batch_event_document

Constructor Details

#initialize(logstashLoganalyticsConfiguration) ⇒ LogStashAutoResizeBuffer

Returns a new instance of LogStashAutoResizeBuffer.



15
16
17
18
19
20
21
22
23
24
# File 'lib/logstash/sentinel/logStashAutoResizeBuffer.rb', line 15

def initialize(logstashLoganalyticsConfiguration)
    buffer_initialize(
      :max_items => logstashLoganalyticsConfiguration.max_items,
      :max_interval => logstashLoganalyticsConfiguration.plugin_flush_interval,
      :logger => logstashLoganalyticsConfiguration.logger,
      #todo: There is a small discrepancy between the total size of the documents and the message body 
      :flush_each => logstashLoganalyticsConfiguration.MAX_SIZE_BYTES - 2000
    )
    super
end

Instance Method Details

#batch_event(event_document) ⇒ Object

Adding an event document into the buffer



30
31
32
# File 'lib/logstash/sentinel/logStashAutoResizeBuffer.rb', line 30

def batch_event(event_document)        
    buffer_receive(event_document)
end

#closeObject

def flush



61
62
63
# File 'lib/logstash/sentinel/logStashAutoResizeBuffer.rb', line 61

def close
    buffer_flush(:final => true)
end

#flush(documents, close = false) ⇒ Object

Flushing all buffer content to Azure Loganalytics. Called from Stud::Buffer#buffer_flush when there are events to flush



36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
# File 'lib/logstash/sentinel/logStashAutoResizeBuffer.rb', line 36

def flush (documents, close=false)
    # Skip in case there are no candidate documents to deliver
    if documents.length < 1
        @logger.warn("No documents in batch for log type #{@logstashLoganalyticsConfiguration.dcr_stream_name}. Skipping")
        return
    end

    # We send Json in the REST request 
    documents_json = documents.to_json
    documents_byte_size = documents_json.bytesize
    if (documents_byte_size <= @logstashLoganalyticsConfiguration.MAX_SIZE_BYTES)
    # Setting resizing to true will cause changing the max size
        if @logstashLoganalyticsConfiguration.amount_resizing == true
            # Resizing the amount of messages according to size of message received and amount of messages
                change_message_limit_size(documents.length, documents_byte_size)
        end
        send_message_to_loganalytics(documents_json, documents.length)
    else
        warn_documents_size_over_limitation(documents, documents_byte_size)
        split_documents_lists = split_document_list_to_sublists_by_max_size(documents, documents_byte_size)
        @logger.trace("Number of documents: #{documents.length}, Number of split lists to send separately: #{split_documents_lists.length}");
        send_split_documents_list_to_loganalytics(split_documents_lists)
    end
end