Class: LogStash::Outputs::Chronix
- Inherits:
-
Base
- Object
- Base
- LogStash::Outputs::Chronix
- Includes:
- Stud::Buffer
- Defined in:
- lib/logstash/outputs/chronix.rb
Instance Method Summary collapse
-
#almostEquals(delta, prevDelta) ⇒ Object
checks if two offsets are almost equals.
-
#connectToChronix ⇒ Object
open the connection to chronix.
- #createChronixPoint(delta, value, type = "") ⇒ Object
-
#createPointHash(events) ⇒ Object
this method iterates through all events and creates a hash with different lists of points sorted by metric.
- #createSolrDocument(metric, phash) ⇒ Object
- #flush(events, close = false) ⇒ Object
-
#noDrift(timestamp, lastStoredDate, timeSinceLastDelta) ⇒ Object
checks if there is a drift.
- #receive(event) ⇒ Object
- #register ⇒ Object
-
#zipAndEncode(points) ⇒ Object
this method zips and base64 encodes the list of points.
Instance Method Details
#almostEquals(delta, prevDelta) ⇒ Object
checks if two offsets are almost equals
158 159 160 161 162 |
# File 'lib/logstash/outputs/chronix.rb', line 158 def almostEquals(delta, prevDelta) diff = (delta - prevDelta).abs return (diff <= @threshold) end |
#connectToChronix ⇒ Object
open the connection to chronix
54 55 56 57 |
# File 'lib/logstash/outputs/chronix.rb', line 54 def connectToChronix @url = "http://" + @host + ":" + @port + @path @solr = RSolr.connect :url => @url end |
#createChronixPoint(delta, value, type = "") ⇒ Object
143 144 145 146 147 148 149 |
# File 'lib/logstash/outputs/chronix.rb', line 143 def createChronixPoint(delta, value, type = "") if type == "strace" return Chronix::StracePoint.new( :t => delta, :v => value ) else return Chronix::Point.new( :t => delta, :v => value ) end end |
#createPointHash(events) ⇒ Object
this method iterates through all events and creates a hash with different lists of points sorted by metric
81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 |
# File 'lib/logstash/outputs/chronix.rb', line 81 def createPointHash(events) pointHash = Hash.new # add each event to our hash, sorted by metrics as key events.each do |event| eventData = event.to_hash() # format the timestamp to unix format = DateTime.iso8601("#{eventData["@timestamp"]}").to_time.to_i metric = eventData["metric"] # if there is no list for the current metric -> create a new one if pointHash[metric] == nil pointHash[metric] = {"startTime" => , "lastTimestamp" => 0, "points" => Chronix::Points.new, "prevDelta" => 0, "timeSinceLastDelta" => 0, "lastStoredDate" => } end if pointHash[metric]["lastTimestamp"] == 0 delta = 0 else delta = - pointHash[metric]["lastTimestamp"] end if (almostEquals(delta, pointHash[metric]["prevDelta"]) && noDrift(, pointHash[metric]["lastStoredDate"], pointHash[metric]["timeSinceLastDelta"])) # insert the current point in our list pointHash[metric]["points"].p << createChronixPoint(0, eventData["value"], eventData["chronix_type"]) pointHash[metric]["timeSinceLastDelta"] += 1 else # insert the current point in our list pointHash[metric]["points"].p << createChronixPoint(delta, eventData["value"], eventData["chronix_type"]) pointHash[metric]["timeSinceLastDelta"] = 1 pointHash[metric]["lastStoredDate"] = end # save current timestamp as lastTimestamp and the previousOffset pointHash[metric]["lastTimestamp"] = pointHash[metric]["prevDelta"] = delta end #end do return pointHash end |
#createSolrDocument(metric, phash) ⇒ Object
151 152 153 154 155 |
# File 'lib/logstash/outputs/chronix.rb', line 151 def createSolrDocument(metric, phash) endTime = phash["lastTimestamp"] # maybe use startTime + delta here?! # TODO add more meta-data return { :metric => metric, :start => phash["startTime"], :end => endTime, :data => zipAndEncode(phash["points"]) } end |
#flush(events, close = false) ⇒ Object
65 66 67 68 69 70 71 72 73 74 75 76 77 78 |
# File 'lib/logstash/outputs/chronix.rb', line 65 def flush(events, close=false) pointHash = createPointHash(events) documents = [] # iterate through pointHash and create a solr document pointHash.each { |metric, phash| documents << createSolrDocument(metric, phash) } # send to chronix @solr.add documents @solr.update :data => '<commit/>' end |
#noDrift(timestamp, lastStoredDate, timeSinceLastDelta) ⇒ Object
checks if there is a drift
165 166 167 168 169 170 |
# File 'lib/logstash/outputs/chronix.rb', line 165 def noDrift(, lastStoredDate, timeSinceLastDelta) calcMaxOffset = @threshold * timeSinceLastDelta drift = lastStoredDate + calcMaxOffset - .to_i return (drift <= (@threshold / 2)) end |
#receive(event) ⇒ Object
60 61 62 |
# File 'lib/logstash/outputs/chronix.rb', line 60 def receive(event) buffer_receive(event) end |
#register ⇒ Object
42 43 44 45 46 47 48 49 50 51 |
# File 'lib/logstash/outputs/chronix.rb', line 42 def register # initialize the buffer buffer_initialize( :max_items => @flush_size, :max_interval => @idle_flush_time, :logger => @logger ) connectToChronix end |
#zipAndEncode(points) ⇒ Object
this method zips and base64 encodes the list of points
128 129 130 131 132 133 134 135 136 137 138 139 140 141 |
# File 'lib/logstash/outputs/chronix.rb', line 128 def zipAndEncode(points) # encode protobuf-list proto_bytes = points.encode string_io = StringIO.new("w") # compress the encoded protobuf-list gz = Zlib::GzipWriter.new(string_io) gz.write(proto_bytes) gz.close data = string_io.string # encode base64 (without \n) return Base64.strict_encode64(data) end |