Class: LogStash::Inputs::LogstashInputAzureblob
- Inherits:
-
Base
- Object
- Base
- LogStash::Inputs::LogstashInputAzureblob
- Defined in:
- lib/logstash/inputs/azureblob.rb
Overview
Logstash input plugin for Azure Blobs
This logstash plugin gathers data from Microsoft Azure Blobs
Constant Summary collapse
- MAX =
Constant of max integer
2 ** ([42].pack('i').size * 16 -2 ) -1
Instance Method Summary collapse
-
#acquire_lease(blob_name, retry_times = 30, interval_sec = 1) ⇒ Object
Acquire a lease on a blob item with retries.
-
#cleanup_registry ⇒ Object
Clean up the registry.
-
#create_registry(blob_items) ⇒ Object
Create a registry file to coordinate between multiple azure blob inputs.
-
#deserialize_registry_hash(json_string) ⇒ Object
Deserialize registry hash from json string.
-
#get_jsons(content, batch_size) ⇒ Object
Get first json object out of a string, return the rest of the string.
-
#list_all_blobs ⇒ Object
List all the blobs in the given container.
-
#load_registry ⇒ Object
Load the content of the registry into the registry hash and return it.
-
#process(queue) ⇒ Object
Start processing the next item.
-
#raise_gen(registry_hash, file_path) ⇒ Object
Raise generation for blob in registry.
- #register ⇒ Object
-
#register_for_read ⇒ Object
Return the next blob for reading as well as the start index.
-
#run(queue) ⇒ Object
def register.
-
#save_registry(registry_hash) ⇒ Object
Serialize the registry hash and save it.
-
#stop ⇒ Object
def run.
-
#update_registry(registry_item) ⇒ Object
Update the registry.
Instance Method Details
#acquire_lease(blob_name, retry_times = 30, interval_sec = 1) ⇒ Object
Acquire a lease on a blob item with retries.
By default, it will retry 30 times with 1 second interval.
304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 |
# File 'lib/logstash/inputs/azureblob.rb', line 304 def acquire_lease(blob_name, retry_times = 30, interval_sec = 1) lease = nil; retried = 0; while lease.nil? do begin lease = @azure_blob.acquire_blob_lease(@container, blob_name, {:timeout => 10}) rescue StandardError => e if(e.type == 'LeaseAlreadyPresent') if (retried > retry_times) raise end retried += 1 sleep interval_sec end end end #while return lease end |
#cleanup_registry ⇒ Object
Clean up the registry.
410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 |
# File 'lib/logstash/inputs/azureblob.rb', line 410 def cleanup_registry begin lease = nil lease = acquire_lease(@registry_locker) registry_hash = load_registry registry_hash.each { | key, registry_item| registry_item.reader = nil if registry_item.reader == @reader } save_registry(registry_hash) @azure_blob.release_blob_lease(@container, @registry_locker, lease) lease = nil rescue StandardError => e @logger.error("Oh My, An error occurred. #{e}:\n#{e.backtrace}", :exception => e) ensure @azure_blob.release_blob_lease(@container, @registry_locker, lease) unless lease.nil? lease = nil end #rescue end |
#create_registry(blob_items) ⇒ Object
Create a registry file to coordinate between multiple azure blob inputs.
430 431 432 433 434 435 436 437 438 439 440 441 |
# File 'lib/logstash/inputs/azureblob.rb', line 430 def create_registry (blob_items) registry_hash = Hash.new blob_items.each do |blob_item| initial_offset = 0 initial_offset = blob_item.properties[:content_length] if @registry_create_policy == 'resume' registry_item = LogStash::Inputs::RegistryItem.new(blob_item.name, blob_item.properties[:etag], nil, initial_offset, 0) registry_hash[blob_item.name] = registry_item end # each save_registry(registry_hash) return registry_hash end |
#deserialize_registry_hash(json_string) ⇒ Object
Deserialize registry hash from json string.
252 253 254 255 256 257 258 259 |
# File 'lib/logstash/inputs/azureblob.rb', line 252 def deserialize_registry_hash (json_string) result = Hash.new temp_hash = JSON.parse(json_string) temp_hash.values.each { |kvp| result[kvp['file_path']] = LogStash::Inputs::RegistryItem.new(kvp['file_path'], kvp['etag'], kvp['reader'], kvp['offset'], kvp['gen']) } return result end |
#get_jsons(content, batch_size) ⇒ Object
Get first json object out of a string, return the rest of the string
211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 |
# File 'lib/logstash/inputs/azureblob.rb', line 211 def get_jsons(content, batch_size) return nil, content, 0 if content.nil? || content.length == 0 return nil, content, 0 if (content.index '{').nil? hit = 0 count = 0 index = 0 first = content.index('{') move_opening = true move_closing = true while(hit < batch_size) inIndex = content.index('{', index) if move_opening outIndex = content.index('}', index) if move_closing # TODO: Fix the ending condition break if count == 0 && (inIndex.nil? || outIndex.nil?) if(inIndex.nil?) index = outIndex elsif(outIndex.nil?) index = inIndex else index = [inIndex, outIndex].min end #if if content[index] == '{' count += 1 move_opening = true move_closing = false elsif content[index] == '}' count -= 1 move_closing = true move_opening = false end #if index += 1 hit += 1 if count == 0 end return content[first..index-1], content[index..-1], hit end |
#list_all_blobs ⇒ Object
List all the blobs in the given container.
262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 |
# File 'lib/logstash/inputs/azureblob.rb', line 262 def list_all_blobs blobs = Set.new [] continuation_token = NIL @blob_list_page_size = 100 if @blob_list_page_size <= 0 loop do # Need to limit the returned number of the returned entries to avoid out of memory exception. entries = @azure_blob.list_blobs(@container, { :timeout => 10, :marker => continuation_token, :max_results => @blob_list_page_size }) entries.each do |entry| blobs << entry end # each continuation_token = entries.continuation_token break if continuation_token.empty? end # loop return blobs end |
#load_registry ⇒ Object
Load the content of the registry into the registry hash and return it.
444 445 446 447 448 449 |
# File 'lib/logstash/inputs/azureblob.rb', line 444 def load_registry # Get content registry_blob, registry_blob_body = @azure_blob.get_blob(@container, @registry_path) registry_hash = deserialize_registry_hash(registry_blob_body) return registry_hash end |
#process(queue) ⇒ Object
Start processing the next item.
133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 |
# File 'lib/logstash/inputs/azureblob.rb', line 133 def process(queue) begin blob, start_index, gen = register_for_read if(!blob.nil?) begin blob_name = blob.name # Work-around: After returned by get_blob, the etag will contains quotes. new_etag = blob.properties[:etag] # ~ Work-around blob, header = @azure_blob.get_blob(@container, blob_name, {:end_range => (@file_head_bytes-1) }) if header.nil? unless @file_head_bytes.nil? or @file_head_bytes <= 0 if start_index == 0 # Skip the header since it is already read. start_index = @file_head_bytes else # Adjust the offset when it is other than first time, then read till the end of the file, including the tail. start_index = start_index - @file_tail_bytes start_index = 0 if start_index < 0 end blob, content = @azure_blob.get_blob(@container, blob_name, {:start_range => start_index} ) # content will be used to calculate the new offset. Create a new variable for processed content. processed_content = content is_json_codec = (defined?(LogStash::Codecs::JSON) == 'constant') && (@codec.is_a? LogStash::Codecs::JSON) if (is_json_codec) skip = processed_content.index '{' processed_content = processed_content[skip..-1] unless skip.nil? end #if if is_json_codec && (@break_json_down_policy != 'do_not_break') @logger.debug("codec is json and policy is not do_not_break") @break_json_batch_count = 1 if break_json_batch_count <= 0 tail = processed_content[-@file_tail_bytes..-1] while (!processed_content.nil? && processed_content.length > @file_tail_bytes) json_event, processed_content = get_jsons(processed_content, @break_json_batch_count) @logger.debug("Got json: ========================") @logger.debug("#{json_event[0..50]}...#{json_event[-50..-1]}") @logger.debug("End got json: ========================") @logger.debug("Processed content: #{processed_content[0..50]}...") break if json_event.nil? if @break_json_down_policy == 'with_head_tail' @logger.debug("Adding json head/tails.") json_event = "#{header}#{json_event}#{tail}" end #if @codec.decode(json_event) do |event| decorate(event) queue << event end # decode end else @logger.debug("Non-json codec or the policy is do not break") # Putting header and content and tail together before pushing into event queue processed_content = "#{header}#{processed_content}" unless header.nil? || header.length == 0 @codec.decode(processed_content) do |event| decorate(event) queue << event end # decode end #if ensure # Making sure the reader is removed from the registry even when there's exception. new_offset = start_index new_offset = 0 if start_index == @file_head_bytes && content.nil? # Reset the offset when nothing has been read. new_offset = new_offset + content.length unless content.nil? new_registry_item = LogStash::Inputs::RegistryItem.new(blob_name, new_etag, nil, new_offset, gen) update_registry(new_registry_item) end # begin end # if rescue StandardError => e @logger.error("Oh My, An error occurred. \nError:#{e}:\nTrace:\n#{e.backtrace}", :exception => e) end # begin end |
#raise_gen(registry_hash, file_path) ⇒ Object
Raise generation for blob in registry
279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 |
# File 'lib/logstash/inputs/azureblob.rb', line 279 def raise_gen(registry_hash, file_path) begin target_item = registry_hash[file_path] begin target_item.gen += 1 # Protect gen from overflow. target_item.gen = target_item.gen / 2 if target_item.gen == MAX rescue StandardError => e @logger.error("Fail to get the next generation for target item #{target_item}.", :exception => e) target_item.gen = 0 end min_gen_item = registry_hash.values.min_by { |x| x.gen } while min_gen_item.gen > 0 registry_hash.values.each { |value| value.gen -= 1 } min_gen_item = registry_hash.values.min_by { |x| x.gen } end end end |
#register ⇒ Object
106 107 108 109 110 111 112 113 114 115 116 117 |
# File 'lib/logstash/inputs/azureblob.rb', line 106 def register # this is the reader # for this specific instance. @reader = SecureRandom.uuid @registry_locker = "#{@registry_path}.lock" # Setup a specific instance of an Azure::Storage::Client client = Azure::Storage::Client.create(:storage_account_name => @storage_account_name, :storage_access_key => @storage_access_key, :storage_blob_host => "https://#{@storage_account_name}.blob.#{@endpoint}") # Get an azure storage blob service object from a specific instance of an Azure::Storage::Client @azure_blob = client.blob_client # Add retry filter to the service object @azure_blob.with_filter(Azure::Storage::Core::Filter::ExponentialRetryPolicyFilter.new) end |
#register_for_read ⇒ Object
Return the next blob for reading as well as the start index.
324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 |
# File 'lib/logstash/inputs/azureblob.rb', line 324 def register_for_read begin all_blobs = list_all_blobs registry = all_blobs.find { |item| item.name.downcase == @registry_path } registry_locker = all_blobs.find { |item| item.name.downcase == @registry_locker } candidate_blobs = all_blobs.select { |item| (item.name.downcase != @registry_path) && ( item.name.downcase != @registry_locker ) } start_index = 0 gen = 0 lease = nil # Put lease on locker file than the registy file to allow update of the registry as a workaround for Azure Storage Ruby SDK issue # 16. # Workaround: https://github.com/Azure/azure-storage-ruby/issues/16 registry_locker = @azure_blob.create_block_blob(@container, @registry_locker, @reader) if registry_locker.nil? lease = acquire_lease(@registry_locker) # ~ Workaround if(registry.nil?) registry_hash = create_registry(candidate_blobs) else registry_hash = load_registry end #if picked_blobs = Set.new [] # Pick up the next candidate picked_blob = nil candidate_blobs.each { |candidate_blob| registry_item = registry_hash[candidate_blob.name] # Appending items that doesn't exist in the hash table if registry_item.nil? registry_item = LogStash::Inputs::RegistryItem.new(candidate_blob.name, candidate_blob.properties[:etag], nil, 0, 0) registry_hash[candidate_blob.name] = registry_item end # if if ((registry_item.offset < candidate_blob.properties[:content_length]) && (registry_item.reader.nil? || registry_item.reader == @reader)) picked_blobs << candidate_blob end } picked_blob = picked_blobs.min_by { |b| registry_hash[b.name].gen } if !picked_blob.nil? registry_item = registry_hash[picked_blob.name] registry_item.reader = @reader registry_hash[picked_blob.name] = registry_item start_index = registry_item.offset raise_gen(registry_hash, picked_blob.name) gen = registry_item.gen end #if # Save the chnage for the registry save_registry(registry_hash) @azure_blob.release_blob_lease(@container, @registry_locker, lease) lease = nil; return picked_blob, start_index, gen rescue StandardError => e @logger.error("Oh My, An error occurred. #{e}:\n#{e.backtrace}", :exception => e) return nil, nil, nil ensure @azure_blob.release_blob_lease(@container, @registry_locker, lease) unless lease.nil? lease = nil end # rescue end |
#run(queue) ⇒ Object
def register
119 120 121 122 123 124 125 126 |
# File 'lib/logstash/inputs/azureblob.rb', line 119 def run(queue) # we can abort the loop if stop? becomes true while !stop? process(queue) @logger.debug("Hitting interval of #{@interval}ms . . .") Stud.stoppable_sleep(@interval) { stop? } end # loop end |
#save_registry(registry_hash) ⇒ Object
Serialize the registry hash and save it.
452 453 454 455 456 457 458 |
# File 'lib/logstash/inputs/azureblob.rb', line 452 def save_registry(registry_hash) # Serialize hash to json registry_hash_json = JSON.generate(registry_hash) # Upload registry to blob @azure_blob.create_block_blob(@container, @registry_path, registry_hash_json) end |
#stop ⇒ Object
def run
128 129 130 |
# File 'lib/logstash/inputs/azureblob.rb', line 128 def stop cleanup_registry end |
#update_registry(registry_item) ⇒ Object
Update the registry
392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 |
# File 'lib/logstash/inputs/azureblob.rb', line 392 def update_registry (registry_item) begin lease = nil lease = acquire_lease(@registry_locker) registry_hash = load_registry registry_hash[registry_item.file_path] = registry_item save_registry(registry_hash) @azure_blob.release_blob_lease(@container, @registry_locker, lease) lease = nil rescue StandardError => e @logger.error("Oh My, An error occurred. #{e}:\n#{e.backtrace}", :exception => e) ensure @azure_blob.release_blob_lease(@container, @registry_locker, lease) unless lease.nil? lease = nil end #rescue end |