Class: LogStash::Inputs::LogstashInputAzureblob

Inherits:
Base
  • Object
show all
Defined in:
lib/logstash/inputs/azureblob.rb

Overview

Logstash input plugin for Azure Blobs

This logstash plugin gathers data from Microsoft Azure Blobs

Constant Summary collapse

MAX =

Constant of max integer

2 ** ([42].pack('i').size * 16 -2 ) -1

Instance Method Summary collapse

Instance Method Details

#acquire_lease(blob_name, retry_times = 30, interval_sec = 1) ⇒ Object

Acquire a lease on a blob item with retries.

By default, it will retry 30 times with 1 second interval.



316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
# File 'lib/logstash/inputs/azureblob.rb', line 316

def acquire_lease(blob_name, retry_times = 30, interval_sec = 1)
  lease = nil;
  retried = 0;
  while lease.nil? do
    begin
      lease = @azure_blob.acquire_blob_lease(@container, blob_name, { :timeout => 10, :duration => @registry_lease_duration })
    rescue StandardError => e
      if(e.type == 'LeaseAlreadyPresent')
          if (retried > retry_times)
              raise
          end
          retried += 1
          sleep interval_sec
      end
    end
  end #while
  return lease
end

#cleanup_registryObject

Clean up the registry.



422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
# File 'lib/logstash/inputs/azureblob.rb', line 422

def cleanup_registry
  begin
    lease = nil
    lease = acquire_lease(@registry_locker)
    registry_hash = load_registry
    registry_hash.each { | key, registry_item|
      registry_item.reader = nil if registry_item.reader == @reader
    }
    save_registry(registry_hash)
    @azure_blob.release_blob_lease(@container, @registry_locker, lease)
    lease = nil
  rescue StandardError => e
    @logger.error("Oh My, An error occurred. #{e}:\n#{e.backtrace}", :exception => e)
  ensure
    @azure_blob.release_blob_lease(@container, @registry_locker, lease) unless lease.nil?
    lease = nil
  end #rescue
end

#create_registry(blob_items) ⇒ Object

Create a registry file to coordinate between multiple azure blob inputs.



442
443
444
445
446
447
448
449
450
451
452
453
# File 'lib/logstash/inputs/azureblob.rb', line 442

def create_registry (blob_items)
  registry_hash = Hash.new

  blob_items.each do |blob_item|
      initial_offset = 0
      initial_offset = blob_item.properties[:content_length] if @registry_create_policy == 'resume'
      registry_item = LogStash::Inputs::RegistryItem.new(blob_item.name, blob_item.properties[:etag], nil, initial_offset, 0)
    registry_hash[blob_item.name] = registry_item
  end # each
  save_registry(registry_hash)
  return registry_hash
end

#deserialize_registry_hash(json_string) ⇒ Object

Deserialize registry hash from json string.



264
265
266
267
268
269
270
271
# File 'lib/logstash/inputs/azureblob.rb', line 264

def deserialize_registry_hash (json_string)
  result = Hash.new
  temp_hash = JSON.parse(json_string)
  temp_hash.values.each { |kvp|
    result[kvp['file_path']] = LogStash::Inputs::RegistryItem.new(kvp['file_path'], kvp['etag'], kvp['reader'], kvp['offset'], kvp['gen'])
  }
  return result
end

#get_jsons!(content, batch_size) ⇒ Object

Get json objects out of a string and return it. Note, content will be updated in place as well.



218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
# File 'lib/logstash/inputs/azureblob.rb', line 218

def get_jsons!(content, batch_size)
  return nil if content.nil? || content.length == 0
  return nil if (content.index '{').nil?

  hit = 0
  count = 0
  index = 0
  first = content.index('{')
  move_opening = true
  move_closing = true
  while(hit < batch_size)
    inIndex = content.index('{', index) if move_opening
    outIndex = content.index('}', index) if move_closing

    break if count == 0 && (inIndex.nil? || outIndex.nil?)
    
    if(inIndex.nil?)
      index = outIndex
    elsif(outIndex.nil?)
      index = inIndex
    else
      index = [inIndex, outIndex].min
    end #if

    if content[index] == '{'
      count += 1
      move_opening = true
      move_closing = false
    elsif content[index] == '}'
      count -= 1
      move_closing = true
      move_opening = false
    end #if
    index += 1

    if (count < 0) 
      throw "Malformed json encountered."
    end #if
    hit += 1 if count == 0
  end
  # slice left & then right to making sure the leading characters are trimed.
  content.slice!(0, first) if first > 0
  return content.slice!(0, index-first)
end

#list_all_blobsObject

List all the blobs in the given container.



274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
# File 'lib/logstash/inputs/azureblob.rb', line 274

def list_all_blobs
  blobs = Set.new []
  continuation_token = NIL
  @blob_list_page_size = 100 if @blob_list_page_size <= 0
  loop do
    # Need to limit the returned number of the returned entries to avoid out of memory exception.
    entries = @azure_blob.list_blobs(@container, { :timeout => 10, :marker => continuation_token, :max_results => @blob_list_page_size })
    entries.each do |entry|
      blobs << entry
    end # each
    continuation_token = entries.continuation_token
    break if continuation_token.empty?
  end # loop
  return blobs
end

#load_registryObject

Load the content of the registry into the registry hash and return it.



456
457
458
459
460
461
# File 'lib/logstash/inputs/azureblob.rb', line 456

def load_registry
  # Get content
  registry_blob, registry_blob_body = @azure_blob.get_blob(@container, @registry_path)
  registry_hash = deserialize_registry_hash(registry_blob_body)
  return registry_hash
end

#process(queue) ⇒ Object

Start processing the next item.



140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
# File 'lib/logstash/inputs/azureblob.rb', line 140

def process(queue)
  begin
    blob, start_index, gen = register_for_read

    if(!blob.nil?)
      begin
        blob_name = blob.name
        # Work-around: After returned by get_blob, the etag will contains quotes.
        new_etag = blob.properties[:etag]
        # ~ Work-around

        blob, header = @azure_blob.get_blob(@container, blob_name, {:end_range => (@file_head_bytes-1) }) if header.nil? unless @file_head_bytes.nil? or @file_head_bytes <= 0

        if start_index == 0
          # Skip the header since it is already read.
          start_index = @file_head_bytes
        else
          # Adjust the offset when it is other than first time, then read till the end of the file, including the tail.
          start_index = start_index - @file_tail_bytes
          start_index = 0 if start_index < 0
        end

        blob, content = @azure_blob.get_blob(@container, blob_name, {:start_range => start_index} )

        # content will be used to calculate the new offset. Create a new variable for processed content.
        content_length = content.length unless content.nil?

        is_json_codec = (defined?(LogStash::Codecs::JSON) == 'constant') && (@codec.is_a? LogStash::Codecs::JSON)
        if (is_json_codec)
          skip = content.index '{'
          content.slice!(skip-1) unless (skip.nil? || skip == 0)
        end #if

        if is_json_codec && (@break_json_down_policy != 'do_not_break')
          @logger.debug("codec is json and policy is not do_not_break")
          
          @break_json_batch_count = 1 if @break_json_batch_count <= 0
          tail = content[-@file_tail_bytes..-1]
          while (!content.nil? && content.length > @file_tail_bytes)
            json_event = get_jsons!(content, @break_json_batch_count)
            break if json_event.nil?
            @logger.debug("Got json: ========================")
            @logger.debug("#{json_event[0..50]}...#{json_event[-50..-1]}")
            @logger.debug("End got json: ========================")
            @logger.debug("Processed content: #{content[0..50]}...")
            if @break_json_down_policy == 'with_head_tail'
              @logger.debug("Adding json head/tails.")
              json_event = "#{header}#{json_event}#{tail}"
            end #if
            @codec.decode(json_event) do |event|
              decorate(event)
              queue << event
            end # decode
          end
        else
          @logger.debug("Non-json codec or the policy is do not break")
          # Putting header and content and tail together before pushing into event queue
          content = "#{header}#{content}" unless header.nil? || header.length == 0
          @codec.decode(content) do |event|
            decorate(event)
            queue << event
          end # decode
        end #if
      ensure
        # Making sure the reader is removed from the registry even when there's exception.
        new_offset = start_index
        new_offset = 0 if start_index == @file_head_bytes && content.nil? # Reset the offset when nothing has been read.
        new_offset = new_offset + content_length unless content_length.nil?
        new_registry_item = LogStash::Inputs::RegistryItem.new(blob_name, new_etag, nil, new_offset, gen)
        update_registry(new_registry_item)
      end # begin
    end # if
  rescue StandardError => e
    @logger.error("Oh My, An error occurred. \nError:#{e}:\nTrace:\n#{e.backtrace}", :exception => e)
  end # begin
end

#raise_gen(registry_hash, file_path) ⇒ Object

Raise generation for blob in registry



291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
# File 'lib/logstash/inputs/azureblob.rb', line 291

def raise_gen(registry_hash, file_path)
  begin
    target_item = registry_hash[file_path]
    begin
      target_item.gen += 1
      # Protect gen from overflow.
      target_item.gen = target_item.gen / 2 if target_item.gen == MAX
    rescue StandardError => e
      @logger.error("Fail to get the next generation for target item #{target_item}.", :exception => e)
      target_item.gen = 0
    end

    min_gen_item = registry_hash.values.min_by { |x| x.gen }
    while min_gen_item.gen > 0
      registry_hash.values.each { |value| 
        value.gen -= 1
      }
      min_gen_item = registry_hash.values.min_by { |x| x.gen }
    end
  end
end

#registerObject



113
114
115
116
117
118
119
120
121
122
123
124
# File 'lib/logstash/inputs/azureblob.rb', line 113

def register
  # this is the reader # for this specific instance.
  @reader = SecureRandom.uuid
  @registry_locker = "#{@registry_path}.lock"
 
  # Setup a specific instance of an Azure::Storage::Client
  client = Azure::Storage::Client.create(:storage_account_name => @storage_account_name, :storage_access_key => @storage_access_key, :storage_blob_host => "https://#{@storage_account_name}.blob.#{@endpoint}")
  # Get an azure storage blob service object from a specific instance of an Azure::Storage::Client
  @azure_blob = client.blob_client
  # Add retry filter to the service object
  @azure_blob.with_filter(Azure::Storage::Core::Filter::ExponentialRetryPolicyFilter.new)
end

#register_for_readObject

Return the next blob for reading as well as the start index.



336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
# File 'lib/logstash/inputs/azureblob.rb', line 336

def register_for_read
  begin
    all_blobs = list_all_blobs
    registry = all_blobs.find { |item| item.name.downcase == @registry_path  }
    registry_locker = all_blobs.find { |item| item.name.downcase == @registry_locker }

    candidate_blobs = all_blobs.select { |item| (item.name.downcase != @registry_path) && ( item.name.downcase != @registry_locker ) }
    
    start_index = 0
    gen = 0
    lease = nil

    # Put lease on locker file than the registy file to allow update of the registry as a workaround for Azure Storage Ruby SDK issue # 16.
    # Workaround: https://github.com/Azure/azure-storage-ruby/issues/16
    registry_locker = @azure_blob.create_block_blob(@container, @registry_locker, @reader) if registry_locker.nil?
    lease = acquire_lease(@registry_locker)
    # ~ Workaround

    if(registry.nil?)
      registry_hash = create_registry(candidate_blobs)
    else
      registry_hash = load_registry
    end #if
      
    picked_blobs = Set.new []
    # Pick up the next candidate
    picked_blob = nil
    candidate_blobs.each { |candidate_blob|
      registry_item = registry_hash[candidate_blob.name]

      # Appending items that doesn't exist in the hash table
      if registry_item.nil?
        registry_item = LogStash::Inputs::RegistryItem.new(candidate_blob.name, candidate_blob.properties[:etag], nil, 0, 0)
        registry_hash[candidate_blob.name] = registry_item
      end # if
      
      if ((registry_item.offset < candidate_blob.properties[:content_length]) && (registry_item.reader.nil? || registry_item.reader == @reader))
        picked_blobs << candidate_blob
      end
    }

    picked_blob = picked_blobs.min_by { |b| registry_hash[b.name].gen }
    if !picked_blob.nil?
      registry_item = registry_hash[picked_blob.name]
      registry_item.reader = @reader
      registry_hash[picked_blob.name] = registry_item
      start_index = registry_item.offset
      raise_gen(registry_hash, picked_blob.name)
      gen = registry_item.gen
    end #if

    # Save the chnage for the registry
    save_registry(registry_hash)
    
    @azure_blob.release_blob_lease(@container, @registry_locker, lease)
    lease = nil;

    return picked_blob, start_index, gen
  rescue StandardError => e
    @logger.error("Oh My, An error occurred. #{e}:\n#{e.backtrace}", :exception => e)
    return nil, nil, nil
  ensure
    @azure_blob.release_blob_lease(@container, @registry_locker, lease) unless lease.nil?
    lease = nil
  end # rescue
end

#run(queue) ⇒ Object

def register



126
127
128
129
130
131
132
133
# File 'lib/logstash/inputs/azureblob.rb', line 126

def run(queue)
  # we can abort the loop if stop? becomes true
  while !stop?
    process(queue)
    @logger.debug("Hitting interval of #{@interval}ms . . .")
    Stud.stoppable_sleep(@interval) { stop? }
  end # loop
end

#save_registry(registry_hash) ⇒ Object

Serialize the registry hash and save it.



464
465
466
467
468
469
470
# File 'lib/logstash/inputs/azureblob.rb', line 464

def save_registry(registry_hash)
  # Serialize hash to json
  registry_hash_json = JSON.generate(registry_hash)

  # Upload registry to blob
  @azure_blob.create_block_blob(@container, @registry_path, registry_hash_json)
end

#stopObject

def run



135
136
137
# File 'lib/logstash/inputs/azureblob.rb', line 135

def stop
  cleanup_registry
end

#update_registry(registry_item) ⇒ Object

Update the registry



404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
# File 'lib/logstash/inputs/azureblob.rb', line 404

def update_registry (registry_item)
  begin
    lease = nil
    lease = acquire_lease(@registry_locker)
    registry_hash = load_registry
    registry_hash[registry_item.file_path] = registry_item
    save_registry(registry_hash)
    @azure_blob.release_blob_lease(@container, @registry_locker, lease)
    lease = nil
  rescue StandardError => e
    @logger.error("Oh My, An error occurred. #{e}:\n#{e.backtrace}", :exception => e)
  ensure
    @azure_blob.release_blob_lease(@container, @registry_locker, lease) unless lease.nil?
    lease = nil
  end #rescue
end