Class: LogStash::Inputs::LogstashInputAzureblob

Inherits:
Base
  • Object
show all
Defined in:
lib/logstash/inputs/azureblob.rb

Overview

Logstash input plugin for Azure Blobs

This logstash plugin gathers data from Microsoft Azure Blobs

Constant Summary collapse

MAX =

Constant of max integer

2**([42].pack('i').size * 16 - 2) - 1
UPDATE_REGISTRY_COUNT =

Update the registry offset each time after this number of entries have been processed

100

Instance Method Summary collapse

Instance Method Details

#acquire_lease(blob_name, retry_times = 60, interval_sec = 1) ⇒ Object

Acquire a lease on a blob item with retries.

By default, it will retry 60 times with 1 second interval.



321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
# File 'lib/logstash/inputs/azureblob.rb', line 321

def acquire_lease(blob_name, retry_times = 60, interval_sec = 1)
  lease = nil;
  retried = 0;
  while lease.nil? do
    begin
      lease = @azure_blob.acquire_blob_lease(@container, blob_name, { :timeout => 60, :duration => @registry_lease_duration })
    rescue StandardError => e
      if (e.class.name.include? 'LeaseAlreadyPresent')
        if (retried > retry_times)
          raise
        end
        retried += 1
        sleep interval_sec
      else
        # Anything else happend other than 'LeaseAlreadyPresent', break the lease. This is a work-around for the behavior that when
        # timeout exception is hit, somehow, a infinite lease will be put on the lock file.
        @azure_blob.break_blob_lease(@container, blob_name, { :break_period => 30 })
      end
    end
  end #while
  return lease
end

#cleanup_registryObject

Clean up the registry.



428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
# File 'lib/logstash/inputs/azureblob.rb', line 428

def cleanup_registry
  begin
    @logger.debug("azureblob : start cleanup_registry")
    lease = nil
    lease = acquire_lease(@registry_path)
    registry_hash = load_registry
    registry_hash.each { | key, registry_item|
      registry_item.reader = nil if registry_item.reader == @reader
    }
    save_registry(registry_hash, lease)
    @azure_blob.release_blob_lease(@container, @registry_path, lease)
    lease = nil
  rescue StandardError => e
    @logger.error("Oh My, An error occurred. #{e}:\n#{e.backtrace}", :exception => e)
  ensure
    @azure_blob.release_blob_lease(@container, @registry_path, lease) unless lease.nil?
    lease = nil
  end #rescue
  @logger.debug("azureblob : End of cleanup_registry")
end

#create_registry(blob_items) ⇒ Object

Create a registry file to coordinate between multiple azure blob inputs.



450
451
452
453
454
455
456
457
458
459
460
461
462
463
# File 'lib/logstash/inputs/azureblob.rb', line 450

def create_registry(blob_items)
  @azure_blob.create_block_blob(@container, @registry_path, '')
  lease = acquire_lease(@registry_path)
  registry_hash = Hash.new
  blob_items.each do |blob_item|
    initial_offset = 0
    initial_offset = blob_item.properties[:content_length] if @registry_create_policy == 'resume'
    registry_item = LogStash::Inputs::RegistryItem.new(blob_item.name, blob_item.properties[:etag], nil, initial_offset, 0)
    registry_hash[blob_item.name] = registry_item
  end # each
  save_registry(registry_hash, lease)
  @azure_blob.release_blob_lease(@container, @registry_path, lease)
  registry_hash
end

#deserialize_registry_hash(json_string) ⇒ Object

Deserialize registry hash from json string.



258
259
260
261
262
263
264
265
# File 'lib/logstash/inputs/azureblob.rb', line 258

def deserialize_registry_hash (json_string)
  result = Hash.new
  temp_hash = JSON.parse(json_string)
  temp_hash.values.each { |kvp|
    result[kvp['file_path']] = LogStash::Inputs::RegistryItem.new(kvp['file_path'], kvp['etag'], kvp['reader'], kvp['offset'], kvp['gen'])
  }
  return result
end

#enqueue_content(queue, content, header, tail) ⇒ Object

process



227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
# File 'lib/logstash/inputs/azureblob.rb', line 227

def enqueue_content(queue, content, header, tail)
  if (header.nil? || header.length == 0) && (tail.nil? || tail.length == 0)
    #skip some unnecessary copying
    full_content = content
  else
    full_content = ''
    full_content << header unless header.nil? || header.length == 0
    full_content << content
    full_content << tail unless tail.nil? || tail.length == 0
  end

  @codec.decode(full_content) do |event|
    decorate(event)
    queue << event
  end
end

#list_all_blobsObject

List all the blobs in the given container.



268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
# File 'lib/logstash/inputs/azureblob.rb', line 268

def list_all_blobs
  blobs = Set.new []
  continuation_token = NIL
  @blob_list_page_size = 100 if @blob_list_page_size <= 0
  loop do
    # Need to limit the returned number of the returned entries to avoid out of memory exception.
    entries = @azure_blob.list_blobs(@container, { :timeout => 60, :marker => continuation_token, :max_results => @blob_list_page_size })
    if @path_filters.empty?
      entries.each do |entry|
        blobs << entry
      end # each
    else
      # Add the registry_path to the list of matched blobs
      @path_filters << @registry_path
      entries.each do |entry|
        # FNM_PATHNAME is required so that "**/test" can match "test" at the root folder
        # FNM_EXTGLOB allows you to use "test{a,b,c}" to match either "testa", "testb" or "testc" (closer to shell behavior)
        matched = @path_filters.any? {|path| File.fnmatch?(path, entry.name, File::FNM_PATHNAME | File::FNM_EXTGLOB)}
        blobs << entry if matched
      end # each
    end
    continuation_token = entries.continuation_token
    break if continuation_token.empty?
  end # loop
  return blobs
end

#load_registryObject

Load the content of the registry into the registry hash and return it.



466
467
468
469
470
471
# File 'lib/logstash/inputs/azureblob.rb', line 466

def load_registry
  # Get content
  _registry_blob, registry_blob_body = @azure_blob.get_blob(@container, @registry_path)
  registry_hash = deserialize_registry_hash(registry_blob_body)
  registry_hash
end

#on_entry_processed(start_index, content_length, blob_name, new_etag, gen) ⇒ Object



244
245
246
247
# File 'lib/logstash/inputs/azureblob.rb', line 244

def on_entry_processed(start_index, content_length, blob_name, new_etag, gen)
  @processed_entries += 1
  request_registry_update(start_index, content_length, blob_name, new_etag, gen) if @processed_entries % UPDATE_REGISTRY_COUNT == 0
end

#process(queue) ⇒ Object

Start processing the next item.



162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
# File 'lib/logstash/inputs/azureblob.rb', line 162

def process(queue)
  begin
    @processed_entries = 0
    blob, start_index, gen = register_for_read

    unless blob.nil?
      begin
        blob_name = blob.name
        @logger.debug("Processing blob #{blob.name}")
        blob_size = blob.properties[:content_length]
        # Work-around: After returned by get_blob, the etag will contains quotes.
        new_etag = blob.properties[:etag]
        # ~ Work-around

        blob, header = @azure_blob.get_blob(@container, blob_name, {:end_range => (@file_head_bytes-1) }) if header.nil? unless @file_head_bytes.nil? or @file_head_bytes <= 0

        blob, tail = @azure_blob.get_blob(@container, blob_name, {:start_range => blob_size - @file_tail_bytes}) if tail.nil? unless @file_tail_bytes.nil? or @file_tail_bytes <= 0

        if start_index == 0
          # Skip the header since it is already read.
          start_index = @file_head_bytes
        end

        @logger.debug("start index: #{start_index} blob size: #{blob_size}")

        content_length = 0
        blob_reader = BlobReader.new(@logger, @azure_blob, @container, blob_name, file_chunk_size_bytes, start_index, blob_size - 1 - @file_tail_bytes)

        is_json_codec = (defined?(LogStash::Codecs::JSON) == 'constant') && (@codec.is_a? LogStash::Codecs::JSON)
        if is_json_codec
          parser = JsonParser.new(@logger, blob_reader)

          parser.parse(->(json_content) {
            content_length += json_content.length

            enqueue_content(queue, json_content, header, tail)

            on_entry_processed(start_index, content_length, blob_name, new_etag, gen)
          }, ->(malformed_json) {
            @logger.debug("Skipping #{malformed_json.length} malformed bytes")
            content_length = content_length + malformed_json.length

            on_entry_processed(start_index, content_length, blob_name, new_etag, gen)
          })
        else
          begin
            content, are_more_bytes_available = blob_reader.read

            content_length += content.length
            enqueue_content(queue, content, header, tail)

            on_entry_processed(start_index, content_length, blob_name, new_etag, gen)
          end until !are_more_bytes_available || content.nil?

        end #if
      ensure
        # Making sure the reader is removed from the registry even when there's exception.
        request_registry_update(start_index, content_length, blob_name, new_etag, gen)
      end # begin
    end # unless
  rescue => e
    @logger.error("Oh My, An error occurred. Error:#{e}: Trace: #{e.backtrace}", :exception => e)
  end # begin
end

#raise_gen(registry_hash, file_path) ⇒ Object

Raise generation for blob in registry



296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
# File 'lib/logstash/inputs/azureblob.rb', line 296

def raise_gen(registry_hash, file_path)
  begin
    target_item = registry_hash[file_path]
    begin
      target_item.gen += 1
      # Protect gen from overflow.
      target_item.gen = target_item.gen / 2 if target_item.gen == MAX
    rescue StandardError => e
      @logger.error("Fail to get the next generation for target item #{target_item}.", :exception => e)
      target_item.gen = 0
    end

    min_gen_item = registry_hash.values.min_by { |x| x.gen }
    while min_gen_item.gen > 0
      registry_hash.values.each { |value| 
        value.gen -= 1
      }
      min_gen_item = registry_hash.values.min_by { |x| x.gen }
    end
  end
end

#registerObject



133
134
135
136
137
138
139
140
141
142
143
144
145
146
# File 'lib/logstash/inputs/azureblob.rb', line 133

def register
  user_agent = 'logstash-input-azureblob'
  user_agent << '/' << Gem.latest_spec_for('logstash-input-azureblob').version.to_s

  # this is the reader # for this specific instance.
  @reader = SecureRandom.uuid

  # Setup a specific instance of an Azure::Storage::Client
  client = Azure::Storage::Client.create(:storage_account_name => @storage_account_name, :storage_access_key => @storage_access_key, :storage_blob_host => "https://#{@storage_account_name}.blob.#{@endpoint}", :user_agent_prefix => user_agent)
  # Get an azure storage blob service object from a specific instance of an Azure::Storage::Client
  @azure_blob = client.blob_client
  # Add retry filter to the service object
  @azure_blob.with_filter(Azure::Storage::Core::Filter::ExponentialRetryPolicyFilter.new)
end

#register_for_readObject

Return the next blob for reading as well as the start index.



345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
# File 'lib/logstash/inputs/azureblob.rb', line 345

def register_for_read
  begin
    all_blobs = list_all_blobs
    registry = all_blobs.find { |item| item.name.downcase == @registry_path }
    
    candidate_blobs = all_blobs.select { |item| (item.name.downcase != @registry_path) }

    start_index = 0
    gen = 0
    lease = nil

    if registry.nil?
      registry_hash = create_registry(candidate_blobs)
      lease = acquire_lease(@registry_path)
    else
      lease = acquire_lease(@registry_path)
      registry_hash = load_registry
    end #if

    picked_blobs = Set.new []
    # Pick up the next candidate
    picked_blob = nil
    candidate_blobs.each { |candidate_blob|
      @logger.debug("candidate_blob: #{candidate_blob.name} content length: #{candidate_blob.properties[:content_length]}")
      registry_item = registry_hash[candidate_blob.name]

      # Appending items that doesn't exist in the hash table
      if registry_item.nil?
        registry_item = LogStash::Inputs::RegistryItem.new(candidate_blob.name, candidate_blob.properties[:etag], nil, 0, 0)
        registry_hash[candidate_blob.name] = registry_item
      end # if
      @logger.debug("registry_item offset: #{registry_item.offset}")
      if ((registry_item.offset < candidate_blob.properties[:content_length]) && (registry_item.reader.nil? || registry_item.reader == @reader))
        @logger.debug("candidate_blob picked: #{candidate_blob.name} content length: #{candidate_blob.properties[:content_length]}")
        picked_blobs << candidate_blob
      end
    }

    picked_blob = picked_blobs.min_by { |b| registry_hash[b.name].gen }
    unless picked_blob.nil?
      registry_item = registry_hash[picked_blob.name]
      registry_item.reader = @reader
      registry_hash[picked_blob.name] = registry_item
      start_index = registry_item.offset
      raise_gen(registry_hash, picked_blob.name)
      gen = registry_item.gen
    end # unless

    # Save the change for the registry
    save_registry(registry_hash, lease)

    @azure_blob.release_blob_lease(@container, @registry_path, lease)
    lease = nil

    return picked_blob, start_index, gen
  rescue StandardError => e
    @logger.error("Oh My, An error occurred. #{e}: #{e.backtrace}", :exception => e)
    return nil, nil, nil
  ensure
    @azure_blob.release_blob_lease(@container, @registry_path, lease) unless lease.nil?
    lease = nil
  end # rescue
end

#request_registry_update(start_index, content_length, blob_name, new_etag, gen) ⇒ Object



249
250
251
252
253
254
255
# File 'lib/logstash/inputs/azureblob.rb', line 249

def request_registry_update(start_index, content_length, blob_name, new_etag, gen)
  new_offset = start_index
  new_offset += content_length unless content_length.nil?
  @logger.debug("New registry offset: #{new_offset}")
  new_registry_item = LogStash::Inputs::RegistryItem.new(blob_name, new_etag, nil, new_offset, gen)
  update_registry(new_registry_item)
end

#run(queue) ⇒ Object

def register



148
149
150
151
152
153
154
155
# File 'lib/logstash/inputs/azureblob.rb', line 148

def run(queue)
  # we can abort the loop if stop? becomes true
  while !stop?
    process(queue)
    @logger.debug("Hitting interval of #{@interval}s . . .")
    Stud.stoppable_sleep(@interval) { stop? }
  end # loop
end

#save_registry(registry_hash, lease_id) ⇒ Object

Serialize the registry hash and save it.



474
475
476
477
478
479
480
# File 'lib/logstash/inputs/azureblob.rb', line 474

def save_registry(registry_hash, lease_id)
  # Serialize hash to json
  registry_hash_json = JSON.generate(registry_hash)

  # Upload registry to blob
  @azure_blob.create_block_blob(@container, @registry_path, registry_hash_json, lease_id: lease_id)
end

#stopObject

def run



157
158
159
# File 'lib/logstash/inputs/azureblob.rb', line 157

def stop
  cleanup_registry
end

#update_registry(registry_item) ⇒ Object

Update the registry



410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
# File 'lib/logstash/inputs/azureblob.rb', line 410

def update_registry(registry_item)
  begin
    lease = nil
    lease = acquire_lease(@registry_path)
    registry_hash = load_registry
    registry_hash[registry_item.file_path] = registry_item
    save_registry(registry_hash, lease)
    @azure_blob.release_blob_lease(@container, @registry_path, lease)
    lease = nil
  rescue StandardError => e
    @logger.error("Oh My, An error occurred. #{e}:\n#{e.backtrace}", :exception => e)
  ensure
    @azure_blob.release_blob_lease(@container, @registry_path, lease) unless lease.nil?
    lease = nil
  end #rescue
end