Class: Fluent::EventLimitedFileBuffer

Inherits:
FileBuffer
  • Object
show all
Defined in:
lib/fluent/plugin/buf_event_limited.rb

Instance Method Summary collapse

Instance Method Details

#emit(key, data, chain) ⇒ Object



63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
# File 'lib/fluent/plugin/buf_event_limited.rb', line 63

def emit(key, data, chain)
  data = MessagePackFormattedBufferData.new(data)
  key = key.to_s
  flush_trigger = false

  synchronize do
    # Get the active chunk if it exists
    chunk = (@map[key] ||= new_chunk(key))

    # Partition the data into chunks that can be written into new chunks
    events = data.as_events
    [
      events.shift(chunk.remaining_capacity),
      *events.each_slice(@buffer_chunk_records_limit)
    ].each do |event_group|
      chunk, queue_size = rotate_chunk!(chunk, key)
      # Trigger flush only when we put the first chunk into it
      flush_trigger ||= (queue_size == 0)

      chain.next
      chunk.write(
        event_group.map { |d| MessagePack.pack(d) }.join(''),
        event_group.size
      )
    end

    return flush_trigger
  end
end

#new_chunk(key) ⇒ Object



93
94
95
96
97
98
99
# File 'lib/fluent/plugin/buf_event_limited.rb', line 93

def new_chunk(key)
  encoded_key = encode_key(key)
  path, tsuffix = make_path(encoded_key, 'b')
  unique_id = tsuffix_to_unique_id(tsuffix)

  chunk_factory(key, path, unique_id, 'a+')
end

#resumeObject



103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
# File 'lib/fluent/plugin/buf_event_limited.rb', line 103

def resume
  maps = []
  queues = []

  Dir.glob("#{@buffer_path_prefix}*#{@buffer_path_suffix}") do |path|
    identifier_part = chunk_identifier_in_path(path)
    next unless (m = PATH_MATCH.match(identifier_part))

    key = decode_key(m[1])
    bq = m[2]
    tsuffix = m[3]
    timestamp = m[3].to_i(16)
    unique_id = tsuffix_to_unique_id(tsuffix)

    case bq
    when 'b'
      maps << [timestamp, chunk_factory(key, path, unique_id, 'a+')]
    when 'q'
      queues << [timestamp, chunk_factory(key, path, unique_id, 'r')]
    end
  end

  map = {}
  maps
    .sort_by { |(timestamp, chunk)| timestamp }
    .each    { |(timestamp, chunk)| map[chunk.key] = chunk }

  queue = queues
    .sort_by { |(timestamp, _chunk)| timestamp }
    .map     { |(_timestamp, chunk)| chunk }

  return queue, map
end