Class: Fluent::Plugin::AzureStorageGen2Output

Inherits:
Output
  • Object
show all
Defined in:
lib/fluent/plugin/out_azurestorage_gen2.rb

Defined Under Namespace

Classes: Compressor, GzipCompressor, JsonCompressor, TextCompressor

Constant Summary collapse

DEFAULT_FORMAT_TYPE =
"out_file"
ACCESS_TOKEN_API_VERSION =
"2018-02-01"
ABFS_API_VERSION =
"2018-11-09"
AZURE_BLOCK_SIZE_LIMIT =
4 * 1024 * 1024 - 1

Instance Method Summary collapse

Constructor Details

#initializeAzureStorageGen2Output

Returns a new instance of AzureStorageGen2Output.



18
19
20
21
# File 'lib/fluent/plugin/out_azurestorage_gen2.rb', line 18

def initialize
    super
    @compressor = nil
end

Instance Method Details

#configure(conf) ⇒ Object



70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
# File 'lib/fluent/plugin/out_azurestorage_gen2.rb', line 70

def configure(conf)
    compat_parameters_convert(conf, :buffer, :formatter, :inject)
    super

    if @store_as.nil? || @store_as == "none"
        log.info "azurestorage_gen2: Compression is disabled (store_as: #{@store_as})"
    else
        begin
            @compressor = COMPRESSOR_REGISTRY.lookup(@store_as).new(:buffer_type => @buffer_type, :log => log)
        rescue => e
            log.warn "#{@store_as} not found. Use 'text' instead"
            @compressor = TextCompressor.new
        end
        @compressor.configure(conf)
    end

    @formatter = formatter_create

    if @azure_container.nil?
      raise Fluent::ConfigError, "azure_container is needed"
    end

    @azure_storage_path = ''
    @last_azure_storage_path = ''
    @current_index = 0

    if @store_as.nil? || @store_as == "none"
        @final_file_extension = @file_extension
    else
        @final_file_extension = @compressor.ext
    end
    @values_for_object_chunk = {}
end

#format(tag, time, record) ⇒ Object



132
133
134
135
# File 'lib/fluent/plugin/out_azurestorage_gen2.rb', line 132

def format(tag, time, record)
    r = inject_values_to_record(tag, time, record)
    @formatter.format(tag, time, r)
end

#multi_workers_ready?Boolean

Returns:

  • (Boolean)


104
105
106
# File 'lib/fluent/plugin/out_azurestorage_gen2.rb', line 104

def multi_workers_ready?
    true
end

#startObject



108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
# File 'lib/fluent/plugin/out_azurestorage_gen2.rb', line 108

def start
    setup_access_token
    if !@skip_container_check
        if @failsafe_container_check
            begin
                if @write_only && @auto_create_container
                    create_container
                else
                    ensure_container
                end
            rescue Exception => e
                log.warn("#{e.message}, container list/create failsafe is enabled. Continue without those operations.")
            end
        else
            if @write_only && @auto_create_container
                create_container
            else
                ensure_container
            end
        end
    end
    super
end

#write(chunk) ⇒ Object



137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
# File 'lib/fluent/plugin/out_azurestorage_gen2.rb', line 137

def write(chunk)
    if @store_as.nil? || @store_as == "none"
        generate_log_name(chunk, @current_index)
        if @last_azure_storage_path != @azure_storage_path
            @current_index = 0
            generate_log_name(chunk, @current_index)
        end
        raw_data = chunk.read
        unless raw_data.empty?
            log.debug "azurestorage_gen2: processing raw data", chunk_id: dump_unique_id_hex(chunk.unique_id)
            upload_blob(raw_data, chunk)
        end
        chunk.close rescue nil
        @last_azure_storage_path = @azure_storage_path
    else
        tmp = Tempfile.new("azure-")
        tmp.binmode
        begin
            @compressor.compress(chunk, tmp)
            tmp.rewind
            generate_log_name(chunk, @current_index)
            if @last_azure_storage_path != @azure_storage_path
                @current_index = 0
                generate_log_name(chunk, @current_index)
            end
            log.debug "azurestorage_gen2: Start uploading temp file: #{tmp.path}"
            content = File.open(tmp.path, 'rb') { |file| file.read }
            upload_blob(content, chunk)
            @last_azure_storage_path = @azure_storage_path
        ensure
            tmp.close(true) rescue nil
        end
        @values_for_object_chunk.delete(chunk.unique_id)
    end

end