Class: Fluent::S3Output
- Inherits:
-
TimeSlicedOutput
- Object
- TimeSlicedOutput
- Fluent::S3Output
show all
- Includes:
- Mixin::ConfigPlaceholders
- Defined in:
- lib/fluent/plugin/out_s3.rb,
lib/fluent/plugin/s3_compressor_lzo.rb,
lib/fluent/plugin/s3_compressor_lzma2.rb,
lib/fluent/plugin/s3_compressor_gzip_command.rb
Defined Under Namespace
Classes: Compressor, GzipCommandCompressor, GzipCompressor, JsonCompressor, LZMA2Compressor, LZOCompressor, TextCompressor
Instance Attribute Summary collapse
Instance Method Summary
collapse
Constructor Details
8
9
10
11
12
13
14
15
16
|
# File 'lib/fluent/plugin/out_s3.rb', line 8
def initialize
super
require 'aws-sdk-resources'
require 'zlib'
require 'time'
require 'tempfile'
@compressor = nil
end
|
Instance Attribute Details
#bucket ⇒ Object
Returns the value of attribute bucket.
59
60
61
|
# File 'lib/fluent/plugin/out_s3.rb', line 59
def bucket
@bucket
end
|
Instance Method Details
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
|
# File 'lib/fluent/plugin/out_s3.rb', line 67
def configure(conf)
super
if @s3_endpoint && @s3_endpoint.end_with?('amazonaws.com')
raise ConfigError, "s3_endpoint parameter is not supported for S3, use s3_region instead. This parameter is for S3 compatible services"
end
begin
@compressor = COMPRESSOR_REGISTRY.lookup(@store_as).new(:buffer_type => @buffer_type, :log => log)
rescue
$log.warn "#{@store_as} not found. Use 'text' instead"
@compressor = TextCompressor.new
end
@compressor.configure(conf)
@formatter = Plugin.new_formatter(@format)
@formatter.configure(conf)
if @localtime
@path_slicer = Proc.new {|path|
Time.now.strftime(path)
}
else
@path_slicer = Proc.new {|path|
Time.now.utc.strftime(path)
}
end
@storage_class = "REDUCED_REDUNDANCY" if @reduced_redundancy
@values_for_s3_object_chunk = {}
end
|
119
120
121
|
# File 'lib/fluent/plugin/out_s3.rb', line 119
def format(tag, time, record)
@formatter.format(tag, time, record)
end
|
#placeholders ⇒ Object
63
64
65
|
# File 'lib/fluent/plugin/out_s3.rb', line 63
def placeholders
[:percent]
end
|
#start ⇒ Object
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
|
# File 'lib/fluent/plugin/out_s3.rb', line 99
def start
super
options = setup_credentials
options[:region] = @s3_region if @s3_region
options[:endpoint] = @s3_endpoint if @s3_endpoint
options[:http_proxy] = @proxy_uri if @proxy_uri
options[:s3_server_side_encryption] = @use_server_side_encryption.to_sym if @use_server_side_encryption
options[:force_path_style] = @force_path_style
s3_client = Aws::S3::Client.new(options)
@s3 = Aws::S3::Resource.new(:client => s3_client)
@bucket = @s3.bucket(@s3_bucket)
check_apikeys if @check_apikey_on_start
ensure_bucket
@hex_random_n = (@hex_random_length + 1) / 2
end
|
#write(chunk) ⇒ Object
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
|
# File 'lib/fluent/plugin/out_s3.rb', line 123
def write(chunk)
i = 0
previous_path = nil
begin
path = @path_slicer.call(@path)
@values_for_s3_object_chunk[chunk.key] ||= {
"hex_random" => hex_random,
"uuid_flush" => uuid_random,
}
values_for_s3_object_key = {
"path" => path,
"time_slice" => chunk.key,
"file_extension" => @compressor.ext,
"index" => i,
}.merge!(@values_for_s3_object_chunk[chunk.key])
s3path = @s3_object_key_format.gsub(%r(%{[^}]+})) { |expr|
values_for_s3_object_key[expr[2...expr.size-1]]
}
if (i > 0) && (s3path == previous_path)
if @overwrite
log.warn "#{s3path} already exists, but will overwrite"
break
else
raise "duplicated path is generated. use %{index} in s3_object_key_format: path = #{s3path}"
end
end
i += 1
previous_path = s3path
end while @bucket.object(s3path).exists?
tmp = Tempfile.new("s3-")
begin
@compressor.compress(chunk, tmp)
tmp.rewind
log.debug { "out_s3: trying to write {object_id:#{chunk.object_id},time_slice:#{chunk.key}} to s3://#{@s3_bucket}/#{s3path}" }
@bucket.object(s3path).put(:body => tmp,
:content_type => @compressor.content_type,
:storage_class => @storage_class)
ensure
@values_for_s3_object_chunk.delete(chunk.key)
tmp.close(true) rescue nil
end
end
|