Class: Fluent::S3Output
- Inherits:
-
TimeSlicedOutput
- Object
- TimeSlicedOutput
- Fluent::S3Output
show all
- Includes:
- Mixin::ConfigPlaceholders
- Defined in:
- lib/fluent/plugin/out_s3.rb,
lib/fluent/plugin/s3_compressor_lzo.rb,
lib/fluent/plugin/s3_compressor_lzma2.rb,
lib/fluent/plugin/s3_compressor_gzip_command.rb
Defined Under Namespace
Classes: Compressor, GzipCommandCompressor, GzipCompressor, JsonCompressor, LZMA2Compressor, LZOCompressor, TextCompressor
Instance Attribute Summary collapse
Class Method Summary
collapse
Instance Method Summary
collapse
Constructor Details
Returns a new instance of S3Output.
7
8
9
10
11
12
13
14
15
|
# File 'lib/fluent/plugin/out_s3.rb', line 7
def initialize
super
require 'aws-sdk'
require 'zlib'
require 'time'
require 'tempfile'
@compressor = nil
end
|
Instance Attribute Details
#bucket ⇒ Object
Returns the value of attribute bucket.
33
34
35
|
# File 'lib/fluent/plugin/out_s3.rb', line 33
def bucket
@bucket
end
|
Class Method Details
.register_compressor(name, compressor) ⇒ Object
239
240
241
|
# File 'lib/fluent/plugin/out_s3.rb', line 239
def self.register_compressor(name, compressor)
COMPRESSOR_REGISTRY.register(name, compressor)
end
|
Instance Method Details
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
|
# File 'lib/fluent/plugin/out_s3.rb', line 41
def configure(conf)
super
if @s3_endpoint && @s3_endpoint.end_with?('amazonaws.com')
raise ConfigError, "s3_endpoint parameter is not supported for S3, use s3_region instead. This parameter is for S3 compatible services"
end
begin
@compressor = COMPRESSOR_REGISTRY.lookup(@store_as).new(:buffer_type => @buffer_type, :log => log)
rescue => e
$log.warn "#{@store_as} not found. Use 'text' instead"
@compressor = TextCompressor.new
end
@compressor.configure(conf)
conf['format'] = @format
@formatter = TextFormatter.create(conf)
if @localtime
@path_slicer = Proc.new {|path|
Time.now.strftime(path)
}
else
@path_slicer = Proc.new {|path|
Time.now.utc.strftime(path)
}
end
end
|
91
92
93
|
# File 'lib/fluent/plugin/out_s3.rb', line 91
def format(tag, time, record)
@formatter.format(tag, time, record)
end
|
#placeholders ⇒ Object
37
38
39
|
# File 'lib/fluent/plugin/out_s3.rb', line 37
def placeholders
[:percent]
end
|
#start ⇒ Object
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
|
# File 'lib/fluent/plugin/out_s3.rb', line 71
def start
super
options = {}
if @aws_key_id && @aws_sec_key
options[:access_key_id] = @aws_key_id
options[:secret_access_key] = @aws_sec_key
end
options[:region] = @s3_region if @s3_region
options[:endpoint] = @s3_endpoint if @s3_endpoint
options[:proxy_uri] = @proxy_uri if @proxy_uri
options[:use_ssl] = @use_ssl
options[:s3_server_side_encryption] = @use_server_side_encryption
@s3 = AWS::S3.new(options)
@bucket = @s3.buckets[@s3_bucket]
check_apikeys if @check_apikey_on_start
ensure_bucket
end
|
#write(chunk) ⇒ Object
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
|
# File 'lib/fluent/plugin/out_s3.rb', line 95
def write(chunk)
i = 0
previous_path = nil
begin
path = @path_slicer.call(@path)
values_for_s3_object_key = {
"path" => path,
"time_slice" => chunk.key,
"file_extension" => @compressor.ext,
"index" => i
}
s3path = @s3_object_key_format.gsub(%r(%{[^}]+})) { |expr|
values_for_s3_object_key[expr[2...expr.size-1]]
}
if (i > 0) && (s3path == previous_path)
raise "duplicated path is generated. use %{index} in s3_object_key_format: path = #{s3path}"
end
i += 1
previous_path = s3path
end while @bucket.objects[s3path].exists?
tmp = Tempfile.new("s3-")
begin
@compressor.compress(chunk, tmp)
@bucket.objects[s3path].write(Pathname.new(tmp.path), {:content_type => @compressor.content_type,
:reduced_redundancy => @reduced_redundancy})
ensure
tmp.close(true) rescue nil
end
end
|