Class: Fluent::S3Output
- Inherits:
-
TimeSlicedOutput
- Object
- TimeSlicedOutput
- Fluent::S3Output
show all
- Includes:
- Mixin::ConfigPlaceholders
- Defined in:
- lib/fluent/plugin/out_s3.rb,
lib/fluent/plugin/s3_compressor_lzo.rb,
lib/fluent/plugin/s3_compressor_lzma2.rb,
lib/fluent/plugin/s3_compressor_gzip_command.rb
Defined Under Namespace
Classes: Compressor, GzipCommandCompressor, GzipCompressor, JsonCompressor, LZMA2Compressor, LZOCompressor, TextCompressor
Instance Attribute Summary collapse
Instance Method Summary
collapse
Constructor Details
Returns a new instance of S3Output.
7
8
9
10
11
12
13
14
15
|
# File 'lib/fluent/plugin/out_s3.rb', line 7
def initialize
super
require 'aws-sdk'
require 'zlib'
require 'time'
require 'tempfile'
@compressor = nil
end
|
Instance Attribute Details
#bucket ⇒ Object
Returns the value of attribute bucket.
31
32
33
|
# File 'lib/fluent/plugin/out_s3.rb', line 31
def bucket
@bucket
end
|
Instance Method Details
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
|
# File 'lib/fluent/plugin/out_s3.rb', line 39
def configure(conf)
super
if conf.has_key?('s3_endpoint')
raise ConfigError, "s3_endpoint parameter is removed. Use s3_region instead"
end
begin
@compressor = COMPRESSOR_REGISTRY.lookup(@store_as).new(:buffer_type => @buffer_type, :log => log)
rescue => e
$log.warn "#{@store_as} not found. Use 'text' instead"
@compressor = TextCompressor.new
end
@compressor.configure(conf)
conf['format'] = @format
@formatter = TextFormatter.create(conf)
if @localtime
@path_slicer = Proc.new {|path|
Time.now.strftime(path)
}
else
@path_slicer = Proc.new {|path|
Time.now.utc.strftime(path)
}
end
end
|
87
88
89
|
# File 'lib/fluent/plugin/out_s3.rb', line 87
def format(tag, time, record)
@formatter.format(tag, time, record)
end
|
#placeholders ⇒ Object
35
36
37
|
# File 'lib/fluent/plugin/out_s3.rb', line 35
def placeholders
[:percent]
end
|
#start ⇒ Object
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
|
# File 'lib/fluent/plugin/out_s3.rb', line 69
def start
super
options = {}
if @aws_key_id && @aws_sec_key
options[:access_key_id] = @aws_key_id
options[:secret_access_key] = @aws_sec_key
end
options[:region] = @s3_region if @s3_region
options[:proxy_uri] = @proxy_uri if @proxy_uri
options[:use_ssl] = @use_ssl
@s3 = AWS::S3.new(options)
@bucket = @s3.buckets[@s3_bucket]
check_apikeys if @check_apikey_on_start
ensure_bucket
end
|
#write(chunk) ⇒ Object
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
|
# File 'lib/fluent/plugin/out_s3.rb', line 91
def write(chunk)
i = 0
previous_path = nil
begin
path = @path_slicer.call(@path)
values_for_s3_object_key = {
"path" => path,
"time_slice" => chunk.key,
"file_extension" => @compressor.ext,
"index" => i
}
s3path = @s3_object_key_format.gsub(%r(%{[^}]+})) { |expr|
values_for_s3_object_key[expr[2...expr.size-1]]
}
if (i > 0) && (s3path == previous_path)
raise "duplicated path is generated. use %{index} in s3_object_key_format: path = #{s3path}"
end
i += 1
previous_path = s3path
end while @bucket.objects[s3path].exists?
tmp = Tempfile.new("s3-")
begin
@compressor.compress(chunk, tmp)
@bucket.objects[s3path].write(Pathname.new(tmp.path), {:content_type => @compressor.content_type,
:reduced_redundancy => @reduced_redundancy})
ensure
tmp.close(true) rescue nil
end
end
|