Class: Aws::S3Interface

Inherits:
AwsBase show all
Extended by:
AwsBaseInterface::ClassMethods
Includes:
AwsBaseInterface
Defined in:
lib/s3/s3_interface.rb

Defined Under Namespace

Classes: S3AclParser, S3BucketLocationParser, S3CopyParser, S3HttpResponseBodyParser, S3HttpResponseHeadParser, S3HttpResponseParser, S3ImprovedListBucketParser, S3InitiateMultipartUploadParser, S3ListAllMyBucketsParser, S3ListBucketParser, S3ListMultipartPartsParser, S3LoggingParser, S3UploadPartParser

Constant Summary collapse

USE_100_CONTINUE_PUT_SIZE =
1_000_000
DEFAULT_HOST =
's3.amazonaws.com'
DEFAULT_PORT =
443
DEFAULT_PROTOCOL =
'https'
DEFAULT_SERVICE =
'/'
REQUEST_TTL =
30
DEFAULT_EXPIRES_AFTER =

One day’s worth of seconds

1 * 24 * 60 * 60
ONE_YEAR_IN_SECONDS =
365 * 24 * 60 * 60
AMAZON_HEADER_PREFIX =
'x-amz-'
AMAZON_METADATA_PREFIX =
'x-amz-meta-'
@@bench =
AwsBenchmarkingBlock.new

Constants included from AwsBaseInterface

AwsBaseInterface::DEFAULT_SIGNATURE_VERSION

Constants inherited from AwsBase

AwsBase::AMAZON_PROBLEMS

Instance Attribute Summary

Attributes included from AwsBaseInterface

#aws_access_key_id, #cache, #last_errors, #last_request, #last_request_id, #last_response, #logger, #params, #signature_version

Class Method Summary collapse

Instance Method Summary collapse

Methods included from AwsBaseInterface

#cache_hits?, caching, caching=, #caching?, #close_conn, #close_connection, #connection, #escape_params, #generate_request, #generate_request2, #get_conn, #hash_params, #init, #multi_thread, #on_exception, #request_cache_or_info, #request_info2, #request_info3, #request_info_impl, #request_info_xml_simple, #request_info_xml_simple3, #signed_service_params, #symbolize, #update_cache

Methods inherited from AwsBase

amazon_problems, amazon_problems=

Constructor Details

#initialize(aws_access_key_id = nil, aws_secret_access_key = nil, params = {}) ⇒ S3Interface

Creates new RightS3 instance.

s3 = Aws::S3Interface.new('1E3GDYEOGFJPIT7XXXXXX','hgTHt68JY07JKUY08ftHYtERkjgtfERn57XXXXXX', {:multi_thread => true, :logger => Logger.new('/tmp/x.log')}) #=> #<Aws::S3Interface:0xb7b3c27c>

Params is a hash:

{:server       => 's3.amazonaws.com'   # Amazon service host: 's3.amazonaws.com'(default)
 :port         => 443                  # Amazon service port: 80 or 443(default)
 :protocol     => 'https'              # Amazon service protocol: 'http' or 'https'(default)
 :connection_mode  => :default         # options are
                                              :default (will use best known safe (as in won't need explicit close) option, may change in the future)
                                              :per_request (opens and closes a connection on every request)
                                              :single (one thread across entire app)
                                              :per_thread (one connection per thread)
 :logger       => Logger Object}       # Logger instance: logs to STDOUT if omitted }


78
79
80
81
82
83
84
85
86
87
# File 'lib/s3/s3_interface.rb', line 78

def initialize(aws_access_key_id=nil, aws_secret_access_key=nil, params={})
  init({:name             => 'S3',
        :default_host     => ENV['S3_URL'] ? URI.parse(ENV['S3_URL']).host : DEFAULT_HOST,
        :default_port     => ENV['S3_URL'] ? URI.parse(ENV['S3_URL']).port : DEFAULT_PORT,
        :default_service  => ENV['S3_URL'] ? URI.parse(ENV['S3_URL']).path : DEFAULT_SERVICE,
        :default_protocol => ENV['S3_URL'] ? URI.parse(ENV['S3_URL']).scheme : DEFAULT_PROTOCOL},
       aws_access_key_id || ENV['AWS_ACCESS_KEY_ID'],
       aws_secret_access_key || ENV['AWS_SECRET_ACCESS_KEY'],
       params)
end

Class Method Details

.benchObject



49
50
51
# File 'lib/s3/s3_interface.rb', line 49

def self.bench
  @@bench
end

.bench_s3Object



57
58
59
# File 'lib/s3/s3_interface.rb', line 57

def self.bench_s3
  @@bench.service
end

.bench_xmlObject



53
54
55
# File 'lib/s3/s3_interface.rb', line 53

def self.bench_xml
  @@bench.xml
end

.connection_nameObject



43
44
45
# File 'lib/s3/s3_interface.rb', line 43

def self.connection_name
  :s3_connection
end

Instance Method Details

#bucket_location(bucket, headers = {}) ⇒ Object

Retrieve bucket location

s3.create_bucket('my-awesome-bucket-us')        #=> true
puts s3.bucket_location('my-awesome-bucket-us') #=> '' (Amazon's default value assumed)

s3.create_bucket('my-awesome-bucket-eu', :location => :eu) #=> true
puts s3.bucket_location('my-awesome-bucket-eu')            #=> 'EU'


239
240
241
242
243
244
# File 'lib/s3/s3_interface.rb', line 239

def bucket_location(bucket, headers={})
  req_hash = generate_rest_request('GET', headers.merge(:url=>"#{bucket}?location"))
  request_info(req_hash, S3BucketLocationParser.new)
rescue
  on_exception
end

#canonical_string(method, path, headers = {}, expires = nil) ⇒ Object


Requests

Produces canonical string for signing.



93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
# File 'lib/s3/s3_interface.rb', line 93

def canonical_string(method, path, headers={}, expires=nil) # :nodoc:
  s3_headers = {}
  headers.each do |key, value|
    key = key.downcase
    s3_headers[key] = value.join("").strip if key[/^#{AMAZON_HEADER_PREFIX}|^content-md5$|^content-type$|^date$/o]
  end
  s3_headers['content-type'] ||= ''
  s3_headers['content-md5']  ||= ''
  s3_headers['date'] = '' if s3_headers.has_key? 'x-amz-date'
  s3_headers['date'] = expires if expires
  # prepare output string
  out_string = "#{method}\n"
  s3_headers.sort { |a, b| a[0] <=> b[0] }.each do |key, value|
    out_string << (key[/^#{AMAZON_HEADER_PREFIX}/o] ? "#{key}:#{value}\n" : "#{value}\n")
  end
  # ignore everything after the question mark...
  out_string << path.gsub(/\?.*$/, '')
  # ...unless there is an acl or torrent parameter
  out_string << '?acl' if path[/[&?]acl($|&|=)/]
  out_string << '?policy' if path[/[&?]policy($|&|=)/]
  out_string << '?torrent' if path[/[&?]torrent($|&|=)/]
  out_string << '?location' if path[/[&?]location($|&|=)/]
  out_string << '?logging' if path[/[&?]logging($|&|=)/] # this one is beta, no support for now
  #... also deal with params for multipart uploads API
  out_string << '?uploads' if path[/\?uploads$/]
  if path[/\?uploadId=(.*)$/]
    out_string << "?uploadId=#{$1}"
  end
  if path[/\?partNumber=(\d*)&uploadId=(.*)$/]
    out_string << "?partNumber=#{$1}&uploadId=#{$2}"
  end
  out_string
end

#clear_bucket(bucket) ⇒ Object

Removes all keys from bucket. Returns true or an exception.

s3.clear_bucket('my_awesome_bucket') #=> true


925
926
927
928
929
930
931
932
933
# File 'lib/s3/s3_interface.rb', line 925

def clear_bucket(bucket)
  incrementally_list_bucket(bucket) do |results|
    p results
    results[:contents].each { |key| p key; delete(bucket, key[:key]) }
  end
  true
rescue
  on_exception
end

#complete_multipart(bucket, key, uploadId, manifest_hash, headers = {}) ⇒ Object

Completes a multipart upload, returning true or an exception docs.amazonwebservices.com/AmazonS3/latest/API/mpUploadComplete.html

Clients must specify the uploadId (obtained from the initiate_multipart call) and the reassembly manifest hash which specifies the each partNumber corresponding etag (obtained from the upload_part call):

s3.complete_multipart(‘my_awesome_bucket’, ‘hugeObject’, “WL7dk8sqbtk3Rg641HHWaNeG6RxI”,

{"1"=>"a54357aff0632cce46d942af68356b38", "2"=>"0c78aef83f66abc1fa1e8477f296d394"}) => true

See docs.amazonwebservices.com/AmazonS3/latest/dev/mpuoverview.html



589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
# File 'lib/s3/s3_interface.rb', line 589

def complete_multipart(bucket, key, uploadId, manifest_hash, headers={})
  parts_string = manifest_hash.inject("") do |res, (part,etag)|
   res<< <<END_PARTS
    <Part>
      <PartNumber>#{part}</PartNumber>
      <ETag>"#{etag}"</ETag>
    </Part>
END_PARTS
   res
  end
  data = <<EOS
  <CompleteMultipartUpload>
    #{parts_string}
  </CompleteMultipartUpload>
EOS
  req_hash = generate_rest_request('POST', headers.merge(:url  => "#{bucket}/#{CGI::escape key}?uploadId=#{uploadId}",
                                                         :data => data))
  request_info(req_hash, RightHttp2xxParser.new)
  rescue
    on_exception
end

#copy(src_bucket, src_key, dest_bucket, dest_key = nil, directive = :copy, headers = {}) ⇒ Object

Copy an object.

directive: :copy    - copy meta-headers from source (default value)
           :replace - replace meta-headers by passed ones

# copy a key with meta-headers
s3.copy('b1', 'key1', 'b1', 'key1_copy') #=> {:e_tag=>"\"e8b...8d\"", :last_modified=>"2008-05-11T10:25:22.000Z"}

# copy a key, overwrite meta-headers
s3.copy('b1', 'key2', 'b1', 'key2_copy', :replace, 'x-amz-meta-family'=>'Woho555!') #=> {:e_tag=>"\"e8b...8d\"", :last_modified=>"2008-05-11T10:26:22.000Z"}

see: docs.amazonwebservices.com/AmazonS3/2006-03-01/UsingCopyingObjects.html

http://docs.amazonwebservices.com/AmazonS3/2006-03-01/RESTObjectCOPY.html


779
780
781
782
783
784
785
786
787
# File 'lib/s3/s3_interface.rb', line 779

def copy(src_bucket, src_key, dest_bucket, dest_key=nil, directive=:copy, headers={})
  dest_key                            ||= src_key
  headers['x-amz-metadata-directive'] = directive.to_s.upcase
  headers['x-amz-copy-source']        = "#{src_bucket}/#{CGI::escape src_key}"
  req_hash                            = generate_rest_request('PUT', headers.merge(:url=>"#{dest_bucket}/#{CGI::escape dest_key}"))
  request_info(req_hash, S3CopyParser.new)
rescue
  on_exception
end

#create_bucket(bucket, headers = {}) ⇒ Object

Creates new bucket. Returns true or an exception.

# create a bucket at American server
s3.create_bucket('my-awesome-bucket-us') #=> true
# create a bucket at European server
s3.create_bucket('my-awesome-bucket-eu', :location => :eu) #=> true


216
217
218
219
220
221
222
223
224
225
226
227
228
229
# File 'lib/s3/s3_interface.rb', line 216

def create_bucket(bucket, headers={})
  data = nil
  unless Aws::Utils.blank?(headers[:location])
#                data = "<CreateBucketConfiguration><LocationConstraint>#{headers[:location].to_s.upcase}</LocationConstraint></CreateBucketConfiguration>"
    location = headers[:location].to_s
    location.upcase! if location == 'eu'
    data = "<CreateBucketConfiguration><LocationConstraint>#{location}</LocationConstraint></CreateBucketConfiguration>"
  end
  req_hash = generate_rest_request('PUT', headers.merge(:url=>bucket, :data => data))
  request_info(req_hash, RightHttp2xxParser.new)
rescue Exception => e
  # if the bucket exists AWS returns an error for the location constraint interface. Drop it
  e.is_a?(Aws::AwsError) && e.message.include?('BucketAlreadyOwnedByYou') ? true : on_exception
end

Generates link for ‘CreateBucket’.

s3.create_bucket_link('my_awesome_bucket') #=> url string


1013
1014
1015
1016
1017
# File 'lib/s3/s3_interface.rb', line 1013

def create_bucket_link(bucket, expires=nil, headers={})
  generate_link('PUT', headers.merge(:url=>bucket), expires)
rescue
  on_exception
end

#delete(bucket, key = '', headers = {}) ⇒ Object

Deletes key. Returns true or an exception.

s3.delete('my_awesome_bucket', 'log/curent/1.log') #=> true


759
760
761
762
763
764
# File 'lib/s3/s3_interface.rb', line 759

def delete(bucket, key='', headers={})
  req_hash = generate_rest_request('DELETE', headers.merge(:url=>"#{bucket}/#{CGI::escape key}"))
  request_info(req_hash, RightHttp2xxParser.new)
rescue
  on_exception
end

#delete_bucket(bucket, headers = {}) ⇒ Object

Deletes new bucket. Bucket must be empty! Returns true or an exception.

s3.delete_bucket('my_awesome_bucket')  #=> true

See also: force_delete_bucket method



283
284
285
286
287
288
# File 'lib/s3/s3_interface.rb', line 283

def delete_bucket(bucket, headers={})
  req_hash = generate_rest_request('DELETE', headers.merge(:url=>bucket))
  request_info(req_hash, RightHttp2xxParser.new)
rescue
  on_exception
end

Generates link for ‘DeleteBucket’.

s3.delete_bucket_link('my_awesome_bucket') #=> url string


1023
1024
1025
1026
1027
# File 'lib/s3/s3_interface.rb', line 1023

def delete_bucket_link(bucket, expires=nil, headers={})
  generate_link('DELETE', headers.merge(:url=>bucket), expires)
rescue
  on_exception
end

#delete_folder(bucket, folder_key, separator = '/') ⇒ Object

Deletes all keys where the ‘folder_key’ may be assumed as ‘folder’ name. Returns an array of string keys that have been deleted.

s3.list_bucket('my_awesome_bucket').map{|key_data| key_data[:key]} #=> ['test','test/2/34','test/3','test1','test1/logs']
s3.delete_folder('my_awesome_bucket','test')                       #=> ['test','test/2/34','test/3']


951
952
953
954
955
956
957
958
959
960
961
962
# File 'lib/s3/s3_interface.rb', line 951

def delete_folder(bucket, folder_key, separator='/')
  folder_key.chomp!(separator)
  allkeys = []
  incrementally_list_bucket(bucket, {'prefix' => folder_key}) do |results|
    keys = results[:contents].map { |s3_key| s3_key[:key][/^#{folder_key}($|#{separator}.*)/] ? s3_key[:key] : nil }.compact
    keys.each { |key| delete(bucket, key) }
    allkeys << keys
  end
  allkeys
rescue
  on_exception
end

Generates link for ‘DeleteObject’.

s3.delete_link('my_awesome_bucket',key) #=> url string


1085
1086
1087
1088
1089
# File 'lib/s3/s3_interface.rb', line 1085

def delete_link(bucket, key, expires=nil, headers={})
  generate_link('DELETE', headers.merge(:url=>"#{bucket}/#{Utils::URLencode key}"), expires)
rescue
  on_exception
end

#fetch_request_params(headers) ⇒ Object

:nodoc:



137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
# File 'lib/s3/s3_interface.rb', line 137

def fetch_request_params(headers) #:nodoc:
  # default server to use
  server  = @params[:server]
  service = @params[:service].to_s
  service.chop! if service[%r{/$}] # remove trailing '/' from service
  # extract bucket name and check it's dns compatibility
  headers[:url].to_s[%r{^([a-z0-9._-]*)(/[^?]*)?(\?.+)?}i]
  bucket_name, key_path, params_list = $1, $2, $3
  # select request model
  if is_dns_bucket?(bucket_name) and !@params[:virtual_hosting]
    # fix a path
    server   = "#{bucket_name}.#{server}"
    key_path ||= '/'
    path     = "#{service}#{key_path}#{params_list}"
  else
    path = "#{service}/#{bucket_name}#{key_path}#{params_list}"
  end
  path_to_sign = "#{service}/#{bucket_name}#{key_path}#{params_list}"
  [server, path, path_to_sign]
end

#force_delete_bucket(bucket) ⇒ Object

Deletes all keys in bucket then deletes bucket. Returns true or an exception.

s3.force_delete_bucket('my_awesome_bucket')


939
940
941
942
943
944
# File 'lib/s3/s3_interface.rb', line 939

def force_delete_bucket(bucket)
  clear_bucket(bucket)
  delete_bucket(bucket)
rescue
  on_exception
end

Generates link for QUERY API



979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
# File 'lib/s3/s3_interface.rb', line 979

def generate_link(method, headers={}, expires=nil) #:nodoc:
  # calculate request data
  server, path, path_to_sign = fetch_request_params(headers)
  # expiration time
  expires ||= DEFAULT_EXPIRES_AFTER
  expires = Time.now.utc + expires if expires.is_a?(Fixnum) && (expires < ONE_YEAR_IN_SECONDS)
  expires = expires.to_i
  # remove unset(==optional) and symbolyc keys
  headers.each { |key, value| headers.delete(key) if (value.nil? || key.is_a?(Symbol)) }
  #generate auth strings
  auth_string = canonical_string(method, path_to_sign, headers, expires)
  signature   = CGI::escape(Base64.encode64(OpenSSL::HMAC.digest(OpenSSL::Digest.new("sha1"), @aws_secret_access_key, auth_string)).strip)
  # path building
  addon       = "Signature=#{signature}&Expires=#{expires}&AWSAccessKeyId=#{@aws_access_key_id}"
  path        += path[/\?/] ? "&#{addon}" : "?#{addon}"
  "#{@params[:protocol]}://#{server}:#{@params[:port]}#{path}"
rescue
  on_exception
end

#generate_rest_request(method, headers) ⇒ Object

Generates request hash for REST API. Assumes that headers is URL encoded (use CGI::escape)



160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
# File 'lib/s3/s3_interface.rb', line 160

def generate_rest_request(method, headers) # :nodoc:
  # calculate request data
  server, path, path_to_sign = fetch_request_params(headers)
  data = headers[:data]
  # remove unset(==optional) and symbolyc keys
  headers.each { |key, value| headers.delete(key) if (value.nil? || key.is_a?(Symbol)) }
  #
  headers['content-type'] ||= ''
  headers['date']         = Time.now.httpdate
  # create request
  request                 = Net::HTTP.const_get(method.capitalize).new(path)
  request.body = data if data
  # set request headers and meta headers
  headers.each { |key, value| request[key.to_s] = value }
  #generate auth strings
  auth_string              = canonical_string(request.method, path_to_sign, request.to_hash)
  signature                = Utils::sign(@aws_secret_access_key, auth_string)
  # set other headers
  request['Authorization'] = "AWS #{@aws_access_key_id}:#{signature}"
  # prepare output hash
  {:request  => request,
   :server   => server,
   :port     => @params[:port],
   :protocol => @params[:protocol]}
end

#get(bucket, key, headers = {}, &block) ⇒ Object

Retrieves object data from Amazon. Returns a hash or an exception.

s3.get('my_awesome_bucket', 'log/curent/1.log') #=>

    {:object  => "Ola-la!",
     :headers => {"last-modified"     => "Wed, 23 May 2007 09:08:04 GMT",
                  "content-type"      => "",
                  "etag"              => "\"000000000096f4ee74bc4596443ef2a4\"",
                  "date"              => "Wed, 23 May 2007 09:08:03 GMT",
                  "x-amz-id-2"        => "ZZZZZZZZZZZZZZZZZZZZ1HJXZoehfrS4QxcxTdNGldR7w/FVqblP50fU8cuIMLiu",
                  "x-amz-meta-family" => "Woho556!",
                  "x-amz-request-id"  => "0000000C246D770C",
                  "server"            => "AmazonS3",
                  "content-length"    => "7"}}

If a block is provided, yields incrementally to the block as the response is read. For large responses, this function is ideal as the response can be ‘streamed’. The hash containing header fields is still returned. Example: foo = File.new(‘./chunder.txt’, File::CREAT|File::RDWR) rhdr = s3.get(‘aws-test’, ‘Cent5V1_7_1.img.part.00’) do |chunk|

foo.write(chunk)

end foo.close



662
663
664
665
666
667
# File 'lib/s3/s3_interface.rb', line 662

def get(bucket, key, headers={}, &block)
  req_hash = generate_rest_request('GET', headers.merge(:url=>"#{bucket}/#{CGI::escape key}"))
  request_info(req_hash, S3HttpResponseBodyParser.new, &block)
rescue
  on_exception
end

#get_acl(bucket, key = '', headers = {}) ⇒ Object

Retieves the ACL (access control policy) for a bucket or object. Returns a hash of headers and xml doc with ACL data. See: docs.amazonwebservices.com/AmazonS3/2006-03-01/RESTAccessPolicy.html.

s3.get_acl('my_awesome_bucket', 'log/curent/1.log') #=>
  {:headers => {"x-amz-id-2"=>"B3BdDMDUz+phFF2mGBH04E46ZD4Qb9HF5PoPHqDRWBv+NVGeA3TOQ3BkVvPBjgxX",
                "content-type"=>"application/xml;charset=ISO-8859-1",
                "date"=>"Wed, 23 May 2007 09:40:16 GMT",
                "x-amz-request-id"=>"B183FA7AB5FBB4DD",
                "server"=>"AmazonS3",
                "transfer-encoding"=>"chunked"},
   :object  => "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<AccessControlPolicy xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\"><Owner>
                <ID>16144ab2929314cc309ffe736daa2b264357476c7fea6efb2c3347ac3ab2792a</ID><DisplayName>root</DisplayName></Owner>
                <AccessControlList><Grant><Grantee xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:type=\"CanonicalUser\"><ID>
                16144ab2929314cc309ffe736daa2b264357476c7fea6efb2c3347ac3ab2792a</ID><DisplayName>root</DisplayName></Grantee>
                <Permission>FULL_CONTROL</Permission></Grant></AccessControlList></AccessControlPolicy>" }


830
831
832
833
834
835
836
# File 'lib/s3/s3_interface.rb', line 830

def get_acl(bucket, key='', headers={})
  key      = Aws::Utils.blank?(key) ? '' : "/#{CGI::escape key}"
  req_hash = generate_rest_request('GET', headers.merge(:url=>"#{bucket}#{key}?acl"))
  request_info(req_hash, S3HttpResponseBodyParser.new)
rescue
  on_exception
end

Generates link for ‘GetACL’.

s3.get_acl_link('my_awesome_bucket',key) #=> url string


1096
1097
1098
1099
1100
# File 'lib/s3/s3_interface.rb', line 1096

def get_acl_link(bucket, key='', headers={})
  return generate_link('GET', headers.merge(:url=>"#{bucket}/#{Utils::URLencode key}?acl"))
rescue
  on_exception
end

#get_acl_parse(bucket, key = '', headers = {}) ⇒ Object

Retieves the ACL (access control policy) for a bucket or object. Returns a hash of :grantees

s3.get_acl_parse('my_awesome_bucket', 'log/curent/1.log') #=>

{ :grantees=>
  { "16...2a"=>
    { :display_name=>"root",
      :permissions=>["FULL_CONTROL"],
      :attributes=>
       { "xsi:type"=>"CanonicalUser",
         "xmlns:xsi"=>"http://www.w3.org/2001/XMLSchema-instance"}},
   "http://acs.amazonaws.com/groups/global/AllUsers"=>
     { :display_name=>"AllUsers",
       :permissions=>["READ"],
       :attributes=>
        { "xsi:type"=>"Group",
          "xmlns:xsi"=>"http://www.w3.org/2001/XMLSchema-instance"}}},
 :owner=>
   { :id=>"16..2a",
     :display_name=>"root"}}


860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
# File 'lib/s3/s3_interface.rb', line 860

def get_acl_parse(bucket, key='', headers={})
  key               = Aws::Utils.blank?(key) ? '' : "/#{CGI::escape key}"
  req_hash          = generate_rest_request('GET', headers.merge(:url=>"#{bucket}#{key}?acl"))
  acl               = request_info(req_hash, S3AclParser.new(:logger => @logger))
  result            = {}
  result[:owner]    = acl[:owner]
  result[:grantees] = {}
  acl[:grantees].each do |grantee|
    key = grantee[:id] || grantee[:uri]
    if result[:grantees].key?(key)
      result[:grantees][key][:permissions] << grantee[:permissions]
    else
      result[:grantees][key] =
          {:display_name => grantee[:display_name] || grantee[:uri].to_s[/[^\/]*$/],
           :permissions  => grantee[:permissions].lines.to_a,
           :attributes   => grantee[:attributes]}
    end
  end
  result
rescue
  on_exception
end

#get_bucket_acl(bucket, headers = {}) ⇒ Object

Retieves the ACL (access control policy) for a bucket. Returns a hash of headers and xml doc with ACL data.



893
894
895
896
897
# File 'lib/s3/s3_interface.rb', line 893

def get_bucket_acl(bucket, headers={})
  return get_acl(bucket, '', headers)
rescue
  on_exception
end

Generates link for ‘GetBucketACL’.

s3.get_acl_link('my_awesome_bucket',key) #=> url string


1116
1117
1118
1119
1120
# File 'lib/s3/s3_interface.rb', line 1116

def get_bucket_acl_link(bucket, headers={})
  return get_acl_link(bucket, '', headers)
rescue
  on_exception
end

#get_bucket_policy(bucket) ⇒ Object



906
907
908
909
910
911
# File 'lib/s3/s3_interface.rb', line 906

def get_bucket_policy(bucket)
  req_hash = generate_rest_request('GET', {:url=>"#{bucket}?policy"})
  request_info(req_hash, S3HttpResponseBodyParser.new)
rescue
  on_exception
end

Generates link for ‘GetObject’.

if a bucket comply with virtual hosting naming then retuns a link with the bucket as a part of host name:

s3.get_link('my-awesome-bucket',key) #=> https://my-awesome-bucket.s3.amazonaws.com:443/asia%2Fcustomers?Signature=nh7...

otherwise returns an old style link (the bucket is a part of path):

s3.get_link('my_awesome_bucket',key) #=> https://s3.amazonaws.com:443/my_awesome_bucket/asia%2Fcustomers?Signature=QAO...

see docs.amazonwebservices.com/AmazonS3/2006-03-01/VirtualHosting.html



1065
1066
1067
1068
1069
# File 'lib/s3/s3_interface.rb', line 1065

def get_link(bucket, key, expires=nil, headers={})
  generate_link('GET', headers.merge(:url=>"#{bucket}/#{Utils::URLencode key.to_s}"), expires)
rescue
  on_exception
end

#get_logging_parse(params) ⇒ Object

Retrieves the logging configuration for a bucket. Returns a hash of :targetbucket, :targetprefix

s3.interface.get_logging_parse(:bucket => “asset_bucket”)

=> {:enabled=>true, :targetbucket=>"mylogbucket", :targetprefix=>"loggylogs/"}


253
254
255
256
257
258
259
260
261
# File 'lib/s3/s3_interface.rb', line 253

def get_logging_parse(params)
  Utils.mandatory_arguments([:bucket], params)
  Utils.allow_only([:bucket, :headers], params)
  params[:headers] = {} unless params[:headers]
  req_hash = generate_rest_request('GET', params[:headers].merge(:url=>"#{params[:bucket]}?logging"))
  request_info(req_hash, S3LoggingParser.new)
rescue
  on_exception
end

#get_object(bucket, key, headers = {}) ⇒ Object

Retrieves object data only (headers are omitted). Returns string or an exception.

s3.get('my_awesome_bucket', 'log/curent/1.log') #=> 'Ola-la!'


968
969
970
971
972
# File 'lib/s3/s3_interface.rb', line 968

def get_object(bucket, key, headers={})
  get(bucket, key, headers)[:object]
rescue
  on_exception
end

#head(bucket, key, headers = {}) ⇒ Object

Retrieves object metadata. Returns a hash of http_response_headers.

s3.head('my_awesome_bucket', 'log/curent/1.log') #=>
  {"last-modified"     => "Wed, 23 May 2007 09:08:04 GMT",
   "content-type"      => "",
   "etag"              => "\"000000000096f4ee74bc4596443ef2a4\"",
   "date"              => "Wed, 23 May 2007 09:08:03 GMT",
   "x-amz-id-2"        => "ZZZZZZZZZZZZZZZZZZZZ1HJXZoehfrS4QxcxTdNGldR7w/FVqblP50fU8cuIMLiu",
   "x-amz-meta-family" => "Woho556!",
   "x-amz-request-id"  => "0000000C246D770C",
   "server"            => "AmazonS3",
   "content-length"    => "7"}


748
749
750
751
752
753
# File 'lib/s3/s3_interface.rb', line 748

def head(bucket, key, headers={})
  req_hash = generate_rest_request('HEAD', headers.merge(:url=>"#{bucket}/#{CGI::escape key}"))
  request_info(req_hash, S3HttpResponseHeadParser.new)
rescue
  on_exception
end

Generates link for ‘HeadObject’.

s3.head_link('my_awesome_bucket',key) #=> url string


1075
1076
1077
1078
1079
# File 'lib/s3/s3_interface.rb', line 1075

def head_link(bucket, key, expires=nil, headers={})
  generate_link('HEAD', headers.merge(:url=>"#{bucket}/#{Utils::URLencode key}"), expires)
rescue
  on_exception
end

#incrementally_list_bucket(bucket, options = {}, headers = {}, &block) ⇒ Object

Incrementally list the contents of a bucket. Yields the following hash to a block:

s3.incrementally_list_bucket('my_awesome_bucket', { 'prefix'=>'t', 'marker'=>'', 'max-keys'=>5, delimiter=>'' }) yields
 {
   :name => 'bucketname',
   :prefix => 'subfolder/',
   :marker => 'fileN.jpg',
   :max_keys => 234,
   :delimiter => '/',
   :is_truncated => true,
   :next_marker => 'fileX.jpg',
   :contents => [
     { :key => "file1",
       :last_modified => "2007-05-18T07:00:59.000Z",
       :e_tag => "000000000059075b964b07152d234b70",
       :size => 3,
       :storage_class => "STANDARD",
       :owner_id => "00000000009314cc309ffe736daa2b264357476c7fea6efb2c3347ac3ab2792a",
       :owner_display_name => "root"
     }, { :key, ...}, ... {:key, ...}
   ]
   :common_prefixes => [
     "prefix1",
     "prefix2",
     ...,
     "prefixN"
   ]
 }


345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
# File 'lib/s3/s3_interface.rb', line 345

def incrementally_list_bucket(bucket, options={}, headers={}, &block)
  internal_options = (options.map {|k,v| [k.to_sym, v] }).inject({}) {|h, ar| h[ar[0]] = ar[1]; h}
  begin
    internal_bucket = bucket.dup
    unless internal_options.nil? || internal_options.empty?
      internal_bucket << '?'
      internal_bucket << internal_options.map { |k, v| "#{k.to_s}=#{CGI::escape v.to_s}" }.join('&')
    end
    req_hash            = generate_rest_request('GET', headers.merge(:url=>internal_bucket))
    response            = request_info(req_hash, S3ImprovedListBucketParser.new(:logger => @logger))
    there_are_more_keys = response[:is_truncated]
    if (there_are_more_keys)
      internal_options[:marker] = decide_marker(response)
      total_results             = response[:contents].length + response[:common_prefixes].length
      internal_options[:'max-keys'] ? (internal_options[:'max-keys'] -= total_results) : nil
    end
    yield response
  end while there_are_more_keys && under_max_keys(internal_options)
  true
rescue
  on_exception
end

#initiate_multipart(bucket, key, headers = {}) ⇒ Object

Initiates a multipart upload and returns an upload ID or an exception docs.amazonwebservices.com/AmazonS3/latest/API/mpUploadInitiate.html

s3.initiate_multipart(‘my_awesome_bucket’, ‘hugeObject’) => WL7dk8sqbtk3Rg641HHWaNeG6RxI4fzS8V0YvuQAfs5Hbk6WNZOU1z_AhGv

The returned uploadId must be retained for use in uploading parts; see docs.amazonwebservices.com/AmazonS3/latest/dev/mpuoverview.html



538
539
540
541
542
543
# File 'lib/s3/s3_interface.rb', line 538

def initiate_multipart(bucket, key, headers={})
  req_hash = generate_rest_request('POST', headers.merge(:url =>"#{bucket}/#{CGI::escape key}?uploads"))
  request_info(req_hash, S3InitiateMultipartUploadParser.new)
  rescue
    on_exception
end

#is_dns_bucket?(bucket_name) ⇒ Boolean

Returns:

  • (Boolean)


128
129
130
131
132
133
134
135
# File 'lib/s3/s3_interface.rb', line 128

def is_dns_bucket?(bucket_name)
  bucket_name = bucket_name.to_s
  return nil unless (3..63) === bucket_name.size
  bucket_name.split('.').each do |component|
    return nil unless component[/^[a-z0-9]([a-z0-9-]*[a-z0-9])?$/]
  end
  true
end

#list_all_my_buckets(headers = {}) ⇒ Object

Returns an array of customer’s buckets. Each item is a hash.

s3.list_all_my_buckets #=>
  [{:owner_id           => "00000000009314cc309ffe736daa2b264357476c7fea6efb2c3347ac3ab2792a",
    :owner_display_name => "root",
    :name               => "bucket_name",
    :creation_date      => "2007-04-19T18:47:43.000Z"}, ..., {...}]


202
203
204
205
206
207
# File 'lib/s3/s3_interface.rb', line 202

def list_all_my_buckets(headers={})
  req_hash = generate_rest_request('GET', headers.merge(:url=>''))
  request_info(req_hash, S3ListAllMyBucketsParser.new(:logger => @logger))
rescue
  on_exception
end

Generates link for ‘ListAllMyBuckets’.

s3.list_all_my_buckets_link #=> url string


1003
1004
1005
1006
1007
# File 'lib/s3/s3_interface.rb', line 1003

def list_all_my_buckets_link(expires=nil, headers={})
  generate_link('GET', headers.merge(:url=>''), expires)
rescue
  on_exception
end

#list_bucket(bucket, options = {}, headers = {}) ⇒ Object

Returns an array of bucket’s keys. Each array item (key data) is a hash.

s3.list_bucket('my_awesome_bucket', { 'prefix'=>'t', 'marker'=>'', 'max-keys'=>5, delimiter=>'' }) #=>
  [{:key                => "test1",
    :last_modified      => "2007-05-18T07:00:59.000Z",
    :owner_id           => "00000000009314cc309ffe736daa2b264357476c7fea6efb2c3347ac3ab2792a",
    :owner_display_name => "root",
    :e_tag              => "000000000059075b964b07152d234b70",
    :storage_class      => "STANDARD",
    :size               => 3,
    :service=> {'is_truncated' => false,
                'prefix'       => "t",
                'marker'       => "",
                'name'         => "my_awesome_bucket",
                'max-keys'     => "5"}, ..., {...}]


306
307
308
309
310
311
312
313
314
315
316
# File 'lib/s3/s3_interface.rb', line 306

def list_bucket(bucket, options={}, headers={})
  internal_bucket = bucket.dup
  unless options.nil? || options.empty?
    internal_bucket << '?'
    internal_bucket << options.map { |k, v| "#{k.to_s}=#{CGI::escape v.to_s}" }.join('&')
  end
  req_hash = generate_rest_request('GET', headers.merge(:url=>internal_bucket))
  request_info(req_hash, S3ListBucketParser.new(:logger => @logger))
rescue
  on_exception
end

Generates link for ‘ListBucket’.

s3.list_bucket_link('my_awesome_bucket') #=> url string


1033
1034
1035
1036
1037
1038
1039
1040
1041
# File 'lib/s3/s3_interface.rb', line 1033

def list_bucket_link(bucket, options=nil, expires=nil, headers={})
  unless options.nil? || options.empty?
    bucket << '?'
    bucket << options.map { |k, v| "#{k.to_s}=#{CGI::escape v.to_s}" }.join('&')
  end
  generate_link('GET', headers.merge(:url=>bucket), expires)
rescue
  on_exception
end

#list_parts(bucket, key, uploadId, headers = {}) ⇒ Object

List parts of a multipart upload, returning a hash or an exception docs.amazonwebservices.com/AmazonS3/latest/API/mpUploadListParts.html

response looks like:

{ :bucket=>"mariosFoo_awesome_test_bucket_000A1",
  :key=>"mariosFoosegmented",
  :upload_id=>"jQKX7JdJBTrbvLn9apUPIXkt14FHdp6nMZVg--"
  :parts=>  [   {:part_number=>"1", :last_modified=>"2012-10-30T15:06:28.000Z",
                    :etag=>"\"78f871f6f01673a4aca05b1f8e26df08\"", :size=>"6276589"},
                {:part_number=>"2", :last_modified=>"2012-10-30T15:08:22.000Z",
                    :etag=>"\"e7b94a1e959ca066026da3ec63aad321\"", :size=>"7454095"}] }

Clients must specify the uploadId (obtained from the initiate_multipart call)

s3.list_parts(‘my_awesome_bucket’, ‘hugeObject’, “WL7dk8sqbtk3Rg641HHWaNeG6RxI”,

See docs.amazonwebservices.com/AmazonS3/latest/dev/mpuoverview.html



630
631
632
633
# File 'lib/s3/s3_interface.rb', line 630

def list_parts(bucket, key, uploadId, headers={})
  req_hash = generate_rest_request('GET', headers.merge(:url=>"#{bucket}/#{CGI::escape key}?uploadId=#{uploadId}"))
  request_info(req_hash, S3ListMultipartPartsParser.new)
end

#move(src_bucket, src_key, dest_bucket, dest_key = nil, directive = :copy, headers = {}) ⇒ Object

Move an object.

directive: :copy    - copy meta-headers from source (default value)
           :replace - replace meta-headers by passed ones

# move bucket1/key1 to bucket1/key2
s3.move('bucket1', 'key1', 'bucket1', 'key2') #=> {:e_tag=>"\"e8b...8d\"", :last_modified=>"2008-05-11T10:27:22.000Z"}

# move bucket1/key1 to bucket2/key2 with new meta-headers assignment
s3.copy('bucket1', 'key1', 'bucket2', 'key2', :replace, 'x-amz-meta-family'=>'Woho555!') #=> {:e_tag=>"\"e8b...8d\"", :last_modified=>"2008-05-11T10:28:22.000Z"}


799
800
801
802
803
804
# File 'lib/s3/s3_interface.rb', line 799

def move(src_bucket, src_key, dest_bucket, dest_key=nil, directive=:copy, headers={})
  copy_result = copy(src_bucket, src_key, dest_bucket, dest_key, directive, headers)
  # delete an original key if it differs from a destination one
  delete(src_bucket, src_key) unless src_bucket == dest_bucket && src_key == dest_key
  copy_result
end

#put(bucket, key, data = nil, headers = {}) ⇒ Object

Saves object to Amazon. Returns true or an exception. Any header starting with AMAZON_METADATA_PREFIX is considered user metadata. It will be stored with the object and returned when you retrieve the object. The total size of the HTTP request, not including the body, must be less than 4 KB.

s3.put('my_awesome_bucket', 'log/current/1.log', 'Ola-la!', 'x-amz-meta-family'=>'Woho556!') #=> true

This method is capable of ‘streaming’ uploads; that is, it can upload data from a file or other IO object without first reading all the data into memory. This is most useful for large PUTs - it is difficult to read a 2 GB file entirely into memory before sending it to S3. To stream an upload, pass an object that responds to ‘read’ (like the read method of IO) and to either ‘lstat’ or ‘size’. For files, this means streaming is enabled by simply making the call:

s3.put(bucket_name, 'S3keyname.forthisfile',  File.open('localfilename.dat'))

If the IO object you wish to stream from responds to the read method but doesn’t implement lstat or size, you can extend the object dynamically to implement these methods, or define your own class which defines these methods. Be sure that your class returns ‘nil’ from read() after having read ‘size’ bytes. Otherwise S3 will drop the socket after ‘Content-Length’ bytes have been uploaded, and HttpConnection will interpret this as an error.

This method now supports very large PUTs, where very large is > 2 GB.

For Win32 users: Files and IO objects should be opened in binary mode. If a text mode IO object is passed to PUT, it will be converted to binary mode.



423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
# File 'lib/s3/s3_interface.rb', line 423

def put(bucket, key, data=nil, headers={})
  # On Windows, if someone opens a file in text mode, we must reset it so
  # to binary mode for streaming to work properly
  if (data.respond_to?(:binmode))
    data.binmode
  end
  if data.is_a?(String)
    data = StringIO.new(data)
#        puts "encoding = #{data.external_encoding} - #{data.internal_encoding}"
#        data.set_encoding("UTF-8")
#        puts "encoding = #{data.external_encoding} - #{data.internal_encoding}"
  end

  data_size = data.respond_to?(:lstat) ? data.lstat.size :
#          data.respond_to?(:bytesize) ? data.bytesize :
  (data.respond_to?(:size) ? data.size : 0)
#      puts 'data_size=' + data_size.to_s
  if (data_size >= USE_100_CONTINUE_PUT_SIZE)
    headers['expect'] = '100-continue'
  end
  req_hash = generate_rest_request('PUT', headers.merge(:url             =>"#{bucket}/#{CGI::escape key}",
                                                        :data            =>data,
                                                        'Content-Length' => data_size.to_s))
  request_info(req_hash, RightHttp2xxParser.new)
rescue
  on_exception
end

#put_acl(bucket, key, acl_xml_doc, headers = {}) ⇒ Object

Sets the ACL on a bucket or object.



884
885
886
887
888
889
890
# File 'lib/s3/s3_interface.rb', line 884

def put_acl(bucket, key, acl_xml_doc, headers={})
  key      = Aws::Utils.blank?(key) ? '' : "/#{CGI::escape key}"
  req_hash = generate_rest_request('PUT', headers.merge(:url=>"#{bucket}#{key}?acl", :data=>acl_xml_doc))
  request_info(req_hash, S3HttpResponseBodyParser.new)
rescue
  on_exception
end

Generates link for ‘PutACL’.

s3.put_acl_link('my_awesome_bucket',key) #=> url string


1106
1107
1108
1109
1110
# File 'lib/s3/s3_interface.rb', line 1106

def put_acl_link(bucket, key='', headers={})
  return generate_link('PUT', headers.merge(:url=>"#{bucket}/#{Utils::URLencode key}?acl"))
rescue
  on_exception
end

#put_bucket_acl(bucket, acl_xml_doc, headers = {}) ⇒ Object

Sets the ACL on a bucket only.



900
901
902
903
904
# File 'lib/s3/s3_interface.rb', line 900

def put_bucket_acl(bucket, acl_xml_doc, headers={})
  return put_acl(bucket, '', acl_xml_doc, headers)
rescue
  on_exception
end

Generates link for ‘PutBucketACL’.

s3.put_acl_link('my_awesome_bucket',key) #=> url string


1126
1127
1128
1129
1130
# File 'lib/s3/s3_interface.rb', line 1126

def put_bucket_acl_link(bucket, acl_xml_doc, headers={})
  return put_acl_link(bucket, '', acl_xml_doc, headers)
rescue
  on_exception
end

#put_bucket_policy(bucket, policy) ⇒ Object



913
914
915
916
917
918
919
# File 'lib/s3/s3_interface.rb', line 913

def put_bucket_policy(bucket, policy)
  key      = Aws::Utils.blank?(key) ? '' : "/#{CGI::escape key}"
  req_hash = generate_rest_request('PUT', {:url=>"#{bucket}?policy", :data=>policy})
  request_info(req_hash, S3HttpResponseBodyParser.new)
rescue
  on_exception
end

Generates link for ‘PutObject’.

s3.put_link('my_awesome_bucket',key, object) #=> url string


1047
1048
1049
1050
1051
# File 'lib/s3/s3_interface.rb', line 1047

def put_link(bucket, key, data=nil, expires=nil, headers={})
  generate_link('PUT', headers.merge(:url=>"#{bucket}/#{Utils::URLencode key}", :data=>data), expires)
rescue
  on_exception
end

#put_logging(params) ⇒ Object

Sets logging configuration for a bucket from the XML configuration document.

params:
 :bucket
 :xmldoc


267
268
269
270
271
272
273
274
275
# File 'lib/s3/s3_interface.rb', line 267

def put_logging(params)
  Utils.mandatory_arguments([:bucket, :xmldoc], params)
  Utils.allow_only([:bucket, :xmldoc, :headers], params)
  params[:headers] = {} unless params[:headers]
  req_hash = generate_rest_request('PUT', params[:headers].merge(:url=>"#{params[:bucket]}?logging", :data => params[:xmldoc]))
  request_info(req_hash, S3TrueParser.new)
rescue
  on_exception
end

#rename(src_bucket, src_key, dest_key, headers = {}) ⇒ Object

Rename an object.

# rename bucket1/key1 to bucket1/key2
s3.rename('bucket1', 'key1', 'key2') #=> {:e_tag=>"\"e8b...8d\"", :last_modified=>"2008-05-11T10:29:22.000Z"}


811
812
813
# File 'lib/s3/s3_interface.rb', line 811

def rename(src_bucket, src_key, dest_key, headers={})
  move(src_bucket, src_key, src_bucket, dest_key, :copy, headers)
end

#request_info(request, parser, options = {}, &block) ⇒ Object

Sends request to Amazon and parses the response. Raises AwsError if any banana happened.



188
189
190
191
# File 'lib/s3/s3_interface.rb', line 188

def request_info(request, parser, options={}, &block) # :nodoc:
#      request_info2(request, parser, @params, :s3_connection, @logger, @@bench, options, &block)
  request_info3(self, request, parser, options, &block)
end

#retrieve_object(params, &block) ⇒ Object

New experimental API for retrieving objects, introduced in Aws 1.8.1. retrieve_object is similar in function to the older function get. It allows for optional verification of object md5 checksums on retrieval. Parameters are passed as hash entries and are checked for completeness as well as for spurious arguments.

If the optional :md5 argument is provided, retrieve_object verifies that the given md5 matches the md5 returned by S3. The :verified_md5 field in the response hash is set true or false depending on the outcome of this check. If no :md5 argument is given, :verified_md5 will be false in the response.

The optional argument of :headers allows the caller to specify arbitrary request header values. Mandatory arguments:

:bucket - the bucket in which the object is stored
:key    - the object address (or path) within the bucket

Optional arguments:

 :headers - hash of additional HTTP headers to include with the request
 :md5     - MD5 checksum against which to verify the retrieved object

s3.retrieve_object(:bucket => "foobucket", :key => "foo")
  => {:verified_md5=>false,
      :headers=>{"last-modified"=>"Mon, 29 Sep 2008 18:58:56 GMT",
                 "x-amz-id-2"=>"2Aj3TDz6HP5109qly//18uHZ2a1TNHGLns9hyAtq2ved7wmzEXDOPGRHOYEa3Qnp",
                 "content-type"=>"",
                 "etag"=>"\"a507841b1bc8115094b00bbe8c1b2954\"",
                 "date"=>"Tue, 30 Sep 2008 00:52:44 GMT",
                 "x-amz-request-id"=>"EE4855DE27A2688C",
                 "server"=>"AmazonS3",
                 "content-length"=>"10"},
      :object=>"polemonium"}

s3.retrieve_object(:bucket => "foobucket", :key => "foo", :md5=>'a507841b1bc8115094b00bbe8c1b2954')
  => {:verified_md5=>true,
      :headers=>{"last-modified"=>"Mon, 29 Sep 2008 18:58:56 GMT",
                 "x-amz-id-2"=>"mLWQcI+VuKVIdpTaPXEo84g0cz+vzmRLbj79TS8eFPfw19cGFOPxuLy4uGYVCvdH",
                 "content-type"=>"", "etag"=>"\"a507841b1bc8115094b00bbe8c1b2954\"",
                 "date"=>"Tue, 30 Sep 2008 00:53:08 GMT",
                 "x-amz-request-id"=>"6E7F317356580599",
                 "server"=>"AmazonS3",
                 "content-length"=>"10"},
      :object=>"polemonium"}

If a block is provided, yields incrementally to the block as the response is read. For large responses, this function is ideal as the response can be ‘streamed’. The hash containing header fields is still returned.



710
711
712
713
714
715
716
717
718
719
720
721
722
723
# File 'lib/s3/s3_interface.rb', line 710

def retrieve_object(params, &block)
  Utils.mandatory_arguments([:bucket, :key], params)
  Utils.allow_only([:bucket, :key, :headers, :md5], params)
  params[:headers] = {} unless params[:headers]
  req_hash            = generate_rest_request('GET', params[:headers].merge(:url=>"#{params[:bucket]}/#{CGI::escape params[:key]}"))
  resp                = request_info(req_hash, S3HttpResponseBodyParser.new, &block)
  resp[:verified_md5] = false
  if (params[:md5] && (resp[:headers]['etag'].gsub(/\"/, '') == params[:md5]))
    resp[:verified_md5] = true
  end
  resp
rescue
  on_exception
end

#retrieve_object_and_verify(params, &block) ⇒ Object

Identical in function to retrieve_object, but requires verification that the returned ETag is identical to the checksum passed in by the user as the ‘md5’ argument. If the check passes, returns the response metadata with the “verified_md5” field set true. Raises an exception if the checksums conflict. This call is implemented as a wrapper around retrieve_object and the user may gain different semantics by creating a custom wrapper.

Raises:



728
729
730
731
732
733
# File 'lib/s3/s3_interface.rb', line 728

def retrieve_object_and_verify(params, &block)
  Utils.mandatory_arguments([:md5], params)
  resp = retrieve_object(params, &block)
  return resp if resp[:verified_md5]
  raise AwsError.new("Retrieved object failed MD5 checksum verification: #{resp.inspect}")
end

#store_object(params) ⇒ Object

New experimental API for uploading objects, introduced in Aws 1.8.1. store_object is similar in function to the older function put, but returns the full response metadata. It also allows for optional verification of object md5 checksums on upload. Parameters are passed as hash entries and are checked for completeness as well as for spurious arguments. The hash of the response headers contains useful information like the Amazon request ID and the object ETag (MD5 checksum).

If the optional :md5 argument is provided, store_object verifies that the given md5 matches the md5 returned by S3. The :verified_md5 field in the response hash is set true or false depending on the outcome of this check. If no :md5 argument is given, :verified_md5 will be false in the response.

The optional argument of :headers allows the caller to specify arbitrary request header values.

s3.store_object(:bucket => “foobucket”, :key => “foo”, :md5 => “a507841b1bc8115094b00bbe8c1b2954”, :data => “polemonium” )

=> {"x-amz-id-2"=>"SVsnS2nfDaR+ixyJUlRKM8GndRyEMS16+oZRieamuL61pPxPaTuWrWtlYaEhYrI/",
    "etag"=>"\"a507841b1bc8115094b00bbe8c1b2954\"",
    "date"=>"Mon, 29 Sep 2008 18:57:46 GMT",
    :verified_md5=>true,
    "x-amz-request-id"=>"63916465939995BA",
    "server"=>"AmazonS3",
    "content-length"=>"0"}

s3.store_object(:bucket => “foobucket”, :key => “foo”, :data => “polemonium” )

=> {"x-amz-id-2"=>"MAt9PLjgLX9UYJ5tV2fI/5dBZdpFjlzRVpWgBDpvZpl+V+gJFcBMW2L+LBstYpbR",
    "etag"=>"\"a507841b1bc8115094b00bbe8c1b2954\"",
    "date"=>"Mon, 29 Sep 2008 18:58:56 GMT",
    :verified_md5=>false,
    "x-amz-request-id"=>"3B25A996BC2CDD3B",
    "server"=>"AmazonS3",
    "content-length"=>"0"}


480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
# File 'lib/s3/s3_interface.rb', line 480

def store_object(params)
  Utils.allow_only([:bucket, :key, :data, :headers, :md5], params)
  Utils.mandatory_arguments([:bucket, :key, :data], params)
  params[:headers] = {} unless params[:headers]

  params[:data].binmode if (params[:data].respond_to?(:binmode)) # On Windows, if someone opens a file in text mode, we must reset it to binary mode for streaming to work properly
  if (params[:data].respond_to?(:lstat) && params[:data].lstat.size >= USE_100_CONTINUE_PUT_SIZE) ||
      (params[:data].respond_to?(:size) && params[:data].size >= USE_100_CONTINUE_PUT_SIZE)
    params[:headers]['expect'] = '100-continue'
  end

  req_hash = generate_rest_request('PUT', params[:headers].merge(:url=>"#{params[:bucket]}/#{CGI::escape params[:key]}", :data=>params[:data]))
  resp     = request_info(req_hash, S3HttpResponseHeadParser.new)
  if (params[:md5])
    resp[:verified_md5] = (resp['etag'].gsub(/\"/, '') == params[:md5]) ? true : false
  else
    resp[:verified_md5] = false
  end
  resp
rescue
  on_exception
end

#store_object_and_verify(params) ⇒ Object

Identical in function to store_object, but requires verification that the returned ETag is identical to the checksum passed in by the user as the ‘md5’ argument. If the check passes, returns the response metadata with the “verified_md5” field set true. Raises an exception if the checksums conflict. This call is implemented as a wrapper around store_object and the user may gain different semantics by creating a custom wrapper.

s3.store_object_and_verify(:bucket => “foobucket”, :key => “foo”, :md5 => “a507841b1bc8115094b00bbe8c1b2954”, :data => “polemonium” )

=> {"x-amz-id-2"=>"IZN3XsH4FlBU0+XYkFTfHwaiF1tNzrm6dIW2EM/cthKvl71nldfVC0oVQyydzWpb",
    "etag"=>"\"a507841b1bc8115094b00bbe8c1b2954\"",
    "date"=>"Mon, 29 Sep 2008 18:38:32 GMT",
    :verified_md5=>true,
    "x-amz-request-id"=>"E8D7EA4FE00F5DF7",
    "server"=>"AmazonS3",
    "content-length"=>"0"}

s3.store_object_and_verify(:bucket => “foobucket”, :key => “foo”, :md5 => “a507841b1bc8115094b00bbe8c1b2953”, :data => “polemonium” )

Aws::AwsError: Uploaded object failed MD5 checksum verification: {"x-amz-id-2"=>"HTxVtd2bf7UHHDn+WzEH43MkEjFZ26xuYvUzbstkV6nrWvECRWQWFSx91z/bl03n",
                                                                       "etag"=>"\"a507841b1bc8115094b00bbe8c1b2954\"",
                                                                       "date"=>"Mon, 29 Sep 2008 18:38:41 GMT",
                                                                       :verified_md5=>false,
                                                                       "x-amz-request-id"=>"0D7ADE09F42606F2",
                                                                       "server"=>"AmazonS3",
                                                                       "content-length"=>"0"}


524
525
526
527
528
# File 'lib/s3/s3_interface.rb', line 524

def store_object_and_verify(params)
  Utils.mandatory_arguments([:md5], params)
  r = store_object(params)
  r[:verified_md5] ? (return r) : (raise AwsError.new("Uploaded object failed MD5 checksum verification: #{r.inspect}"))
end

#upload_part(bucket, key, uploadId, partNumber, data, headers = {}) ⇒ Object

Uploads a part in a multipart upload - returns an Etag for the part or an exception docs.amazonwebservices.com/AmazonS3/latest/API/mpUploadUploadPart.html

Among the parameters required, clients must supply the uploadId (obtained from the initiate_multipart method call) as well as the partNumber for this part (user-specified, determining the sequence for reassembly).

s3.upload_part(‘my_awesome_bucket’, ‘hugeObject’, “WL7dk8sqbtk3Rg641HHWaNeG6RxI”, “2”, File.open(‘localfilename.dat’))

=> "b54357faf0632cce46e942fa68356b38"

The return Etag must be retained for use in the completion of the multipart upload; see docs.amazonwebservices.com/AmazonS3/latest/dev/mpuoverview.html



557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
# File 'lib/s3/s3_interface.rb', line 557

def upload_part(bucket, key, uploadId, partNumber, data, headers={})
  if (data.respond_to?(:binmode))
    data.binmode
  end
  if data.is_a?(String)
    data = StringIO.new(data)
  end
  data_size = data.respond_to?(:lstat) ? data.lstat.size :
  (data.respond_to?(:size) ? data.size : 0)
  if (data_size >= USE_100_CONTINUE_PUT_SIZE)
    headers['expect'] = '100-continue'
  end
  req_hash = generate_rest_request('PUT', headers.merge(:url =>"#{bucket}/#{CGI::escape key}?partNumber=#{partNumber}&uploadId=#{uploadId}",
                                                        :data  => data,
                                                        'Content-Length' => data_size.to_s))
  request_info(req_hash, S3UploadPartParser.new)
  rescue
    on_exception
end