Class: S3
Constant Summary collapse
- S3_ENDPOINT =
"s3.amazonaws.com"- XMLNS =
'http://s3.amazonaws.com/doc/2006-03-01/'
Instance Method Summary collapse
- #build_post_form(bucket_name, key, options = {}) ⇒ Object
- #build_post_policy(expiration_time, conditions) ⇒ Object
-
#copy_object(source_bucket_name, source_object_key, dest_bucket_name, dest_object_key, acl = nil, new_metadata = nil) ⇒ Object
The copy object feature was added to the S3 API after the release of “Programming Amazon Web Services” so it is not discussed in the book’s text.
- #create_bucket(bucket_name, location = nil) ⇒ Object
- #create_object(bucket_name, object_key, opts = {}) ⇒ Object
- #delete_bucket(bucket_name) ⇒ Object
- #delete_object(bucket_name, object_key) ⇒ Object
- #generate_s3_uri(bucket_name = '', object_name = '', params = []) ⇒ Object
- #get_acl(bucket_name, object_key = '') ⇒ Object
- #get_bucket_location(bucket_name) ⇒ Object
- #get_logging(bucket_name) ⇒ Object
- #get_object(bucket_name, object_key, headers = {}) ⇒ Object
- #get_object_metadata(bucket_name, object_key, headers = {}) ⇒ Object
- #get_owner_id ⇒ Object
- #get_torrent(bucket_name, object_key, output) ⇒ Object
- #list_buckets ⇒ Object
- #list_objects(bucket_name, *params) ⇒ Object
- #set_acl(owner_id, bucket_name, object_key = '', grants = [owner_id=>'FULL_CONTROL']) ⇒ Object
- #set_canned_acl(canned_acl, bucket_name, object_key = '') ⇒ Object
- #set_logging(bucket_name, target_bucket = nil, target_prefix = "#{bucket_name}.") ⇒ Object
- #sign_uri(method, expires, bucket_name, object_key = '', opts = {}) ⇒ Object
- #valid_dns_name(bucket_name) ⇒ Object
Instance Method Details
#build_post_form(bucket_name, key, options = {}) ⇒ Object
499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 |
# File 'lib/OriginalAWS/S3.rb', line 499 def build_post_form(bucket_name, key, ={}) fields = [] # Form is only authenticated if a policy is specified. if [:expiration] or [:conditions] # Generate policy document policy = build_post_policy([:expiration], [:conditions]) puts "POST Policy\n===========\n#{policy}\n\n" if @debug # Add the base64-encoded policy document as the 'policy' field policy_b64 = encode_base64(policy) fields << %{<input type="hidden" name="policy" value="#{policy_b64}">} # Add the AWS access key as the 'AWSAccessKeyId' field fields << %{<input type="hidden" name="AWSAccessKeyId" value="#{@aws_access_key}">} # Add signature for encoded policy document as the 'AWSAccessKeyId' field signature = generate_signature(policy_b64) fields << %{<input type="hidden" name="signature" value="#{signature}">} end # Include any additional fields [:fields].each_pair do |n,v| if v.nil? # Allow users to provide their own <input> fields as text. fields << n else fields << %{<input type="hidden" name="#{n}" value="#{v}">} end end if [:fields] # Add the vital 'file' input item, which may be a textarea or file. if [:text_input] # Use the text_input option which should specify a textarea or text # input field. For example: # '<textarea name="file" cols="80" rows="5">Default Text</textarea>' fields << [:text_input] else fields << %{<input name="file" type="file">} end # Construct a sub-domain URL to refer to the target bucket. The # HTTPS protocol will be used if the secure HTTP option is enabled. url = "http#{@secure_http ? 's' : ''}://#{bucket_name}.s3.amazonaws.com/" # Construct the entire form. form = %{ <form action="#{url}" method="post" enctype="multipart/form-data"> <input type="hidden" name="key" value="#{key}"> #{fields.join("\n")} <br> <input type="submit" value="Upload to Amazon S3"> </form> } puts "POST Form\n=========\n#{form}\n" if @debug return form end |
#build_post_policy(expiration_time, conditions) ⇒ Object
466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 |
# File 'lib/OriginalAWS/S3.rb', line 466 def build_post_policy(expiration_time, conditions) if expiration_time.nil? or not expiration_time.respond_to?(:getutc) raise 'Policy document must include a valid expiration Time object' end if conditions.nil? or not conditions.class == Hash raise 'Policy document must include a valid conditions Hash object' end # Convert conditions object mappings to condition statements conds = [] conditions.each_pair do |name,test| if test.nil? # A nil condition value means allow anything. conds << %{["starts-with", "$#{name}", ""]} elsif test.is_a? String conds << %{{"#{name}": "#{test}"}} elsif test.is_a? Array conds << %{{"#{name}": "#{test.join(',')}"}} elsif test.is_a? Hash operation = test[:op] value = test[:value] conds << %{["#{operation}", "$#{name}", "#{value}"]} elsif test.is_a? Range conds << %{["#{name}", #{test.begin}, #{test.end}]} else raise "Unexpected value type for condition '#{name}': #{test.class}" end end return %{{"expiration": "#{expiration_time.getutc.iso8601}", "conditions": [#{conds.join(",")}]}} end |
#copy_object(source_bucket_name, source_object_key, dest_bucket_name, dest_object_key, acl = nil, new_metadata = nil) ⇒ Object
The copy object feature was added to the S3 API after the release of “Programming Amazon Web Services” so it is not discussed in the book’s text. For more details, see: www.jamesmurty.com/2008/05/06/s3-copy-object-in-beta/
241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 |
# File 'lib/OriginalAWS/S3.rb', line 241 def copy_object(source_bucket_name, source_object_key, dest_bucket_name, dest_object_key, acl=nil, =nil) headers = {} # Identify the source object headers['x-amz-copy-source'] = CGI::escape( source_bucket_name + '/' + source_object_key) # Copy metadata from original object, or replace the metadata. if .nil? headers['x-amz-metadata-directive'] = 'COPY' else headers['x-amz-metadata-directive'] = 'REPLACE' headers.merge!() end # The Content-Length header must always be set when data is uploaded. headers['Content-Length'] = '0' # Set the canned policy, may be: 'private', 'public-read', # 'public-read-write', 'authenticated-read' headers['x-amz-acl'] = acl if acl uri = generate_s3_uri(dest_bucket_name, dest_object_key) do_rest('PUT', uri, nil, headers) return true end |
#create_bucket(bucket_name, location = nil) ⇒ Object
102 103 104 105 106 107 108 109 110 111 112 113 114 115 |
# File 'lib/OriginalAWS/S3.rb', line 102 def create_bucket(bucket_name, location=nil) uri = generate_s3_uri(bucket_name) if location xml_doc = REXML::Document.new("<CreateBucketConfiguration/>") xml_doc.root.add_attribute('xmlns', XMLNS) xml_doc.root.add_element('LocationConstraint').text = location do_rest('PUT', uri, xml_doc.to_s, {'Content-Type'=>'text/xml'}) else do_rest('PUT', uri) end return true end |
#create_object(bucket_name, object_key, opts = {}) ⇒ Object
190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 |
# File 'lib/OriginalAWS/S3.rb', line 190 def create_object(bucket_name, object_key, opts={}) # Initialize local variables for the provided option items data = (opts[:data] ? opts[:data] : '') headers = (opts[:headers] ? opts[:headers].clone : {}) = (opts[:metadata] ? opts[:metadata].clone : {}) # The Content-Length header must always be set when data is uploaded. headers['Content-Length'] = (data.respond_to?(:stat) ? data.stat.size : data.size).to_s # Calculate an md5 hash of the data for upload verification if data.respond_to?(:stat) # Generate MD5 digest from file data one chunk at a time md5_digest = Digest::MD5.new File.open(data.path, 'rb') do |io| buffer = '' md5_digest.update(buffer) while io.read(4096, buffer) end md5_hash = md5_digest.digest else md5_hash = Digest::MD5.digest(data) end headers['Content-MD5'] = encode_base64(md5_hash) # Set the canned policy, may be: 'private', 'public-read', # 'public-read-write', 'authenticated-read' headers['x-amz-acl'] = opts[:policy] if opts[:policy] # Set an explicit content type if none is provided, otherwise the # ruby HTTP library will use its own default type # 'application/x-www-form-urlencoded' if not headers['Content-Type'] headers['Content-Type'] = data.respond_to?(:to_str) ? 'text/plain' : 'application/octet-stream' end # Convert metadata items to headers using the # S3 metadata header name prefix. .each do |n,v| headers["x-amz-meta-#{n}"] = v end uri = generate_s3_uri(bucket_name, object_key) do_rest('PUT', uri, data, headers) return true end |
#delete_bucket(bucket_name) ⇒ Object
117 118 119 120 121 |
# File 'lib/OriginalAWS/S3.rb', line 117 def delete_bucket(bucket_name) uri = generate_s3_uri(bucket_name) do_rest('DELETE', uri) return true end |
#delete_object(bucket_name, object_key) ⇒ Object
270 271 272 273 274 |
# File 'lib/OriginalAWS/S3.rb', line 270 def delete_object(bucket_name, object_key) uri = generate_s3_uri(bucket_name, object_key) do_rest('DELETE', uri) return true end |
#generate_s3_uri(bucket_name = '', object_name = '', params = []) ⇒ Object
33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 |
# File 'lib/OriginalAWS/S3.rb', line 33 def generate_s3_uri(bucket_name='', object_name='', params=[]) # Decide between the default and sub-domain host name formats if valid_dns_name(bucket_name) hostname = bucket_name + "." + S3_ENDPOINT else hostname = S3_ENDPOINT end # Build an initial secure or non-secure URI for the end point. request_uri = (@secure_http ? "https://" : "http://") + hostname; # Include the bucket name in the URI except for alternative hostnames if hostname == S3_ENDPOINT request_uri << '/' + URI.escape(bucket_name) if bucket_name != '' end # Add object name component to URI if present request_uri << '/' + URI.escape(object_name) if object_name != '' # Add request parameters to the URI. Each item in the params variable # is a hash dictionary containing multiple keys. query = "" params.each do |hash| hash.each do |name, value| query << '&' if query.length > 0 if value.nil? query << "#{name}" else query << "#{name}=#{CGI::escape(value.to_s)}" end end end request_uri << "?" + query if query.length > 0 return URI.parse(request_uri) end |
#get_acl(bucket_name, object_key = '') ⇒ Object
360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 |
# File 'lib/OriginalAWS/S3.rb', line 360 def get_acl(bucket_name, object_key='') uri = generate_s3_uri(bucket_name, object_key, [:acl=>nil]) response = do_rest('GET', uri) xml_doc = REXML::Document.new(response.body) grants = [] xml_doc.elements.each('//Grant') do |grant| grantee = {} grantee[:type] = grant.elements['Grantee'].attributes['type'] if grantee[:type] == 'Group' grantee[:uri] = grant.elements['Grantee/URI'].text else grantee[:id] = grant.elements['Grantee/ID'].text grantee[:display_name] = grant.elements['Grantee/DisplayName'].text end grants << { :grantee => grantee, :permission => grant.elements['Permission'].text } end return { :owner_id => xml_doc.elements['//Owner/ID'].text, :owner_name => xml_doc.elements['//Owner/DisplayName'].text, :grants => grants } end |
#get_bucket_location(bucket_name) ⇒ Object
123 124 125 126 127 128 129 |
# File 'lib/OriginalAWS/S3.rb', line 123 def get_bucket_location(bucket_name) uri = generate_s3_uri(bucket_name, '', [:location=>nil]) response = do_rest('GET', uri) xml_doc = REXML::Document.new(response.body) return xml_doc.elements['LocationConstraint'].text end |
#get_logging(bucket_name) ⇒ Object
326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 |
# File 'lib/OriginalAWS/S3.rb', line 326 def get_logging(bucket_name) uri = generate_s3_uri(bucket_name, '', [:logging=>nil]) response = do_rest('GET', uri) xml_doc = REXML::Document.new(response.body) if xml_doc.elements['//LoggingEnabled'] return { :target_bucket => xml_doc.elements['//TargetBucket'].text, :target_prefix => xml_doc.elements['//TargetPrefix'].text } else # Logging is not enabled return nil end end |
#get_object(bucket_name, object_key, headers = {}) ⇒ Object
297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 |
# File 'lib/OriginalAWS/S3.rb', line 297 def get_object(bucket_name, object_key, headers={}) uri = generate_s3_uri(bucket_name, object_key) if block_given? response = do_rest('GET', uri, nil, headers) {|segment| yield(segment)} else response = do_rest('GET', uri, nil, headers) end response_headers = {} = {} response.each_header do |name,value| if name.index('x-amz-meta-') == 0 [name['x-amz-meta-'.length..-1]] = value else response_headers[name] = value end end result = { :metadata => , :headers => response_headers } result[:body] = response.body if not block_given? return result end |
#get_object_metadata(bucket_name, object_key, headers = {}) ⇒ Object
276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 |
# File 'lib/OriginalAWS/S3.rb', line 276 def (bucket_name, object_key, headers={}) uri = generate_s3_uri(bucket_name, object_key) response = do_rest('HEAD', uri, nil, headers) response_headers = {} = {} response.each_header do |name,value| if name.index('x-amz-meta-') == 0 [name['x-amz-meta-'.length..-1]] = value else response_headers[name] = value end end return { :metadata => , :headers => response_headers } end |
#get_owner_id ⇒ Object
72 73 74 75 76 77 78 79 |
# File 'lib/OriginalAWS/S3.rb', line 72 def get_owner_id uri = generate_s3_uri() response = do_rest('GET', uri) buckets = [] xml_doc = REXML::Document.new(response.body) xml_doc.elements['//Owner/ID'].text end |
#get_torrent(bucket_name, object_key, output) ⇒ Object
440 441 442 443 444 |
# File 'lib/OriginalAWS/S3.rb', line 440 def get_torrent(bucket_name, object_key, output) uri = generate_s3_uri(bucket_name, object_key, [:torrent=>nil]) response = do_rest('GET', uri) output.write(response.body) end |
#list_buckets ⇒ Object
81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 |
# File 'lib/OriginalAWS/S3.rb', line 81 def list_buckets uri = generate_s3_uri() response = do_rest('GET', uri) buckets = [] xml_doc = REXML::Document.new(response.body) xml_doc.elements.each('//Buckets/Bucket') do |bucket| buckets << { :name => bucket.elements['Name'].text, :creation_date => bucket.elements['CreationDate'].text } end return { :owner_id => xml_doc.elements['//Owner/ID'].text, :display_name => xml_doc.elements['//Owner/DisplayName'].text, :buckets => buckets } end |
#list_objects(bucket_name, *params) ⇒ Object
131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 |
# File 'lib/OriginalAWS/S3.rb', line 131 def list_objects(bucket_name, *params) is_truncated = true objects = [] prefixes = [] while is_truncated uri = generate_s3_uri(bucket_name, '', params) response = do_rest('GET', uri) xml_doc = REXML::Document.new(response.body) xml_doc.elements.each('//Contents') do |contents| objects << { :key => contents.elements['Key'].text, :size => contents.elements['Size'].text, :last_modified => contents.elements['LastModified'].text, :etag => contents.elements['ETag'].text, :owner_id => contents.elements['Owner/ID'].text, :owner_name => contents.elements['Owner/DisplayName'].text } end cps = xml_doc.elements.to_a('//CommonPrefixes') if cps.length > 0 cps.each do |cp| prefixes << cp.elements['Prefix'].text end end # Determine whether listing is truncated is_truncated = 'true' == xml_doc.elements['//IsTruncated'].text # Remove any existing marker value params.delete_if {|p| p[:marker]} # Set the marker parameter to the NextMarker if possible, # otherwise set it to the last key name in the listing next_marker_elem = xml_doc.elements['//NextMarker'] last_key_elem = xml_doc.elements['//Contents/Key[last()]'] if next_marker_elem params << {:marker => next_marker_elem.text} elsif last_key_elem params << {:marker => last_key_elem.text} else params << {:marker => ''} end end return { :bucket_name => bucket_name, :objects => objects, :prefixes => prefixes } end |
#set_acl(owner_id, bucket_name, object_key = '', grants = [owner_id=>'FULL_CONTROL']) ⇒ Object
393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 |
# File 'lib/OriginalAWS/S3.rb', line 393 def set_acl(owner_id, bucket_name, object_key='', grants=[owner_id=>'FULL_CONTROL']) xml_doc = REXML::Document.new("<AccessControlPolicy xmlns='#{XMLNS}'/>") xml_doc.root.add_element('Owner').add_element('ID').text = owner_id grant_list = xml_doc.root.add_element('AccessControlList') grants.each do |hash| hash.each do |grantee_id, | grant = grant_list.add_element('Grant') grant.add_element('Permission').text = # Grantee may be of type email, group, or canonical user if grantee_id.index('@') # Email grantee grantee = grant.add_element('Grantee', {'xmlns:xsi'=>'http://www.w3.org/2001/XMLSchema-instance', 'xsi:type'=>'AmazonCustomerByEmail'}) grantee.add_element('EmailAddress').text = grantee_id elsif grantee_id.index('://') # Group grantee grantee = grant.add_element('Grantee', {'xmlns:xsi'=>'http://www.w3.org/2001/XMLSchema-instance', 'xsi:type'=>'Group'}) grantee.add_element('URI').text = grantee_id else # Canonical user grantee grantee = grant.add_element('Grantee', {'xmlns:xsi'=>'http://www.w3.org/2001/XMLSchema-instance', 'xsi:type'=>'CanonicalUser'}) grantee.add_element('ID').text = grantee_id end end end uri = generate_s3_uri(bucket_name, object_key, [:acl=>nil]) do_rest('PUT', uri, xml_doc.to_s, {'Content-Type'=>'application/xml'}) return true end |
#set_canned_acl(canned_acl, bucket_name, object_key = '') ⇒ Object
434 435 436 437 438 |
# File 'lib/OriginalAWS/S3.rb', line 434 def set_canned_acl(canned_acl, bucket_name, object_key='') uri = generate_s3_uri(bucket_name, object_key, [:acl=>nil]) response = do_rest('PUT', uri, nil, {'x-amz-acl'=>canned_acl}) return true end |
#set_logging(bucket_name, target_bucket = nil, target_prefix = "#{bucket_name}.") ⇒ Object
343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 |
# File 'lib/OriginalAWS/S3.rb', line 343 def set_logging(bucket_name, target_bucket=nil, target_prefix="#{bucket_name}.") # Build BucketLoggingStatus XML document xml_doc = REXML::Document.new("<BucketLoggingStatus xmlns='#{XMLNS}'/>") if target_bucket logging_enabled = xml_doc.root.add_element('LoggingEnabled') logging_enabled.add_element('TargetBucket').text = target_bucket logging_enabled.add_element('TargetPrefix').text = target_prefix end uri = generate_s3_uri(bucket_name, '', [:logging=>nil]) do_rest('PUT', uri, xml_doc.to_s, {'Content-Type'=>'application/xml'}) return true end |
#sign_uri(method, expires, bucket_name, object_key = '', opts = {}) ⇒ Object
446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 |
# File 'lib/OriginalAWS/S3.rb', line 446 def sign_uri(method, expires, bucket_name, object_key='', opts={}) parameters = opts[:parameters] || [] headers = opts[:headers] || {} headers['Date'] = expires uri = generate_s3_uri(bucket_name, object_key, parameters) signature = generate_rest_signature(method, uri, headers) uri.query = (uri.query.nil? ? '' : "#{uri.query}&") uri.query << "Signature=" + CGI::escape(signature) uri.query << "&Expires=" + expires.to_s uri.query << "&AWSAccessKeyId=" + @aws_access_key uri.host = bucket_name if opts[:is_virtual_host] return uri.to_s end |
#valid_dns_name(bucket_name) ⇒ Object
16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 |
# File 'lib/OriginalAWS/S3.rb', line 16 def valid_dns_name(bucket_name) if bucket_name.size > 63 or bucket_name.size < 3 return false end return false unless bucket_name =~ /^[a-z0-9][a-z0-9.-]+$/ return false unless bucket_name =~ /[a-z]/ # Cannot be an IP address bucket_name.split('.').each do |fragment| return false if fragment =~ /^-/ or fragment =~ /-$/ or fragment =~ /^$/ end return true end |