Class: Backup::CloudIO::S3

Inherits:
Base
  • Object
show all
Defined in:
lib/backup/cloud_io/s3.rb

Defined Under Namespace

Classes: Error, Object

Constant Summary collapse

MAX_FILE_SIZE =

5 GiB

1024**3 * 5
MAX_MULTIPART_SIZE =

5 TiB

1024**4 * 5

Instance Attribute Summary collapse

Attributes inherited from Base

#max_retries, #retry_waitsec

Instance Method Summary collapse

Constructor Details

#initialize(options = {}) ⇒ S3

Returns a new instance of S3.



20
21
22
23
24
25
26
27
28
29
30
31
32
# File 'lib/backup/cloud_io/s3.rb', line 20

def initialize(options = {})
  super

  @access_key_id      = options[:access_key_id]
  @secret_access_key  = options[:secret_access_key]
  @use_iam_profile    = options[:use_iam_profile]
  @region             = options[:region]
  @bucket             = options[:bucket]
  @chunk_size         = options[:chunk_size]
  @encryption         = options[:encryption]
  @storage_class      = options[:storage_class]
  @fog_options        = options[:fog_options]
end

Instance Attribute Details

#access_key_idObject (readonly)

Returns the value of attribute access_key_id.



16
17
18
# File 'lib/backup/cloud_io/s3.rb', line 16

def access_key_id
  @access_key_id
end

#bucketObject (readonly)

Returns the value of attribute bucket.



16
17
18
# File 'lib/backup/cloud_io/s3.rb', line 16

def bucket
  @bucket
end

#chunk_sizeObject (readonly)

Returns the value of attribute chunk_size.



16
17
18
# File 'lib/backup/cloud_io/s3.rb', line 16

def chunk_size
  @chunk_size
end

#encryptionObject (readonly)

Returns the value of attribute encryption.



16
17
18
# File 'lib/backup/cloud_io/s3.rb', line 16

def encryption
  @encryption
end

#fog_optionsObject (readonly)

Returns the value of attribute fog_options.



16
17
18
# File 'lib/backup/cloud_io/s3.rb', line 16

def fog_options
  @fog_options
end

#regionObject (readonly)

Returns the value of attribute region.



16
17
18
# File 'lib/backup/cloud_io/s3.rb', line 16

def region
  @region
end

#secret_access_keyObject (readonly)

Returns the value of attribute secret_access_key.



16
17
18
# File 'lib/backup/cloud_io/s3.rb', line 16

def secret_access_key
  @secret_access_key
end

#storage_classObject (readonly)

Returns the value of attribute storage_class.



16
17
18
# File 'lib/backup/cloud_io/s3.rb', line 16

def storage_class
  @storage_class
end

#use_iam_profileObject (readonly)

Returns the value of attribute use_iam_profile.



16
17
18
# File 'lib/backup/cloud_io/s3.rb', line 16

def use_iam_profile
  @use_iam_profile
end

Instance Method Details

#delete(objects_or_keys) ⇒ Object

Delete object(s) from the bucket.

  • Called by the Storage (with objects) and the Syncer (with keys)

  • Deletes 1000 objects per request.

  • Missing objects will be ignored.



101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
# File 'lib/backup/cloud_io/s3.rb', line 101

def delete(objects_or_keys)
  keys = Array(objects_or_keys).dup
  keys.map!(&:key) if keys.first.is_a?(Object)

  opts = { :quiet => true } # only report Errors in DeleteResult
  until keys.empty?
    _keys = keys.slice!(0, 1000)
    with_retries('DELETE Multiple Objects') do
      resp = connection.delete_multiple_objects(bucket, _keys, opts.dup)
      unless resp.body['DeleteResult'].empty?
        errors = resp.body['DeleteResult'].map do |result|
          error = result['Error']
          "Failed to delete: #{ error['Key'] }\n" +
          "Reason: #{ error['Code'] }: #{ error['Message'] }"
        end.join("\n")
        raise Error, "The server returned the following:\n#{ errors }"
      end
    end
  end
end

#head_object(object) ⇒ Object

Used by Object to fetch metadata if needed.



88
89
90
91
92
93
94
# File 'lib/backup/cloud_io/s3.rb', line 88

def head_object(object)
  resp = nil
  with_retries("HEAD '#{ bucket }/#{ object.key }'") do
    resp = connection.head_object(bucket, object.key)
  end
  resp
end

#objects(prefix) ⇒ Object

Returns all objects in the bucket with the given prefix.

  • #get_bucket returns a max of 1000 objects per request.

  • Returns objects in alphabetical order.

  • If marker is given, only objects after the marker are in the response.



68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
# File 'lib/backup/cloud_io/s3.rb', line 68

def objects(prefix)
  objects = []
  resp = nil
  prefix = prefix.chomp('/')
  opts = { 'prefix' => prefix + '/' }

  while resp.nil? || resp.body['IsTruncated']
    opts.merge!('marker' => objects.last.key) unless objects.empty?
    with_retries("GET '#{ bucket }/#{ prefix }/*'") do
      resp = connection.get_bucket(bucket, opts)
    end
    resp.body['Contents'].each do |obj_data|
      objects << Object.new(self, obj_data)
    end
  end

  objects
end

#upload(src, dest) ⇒ Object

The Syncer may call this method in multiple threads. However, #objects is always called prior to multithreading.



36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
# File 'lib/backup/cloud_io/s3.rb', line 36

def upload(src, dest)
  file_size = File.size(src)
  chunk_bytes = chunk_size * 1024**2
  if chunk_bytes > 0 && file_size > chunk_bytes
    raise FileSizeError, <<-EOS if file_size > MAX_MULTIPART_SIZE
      File Too Large
      File: #{ src }
      Size: #{ file_size }
      Max Multipart Upload Size is #{ MAX_MULTIPART_SIZE } (5 TiB)
    EOS

    chunk_bytes = adjusted_chunk_bytes(chunk_bytes, file_size)
    upload_id = initiate_multipart(dest)
    parts = upload_parts(src, dest, upload_id, chunk_bytes, file_size)
    complete_multipart(dest, upload_id, parts)
  else
    raise FileSizeError, <<-EOS if file_size > MAX_FILE_SIZE
      File Too Large
      File: #{ src }
      Size: #{ file_size }
      Max File Size is #{ MAX_FILE_SIZE } (5 GiB)
    EOS

    put_object(src, dest)
  end
end