Class: ObjectStorage::DirectUpload

Inherits:
Object
  • Object
show all
Includes:
Gitlab::Utils::StrongMemoize
Defined in:
lib/object_storage/direct_upload.rb

Overview

The DirectUpload class generates a set of presigned URLs that can be used to upload data to object storage from untrusted component: Workhorse, Runner?

For Google it assumes that the platform supports variable Content-Length.

For AWS it initiates Multipart Upload and presignes a set of part uploads.

Class calculates the best part size to be able to upload up to asked maximum size.
The number of generated parts will never go above 100,
but we will always try to reduce amount of generated parts.
The part size is rounded-up to 5MB.

Constant Summary collapse

TIMEOUT =
4.hours
EXPIRE_OFFSET =
15.minutes
MAXIMUM_MULTIPART_PARTS =
100
MINIMUM_MULTIPART_SIZE =
5.megabytes

Instance Attribute Summary collapse

Instance Method Summary collapse

Methods included from Gitlab::Utils::StrongMemoize

#clear_memoization, #strong_memoize, #strong_memoized?

Constructor Details

#initialize(config, object_name, has_length:, maximum_size: nil) ⇒ DirectUpload

Returns a new instance of DirectUpload.


28
29
30
31
32
33
34
35
36
37
38
39
# File 'lib/object_storage/direct_upload.rb', line 28

def initialize(config, object_name, has_length:, maximum_size: nil)
  unless has_length
    raise ArgumentError, 'maximum_size has to be specified if length is unknown' unless maximum_size
  end

  @config = config
  @credentials = config.credentials
  @bucket_name = config.bucket
  @object_name = object_name
  @has_length = has_length
  @maximum_size = maximum_size
end

Instance Attribute Details

#bucket_nameObject (readonly)

Returns the value of attribute bucket_name


25
26
27
# File 'lib/object_storage/direct_upload.rb', line 25

def bucket_name
  @bucket_name
end

#configObject (readonly)

Returns the value of attribute config


25
26
27
# File 'lib/object_storage/direct_upload.rb', line 25

def config
  @config
end

#credentialsObject (readonly)

Returns the value of attribute credentials


25
26
27
# File 'lib/object_storage/direct_upload.rb', line 25

def credentials
  @credentials
end

#has_lengthObject (readonly)

Returns the value of attribute has_length


26
27
28
# File 'lib/object_storage/direct_upload.rb', line 26

def has_length
  @has_length
end

#maximum_sizeObject (readonly)

Returns the value of attribute maximum_size


26
27
28
# File 'lib/object_storage/direct_upload.rb', line 26

def maximum_size
  @maximum_size
end

#object_nameObject (readonly)

Returns the value of attribute object_name


25
26
27
# File 'lib/object_storage/direct_upload.rb', line 25

def object_name
  @object_name
end

Instance Method Details

#azure_gocloud_urlObject


108
109
110
111
112
# File 'lib/object_storage/direct_upload.rb', line 108

def azure_gocloud_url
  url = "azblob://#{bucket_name}"
  url += "?domain=#{config.azure_storage_domain}" if config.azure_storage_domain.present?
  url
end

#delete_urlObject


137
138
139
# File 'lib/object_storage/direct_upload.rb', line 137

def delete_url
  connection.delete_object_url(bucket_name, object_name, expire_at)
end

#get_urlObject


128
129
130
131
132
133
134
# File 'lib/object_storage/direct_upload.rb', line 128

def get_url
  if config.google?
    connection.get_object_https_url(bucket_name, object_name, expire_at)
  else
    connection.get_object_url(bucket_name, object_name, expire_at)
  end
end

#multipart_abort_urlObject


175
176
177
178
179
180
181
182
# File 'lib/object_storage/direct_upload.rb', line 175

def multipart_abort_url
  connection.signed_url({
    method: 'DELETE',
    bucket_name: bucket_name,
    object_name: object_name,
    query: { 'uploadId' => upload_id }
  }, expire_at)
end

#multipart_complete_urlObject


164
165
166
167
168
169
170
171
172
# File 'lib/object_storage/direct_upload.rb', line 164

def multipart_complete_url
  connection.signed_url({
    method: 'POST',
    bucket_name: bucket_name,
    object_name: object_name,
    query: { 'uploadId' => upload_id },
    headers: { 'Content-Type' => 'application/xml' }
  }, expire_at)
end

#multipart_part_upload_url(part_number) ⇒ Object


153
154
155
156
157
158
159
160
161
# File 'lib/object_storage/direct_upload.rb', line 153

def multipart_part_upload_url(part_number)
  connection.signed_url({
    method: 'PUT',
    bucket_name: bucket_name,
    object_name: object_name,
    query: { 'uploadId' => upload_id, 'partNumber' => part_number },
    headers: upload_options
  }, expire_at)
end

#multipart_part_urlsObject


146
147
148
149
150
# File 'lib/object_storage/direct_upload.rb', line 146

def multipart_part_urls
  Array.new(number_of_multipart_parts) do |part_index|
    multipart_part_upload_url(part_index + 1)
  end
end

#multipart_upload_hashObject


53
54
55
56
57
58
59
60
61
62
# File 'lib/object_storage/direct_upload.rb', line 53

def multipart_upload_hash
  return unless requires_multipart_upload?

  {
    PartSize: rounded_multipart_part_size,
    PartURLs: multipart_part_urls,
    CompleteURL: multipart_complete_url,
    AbortURL: multipart_abort_url
  }
end

#providerObject


123
124
125
# File 'lib/object_storage/direct_upload.rb', line 123

def provider
  credentials[:provider].to_s
end

#store_urlObject


142
143
144
# File 'lib/object_storage/direct_upload.rb', line 142

def store_url
  connection.put_object_url(bucket_name, object_name, expire_at, upload_options)
end

#to_hashObject


41
42
43
44
45
46
47
48
49
50
51
# File 'lib/object_storage/direct_upload.rb', line 41

def to_hash
  {
    Timeout: TIMEOUT,
    GetURL: get_url,
    StoreURL: store_url,
    DeleteURL: delete_url,
    MultipartUpload: multipart_upload_hash,
    CustomPutHeaders: true,
    PutHeaders: upload_options
  }.merge(workhorse_client_hash).compact
end

#use_workhorse_s3_client?Boolean

Returns:

  • (Boolean)

114
115
116
117
118
119
120
121
# File 'lib/object_storage/direct_upload.rb', line 114

def use_workhorse_s3_client?
  return false unless Feature.enabled?(:use_workhorse_s3_client, default_enabled: true)
  return false unless config.use_iam_profile? || config.consolidated_settings?
  # The Golang AWS SDK does not support V2 signatures
  return false unless credentials.fetch(:aws_signature_version, 4).to_i >= 4

  true
end

#workhorse_aws_hashObject


74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
# File 'lib/object_storage/direct_upload.rb', line 74

def workhorse_aws_hash
  {
    UseWorkhorseClient: use_workhorse_s3_client?,
    RemoteTempObjectID: object_name,
    ObjectStorage: {
      Provider: 'AWS',
      S3Config: {
        Bucket: bucket_name,
        Region: credentials[:region],
        Endpoint: credentials[:endpoint],
        PathStyle: config.use_path_style?,
        UseIamProfile: config.use_iam_profile?,
        ServerSideEncryption: config.server_side_encryption,
        SSEKMSKeyID: config.server_side_encryption_kms_key_id
      }.compact
    }
  }
end

#workhorse_azure_hashObject


93
94
95
96
97
98
99
100
101
102
103
104
105
106
# File 'lib/object_storage/direct_upload.rb', line 93

def workhorse_azure_hash
  {
    # Azure requires Workhorse client because direct uploads can't
    # use pre-signed URLs without buffering the whole file to disk.
    UseWorkhorseClient: true,
    RemoteTempObjectID: object_name,
    ObjectStorage: {
      Provider: 'AzureRM',
      GoCloudConfig: {
        URL: azure_gocloud_url
      }
    }
  }
end

#workhorse_client_hashObject


64
65
66
67
68
69
70
71
72
# File 'lib/object_storage/direct_upload.rb', line 64

def workhorse_client_hash
  if config.aws?
    workhorse_aws_hash
  elsif config.azure?
    workhorse_azure_hash
  else
    {}
  end
end