Module: ObjectStorage::Concern

Defined Under Namespace

Classes: OpenFile

Instance Method Summary collapse

Methods included from Gitlab::Utils::Override

extended, extensions, included, method_added, override, prepended, queue_verification, verify!

Instance Method Details

#cache!(new_file = sanitized_file) ⇒ Object



423
424
425
426
427
428
429
430
431
# File 'app/uploaders/object_storage.rb', line 423

def cache!(new_file = sanitized_file)
  # We intercept ::UploadedFile which might be stored on remote storage
  # We use that for "accelerated" uploads, where we store result on remote storage
  if new_file.is_a?(::UploadedFile) && new_file.remote_id.present?
    return cache_remote_file!(new_file.remote_id, new_file.original_filename)
  end

  super
end

#delete_migrated_file(migrated_file) ⇒ Object



380
381
382
# File 'app/uploaders/object_storage.rb', line 380

def delete_migrated_file(migrated_file)
  migrated_file.delete
end

#delete_tmp_file_after_storageObject



447
448
449
450
451
452
453
# File 'app/uploaders/object_storage.rb', line 447

def delete_tmp_file_after_storage
  # If final path is present then the file is not on temporary location
  # so we don't want carrierwave to delete it.
  return false if direct_upload_final_path.present?

  super
end

#exclusive_lease_keyObject



442
443
444
# File 'app/uploaders/object_storage.rb', line 442

def exclusive_lease_key
  "object_storage_migrate:#{model.class}:#{model.id}"
end

#exists?Boolean

Returns:

  • (Boolean)


384
385
386
# File 'app/uploaders/object_storage.rb', line 384

def exists?
  file.present?
end

#file_cache_storage?Boolean

Returns:

  • (Boolean)


293
294
295
# File 'app/uploaders/object_storage.rb', line 293

def file_cache_storage?
  cache_storage.is_a?(CarrierWave::Storage::File)
end

#file_storage?Boolean

Returns:

  • (Boolean)


289
290
291
# File 'app/uploaders/object_storage.rb', line 289

def file_storage?
  storage.is_a?(CarrierWave::Storage::File)
end

#filenameObject

allow to configure and overwrite the filename



281
282
283
# File 'app/uploaders/object_storage.rb', line 281

def filename
  @filename || super || file&.filename # rubocop:disable Gitlab/ModuleWithInstanceVariables
end

#filename=(filename) ⇒ Object



285
286
287
# File 'app/uploaders/object_storage.rb', line 285

def filename=(filename)
  @filename = filename # rubocop:disable Gitlab/ModuleWithInstanceVariables
end

#fog_attributesObject



369
370
371
# File 'app/uploaders/object_storage.rb', line 369

def fog_attributes
  @fog_attributes ||= self.class.object_store_config.fog_attributes
end

#fog_credentialsObject



365
366
367
# File 'app/uploaders/object_storage.rb', line 365

def fog_credentials
  self.class.object_store_credentials
end

#fog_directoryObject



361
362
363
# File 'app/uploaders/object_storage.rb', line 361

def fog_directory
  self.class.remote_store_path
end

#fog_publicObject

Set ACL of uploaded objects to not-public (fog-aws) or no ACL at all (fog-google). Value is ignored by fog-aliyun [1]: github.com/fog/fog-aws/blob/daa50bb3717a462baf4d04d0e0cbfc18baacb541/lib/fog/aws/models/storage/file.rb#L152-L159



376
377
378
# File 'app/uploaders/object_storage.rb', line 376

def fog_public
  nil
end

#migrate!(new_store) ⇒ Object

Move the file to another store

new_store: Enum (Store::LOCAL, Store::REMOTE)


355
356
357
358
359
# File 'app/uploaders/object_storage.rb', line 355

def migrate!(new_store)
  with_exclusive_lease do
    unsafe_migrate!(new_store)
  end
end

#object_storeObject



297
298
299
300
# File 'app/uploaders/object_storage.rb', line 297

def object_store
  # We use Store::LOCAL as null value indicates the local storage
  @object_store ||= model.try(store_serialization_column) || Store::LOCAL
end

#object_store=(value) ⇒ Object

rubocop:disable Gitlab/ModuleWithInstanceVariables



303
304
305
306
# File 'app/uploaders/object_storage.rb', line 303

def object_store=(value)
  @object_store = value || Store::LOCAL
  @storage = storage_for(object_store)
end

#persist_object_store!Object

Save the current @object_store to the model <mounted_as>_store column



316
317
318
319
320
321
# File 'app/uploaders/object_storage.rb', line 316

def persist_object_store!
  return unless persist_object_store?

  updated = model.update_column(store_serialization_column, object_store)
  raise 'Failed to update object store' unless updated
end

#persist_object_store?Boolean

Return true if the current file is part or the model (i.e. is mounted in the model)

Returns:

  • (Boolean)


311
312
313
# File 'app/uploaders/object_storage.rb', line 311

def persist_object_store?
  model.respond_to?(:"#{store_serialization_column}=")
end

#retrieve_from_store!(identifier) ⇒ Object



455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
# File 'app/uploaders/object_storage.rb', line 455

def retrieve_from_store!(identifier)
  Gitlab::PathTraversal.check_path_traversal!(identifier)

  # We need to force assign the value of @filename so that we will still
  # get the original_filename in cases wherein the file points to a random generated
  # path format. This happens for direct uploaded files to final location.
  #
  # If we don't set @filename value here, the result of uploader.filename (see ObjectStorage#filename) will result
  # to the value of uploader.file.filename which will then contain the random generated path.
  # The `identifier` variable contains the value of the `file` column which is the original_filename.
  #
  # In cases wherein we are not uploading to final location, it is still fine to set the
  # @filename with the `identifier` value because it still contains the original filename from the `file` column,
  # which is what we want in either case.
  @filename = identifier # rubocop: disable Gitlab/ModuleWithInstanceVariables

  super
end

#store!(new_file = nil) ⇒ Object



433
434
435
436
437
438
439
440
# File 'app/uploaders/object_storage.rb', line 433

def store!(new_file = nil)
  # when direct upload is enabled, always store on remote storage
  if self.class.direct_upload_to_object_store?
    self.object_store = Store::REMOTE
  end

  super
end

#store_dir(store = nil) ⇒ Object



388
389
390
# File 'app/uploaders/object_storage.rb', line 388

def store_dir(store = nil)
  store_dirs[store || object_store]
end

#store_dirsObject



392
393
394
395
396
397
# File 'app/uploaders/object_storage.rb', line 392

def store_dirs
  {
    Store::LOCAL => File.join(base_dir, dynamic_segment),
    Store::REMOTE => File.join(dynamic_segment)
  }
end

#store_path(*args) ⇒ Object



399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
# File 'app/uploaders/object_storage.rb', line 399

def store_path(*args)
  if self.object_store == Store::REMOTE
    path = direct_upload_final_path
    path ||= super

    # We allow administrators to create "sub buckets" by setting a prefix.
    # This makes it possible to deploy GitLab with only one object storage
    # bucket. Because the prefix is configuration data we do not want to
    # store it in the uploads table via RecordsUploads. That means that the
    # prefix cannot be part of store_dir. This is why we chose to implement
    # the prefix support here in store_path.
    self.class.with_bucket_prefix(path)
  else
    super
  end
end

#upload_paths(identifier) ⇒ Object

Returns all the possible paths for an upload. the ‘upload.path` is a lookup parameter, and it may change depending on the `store` param.



419
420
421
# File 'app/uploaders/object_storage.rb', line 419

def upload_paths(identifier)
  store_dirs.map { |store, path| File.join(path, identifier) }
end

#use_file(&blk) ⇒ Object



323
324
325
326
327
# File 'app/uploaders/object_storage.rb', line 323

def use_file(&blk)
  with_exclusive_lease do
    unsafe_use_file(&blk)
  end
end

#use_open_file(unlink_early: true) ⇒ Object



329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
# File 'app/uploaders/object_storage.rb', line 329

def use_open_file(unlink_early: true)
  Tempfile.open(path) do |file|
    file.unlink if unlink_early
    file.binmode

    if file_storage?
      IO.copy_stream(path, file)
    else
      Faraday.get(url) do |req|
        req.options.on_data = proc { |chunk, _| file.write(chunk) }
      end
    end

    file.seek(0, IO::SEEK_SET)

    yield OpenFile.new(file)
  ensure
    file.unlink unless unlink_early
  end
end