Module: ObjectStorage::Concern
- Extended by:
- ActiveSupport::Concern
- Included in:
- AttachmentUploader, AvatarUploader, Ci::PipelineArtifactUploader, Ci::SecureFileUploader, DeletedObjectUploader, DependencyProxy::FileUploader, DesignManagement::DesignV432x230Uploader, ExternalDiffUploader, FileUploader, JobArtifactUploader, LfsObjectUploader, MetricImageUploader, Packages::Composer::CacheUploader, Packages::Debian::ComponentFileUploader, Packages::Debian::DistributionReleaseFileUploader, Packages::PackageFileUploader, Pages::DeploymentUploader, Terraform::StateUploader
- Defined in:
- app/uploaders/object_storage.rb
Defined Under Namespace
Classes: OpenFile
Instance Method Summary collapse
- #cache!(new_file = sanitized_file) ⇒ Object
- #delete_migrated_file(migrated_file) ⇒ Object
- #exclusive_lease_key ⇒ Object
- #exists? ⇒ Boolean
- #file_cache_storage? ⇒ Boolean
- #file_storage? ⇒ Boolean
-
#filename ⇒ Object
allow to configure and overwrite the filename.
- #filename=(filename) ⇒ Object
- #fog_attributes ⇒ Object
- #fog_credentials ⇒ Object
- #fog_directory ⇒ Object
-
#fog_public ⇒ Object
Set ACL of uploaded objects to not-public (fog-aws) or no ACL at all (fog-google).
-
#migrate!(new_store) ⇒ Object
Move the file to another store.
- #object_store ⇒ Object
-
#object_store=(value) ⇒ Object
rubocop:disable Gitlab/ModuleWithInstanceVariables.
-
#persist_object_store! ⇒ Object
Save the current @object_store to the model <mounted_as>_store column.
-
#persist_object_store? ⇒ Boolean
Return true if the current file is part or the model (i.e. is mounted in the model).
- #schedule_background_upload(*args) ⇒ Object
- #store!(new_file = nil) ⇒ Object
- #store_dir(store = nil) ⇒ Object
- #store_dirs ⇒ Object
-
#upload_paths(identifier) ⇒ Object
Returns all the possible paths for an upload.
- #use_file(&blk) ⇒ Object
- #use_open_file(&blk) ⇒ Object
Instance Method Details
#cache!(new_file = sanitized_file) ⇒ Object
363 364 365 366 367 368 369 370 371 |
# File 'app/uploaders/object_storage.rb', line 363 def cache!(new_file = sanitized_file) # We intercept ::UploadedFile which might be stored on remote storage # We use that for "accelerated" uploads, where we store result on remote storage if new_file.is_a?(::UploadedFile) && new_file.remote_id.present? return cache_remote_file!(new_file.remote_id, new_file.original_filename) end super end |
#delete_migrated_file(migrated_file) ⇒ Object
337 338 339 |
# File 'app/uploaders/object_storage.rb', line 337 def delete_migrated_file(migrated_file) migrated_file.delete end |
#exclusive_lease_key ⇒ Object
382 383 384 |
# File 'app/uploaders/object_storage.rb', line 382 def exclusive_lease_key "object_storage_migrate:#{model.class}:#{model.id}" end |
#exists? ⇒ Boolean
341 342 343 |
# File 'app/uploaders/object_storage.rb', line 341 def exists? file.present? end |
#file_cache_storage? ⇒ Boolean
242 243 244 |
# File 'app/uploaders/object_storage.rb', line 242 def file_cache_storage? cache_storage.is_a?(CarrierWave::Storage::File) end |
#file_storage? ⇒ Boolean
238 239 240 |
# File 'app/uploaders/object_storage.rb', line 238 def file_storage? storage.is_a?(CarrierWave::Storage::File) end |
#filename ⇒ Object
allow to configure and overwrite the filename
230 231 232 |
# File 'app/uploaders/object_storage.rb', line 230 def filename @filename || super || file&.filename # rubocop:disable Gitlab/ModuleWithInstanceVariables end |
#filename=(filename) ⇒ Object
234 235 236 |
# File 'app/uploaders/object_storage.rb', line 234 def filename=(filename) @filename = filename # rubocop:disable Gitlab/ModuleWithInstanceVariables end |
#fog_attributes ⇒ Object
325 326 327 |
# File 'app/uploaders/object_storage.rb', line 325 def fog_attributes @fog_attributes ||= self.class.object_store_config.fog_attributes end |
#fog_credentials ⇒ Object
321 322 323 |
# File 'app/uploaders/object_storage.rb', line 321 def fog_credentials self.class.object_store_credentials end |
#fog_directory ⇒ Object
317 318 319 |
# File 'app/uploaders/object_storage.rb', line 317 def fog_directory self.class.remote_store_path end |
#fog_public ⇒ Object
Set ACL of uploaded objects to not-public (fog-aws) or no ACL at all (fog-google). Value is ignored by other supported backends (fog-aliyun, fog-openstack, fog-rackspace) [1]: github.com/fog/fog-aws/blob/daa50bb3717a462baf4d04d0e0cbfc18baacb541/lib/fog/aws/models/storage/file.rb#L152-L159
333 334 335 |
# File 'app/uploaders/object_storage.rb', line 333 def fog_public nil end |
#migrate!(new_store) ⇒ Object
Move the file to another store
new_store: Enum (Store::LOCAL, Store::REMOTE)
302 303 304 305 306 |
# File 'app/uploaders/object_storage.rb', line 302 def migrate!(new_store) with_exclusive_lease do unsafe_migrate!(new_store) end end |
#object_store ⇒ Object
246 247 248 249 |
# File 'app/uploaders/object_storage.rb', line 246 def object_store # We use Store::LOCAL as null value indicates the local storage @object_store ||= model.try(store_serialization_column) || Store::LOCAL end |
#object_store=(value) ⇒ Object
rubocop:disable Gitlab/ModuleWithInstanceVariables
252 253 254 255 |
# File 'app/uploaders/object_storage.rb', line 252 def object_store=(value) @object_store = value || Store::LOCAL @storage = storage_for(object_store) end |
#persist_object_store! ⇒ Object
Save the current @object_store to the model <mounted_as>_store column
265 266 267 268 269 270 |
# File 'app/uploaders/object_storage.rb', line 265 def persist_object_store! return unless persist_object_store? updated = model.update_column(store_serialization_column, object_store) raise 'Failed to update object store' unless updated end |
#persist_object_store? ⇒ Boolean
Return true if the current file is part or the model (i.e. is mounted in the model)
260 261 262 |
# File 'app/uploaders/object_storage.rb', line 260 def persist_object_store? model.respond_to?(:"#{store_serialization_column}=") end |
#schedule_background_upload(*args) ⇒ Object
308 309 310 311 312 313 314 315 |
# File 'app/uploaders/object_storage.rb', line 308 def schedule_background_upload(*args) return unless schedule_background_upload? ObjectStorage::BackgroundMoveWorker.perform_async(self.class.name, model.class.name, mounted_as, model.id) end |
#store!(new_file = nil) ⇒ Object
373 374 375 376 377 378 379 380 |
# File 'app/uploaders/object_storage.rb', line 373 def store!(new_file = nil) # when direct upload is enabled, always store on remote storage if self.class.object_store_enabled? && self.class.direct_upload_enabled? self.object_store = Store::REMOTE end super end |
#store_dir(store = nil) ⇒ Object
345 346 347 |
# File 'app/uploaders/object_storage.rb', line 345 def store_dir(store = nil) store_dirs[store || object_store] end |
#store_dirs ⇒ Object
349 350 351 352 353 354 |
# File 'app/uploaders/object_storage.rb', line 349 def store_dirs { Store::LOCAL => File.join(base_dir, dynamic_segment), Store::REMOTE => File.join(dynamic_segment) } end |
#upload_paths(identifier) ⇒ Object
Returns all the possible paths for an upload. the `upload.path` is a lookup parameter, and it may change depending on the `store` param.
359 360 361 |
# File 'app/uploaders/object_storage.rb', line 359 def upload_paths(identifier) store_dirs.map { |store, path| File.join(path, identifier) } end |
#use_file(&blk) ⇒ Object
272 273 274 275 276 |
# File 'app/uploaders/object_storage.rb', line 272 def use_file(&blk) with_exclusive_lease do unsafe_use_file(&blk) end end |
#use_open_file(&blk) ⇒ Object
278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 |
# File 'app/uploaders/object_storage.rb', line 278 def use_open_file(&blk) Tempfile.open(path) do |file| file.unlink file.binmode if file_storage? IO.copy_stream(path, file) else Faraday.get(url) do |req| req..on_data = proc { |chunk, _| file.write(chunk) } end end file.seek(0, IO::SEEK_SET) yield OpenFile.new(file) end end |