Module: AwsHelper
- Defined in:
- lib/distributed_resque_worker/aws_helper.rb
Overview
AwsHelper
Constant Summary collapse
- CONTENT_TYPE_TO_EXT =
{ 'audio/amr' => '.amr', 'audio/acc' => '.mp4', 'audio/mp4' => '.mp4', 'audio/mpeg' => '.mp3', 'audio/ogg' => '.ogg', 'image/jpeg' => '.jpg', 'image/png' => '.png', 'image/gif' => '.gif', 'text/plain' => '.txt', 'text/rtf' => '.rtf', 'application/zip' => '.zip', 'application/pdf' => '.pdf', 'application/msword' => '.doc' }.freeze
Class Method Summary collapse
- .bucket(bucket) ⇒ Object
- .content_ext(content_type) ⇒ Object
- .content_type(ext) ⇒ Object
- .doomsday ⇒ Object
- .run_with_retry ⇒ Object
- .s3_delete(name, bucket_name) ⇒ Object
- .s3_download_file(name, filename, bucket_name) ⇒ Object
- .s3_get_object(name, bucket_name) ⇒ Object
- .s3_get_object_url(name, bucket_name, _opt = {}) ⇒ Object
- .s3_store(name, content, bucket_name, opt = {}) ⇒ Object
- .s3_store_file(name, file, bucket_name, opt = {}) ⇒ Object
Class Method Details
.bucket(bucket) ⇒ Object
9 10 11 |
# File 'lib/distributed_resque_worker/aws_helper.rb', line 9 def bucket(bucket) @bucket = AWS::S3.new.buckets[bucket] end |
.content_ext(content_type) ⇒ Object
106 107 108 |
# File 'lib/distributed_resque_worker/aws_helper.rb', line 106 def content_ext(content_type) CONTENT_TYPE_TO_EXT[content_type] || '' end |
.content_type(ext) ⇒ Object
100 101 102 103 104 |
# File 'lib/distributed_resque_worker/aws_helper.rb', line 100 def content_type(ext) CONTENT_TYPE_TO_EXT.each do |ct, cext| return ct if ext == cext end end |
.doomsday ⇒ Object
45 46 47 48 49 50 51 52 53 54 55 |
# File 'lib/distributed_resque_worker/aws_helper.rb', line 45 def doomsday expiration = Time.zone.now + 20.years # TODO: ::Update after AWS changes this limit. Will likely be a while # since it depends on global transition to 64-bit systems # # AWS sets 01/20/2038 as an upper limit threshold on expiration date # due to https://en.wikipedia.org/wiki/Year_2038_problem aws_max_date = Time.zone.parse('2038-01-18') expiration = aws_max_date if expiration > aws_max_date expiration end |
.run_with_retry ⇒ Object
28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 |
# File 'lib/distributed_resque_worker/aws_helper.rb', line 28 def run_with_retry maxtry = 3 ntry = 0 begin yield rescue AWS::S3::Errors::RequestTimeout ntry += 1 if ntry > maxtry Resque.logger.info($ERROR_INFO) raise end print "Error: #{$ERROR_INFO}, retrying\n" @bucket = nil # So that we create a new bucket retry end end |
.s3_delete(name, bucket_name) ⇒ Object
65 66 67 |
# File 'lib/distributed_resque_worker/aws_helper.rb', line 65 def s3_delete(name, bucket_name) run_with_retry { s3_get_object(name, bucket_name).delete } end |
.s3_download_file(name, filename, bucket_name) ⇒ Object
74 75 76 77 78 79 80 81 82 |
# File 'lib/distributed_resque_worker/aws_helper.rb', line 74 def s3_download_file(name, filename, bucket_name) run_with_retry do data = s3_get_object(name, bucket_name).read File.open(filename, 'wb') do |file| file.write(data) end nil end end |
.s3_get_object(name, bucket_name) ⇒ Object
69 70 71 72 |
# File 'lib/distributed_resque_worker/aws_helper.rb', line 69 def s3_get_object(name, bucket_name) bucket = bucket(bucket_name) bucket.objects[name] end |
.s3_get_object_url(name, bucket_name, _opt = {}) ⇒ Object
57 58 59 60 61 62 63 |
# File 'lib/distributed_resque_worker/aws_helper.rb', line 57 def s3_get_object_url(name, bucket_name, _opt = {}) obj = s3_get_object(name, bucket_name) return nil unless obj&.exists? secure = true obj.url_for(:read, secure: secure, expires: doomsday).to_s end |
.s3_store(name, content, bucket_name, opt = {}) ⇒ Object
20 21 22 23 24 25 26 |
# File 'lib/distributed_resque_worker/aws_helper.rb', line 20 def s3_store(name, content, bucket_name, opt = {}) run_with_retry do obj = s3_get_object(name, bucket_name) obj.write(content, opt) obj.exists? ? s3_get_object_url(name, bucket_name) : nil end end |
.s3_store_file(name, file, bucket_name, opt = {}) ⇒ Object
13 14 15 16 17 18 |
# File 'lib/distributed_resque_worker/aws_helper.rb', line 13 def s3_store_file(name, file, bucket_name, opt = {}) # Stream the content for storage File.open(file, 'rb') do |f| return s3_store(name, f, bucket_name, opt) end end |