Class: AssetSync::Storage

Inherits:
Object
  • Object
show all
Defined in:
lib/asset_sync/storage.rb

Defined Under Namespace

Classes: BucketNotFound

Constant Summary collapse

REGEXP_FINGERPRINTED_FILES =
/\A(.*)\/(.+)-[^\.]+\.([^\.]+)\z/m
REGEXP_ASSETS_TO_CACHE_CONTROL =
/-[0-9a-fA-F]{32,}$/
CONTENT_ENCODING =
{
  'gz' => 'gzip',
  'br' => 'br',
}.freeze

Instance Attribute Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(cfg) ⇒ Storage

Returns a new instance of Storage.



20
21
22
# File 'lib/asset_sync/storage.rb', line 20

def initialize(cfg)
  @config = cfg
end

Instance Attribute Details

#configObject

Returns the value of attribute config.



18
19
20
# File 'lib/asset_sync/storage.rb', line 18

def config
  @config
end

Instance Method Details

#always_upload_filesObject



128
129
130
# File 'lib/asset_sync/storage.rb', line 128

def always_upload_files
  expand_file_names(self.config.always_upload) + get_manifest_path
end

#bucketObject



28
29
30
31
32
33
34
35
36
37
# File 'lib/asset_sync/storage.rb', line 28

def bucket
  # fixes: https://github.com/rumblelabs/asset_sync/issues/18

  @bucket ||= if self.config.backblaze?
                connection.directories.get(self.config.fog_directory)
              else
                connection.directories.get(self.config.fog_directory, :prefix => self.config.assets_prefix)
              end

end

#connectionObject



24
25
26
# File 'lib/asset_sync/storage.rb', line 24

def connection
  @connection ||= Fog::Storage.new(self.config.fog_options)
end

#delete_extra_remote_filesObject



195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
# File 'lib/asset_sync/storage.rb', line 195

def delete_extra_remote_files
  log "Fetching files to flag for delete"
  remote_files = get_remote_files
  # fixes: https://github.com/rumblelabs/asset_sync/issues/19
  from_remote_files_to_delete = remote_files - local_files - ignored_files - always_upload_files

  log "Flagging #{from_remote_files_to_delete.size} file(s) for deletion"
  # Delete unneeded remote files, if we are on aws delete in bulk else use sequential delete
  if self.config.aws? && connection.respond_to?(:delete_multiple_objects)
    from_remote_files_to_delete.each_slice(500) do |slice|
      connection.delete_multiple_objects(config.fog_directory, slice)
    end
  else
    bucket.files.each do |f|
      delete_file(f, from_remote_files_to_delete)
    end
  end
end

#delete_file(f, remote_files_to_delete) ⇒ Object



188
189
190
191
192
193
# File 'lib/asset_sync/storage.rb', line 188

def delete_file(f, remote_files_to_delete)
  if remote_files_to_delete.include?(f.key)
    log "Deleting: #{f.key}"
    f.destroy
  end
end

#files_to_invalidateObject



136
137
138
# File 'lib/asset_sync/storage.rb', line 136

def files_to_invalidate
  self.config.invalidate.map { |filename| File.join("/", self.config.assets_prefix, filename) }
end

#files_with_custom_headersObject



132
133
134
# File 'lib/asset_sync/storage.rb', line 132

def files_with_custom_headers
  self.config.custom_headers.inject({}) { |h,(k, v)| h[File.join(self.config.assets_prefix, k)] = v; h; }
end

#get_asset_files_from_manifestObject



143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
# File 'lib/asset_sync/storage.rb', line 143

def get_asset_files_from_manifest
  if self.config.manifest
    if ActionView::Base.respond_to?(:assets_manifest)
      log "Using: Rails 4.0 manifest access"
      manifest = Sprockets::Manifest.new(ActionView::Base.assets_manifest.environment, ActionView::Base.assets_manifest.dir, self.config.manifest_path)
      return manifest.assets.values.map { |f| File.join(self.config.assets_prefix, f) }
    elsif File.exist?(self.config.manifest_path)
      log "Using: Manifest #{self.config.manifest_path}"
      yml = AssetSync.load_yaml(IO.read(self.config.manifest_path))

      return yml.map do |original, compiled|
        # Upload font originals and compiled
        if original =~ /^.+(eot|svg|ttf|woff)$/
          [original, compiled]
        else
          compiled
        end
      end.flatten.map { |f| File.join(self.config.assets_prefix, f) }.uniq!
    else
      log "Warning: Manifest could not be found"
    end
  end
end

#get_local_filesObject



167
168
169
170
171
172
173
174
175
176
177
# File 'lib/asset_sync/storage.rb', line 167

def get_local_files
  if from_manifest = get_asset_files_from_manifest
    return from_manifest
  end

  log "Using: Directory Search of #{path}/#{self.config.assets_prefix}"
  Dir.chdir(path) do
    to_load = self.config.assets_prefix.present? ? "#{self.config.assets_prefix}/**/**" : '**/**'
    Dir[to_load]
  end
end

#get_manifest_pathObject



63
64
65
66
67
68
69
70
71
72
73
# File 'lib/asset_sync/storage.rb', line 63

def get_manifest_path
  return [] unless self.config.include_manifest

  if ActionView::Base.respond_to?(:assets_manifest)
    manifest = Sprockets::Manifest.new(ActionView::Base.assets_manifest.environment, ActionView::Base.assets_manifest.dir, self.config.manifest_path)
    manifest_path = manifest.filename
  else
    manifest_path = self.config.manifest_path
  end
  [manifest_path.sub(/^#{path}\//, "")] # full path to relative path
end

#get_remote_filesObject

Raises:



179
180
181
182
183
184
185
186
# File 'lib/asset_sync/storage.rb', line 179

def get_remote_files
  raise BucketNotFound.new("#{self.config.fog_provider} Bucket: #{self.config.fog_directory} not found.") unless bucket
  # fixes: https://github.com/rumblelabs/asset_sync/issues/16
  #        (work-around for https://github.com/fog/fog/issues/596)
  files = []
  bucket.files.each { |f| files << f.key }
  return files
end

#ignored_filesObject



59
60
61
# File 'lib/asset_sync/storage.rb', line 59

def ignored_files
  expand_file_names(self.config.ignored_files)
end

#keep_existing_remote_files?Boolean

Returns:

  • (Boolean)


43
44
45
# File 'lib/asset_sync/storage.rb', line 43

def keep_existing_remote_files?
  self.config.existing_remote_files?
end

#local_filesObject



75
76
77
78
# File 'lib/asset_sync/storage.rb', line 75

def local_files
  @local_files ||=
    (get_local_files + config.additional_local_file_paths).uniq
end

#log(msg) ⇒ Object



39
40
41
# File 'lib/asset_sync/storage.rb', line 39

def log(msg)
  AssetSync.log(msg)
end

#pathObject



47
48
49
# File 'lib/asset_sync/storage.rb', line 47

def path
  self.config.public_path
end

#remote_file_list_cache_file_pathObject



51
52
53
# File 'lib/asset_sync/storage.rb', line 51

def remote_file_list_cache_file_path
  self.config.remote_file_list_cache_file_path
end

#remote_file_list_remote_pathObject



55
56
57
# File 'lib/asset_sync/storage.rb', line 55

def remote_file_list_remote_path
  self.config.remote_file_list_remote_path
end

#remote_filesObject



80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
# File 'lib/asset_sync/storage.rb', line 80

def remote_files
  return [] if ignore_existing_remote_files?
  return @remote_files if @remote_files

  if remote_file_list_remote_path && remote_file_list_cache_file_path
    log "Downloading file list file from remote"
    remote_cache_file = bucket.files.get(remote_file_list_remote_path)
    if remote_cache_file
      File.open(remote_file_list_cache_file_path, 'w') do |local_file|
        local_file.write(remote_cache_file.body)
      end
    end
  end

  if remote_file_list_cache_file_path && File.file?(remote_file_list_cache_file_path)
    begin
      content = File.read(remote_file_list_cache_file_path)
      return @remote_files = JSON.parse(content)
    rescue JSON::ParserError
      warn "Failed to parse #{remote_file_list_cache_file_path} as json"
    end
  end

  @remote_files = get_remote_files
end

#syncObject



358
359
360
361
362
363
364
# File 'lib/asset_sync/storage.rb', line 358

def sync
  # fixes: https://github.com/rumblelabs/asset_sync/issues/19
  log "AssetSync: Syncing."
  upload_files
  delete_extra_remote_files unless keep_existing_remote_files?
  log "AssetSync: Done."
end

#update_remote_file_list_cache(local_files_to_upload) ⇒ Object



106
107
108
109
110
111
112
113
114
# File 'lib/asset_sync/storage.rb', line 106

def update_remote_file_list_cache(local_files_to_upload)
  return unless remote_file_list_cache_file_path
  return if ignore_existing_remote_files?

  File.open(self.remote_file_list_cache_file_path, 'w') do |file|
    uploaded = local_files_to_upload + remote_files
    file.write(uploaded.to_json)
  end
end

#update_remote_file_list_in_remoteObject



116
117
118
119
120
121
122
123
124
125
126
# File 'lib/asset_sync/storage.rb', line 116

def update_remote_file_list_in_remote
  return if ignore_existing_remote_files?
  return unless remote_file_list_remote_path
  return unless remote_file_list_cache_file_path
  log "Updating file list file in remote"
  remote_file = bucket.files.new({
    :key    => remote_file_list_remote_path,
    :body   => File.open(remote_file_list_cache_file_path)
  })
  remote_file.save
end

#upload_file(f) ⇒ Object



214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
# File 'lib/asset_sync/storage.rb', line 214

def upload_file(f)
  # TODO output files in debug logs as asset filename only.
  one_year = 31557600
  ext = File.extname(f)[1..-1]
  mime = MultiMime.lookup(ext)
  compressed_file_handle = nil
  file_handle = File.open("#{path}/#{f}")
  file = {
    :key => f,
    :body => file_handle,
    :content_type => mime
  }

  # region fog_public

  if config.aws? && config.aws_acl
    file[:acl] = config.aws_acl
  elsif config.fog_public.use_explicit_value?
    file[:public] = config.fog_public.to_bool
  end

  # endregion fog_public

  uncompressed_filename = f.sub(/\.gz\z/, '')
  basename = File.basename(uncompressed_filename, File.extname(uncompressed_filename))

  assets_to_cache_control = Regexp.union([REGEXP_ASSETS_TO_CACHE_CONTROL] | config.cache_asset_regexps).source
  if basename.match(Regexp.new(assets_to_cache_control)).present?
    file.merge!({
      :cache_control => "public, max-age=#{one_year}",
      :expires => CGI.rfc1123_date(Time.now + one_year)
    })
  end

  # overwrite headers if applicable, you probably shouldn't specific key/body, but cache-control headers etc.

  if files_with_custom_headers.has_key? f
    file.merge! files_with_custom_headers[f]
    log "Overwriting #{f} with custom headers #{files_with_custom_headers[f].to_s}"
  elsif key = self.config.custom_headers.keys.detect {|k| f.match(Regexp.new(k))}
    headers = {}
    self.config.custom_headers[key].each do |k, value|
      headers[k.to_sym] = value
    end
    file.merge! headers
    log "Overwriting matching file #{f} with custom headers #{headers.to_s}"
  end


  ignore = false
  if config.compression
    compressed_name = "#{path}/#{f}.#{config.compression}"

    # `File.extname` returns value with `.` prefix, `config.compression` contains value without `.`
    if File.extname(f)[1..-1] == config.compression
      # Don't bother uploading compressed assets if we are in compression mode
      # as we will overwrite file.css with file.css.gz if it exists.
      log "Ignoring: #{f}"
      ignore = true
    elsif File.exist?(compressed_name)
      original_size = File.size("#{path}/#{f}")
      compressed_size = File.size(compressed_name)

      if compressed_size < original_size
        percentage = ((compressed_size.to_f/original_size.to_f)*100).round(2)
        compressed_file_handle = File.open(compressed_name)
        file.merge!({
                      :key => f,
                      :body => compressed_file_handle,
                      :content_encoding => CONTENT_ENCODING[config.compression]
                    })
        log "Uploading: #{compressed_name} in place of #{f} saving #{percentage}%"
      else
        percentage = ((original_size.to_f/compressed_size.to_f)*100).round(2)
        log "Uploading: #{f} instead of #{compressed_name} (compression increases this file by #{percentage}%)"
      end
    end
  else
    compressed_encoding = CONTENT_ENCODING[File.extname(f).delete('.')]
    if compressed_encoding
      # set content encoding for compressed files this allows cloudfront to properly handle requests with Accept-Encoding
      # http://docs.amazonwebservices.com/AmazonCloudFront/latest/DeveloperGuide/ServingCompressedFiles.html
      uncompressed_filename = f[0..-4]
      ext = File.extname(uncompressed_filename)[1..-1]
      mime = MultiMime.lookup(ext)
      file.merge!({
        :content_type     => mime,
        :content_encoding => compressed_encoding
      })
    end
    log "Uploading: #{f}"
  end

  if config.aws? && config.aws_rrs?
    file.merge!({
      :storage_class => 'REDUCED_REDUNDANCY'
    })
  end

  bucket.files.create( file ) unless ignore
  file_handle.close
  compressed_file_handle.close if compressed_file_handle
end

#upload_filesObject



318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
# File 'lib/asset_sync/storage.rb', line 318

def upload_files
  # fixes: https://github.com/rumblelabs/asset_sync/issues/19
  local_files_to_upload = local_files - ignored_files - remote_files + always_upload_files
  local_files_to_upload = (local_files_to_upload + get_non_fingerprinted(local_files_to_upload)).uniq
  # Only files.
  local_files_to_upload = local_files_to_upload.select { |f| File.file? "#{path}/#{f}" }

  if self.config.concurrent_uploads
    jobs = Queue.new
    local_files_to_upload.each { |f| jobs.push(f) }
    jobs.close

    num_threads = [self.config.concurrent_uploads_max_threads, local_files_to_upload.length].min
    # Upload new files
    workers = Array.new(num_threads) do
      Thread.new do
        while f = jobs.pop
          upload_file(f)
        end
      end
    end
    workers.map(&:join)
  else
    # Upload new files
    local_files_to_upload.each do |f|
      upload_file f
    end
  end

  if self.config.cdn_distribution_id && files_to_invalidate.any?
    log "Invalidating Files"
    cdn ||= Fog::CDN.new(self.config.fog_options.except(:region))
    data = cdn.post_invalidation(self.config.cdn_distribution_id, files_to_invalidate)
    log "Invalidation id: #{data.body["Id"]}"
  end

  update_remote_file_list_cache(local_files_to_upload)
  update_remote_file_list_in_remote
end