Class: AssetSync::Storage

Inherits:
Object
  • Object
show all
Defined in:
lib/asset_sync/storage.rb

Defined Under Namespace

Classes: BucketNotFound

Constant Summary collapse

REGEXP_FINGERPRINTED_FILES =
/\A(.*)\/(.+)-[^\.]+\.([^\.]+)\z/m
REGEXP_ASSETS_TO_CACHE_CONTROL =
/-[0-9a-fA-F]{32,}$/

Instance Attribute Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(cfg) ⇒ Storage

Returns a new instance of Storage.



15
16
17
# File 'lib/asset_sync/storage.rb', line 15

def initialize(cfg)
  @config = cfg
end

Instance Attribute Details

#configObject

Returns the value of attribute config.



13
14
15
# File 'lib/asset_sync/storage.rb', line 13

def config
  @config
end

Instance Method Details

#always_upload_filesObject



123
124
125
# File 'lib/asset_sync/storage.rb', line 123

def always_upload_files
  expand_file_names(self.config.always_upload) + get_manifest_path
end

#bucketObject



23
24
25
26
27
28
29
30
31
32
# File 'lib/asset_sync/storage.rb', line 23

def bucket
  # fixes: https://github.com/rumblelabs/asset_sync/issues/18

  @bucket ||= if self.config.backblaze?
                connection.directories.get(self.config.fog_directory)
              else
                connection.directories.get(self.config.fog_directory, :prefix => self.config.assets_prefix)
              end

end

#connectionObject



19
20
21
# File 'lib/asset_sync/storage.rb', line 19

def connection
  @connection ||= Fog::Storage.new(self.config.fog_options)
end

#delete_extra_remote_filesObject



190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
# File 'lib/asset_sync/storage.rb', line 190

def delete_extra_remote_files
  log "Fetching files to flag for delete"
  remote_files = get_remote_files
  # fixes: https://github.com/rumblelabs/asset_sync/issues/19
  from_remote_files_to_delete = remote_files - local_files - ignored_files - always_upload_files

  log "Flagging #{from_remote_files_to_delete.size} file(s) for deletion"
  # Delete unneeded remote files, if we are on aws delete in bulk else use sequential delete
  if self.config.aws? && connection.respond_to?(:delete_multiple_objects)
    from_remote_files_to_delete.each_slice(500) do |slice|
      connection.delete_multiple_objects(config.fog_directory, slice)
    end
  else
    bucket.files.each do |f|
      delete_file(f, from_remote_files_to_delete)
    end
  end
end

#delete_file(f, remote_files_to_delete) ⇒ Object



183
184
185
186
187
188
# File 'lib/asset_sync/storage.rb', line 183

def delete_file(f, remote_files_to_delete)
  if remote_files_to_delete.include?(f.key)
    log "Deleting: #{f.key}"
    f.destroy
  end
end

#files_to_invalidateObject



131
132
133
# File 'lib/asset_sync/storage.rb', line 131

def files_to_invalidate
  self.config.invalidate.map { |filename| File.join("/", self.config.assets_prefix, filename) }
end

#files_with_custom_headersObject



127
128
129
# File 'lib/asset_sync/storage.rb', line 127

def files_with_custom_headers
  self.config.custom_headers.inject({}) { |h,(k, v)| h[File.join(self.config.assets_prefix, k)] = v; h; }
end

#get_asset_files_from_manifestObject



138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
# File 'lib/asset_sync/storage.rb', line 138

def get_asset_files_from_manifest
  if self.config.manifest
    if ActionView::Base.respond_to?(:assets_manifest)
      log "Using: Rails 4.0 manifest access"
      manifest = Sprockets::Manifest.new(ActionView::Base.assets_manifest.environment, ActionView::Base.assets_manifest.dir)
      return manifest.assets.values.map { |f| File.join(self.config.assets_prefix, f) }
    elsif File.exist?(self.config.manifest_path)
      log "Using: Manifest #{self.config.manifest_path}"
      yml = AssetSync.load_yaml(IO.read(self.config.manifest_path))

      return yml.map do |original, compiled|
        # Upload font originals and compiled
        if original =~ /^.+(eot|svg|ttf|woff)$/
          [original, compiled]
        else
          compiled
        end
      end.flatten.map { |f| File.join(self.config.assets_prefix, f) }.uniq!
    else
      log "Warning: Manifest could not be found"
    end
  end
end

#get_local_filesObject



162
163
164
165
166
167
168
169
170
171
172
# File 'lib/asset_sync/storage.rb', line 162

def get_local_files
  if from_manifest = get_asset_files_from_manifest
    return from_manifest
  end

  log "Using: Directory Search of #{path}/#{self.config.assets_prefix}"
  Dir.chdir(path) do
    to_load = self.config.assets_prefix.present? ? "#{self.config.assets_prefix}/**/**" : '**/**'
    Dir[to_load]
  end
end

#get_manifest_pathObject



58
59
60
61
62
63
64
65
66
67
68
# File 'lib/asset_sync/storage.rb', line 58

def get_manifest_path
  return [] unless self.config.include_manifest

  if ActionView::Base.respond_to?(:assets_manifest)
    manifest = Sprockets::Manifest.new(ActionView::Base.assets_manifest.environment, ActionView::Base.assets_manifest.dir)
    manifest_path = manifest.filename
  else
    manifest_path = self.config.manifest_path
  end
  [manifest_path.sub(/^#{path}\//, "")] # full path to relative path
end

#get_remote_filesObject

Raises:



174
175
176
177
178
179
180
181
# File 'lib/asset_sync/storage.rb', line 174

def get_remote_files
  raise BucketNotFound.new("#{self.config.fog_provider} Bucket: #{self.config.fog_directory} not found.") unless bucket
  # fixes: https://github.com/rumblelabs/asset_sync/issues/16
  #        (work-around for https://github.com/fog/fog/issues/596)
  files = []
  bucket.files.each { |f| files << f.key }
  return files
end

#ignored_filesObject



54
55
56
# File 'lib/asset_sync/storage.rb', line 54

def ignored_files
  expand_file_names(self.config.ignored_files)
end

#keep_existing_remote_files?Boolean

Returns:

  • (Boolean)


38
39
40
# File 'lib/asset_sync/storage.rb', line 38

def keep_existing_remote_files?
  self.config.existing_remote_files?
end

#local_filesObject



70
71
72
73
# File 'lib/asset_sync/storage.rb', line 70

def local_files
  @local_files ||=
    (get_local_files + config.additional_local_file_paths).uniq
end

#log(msg) ⇒ Object



34
35
36
# File 'lib/asset_sync/storage.rb', line 34

def log(msg)
  AssetSync.log(msg)
end

#pathObject



42
43
44
# File 'lib/asset_sync/storage.rb', line 42

def path
  self.config.public_path
end

#remote_file_list_cache_file_pathObject



46
47
48
# File 'lib/asset_sync/storage.rb', line 46

def remote_file_list_cache_file_path
  self.config.remote_file_list_cache_file_path
end

#remote_file_list_remote_pathObject



50
51
52
# File 'lib/asset_sync/storage.rb', line 50

def remote_file_list_remote_path
  self.config.remote_file_list_remote_path
end

#remote_filesObject



75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
# File 'lib/asset_sync/storage.rb', line 75

def remote_files
  return [] if ignore_existing_remote_files?
  return @remote_files if @remote_files

  if remote_file_list_remote_path && remote_file_list_cache_file_path
    log "Downloading file list file from remote"
    remote_cache_file = bucket.files.get(remote_file_list_remote_path)
    if remote_cache_file
      File.open(remote_file_list_cache_file_path, 'w') do |local_file|
        local_file.write(remote_cache_file.body)
      end
    end
  end

  if remote_file_list_cache_file_path && File.file?(remote_file_list_cache_file_path)
    begin
      content = File.read(remote_file_list_cache_file_path)
      return @remote_files = JSON.parse(content)
    rescue JSON::ParserError
      warn "Failed to parse #{remote_file_list_cache_file_path} as json"
    end
  end

  @remote_files = get_remote_files
end

#syncObject



349
350
351
352
353
354
355
# File 'lib/asset_sync/storage.rb', line 349

def sync
  # fixes: https://github.com/rumblelabs/asset_sync/issues/19
  log "AssetSync: Syncing."
  upload_files
  delete_extra_remote_files unless keep_existing_remote_files?
  log "AssetSync: Done."
end

#update_remote_file_list_cache(local_files_to_upload) ⇒ Object



101
102
103
104
105
106
107
108
109
# File 'lib/asset_sync/storage.rb', line 101

def update_remote_file_list_cache(local_files_to_upload)
  return unless remote_file_list_cache_file_path
  return if ignore_existing_remote_files?

  File.open(self.remote_file_list_cache_file_path, 'w') do |file|
    uploaded = local_files_to_upload + remote_files
    file.write(uploaded.to_json)
  end
end

#update_remote_file_list_in_remoteObject



111
112
113
114
115
116
117
118
119
120
121
# File 'lib/asset_sync/storage.rb', line 111

def update_remote_file_list_in_remote
  return if ignore_existing_remote_files?
  return unless remote_file_list_remote_path
  return unless remote_file_list_cache_file_path
  log "Updating file list file in remote"
  remote_file = bucket.files.new({
    :key    => remote_file_list_remote_path,
    :body   => File.open(remote_file_list_cache_file_path)
  })
  remote_file.save
end

#upload_file(f) ⇒ Object



209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
# File 'lib/asset_sync/storage.rb', line 209

def upload_file(f)
  # TODO output files in debug logs as asset filename only.
  one_year = 31557600
  ext = File.extname(f)[1..-1]
  mime = MultiMime.lookup(ext)
  gzip_file_handle = nil
  file_handle = File.open("#{path}/#{f}")
  file = {
    :key => f,
    :body => file_handle,
    :content_type => mime
  }

  # region fog_public

  if config.aws? && config.aws_acl
    file[:acl] = config.aws_acl
  elsif config.fog_public.use_explicit_value?
    file[:public] = config.fog_public.to_bool
  end

  # endregion fog_public

  uncompressed_filename = f.sub(/\.gz\z/, '')
  basename = File.basename(uncompressed_filename, File.extname(uncompressed_filename))

  assets_to_cache_control = Regexp.union([REGEXP_ASSETS_TO_CACHE_CONTROL] | config.cache_asset_regexps).source
  if basename.match(Regexp.new(assets_to_cache_control)).present?
    file.merge!({
      :cache_control => "public, max-age=#{one_year}",
      :expires => CGI.rfc1123_date(Time.now + one_year)
    })
  end

  # overwrite headers if applicable, you probably shouldn't specific key/body, but cache-control headers etc.

  if files_with_custom_headers.has_key? f
    file.merge! files_with_custom_headers[f]
    log "Overwriting #{f} with custom headers #{files_with_custom_headers[f].to_s}"
  elsif key = self.config.custom_headers.keys.detect {|k| f.match(Regexp.new(k))}
    headers = {}
    self.config.custom_headers[key].each do |k, value|
      headers[k.to_sym] = value
    end
    file.merge! headers
    log "Overwriting matching file #{f} with custom headers #{headers.to_s}"
  end


  gzipped = "#{path}/#{f}.gz"
  ignore = false

  if config.gzip? && File.extname(f) == ".gz"
    # Don't bother uploading gzipped assets if we are in gzip_compression mode
    # as we will overwrite file.css with file.css.gz if it exists.
    log "Ignoring: #{f}"
    ignore = true
  elsif config.gzip? && File.exist?(gzipped)
    original_size = File.size("#{path}/#{f}")
    gzipped_size = File.size(gzipped)

    if gzipped_size < original_size
      percentage = ((gzipped_size.to_f/original_size.to_f)*100).round(2)
      gzip_file_handle = File.open(gzipped)
      file.merge!({
                    :key => f,
                    :body => gzip_file_handle,
                    :content_encoding => 'gzip'
                  })
      log "Uploading: #{gzipped} in place of #{f} saving #{percentage}%"
    else
      percentage = ((original_size.to_f/gzipped_size.to_f)*100).round(2)
      log "Uploading: #{f} instead of #{gzipped} (compression increases this file by #{percentage}%)"
    end
  else
    if !config.gzip? && File.extname(f) == ".gz"
      # set content encoding for gzipped files this allows cloudfront to properly handle requests with Accept-Encoding
      # http://docs.amazonwebservices.com/AmazonCloudFront/latest/DeveloperGuide/ServingCompressedFiles.html
      uncompressed_filename = f[0..-4]
      ext = File.extname(uncompressed_filename)[1..-1]
      mime = MultiMime.lookup(ext)
      file.merge!({
        :content_type     => mime,
        :content_encoding => 'gzip'
      })
    end
    log "Uploading: #{f}"
  end

  if config.aws? && config.aws_rrs?
    file.merge!({
      :storage_class => 'REDUCED_REDUNDANCY'
    })
  end

  bucket.files.create( file ) unless ignore
  file_handle.close
  gzip_file_handle.close if gzip_file_handle
end

#upload_filesObject



309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
# File 'lib/asset_sync/storage.rb', line 309

def upload_files
  # fixes: https://github.com/rumblelabs/asset_sync/issues/19
  local_files_to_upload = local_files - ignored_files - remote_files + always_upload_files
  local_files_to_upload = (local_files_to_upload + get_non_fingerprinted(local_files_to_upload)).uniq
  # Only files.
  local_files_to_upload = local_files_to_upload.select { |f| File.file? "#{path}/#{f}" }

  if self.config.concurrent_uploads
    jobs = Queue.new
    local_files_to_upload.each { |f| jobs.push(f) }
    jobs.close

    num_threads = [self.config.concurrent_uploads_max_threads, local_files_to_upload.length].min
    # Upload new files
    workers = Array.new(num_threads) do
      Thread.new do
        while f = jobs.pop
          upload_file(f)
        end
      end
    end
    workers.map(&:join)
  else
    # Upload new files
    local_files_to_upload.each do |f|
      upload_file f
    end
  end

  if self.config.cdn_distribution_id && files_to_invalidate.any?
    log "Invalidating Files"
    cdn ||= Fog::CDN.new(self.config.fog_options.except(:region))
    data = cdn.post_invalidation(self.config.cdn_distribution_id, files_to_invalidate)
    log "Invalidation id: #{data.body["Id"]}"
  end

  update_remote_file_list_cache(local_files_to_upload)
  update_remote_file_list_in_remote
end