Class: AssetSync::Storage

Inherits:
Object
  • Object
show all
Defined in:
lib/asset_sync/storage.rb

Defined Under Namespace

Classes: BucketNotFound

Constant Summary collapse

REGEXP_FINGERPRINTED_FILES =
/^(.*)\/([^-]+)-[^\.]+\.([^\.]+)$/
REGEXP_ASSETS_TO_CACHE_CONTROL =
/-[0-9a-fA-F]{32,}$/

Instance Attribute Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(cfg) ⇒ Storage

Returns a new instance of Storage.



15
16
17
# File 'lib/asset_sync/storage.rb', line 15

def initialize(cfg)
  @config = cfg
end

Instance Attribute Details

#configObject

Returns the value of attribute config.



13
14
15
# File 'lib/asset_sync/storage.rb', line 13

def config
  @config
end

Instance Method Details

#always_upload_filesObject



91
92
93
# File 'lib/asset_sync/storage.rb', line 91

def always_upload_files
  expand_file_names(self.config.always_upload) + get_manifest_path
end

#bucketObject



23
24
25
26
# File 'lib/asset_sync/storage.rb', line 23

def bucket
  # fixes: https://github.com/rumblelabs/asset_sync/issues/18
  @bucket ||= connection.directories.get(self.config.fog_directory, :prefix => self.config.assets_prefix)
end

#connectionObject



19
20
21
# File 'lib/asset_sync/storage.rb', line 19

def connection
  @connection ||= Fog::Storage.new(self.config.fog_options)
end

#delete_extra_remote_filesObject



158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
# File 'lib/asset_sync/storage.rb', line 158

def delete_extra_remote_files
  log "Fetching files to flag for delete"
  remote_files = get_remote_files
  # fixes: https://github.com/rumblelabs/asset_sync/issues/19
  from_remote_files_to_delete = remote_files - local_files - ignored_files - always_upload_files

  log "Flagging #{from_remote_files_to_delete.size} file(s) for deletion"
  # Delete unneeded remote files, if we are on aws delete in bulk else use sequential delete
  if self.config.aws? && connection.respond_to?(:delete_multiple_objects)
    from_remote_files_to_delete.each_slice(500) do |slice|
      connection.delete_multiple_objects(config.fog_directory, slice)
    end
  else
    bucket.files.each do |f|
      delete_file(f, from_remote_files_to_delete)
    end
  end
end

#delete_file(f, remote_files_to_delete) ⇒ Object



151
152
153
154
155
156
# File 'lib/asset_sync/storage.rb', line 151

def delete_file(f, remote_files_to_delete)
  if remote_files_to_delete.include?(f.key)
    log "Deleting: #{f.key}"
    f.destroy
  end
end

#files_to_invalidateObject



99
100
101
# File 'lib/asset_sync/storage.rb', line 99

def files_to_invalidate
  self.config.invalidate.map { |filename| File.join("/", self.config.assets_prefix, filename) }
end

#files_with_custom_headersObject



95
96
97
# File 'lib/asset_sync/storage.rb', line 95

def files_with_custom_headers
  self.config.custom_headers.inject({}) { |h,(k, v)| h[File.join(self.config.assets_prefix, k)] = v; h; }
end

#get_asset_files_from_manifestObject



106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
# File 'lib/asset_sync/storage.rb', line 106

def get_asset_files_from_manifest
  if self.config.manifest
    if ActionView::Base.respond_to?(:assets_manifest)
      log "Using: Rails 4.0 manifest access"
      manifest = Sprockets::Manifest.new(ActionView::Base.assets_manifest.environment, ActionView::Base.assets_manifest.dir)
      return manifest.assets.values.map { |f| File.join(self.config.assets_prefix, f) }
    elsif File.exist?(self.config.manifest_path)
      log "Using: Manifest #{self.config.manifest_path}"
      yml = YAML.load(IO.read(self.config.manifest_path))

      return yml.map do |original, compiled|
        # Upload font originals and compiled
        if original =~ /^.+(eot|svg|ttf|woff)$/
          [original, compiled]
        else
          compiled
        end
      end.flatten.map { |f| File.join(self.config.assets_prefix, f) }.uniq!
    else
      log "Warning: Manifest could not be found"
    end
  end
end

#get_local_filesObject



130
131
132
133
134
135
136
137
138
139
140
# File 'lib/asset_sync/storage.rb', line 130

def get_local_files
  if from_manifest = get_asset_files_from_manifest
    return from_manifest
  end

  log "Using: Directory Search of #{path}/#{self.config.assets_prefix}"
  Dir.chdir(path) do
    to_load = self.config.assets_prefix.present? ? "#{self.config.assets_prefix}/**/**" : '**/**'
    Dir[to_load]
  end
end

#get_manifest_pathObject



48
49
50
51
52
53
54
55
56
57
58
# File 'lib/asset_sync/storage.rb', line 48

def get_manifest_path
  return [] unless self.config.include_manifest

  if ActionView::Base.respond_to?(:assets_manifest)
    manifest = Sprockets::Manifest.new(ActionView::Base.assets_manifest.environment, ActionView::Base.assets_manifest.dir)
    manifest_path = manifest.filename
  else
    manifest_path = self.config.manifest_path
  end
  [manifest_path.sub(/^#{path}\//, "")] # full path to relative path
end

#get_remote_filesObject

Raises:



142
143
144
145
146
147
148
149
# File 'lib/asset_sync/storage.rb', line 142

def get_remote_files
  raise BucketNotFound.new("#{self.config.fog_provider} Bucket: #{self.config.fog_directory} not found.") unless bucket
  # fixes: https://github.com/rumblelabs/asset_sync/issues/16
  #        (work-around for https://github.com/fog/fog/issues/596)
  files = []
  bucket.files.each { |f| files << f.key }
  return files
end

#ignored_filesObject



44
45
46
# File 'lib/asset_sync/storage.rb', line 44

def ignored_files
  expand_file_names(self.config.ignored_files)
end

#keep_existing_remote_files?Boolean

Returns:

  • (Boolean)


32
33
34
# File 'lib/asset_sync/storage.rb', line 32

def keep_existing_remote_files?
  self.config.existing_remote_files?
end

#local_filesObject



60
61
62
63
# File 'lib/asset_sync/storage.rb', line 60

def local_files
  @local_files ||=
    (get_local_files + config.additional_local_file_paths).uniq
end

#log(msg) ⇒ Object



28
29
30
# File 'lib/asset_sync/storage.rb', line 28

def log(msg)
  AssetSync.log(msg)
end

#pathObject



36
37
38
# File 'lib/asset_sync/storage.rb', line 36

def path
  self.config.public_path
end

#remote_file_list_cache_file_pathObject



40
41
42
# File 'lib/asset_sync/storage.rb', line 40

def remote_file_list_cache_file_path
  self.config.remote_file_list_cache_file_path
end

#remote_filesObject



65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
# File 'lib/asset_sync/storage.rb', line 65

def remote_files
  return [] if ignore_existing_remote_files?
  return @remote_files if @remote_files

  if remote_file_list_cache_file_path && File.file?(remote_file_list_cache_file_path)
    begin
      content = File.read(remote_file_list_cache_file_path)
      return @remote_files = JSON.parse(content)
    rescue JSON::ParserError
      warn "Failed to parse #{remote_file_list_cache_file_path} as json"
    end
  end

  @remote_files = get_remote_files
end

#syncObject



314
315
316
317
318
319
320
# File 'lib/asset_sync/storage.rb', line 314

def sync
  # fixes: https://github.com/rumblelabs/asset_sync/issues/19
  log "AssetSync: Syncing."
  upload_files
  delete_extra_remote_files unless keep_existing_remote_files?
  log "AssetSync: Done."
end

#update_remote_file_list_cache(local_files_to_upload) ⇒ Object



81
82
83
84
85
86
87
88
89
# File 'lib/asset_sync/storage.rb', line 81

def update_remote_file_list_cache(local_files_to_upload)
  return unless remote_file_list_cache_file_path
  return if ignore_existing_remote_files?

  File.open(self.remote_file_list_cache_file_path, 'w') do |file|
    uploaded = local_files_to_upload + remote_files
    file.write(uploaded.to_json)
  end
end

#upload_file(f) ⇒ Object



177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
# File 'lib/asset_sync/storage.rb', line 177

def upload_file(f)
  # TODO output files in debug logs as asset filename only.
  one_year = 31557600
  ext = File.extname(f)[1..-1]
  mime = MultiMime.lookup(ext)
  gzip_file_handle = nil
  file_handle = File.open("#{path}/#{f}")
  file = {
    :key => f,
    :body => file_handle,
    :content_type => mime
  }

  # region fog_public

  if config.fog_public.use_explicit_value?
    file[:public] = config.fog_public.to_bool
  end

  # endregion fog_public

  uncompressed_filename = f.sub(/\.gz\z/, '')
  basename = File.basename(uncompressed_filename, File.extname(uncompressed_filename))

  assets_to_cache_control = Regexp.union([REGEXP_ASSETS_TO_CACHE_CONTROL] | config.cache_asset_regexps).source
  if basename.match(Regexp.new(assets_to_cache_control)).present?
    file.merge!({
      :cache_control => "public, max-age=#{one_year}",
      :expires => CGI.rfc1123_date(Time.now + one_year)
    })
  end

  # overwrite headers if applicable, you probably shouldn't specific key/body, but cache-control headers etc.

  if files_with_custom_headers.has_key? f
    file.merge! files_with_custom_headers[f]
    log "Overwriting #{f} with custom headers #{files_with_custom_headers[f].to_s}"
  elsif key = self.config.custom_headers.keys.detect {|k| f.match(Regexp.new(k))}
    headers = {}
    self.config.custom_headers[key].each do |k, value|
      headers[k.to_sym] = value
    end
    file.merge! headers
    log "Overwriting matching file #{f} with custom headers #{headers.to_s}"
  end


  gzipped = "#{path}/#{f}.gz"
  ignore = false

  if config.gzip? && File.extname(f) == ".gz"
    # Don't bother uploading gzipped assets if we are in gzip_compression mode
    # as we will overwrite file.css with file.css.gz if it exists.
    log "Ignoring: #{f}"
    ignore = true
  elsif config.gzip? && File.exist?(gzipped)
    original_size = File.size("#{path}/#{f}")
    gzipped_size = File.size(gzipped)

    if gzipped_size < original_size
      percentage = ((gzipped_size.to_f/original_size.to_f)*100).round(2)
      gzip_file_handle = File.open(gzipped)
      file.merge!({
                    :key => f,
                    :body => gzip_file_handle,
                    :content_encoding => 'gzip'
                  })
      log "Uploading: #{gzipped} in place of #{f} saving #{percentage}%"
    else
      percentage = ((original_size.to_f/gzipped_size.to_f)*100).round(2)
      log "Uploading: #{f} instead of #{gzipped} (compression increases this file by #{percentage}%)"
    end
  else
    if !config.gzip? && File.extname(f) == ".gz"
      # set content encoding for gzipped files this allows cloudfront to properly handle requests with Accept-Encoding
      # http://docs.amazonwebservices.com/AmazonCloudFront/latest/DeveloperGuide/ServingCompressedFiles.html
      uncompressed_filename = f[0..-4]
      ext = File.extname(uncompressed_filename)[1..-1]
      mime = MultiMime.lookup(ext)
      file.merge!({
        :content_type     => mime,
        :content_encoding => 'gzip'
      })
    end
    log "Uploading: #{f}"
  end

  if config.aws? && config.aws_rrs?
    file.merge!({
      :storage_class => 'REDUCED_REDUNDANCY'
    })
  end

  bucket.files.create( file ) unless ignore
  file_handle.close
  gzip_file_handle.close if gzip_file_handle
end

#upload_filesObject



275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
# File 'lib/asset_sync/storage.rb', line 275

def upload_files
  # fixes: https://github.com/rumblelabs/asset_sync/issues/19
  local_files_to_upload = local_files - ignored_files - remote_files + always_upload_files
  local_files_to_upload = (local_files_to_upload + get_non_fingerprinted(local_files_to_upload)).uniq
  # Only files.
  local_files_to_upload = local_files_to_upload.select { |f| File.file? "#{path}/#{f}" }

  if self.config.concurrent_uploads
    jobs = Queue.new
    local_files_to_upload.each { |f| jobs.push(f) }
    jobs.close

    num_threads = [self.config.concurrent_uploads_max_threads, local_files_to_upload.length].min
    # Upload new files
    workers = Array.new(num_threads) do
      Thread.new do
        while f = jobs.pop
          upload_file(f)
        end
      end
    end
    workers.map(&:join)
  else
    # Upload new files
    local_files_to_upload.each do |f|
      upload_file f
    end
  end

  if self.config.cdn_distribution_id && files_to_invalidate.any?
    log "Invalidating Files"
    cdn ||= Fog::CDN.new(self.config.fog_options.except(:region))
    data = cdn.post_invalidation(self.config.cdn_distribution_id, files_to_invalidate)
    log "Invalidation id: #{data.body["Id"]}"
  end

  update_remote_file_list_cache(local_files_to_upload)
end