Class: CKnifeAws

Inherits:
Thor
  • Object
show all
Defined in:
lib/cknife/cknife_aws.rb

Constant Summary collapse

FILE_BUFFER_SIZE =
10.megabytes
LOCAL_MOD_KEY =
"x-amz-meta-mtime"
EPSILON =
1.second

Instance Method Summary collapse

Instance Method Details

#afew(bucket_name) ⇒ Object



247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
# File 'lib/cknife/cknife_aws.rb', line 247

def afew(bucket_name)
  d = get_bucket(bucket_name)
  return if d.nil?

  found = n_file_heads(d, options[:glob], options[:count].to_i)

  unit_to_mult = {
    'B' => 1,
    'K' => 2**10,
    'M' => 2**20,
    'G' => 2**30
  }

  found.map { |f|
    matching = unit_to_mult.keys.select { |k|
      f.content_length >= unit_to_mult[k]
    }.last

    [f.key,
     "#{f.content_length == 0 ? 0 : (f.content_length.to_f / unit_to_mult[matching]).round(2)}#{matching}",
     f.content_type,
     f.last_modified
    ]
  }.tap do |tabular|
    print_table(tabular, :ident => 2)
  end

end

#create(bucket_name = nil) ⇒ Object



574
575
576
577
578
579
580
581
582
583
584
585
586
587
# File 'lib/cknife/cknife_aws.rb', line 574

def create(bucket_name = nil)
  if !bucket_name
    puts "No bucket name given."
    return
  end

  fog_storage.directories.create(
                                 :key => bucket_name,
                                 :location => options[:region]
                                 )

  puts "Created bucket #{bucket_name}."
  show_buckets
end

#create_cloudfront(bucket_id) ⇒ Object



213
214
215
216
217
218
219
220
221
222
# File 'lib/cknife/cknife_aws.rb', line 213

def create_cloudfront(bucket_id)
  fog_cdn.post_distribution({
                          'S3Origin' => {
                            'DNSName' => "#{bucket_id}.s3.amazonaws.com"
                          },
                          'Enabled' => true
                        })

  show_cdns
end

#delete(bucket_name) ⇒ Object



537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
# File 'lib/cknife/cknife_aws.rb', line 537

def delete(bucket_name)
  d = fog_storage.directories.select { |d| d.key == bucket_name }.first

  if d.nil?
    say ("Found no bucket by name #{bucket_name}")
    return
  end

  if options[:noprompt] || yes?("Are you sure you want to delete this bucket #{d.key}?", :red)

    if d.files.length > 0
      if !options[:deep]
        say "Bucket has #{d.files.length} files. Please empty before destroying."
        return
      end

      found = n_file_heads(d)
      while found.length > 0
        found.each do |f|
          f.destroy
          say("Deleted file #{f.key}.")
        end
        found = n_file_heads(d)
      end
    end

    d.destroy
    say "Destroyed bucket named #{bucket_name}."
    show_buckets
  else
    say "No action taken."
  end

end

#download(bucket_name) ⇒ Object



279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
# File 'lib/cknife/cknife_aws.rb', line 279

def download(bucket_name)
  with_bucket bucket_name do |d|
    if options[:one].nil?
      if yes?("Are you sure you want to download all files into the CWD?", :red)
        d.files.each do |s3_file|
          say("Creating path for and downloading #{s3_file.key}")
          s3_download(s3_file)
        end
      else
        say("No action taken.")
      end
    else
      s3_file = d.files.get(options[:one])
      if !s3_file.nil?
        s3_download(s3_file)
      else
        say("Could not find #{options[:one]}. No action taken.")
      end
    end
  end
end

#fdelete(bucket_name, file_name) ⇒ Object



492
493
494
495
496
497
498
499
500
501
502
# File 'lib/cknife/cknife_aws.rb', line 492

def fdelete(bucket_name, file_name)
  d, f = get_bucket_and_file(bucket_name, file_name)
  return if d.nil? || f.nil?

  if options[:noprompt] || yes?("Are you sure you want to delete #{f.key} in #{d.key}?", :red)
    f.destroy
    say "Destroyed #{f.key} in #{d.key}."
  else
    say "No action taken."
  end
end

#fupload(bucket_name, file_name) ⇒ Object



507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
# File 'lib/cknife/cknife_aws.rb', line 507

def fupload(bucket_name, file_name)
  d = fog_storage.directories.select { |d| d.key == bucket_name }.first

  if d.nil?
    say ("Found no bucket by name #{bucket_name}")
    return
  end

  if !File.exists?(file_name)
    say("Found no such file #{file_name} on the local disk.")
    return
  end

  key = File.basename(file_name)
  f = d.files.select { |f| f.key == key }.first
  if !f.nil? && !yes?("There is already a file named #{key} in #{d.key}. Do you want to overwrite it with this upload?", :red)
    say("No action taken.")
    return
    f.destroy
    say "Destroyed #{f.key} in #{d.key}."
  end

  fresh_file_upload(file_name, d, key, options[:public])
  say "Uploaded #{key} to #{d.key}."
end

#listObject



226
227
228
# File 'lib/cknife/cknife_aws.rb', line 226

def list
  show_buckets
end

#list_cloudfrontObject



208
209
210
# File 'lib/cknife/cknife_aws.rb', line 208

def list_cloudfront
  show_cdns
end

#list_serversObject



180
181
182
# File 'lib/cknife/cknife_aws.rb', line 180

def list_servers
  show_servers
end

#show(bucket_name = nil) ⇒ Object



591
592
593
594
595
596
597
598
599
600
601
# File 'lib/cknife/cknife_aws.rb', line 591

def show(bucket_name = nil)
  if !bucket_name
    puts "No bucket name given."
    return
  end

  with_bucket(bucket_name) do |d|
    say "#{d}: "
    say d.location
  end
end

#start_server(server_id) ⇒ Object



185
186
187
188
189
190
191
192
193
194
# File 'lib/cknife/cknife_aws.rb', line 185

def start_server(server_id)
  s = fog_compute.servers.select { |s| s.id == server_id}.first
  if s
    say("found server. starting/resuming. #{s.id}")
    s.start
    show_servers
  else
    say("no server with that id found. nothing done.")
  end
end

#stop_server(server_id) ⇒ Object



197
198
199
200
201
202
203
204
205
# File 'lib/cknife/cknife_aws.rb', line 197

def stop_server(server_id)
  s = fog_compute.servers.select { |s| s.id == server_id}.first
  if s
    say("found server. stopping. #{s.id}")
    s.stop
  else
    say("no server with that id found. nothing done.")
  end
end

#upsync(bucket_name, directory) ⇒ Object



311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
# File 'lib/cknife/cknife_aws.rb', line 311

def upsync(bucket_name, directory)

  say("This is a dry run.") if options[:dry_run]

  if !File.exists?(directory) || !File.directory?(directory)
    say("'#{directory} does not exist or is not a directory.")
    return
  end

  target_root = Pathname.new(directory)

  files = Dir.glob(target_root.join(options[:glob])).select { |f| !File.directory?(f) }.map(&:to_s)
  if !options[:backups_retain] && files.count == 0
    say("No files to upload and no backups retain requested.")
    return
  end

  say("Found #{files.count} candidate file upload(s).")

  spn = dn = sn = un = cn = 0
  with_bucket bucket_name do |d|

    # having a brain fart and cant get this to simplify
    go = false
    if options[:noprompt] != nil
      go = true
    else
      go = yes?("Proceed?", :red)
    end

    if go
      time_marks = []
      immediate_successors = {}
      if options[:backups_retain]
        # inclusive lower bound, exclusive upper bound
        time_marks = []
        Time.now.beginning_of_day.tap do |start|
          options[:days_retain].times do |i|
            time_marks.push(start - i.days)
          end
        end

        Time.now.beginning_of_week.tap do |start|
          options[:weeks_retain].times do |i|
            time_marks.push(start - i.weeks)
          end
        end

        Time.now.beginning_of_month.tap do |start|
          options[:months_retain].times do |i|
            time_marks.push(start - i.months)
          end
        end

        time_marks.each do |tm|
          files.each do |to_upload|
            File.open(to_upload) do |localfile|
              if localfile.mtime >= tm && (immediate_successors[tm].nil? || localfile.mtime < immediate_successors[tm][:last_modified])
                immediate_successors[tm] = { :local_path => to_upload, :last_modified => localfile.mtime }
              end
            end
          end
        end
      end

      # don't pointlessly upload large files if we already know we're going to delete them!
      if options[:backups_retain]
        immediate_successors.values.map { |h| h[:local_path] }.tap do |kept_files|
          before_reject = files.count # blah...lame
          files.reject! { |to_upload| !kept_files.include?(to_upload) }
          sn += before_reject - files.count

          say("Found #{files.count} file(s) that meet backups retention criteria for upload. Comparing against bucket...")

        end
      end

      files.each do |to_upload|
        say("#{to_upload} (no output if skipped)...")
        k = File.basename(to_upload)

        existing_head = d.files.head(k)

        time_mismatch = false
        content_hash_mistmatched = false
        File.open(to_upload) do |localfile|
          time_mismatch = !existing_head.nil? && (existing_head.[LOCAL_MOD_KEY].nil? || (Time.parse(existing_head.[LOCAL_MOD_KEY]) - localfile.mtime).abs > EPSILON)
          if time_mismatch
            content_hash_mistmatched = existing_head.etag != content_hash(localfile)
          end
        end

        if existing_head && time_mismatch && content_hash_mistmatched
          if !options[:dry_run]
            File.open(to_upload) do |localfile|
              existing_head. = { LOCAL_MOD_KEY => localfile.mtime.to_s }
              existing_head.body = localfile
              existing_head.multipart_chunk_size = FILE_BUFFER_SIZE # creates multipart_save
              existing_head.save
            end
          end
          say("updated.")
          un += 1
        elsif existing_head && time_mismatch
          if !options[:dry_run]
            File.open(to_upload) do |localfile|
              existing_head. = { LOCAL_MOD_KEY => localfile.mtime.to_s }
              existing_head.save
            end
          end
          say("updated.")
          un += 1
        elsif existing_head.nil?
          if !options[:dry_run]
            fresh_file_upload(to_upload, d, k, options[:public])
          end
          say("created.")
          cn += 1
        else
          sn += 1
          # skipped
        end
      end


      if options[:backups_retain]

        # This array of hashes is computed because we need to do
        # nested for loops of M*N complexity, where M=time_marks
        # and N=files.  We also need to do an remote get call to
        # fetch the metadata of all N remote files (d.files.each
        # will not do this). so, for performance sanity, we cache
        # all the meta data for all the N files.
        file_keys_modtimes = []
        d.files.each { |f|
          if File.fnmatch(options[:glob], f.key)
            existing_head = d.files.head(f.key)
            md = existing_head.
            file_keys_modtimes.push({
                                      :key => f.key,
                                      :last_modified => md[LOCAL_MOD_KEY] ? Time.parse(md[LOCAL_MOD_KEY]) : f.last_modified,
                                      :existing_head => existing_head
                                    })
          end
        }

        say("#{file_keys_modtimes.length} file(s) found to consider for remote retention or remote deletion.")

        # this generates as many 'kept files' as there are time marks...which seems wrong.
        immediate_successors = {}
        time_marks.each do |tm|
          file_keys_modtimes.each do |fkm|
            if fkm[:last_modified] >= tm && (immediate_successors[tm].nil? || fkm[:last_modified] < immediate_successors[tm][:last_modified])
              immediate_successors[tm] = fkm
            end
          end
        end

        immediate_successors.values.map { |v| v[:key] }.tap do |kept_keys|
          file_keys_modtimes.each do |fkm|
            if kept_keys.include?(fkm[:key])
              say("Remote retained #{fkm[:key]}.")
              spn += 1
            else
              fkm[:existing_head].destroy if !options[:dry_run]
              say("Remote deleted #{fkm[:key]}.")
              dn += 1
            end
          end
        end
      end
    else
      say ("No action taken.")
    end
  end
  say("Done. #{cn} created. #{un} updated. #{sn} local skipped. #{dn} deleted remotely. #{spn} retained remotely.")
end

#url(bucket_name, file_name) ⇒ Object



233
234
235
236
237
238
239
240
241
242
# File 'lib/cknife/cknife_aws.rb', line 233

def url(bucket_name, file_name)
  d, f = get_bucket_and_file(bucket_name, file_name)
  return if d.nil? || f.nil?

  minutes = options[:duration].to_i
  expiry = (Time.now + minutes.minutes).to_i
  url = f.url(expiry, path_style: true)
  say("URL created.")
  say(url)
end