Class: Octopress::Deploy::S3
- Inherits:
-
Object
- Object
- Octopress::Deploy::S3
- Defined in:
- lib/octopress-deploy/s3.rb
Class Method Summary collapse
-
.default_config(options = {}) ⇒ Object
Return default configuration options for this deployment type.
Instance Method Summary collapse
-
#add_bucket ⇒ Object
Create a new S3 bucket.
- #configure_bucket ⇒ Object
-
#connect ⇒ Object
Connect to S3 using the AWS SDK Retuns an aws bucket.
-
#deletable_files ⇒ Object
Files from the bucket which are deletable Only deletes files beneath the remote_path if specified.
-
#delete_files ⇒ Object
Delete files from the bucket, to ensure a 1:1 match with site files.
- #delete_files? ⇒ Boolean
- #get_file_with_metadata(file, s3_filename) ⇒ Object
-
#initialize(options) ⇒ S3
constructor
A new instance of S3.
- #invalidate_cache(files) ⇒ Object
- #pluralize(str, num) ⇒ Object
-
#progress(str) ⇒ Object
Print consecutive characters.
- #pull ⇒ Object
- #push ⇒ Object
-
#remote_path(file) ⇒ Object
Replace local path with remote path.
-
#site_files ⇒ Object
local site files.
-
#site_files_dest ⇒ Object
Destination paths for local site files.
-
#status_message ⇒ Object
List written and deleted file counts.
-
#write_files ⇒ Object
Write site files to the selected bucket.
Constructor Details
#initialize(options) ⇒ S3
Returns a new instance of S3.
8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 |
# File 'lib/octopress-deploy/s3.rb', line 8 def initialize() begin require 'aws-sdk-v1' rescue LoadError abort "Deploying to S3 requires the aws-sdk-v1 gem. Install with `gem install aws-sdk-v1`." end = @local = [:site_dir] || '_site' @bucket_name = [:bucket_name] @access_key = [:access_key_id] || ENV['AWS_ACCESS_KEY_ID'] @secret_key = [:secret_access_key] || ENV['AWS_SECRET_ACCESS_KEY'] @region = [:region] || ENV['AWS_DEFAULT_REGION'] || 'us-east-1' @distro_id = [:distribution_id] || ENV['AWS_DISTRIBUTION_ID'] @remote_path = ([:remote_path] || '/').sub(/^\//,'') @verbose = [:verbose] @incremental = [:incremental] @delete = [:delete] @headers = [:headers] || [] @remote_path = @remote_path.sub(/^\//,'') # remove leading slash @pull_dir = [:dir] connect end |
Class Method Details
.default_config(options = {}) ⇒ Object
Return default configuration options for this deployment type
261 262 263 264 265 266 267 268 269 270 271 272 273 |
# File 'lib/octopress-deploy/s3.rb', line 261 def self.default_config(={}) "\#{\"bucket_name: \#{options[:bucket_name]}\".ljust(40)} # Name of the S3 bucket where these files will be stored.\n\#{\"access_key_id: \#{options[:access_key_id]}\".ljust(40)} # Get this from your AWS console at aws.amazon.com.\n\#{\"secret_access_key: \#{options[:secret_access_key]}\".ljust(40)} # Keep it safe; keep it secret. Keep this file in your .gitignore.\n\#{\"distribution_id: \#{options[:distribution_id]}\".ljust(40)} # Get this from your CloudFront page at https://console.aws.amazon.com/cloudfront/\n\#{\"remote_path: \#{options[:remote_path] || '/'}\".ljust(40)} # relative path on bucket where files should be copied.\n\#{\"region: \#{options[:remote_path] || 'us-east-1'}\".ljust(40)} # Region where your bucket is located.\n\#{\"verbose: \#{options[:verbose] || 'false'}\".ljust(40)} # Print out all file operations.\n\#{\"incremental: \#{options[:incremental] || 'false'}\".ljust(40)} # Only upload new/changed files\n\#{\"delete: \#{options[:delete] || 'false'}\".ljust(40)} # Remove files from destination which do not match source files.\n" end |
Instance Method Details
#add_bucket ⇒ Object
Create a new S3 bucket
188 189 190 191 192 193 |
# File 'lib/octopress-deploy/s3.rb', line 188 def add_bucket puts @bucket_name @bucket = @s3.buckets.create(@bucket_name) puts "Created new bucket '#{@bucket_name}' in region '#{@region}'." configure_bucket end |
#configure_bucket ⇒ Object
195 196 197 198 199 200 201 202 203 204 |
# File 'lib/octopress-deploy/s3.rb', line 195 def configure_bucket error_page = ['error_page'] || remote_path('404.html') index_page = ['index_page'] || remote_path('index.html') config = @bucket.configure_website do |cfg| cfg.index_document_suffix = index_page cfg.error_document_key = error_page end puts "Bucket configured with index_document: #{index_page} and error_document: #{error_page}." end |
#connect ⇒ Object
Connect to S3 using the AWS SDK Retuns an aws bucket
68 69 70 71 72 |
# File 'lib/octopress-deploy/s3.rb', line 68 def connect AWS.config(access_key_id: @access_key, secret_access_key: @secret_key, region: @region) @s3 = AWS.s3 @cloudfront = AWS.cloud_front.client end |
#deletable_files ⇒ Object
Files from the bucket which are deletable Only deletes files beneath the remote_path if specified
229 230 231 232 233 234 235 236 |
# File 'lib/octopress-deploy/s3.rb', line 229 def deletable_files return [] unless delete_files? unless @deletable @deletable = @bucket.objects.map(&:key) - site_files_dest @deletable.reject!{|f| (f =~ /^#{@remote_path}/).nil? } end @deletable end |
#delete_files ⇒ Object
Delete files from the bucket, to ensure a 1:1 match with site files
172 173 174 175 176 177 178 179 180 181 182 183 184 |
# File 'lib/octopress-deploy/s3.rb', line 172 def delete_files if deletable_files.size > 0 puts "Deleting #{pluralize('file', deletable_files.size)}:" if @verbose deletable_files.each do |file| @bucket.objects.delete(file) if @verbose puts "- #{file}" else progress('-') end end end end |
#delete_files? ⇒ Boolean
206 207 208 |
# File 'lib/octopress-deploy/s3.rb', line 206 def delete_files? !!@delete end |
#get_file_with_metadata(file, s3_filename) ⇒ Object
125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 |
# File 'lib/octopress-deploy/s3.rb', line 125 def (file, s3_filename) = { :file => file, :acl => :public_read } @headers.each do |conf| if conf.has_key? 'filename' and s3_filename.match(conf['filename']) if @verbose puts "+ #{remote_path(file)} matched pattern #{conf['filename']}" end if conf.has_key? 'expires' expireDate = conf['expires'] relative_years = /^\+(\d+) year(s)?$/.match(conf['expires']) if relative_years expireDate = (Time.now + (60 * 60 * 24 * 365 * relative_years[1].to_i)).httpdate end relative_days = /^\+(\d+) day(s)?$/.match(conf['expires']) if relative_days expireDate = (Time.now + (60 * 60 * 24 * relative_days[1].to_i)).httpdate end [:expires] = expireDate end if conf.has_key? 'content_type' [:content_type] = conf['content_type'] end if conf.has_key? 'cache_control' [:cache_control] = conf['cache_control'] end if conf.has_key? 'content_encoding' [:content_encoding] = conf['content_encoding'] end end end return end |
#invalidate_cache(files) ⇒ Object
110 111 112 113 114 115 116 117 118 119 120 121 122 123 |
# File 'lib/octopress-deploy/s3.rb', line 110 def invalidate_cache(files) puts "Invalidating cache for #{pluralize('file', site_files.size)}" if @verbose @cloudfront.create_invalidation( distribution_id: @distro_id, invalidation_batch:{ paths:{ quantity: files.size, items: files.map{|file| "/" + remote_path(file)} }, # String of 8 random chars to uniquely id this invalidation caller_reference: (0...8).map { ('a'..'z').to_a[rand(26)] }.join } ) unless files.empty? end |
#pluralize(str, num) ⇒ Object
255 256 257 258 |
# File 'lib/octopress-deploy/s3.rb', line 255 def pluralize(str, num) str << 's' if num != 1 str end |
#progress(str) ⇒ Object
Print consecutive characters
250 251 252 253 |
# File 'lib/octopress-deploy/s3.rb', line 250 def progress(str) print str $stdout.flush end |
#pull ⇒ Object
44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 |
# File 'lib/octopress-deploy/s3.rb', line 44 def pull @bucket = @s3.buckets[@bucket_name] if !@bucket.exists? abort "Bucket not found: '#{@bucket_name}'. Check your configuration or create a bucket using: `octopress deploy add-bucket`" else puts "Syncing from S3 bucket: '#{@bucket_name}' to #{@pull_dir}." @bucket.objects.each do |object| path = File.join(@pull_dir, object.key) # Path is a directory, not a file if path =~ /\/$/ FileUtils.mkdir_p(path) unless File.directory?(path) else dir = File.dirname(path) FileUtils.mkdir_p(dir) unless File.directory?(dir) File.open(path, 'w') { |f| f.write(object.read) } end end end end |
#push ⇒ Object
31 32 33 34 35 36 37 38 39 40 41 42 |
# File 'lib/octopress-deploy/s3.rb', line 31 def push #abort "Seriously, you should. Quitting..." unless Deploy.check_gitignore @bucket = @s3.buckets[@bucket_name] if !@bucket.exists? abort "Bucket not found: '#{@bucket_name}'. Check your configuration or create a bucket using: `octopress deploy add-bucket`" else puts "Syncing #{@local} files to #{@bucket_name} on S3." write_files delete_files if delete_files? end end |
#remote_path(file) ⇒ Object
Replace local path with remote path
223 224 225 |
# File 'lib/octopress-deploy/s3.rb', line 223 def remote_path(file) File.join(@remote_path, file.sub(@local, '')).sub(/^\//, '') end |
#site_files ⇒ Object
local site files
211 212 213 214 215 |
# File 'lib/octopress-deploy/s3.rb', line 211 def site_files @site_files ||= Find.find(@local).to_a.reject do |f| File.directory?(f) end end |
#site_files_dest ⇒ Object
Destination paths for local site files.
218 219 220 |
# File 'lib/octopress-deploy/s3.rb', line 218 def site_files_dest @site_files_dest ||= site_files.map{|f| remote_path(f) } end |
#status_message ⇒ Object
List written and deleted file counts
239 240 241 242 243 244 245 246 247 |
# File 'lib/octopress-deploy/s3.rb', line 239 def uploaded = site_files.size deleted = deletable_files.size = "\nSuccess:".green + " #{uploaded} #{pluralize('file', uploaded)} uploaded" << ", #{deleted} #{pluralize('file', deleted)} deleted." puts configure_bucket unless @bucket.website? end |
#write_files ⇒ Object
Write site files to the selected bucket
76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 |
# File 'lib/octopress-deploy/s3.rb', line 76 def write_files puts "Writing #{pluralize('file', site_files.size)}:" if @verbose files_to_invalidate = [] site_files.each do |file| s3_filename = remote_path(file) o = @bucket.objects[s3_filename] = (file, s3_filename); begin s3sum = o.etag.tr('"','') if o.exists? rescue AWS::S3::Errors::NoSuchKey s3sum = "" end if @incremental && (s3sum == Digest::MD5.file(file).hexdigest) if @verbose puts "= #{remote_path(file)}" else progress('=') end else o.write() files_to_invalidate.push(file) if @verbose puts "+ #{remote_path(file)}" else progress('+') end end end invalidate_cache(files_to_invalidate) unless @distro_id.nil? end |