Class: Dockly::Docker
- Inherits:
-
Object
- Object
- Dockly::Docker
- Includes:
- Util::DSL, Util::Logger::Mixin
- Defined in:
- lib/dockly/docker.rb
Defined Under Namespace
Classes: Registry
Instance Method Summary collapse
- #add_build_env(image) ⇒ Object
- #add_git_archive(image) ⇒ Object
- #build_env(hash = nil) ⇒ Object
- #build_image(image) ⇒ Object
- #cleanup(images) ⇒ Object
- #copy_from_s3(sha) ⇒ Object
- #ensure_tar(file_name) ⇒ Object
- #exists? ⇒ Boolean
- #export_filename ⇒ Object
- #export_image(image) ⇒ Object
- #export_image_diff(container, output) ⇒ Object
- #export_image_whole(container, output) ⇒ Object
- #export_only ⇒ Object
- #fetch_import ⇒ Object
- #find_image_by_repotag ⇒ Object
- #generate! ⇒ Object
- #generate_build ⇒ Object
- #git_archive_dir ⇒ Object
- #git_archive_path ⇒ Object
- #git_archive_tar ⇒ Object
- #import_base(docker_tar) ⇒ Object
- #make_git_archive ⇒ Object
- #push_to_registry(image) ⇒ Object
- #registry_import(img_name = nil, opts = {}) ⇒ Object
- #repo ⇒ Object
- #repository(value = nil) ⇒ Object
- #run_build_caches(image) ⇒ Object
- #s3_object ⇒ Object
- #s3_object_for(sha) ⇒ Object
- #s3_url ⇒ Object
- #tar_path ⇒ Object
Instance Method Details
#add_build_env(image) ⇒ Object
190 191 192 193 194 195 196 197 198 199 200 |
# File 'lib/dockly/docker.rb', line 190 def add_build_env(image) return image if build_env.empty? info "Setting the following environment variables in the docker image: #{build_env.keys}" dockerfile = [ "FROM #{image.id}", *build_env.map { |key, val| "ENV #{key.to_s.shellescape}=#{val.to_s.shellescape}" } ].join("\n") out_image = ::Docker::Image.build(dockerfile) info "Successfully set the environment variables in the dockerfile" out_image end |
#add_git_archive(image) ⇒ Object
202 203 204 205 206 207 208 209 210 211 |
# File 'lib/dockly/docker.rb', line 202 def add_git_archive(image) return image if git_archive.nil? info "adding the git archive" new_image = image.insert_local( 'localPath' => git_archive_tar, 'outputPath' => '/' ) info "successfully added the git archive" new_image end |
#build_env(hash = nil) ⇒ Object
31 32 33 |
# File 'lib/dockly/docker.rb', line 31 def build_env(hash = nil) (@build_env ||= {}).tap { |env| env.merge!(hash) if hash.is_a?(Hash) } end |
#build_image(image) ⇒ Object
213 214 215 216 217 218 219 |
# File 'lib/dockly/docker.rb', line 213 def build_image(image) ensure_present! :name, :build info "running custom build steps, starting with id: #{image.id}" out_image = ::Docker::Image.build("from #{image.id}\n#{build}") info "finished running custom build steps, result id: #{out_image.id}" out_image.tap { |img| img.tag(repo: repo, tag: tag, force: true) } end |
#cleanup(images) ⇒ Object
102 103 104 105 106 107 108 109 110 111 112 113 114 115 |
# File 'lib/dockly/docker.rb', line 102 def cleanup(images) info 'Cleaning up intermediate images' images ||= [] images = images.compact ::Docker::Container.all(:all => true).each do |container| image_id = container.json['Image'] if images.any? { |image| image.id.start_with?(image_id) || image_id.start_with?(image.id) } container.kill container.delete end end images.each { |image| image.remove rescue nil } info 'Done cleaning images' end |
#copy_from_s3(sha) ⇒ Object
35 36 37 38 39 40 41 42 43 44 45 |
# File 'lib/dockly/docker.rb', line 35 def copy_from_s3(sha) return if s3_bucket.nil? object = s3_object_for(sha) info "Copying s3://#{s3_bucket}/#{object} to #{s3_bucket}/#{s3_object}" Dockly.s3.copy_object( copy_source: File.join(s3_bucket, object), bucket: s3_bucket, key: s3_object ) info "Successfully copied s3://#{s3_bucket}/#{object} to s3://#{s3_bucket}/#{s3_object}" end |
#ensure_tar(file_name) ⇒ Object
135 136 137 138 139 140 141 142 143 |
# File 'lib/dockly/docker.rb', line 135 def ensure_tar(file_name) if Dockly::Util::Tar.is_tar?(file_name) file_name elsif Dockly::Util::Tar.is_gzip?(file_name) file_name else raise "Expected a (possibly gzipped) tar: #{file_name}" end end |
#exists? ⇒ Boolean
355 356 357 358 359 360 361 362 363 364 |
# File 'lib/dockly/docker.rb', line 355 def exists? return false unless s3_bucket debug "#{name}: checking for package: #{s3_url}" Dockly.s3.head_object(bucket: s3_bucket, key: s3_object) info "#{name}: found package: #{s3_url}" true rescue Aws::S3::Errors::NotFound, Aws::S3::Errors::NoSuchKey info "#{name}: could not find package: #{s3_url}" false end |
#export_filename ⇒ Object
117 118 119 |
# File 'lib/dockly/docker.rb', line 117 def export_filename "#{name}-image.tgz" end |
#export_image(image) ⇒ Object
236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 |
# File 'lib/dockly/docker.rb', line 236 def export_image(image) ensure_present! :name if registry.nil? ensure_present! :build_dir info "Exporting the image with id #{image.id} to file #{File.(tar_path)}" container = image.run('true') info "created the container: #{container.id}" if s3_bucket.nil? output = File.open(tar_path, 'wb') else output = Dockly::S3Writer.new(Dockly.s3, s3_bucket, s3_object) end gzip_output = Zlib::GzipWriter.new(output) if tar_diff export_image_diff(container, gzip_output) else export_image_whole(container, gzip_output) end else push_to_registry(image) end rescue if output && !s3_bucket.nil? output.abort_unless_closed end raise ensure container.tap(&:wait).remove if container gzip_output.close if gzip_output end |
#export_image_diff(container, output) ⇒ Object
276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 |
# File 'lib/dockly/docker.rb', line 276 def export_image_diff(container, output) rd, wr = IO.pipe(Encoding::ASCII_8BIT) rd.binmode wr.binmode thread = Thread.new do begin if Dockly::Util::Tar.is_tar?(fetch_import) base = File.open(fetch_import, 'rb') else base = Zlib::GzipReader.new(File.open(fetch_import, 'rb')) end td = Dockly::TarDiff.new(base, rd, output) td.process info "done writing the docker tar: #{export_filename}" ensure base.close if base rd.close end end begin container.export do |chunk, remaining, total| wr.write(chunk) end ensure wr.close thread.join end end |
#export_image_whole(container, output) ⇒ Object
270 271 272 273 274 |
# File 'lib/dockly/docker.rb', line 270 def export_image_whole(container, output) container.export do |chunk, remaining, total| output.write(chunk) end end |
#export_only ⇒ Object
54 55 56 57 58 59 60 61 |
# File 'lib/dockly/docker.rb', line 54 def export_only if image = find_image_by_repotag info "Found image by repo:tag: #{repo}:#{tag} - #{image.inspect}" export_image(image) else raise "Could not find image" end end |
#fetch_import ⇒ Object
327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 |
# File 'lib/dockly/docker.rb', line 327 def fetch_import ensure_present! :import path = "/tmp/dockly-docker-import.#{name}.#{File.basename(import)}" if File.exist?(path) debug "already fetched #{import}" else debug "fetching #{import}" File.open("#{path}.tmp", 'wb') do |file| case import when /^s3:\/\/(?<bucket>.+?)\/(?<key>.+)$/ bucket, key = Regexp.last_match[:bucket], Regexp.last_match[:key] Dockly.s3.get_object(bucket: bucket, key: key) do |chunk| file.write(chunk) end when /^https?:\/\// Excon.get(import, :response_block => lambda { |chunk, remaining, total| file.write(chunk) }) else raise "You can only import from S3 or a public url" end end FileUtils.mv("#{path}.tmp", path, :force => true) end path end |
#find_image_by_repotag ⇒ Object
63 64 65 66 67 |
# File 'lib/dockly/docker.rb', line 63 def find_image_by_repotag Docker::Image.all.find do |image| image.info["RepoTags"].include?("#{repo}:#{tag}") end end |
#generate! ⇒ Object
47 48 49 50 51 52 |
# File 'lib/dockly/docker.rb', line 47 def generate! image = generate_build export_image(image) ensure cleanup([image]) if cleanup_images end |
#generate_build ⇒ Object
69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 |
# File 'lib/dockly/docker.rb', line 69 def generate_build Docker. = { :read_timeout => timeout, :write_timeout => timeout } images = {} if registry_import.nil? docker_tar = File.absolute_path(ensure_tar(fetch_import)) images[:one] = import_base(docker_tar) else registry.authenticate! unless registry.nil? full_name = "#{registry_import[:name]}:#{registry_import[:tag]}" info "Pulling #{full_name}" images[:one] = ::Docker::Image.create('fromImage' => registry_import[:name], 'tag' => registry_import[:tag]) info "Successfully pulled #{full_name}" end images[:two] = add_build_env(images[:one]) images[:three] = add_git_archive(images[:two]) images[:four] = run_build_caches(images[:three]) build_image(images[:four]) ensure cleanup(images.values.compact) if cleanup_images end |
#git_archive_dir ⇒ Object
162 163 164 |
# File 'lib/dockly/docker.rb', line 162 def git_archive_dir @git_archive_dir ||= File.join(build_dir, "gitarc") end |
#git_archive_path ⇒ Object
166 167 168 |
# File 'lib/dockly/docker.rb', line 166 def git_archive_path "#{git_archive_dir}/#{name}.tar" end |
#git_archive_tar ⇒ Object
170 171 172 |
# File 'lib/dockly/docker.rb', line 170 def git_archive_tar git_archive && File.absolute_path(make_git_archive) end |
#import_base(docker_tar) ⇒ Object
174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 |
# File 'lib/dockly/docker.rb', line 174 def import_base(docker_tar) repo = "#{name}-base" tag = "dockly-#{Dockly::VERSION}-#{File.basename(import).split('.').first}" info "looking for imported base image with tag: #{tag}" image = Docker::Image.all.find { |img| img.info['RepoTags'].include?("#{repo}:#{tag}") } if image info "found imported base image: #{image.id}" image else info "could not find image with tag #{tag}, importing the docker image from #{docker_tar}" image = ::Docker::Image.import(docker_tar, 'repo' => repo, 'tag' => tag) info "imported initial docker image: #{image.id}" image end end |
#make_git_archive ⇒ Object
145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 |
# File 'lib/dockly/docker.rb', line 145 def make_git_archive ensure_present! :git_archive info "initializing" prefix = git_archive prefix += '/' unless prefix.end_with?('/') FileUtils.rm_rf(git_archive_dir) FileUtils.mkdir_p(git_archive_dir) info "archiving #{Dockly::Util::Git.sha}" File.open(git_archive_path, 'wb') do |file| Dockly::Util::Git.archive(Dockly::Util::Git.sha, prefix, file) end info "made the git archive for sha #{Dockly::Util::Git.sha}" git_archive_path end |
#push_to_registry(image) ⇒ Object
316 317 318 319 320 321 322 323 324 325 |
# File 'lib/dockly/docker.rb', line 316 def push_to_registry(image) ensure_present! :registry info "Exporting #{image.id} to Docker registry at #{registry.server_address}" registry.authenticate! image = Docker::Image.all(:all => true).find { |img| img.id.start_with?(image.id) || image.id.start_with?(img.id) } raise "Could not find image after authentication" if image.nil? image.push(registry.to_h, :registry => registry.server_address) end |
#registry_import(img_name = nil, opts = {}) ⇒ Object
92 93 94 95 96 97 98 99 100 |
# File 'lib/dockly/docker.rb', line 92 def registry_import(img_name = nil, opts = {}) if img_name @registry_import ||= {} @registry_import[:name] = img_name @registry_import[:tag] = opts[:tag] || 'latest' else @registry_import end end |
#repo ⇒ Object
221 222 223 224 225 226 227 228 229 230 |
# File 'lib/dockly/docker.rb', line 221 def repo @repo ||= case when registry.nil? name when registry.default_server_address? "#{registry.username}/#{name}" else "#{registry.server_address}/#{name}" end end |
#repository(value = nil) ⇒ Object
366 367 368 |
# File 'lib/dockly/docker.rb', line 366 def repository(value = nil) name(value) end |
#run_build_caches(image) ⇒ Object
121 122 123 124 125 126 127 128 129 |
# File 'lib/dockly/docker.rb', line 121 def run_build_caches(image) info "starting build caches" (build_cache || []).each do |cache| cache.image = image image = cache.execute! end info "finished build caches" image end |
#s3_object ⇒ Object
308 309 310 |
# File 'lib/dockly/docker.rb', line 308 def s3_object s3_object_for(Dockly::Util::Git.sha) end |
#s3_object_for(sha) ⇒ Object
312 313 314 |
# File 'lib/dockly/docker.rb', line 312 def s3_object_for(sha) [s3_object_prefix, sha, '/', export_filename].join end |
#s3_url ⇒ Object
232 233 234 |
# File 'lib/dockly/docker.rb', line 232 def s3_url "s3://#{s3_bucket}/#{s3_object}" end |
#tar_path ⇒ Object
131 132 133 |
# File 'lib/dockly/docker.rb', line 131 def tar_path File.join(build_dir, export_filename) end |