Class: RightDevelop::S3::RakeTask

Inherits:
Rake::TaskLib
  • Object
show all
Includes:
Rake::DSL
Defined in:
lib/right_develop/s3/rake_task.rb

Constant Summary collapse

DEFAULT_OPTIONS =
{
  :s3_namespace     => :s3
}

Instance Attribute Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(options = {}) {|_self| ... } ⇒ RakeTask

Returns a new instance of RakeTask.

Yields:

  • (_self)

Yield Parameters:



45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
# File 'lib/right_develop/s3/rake_task.rb', line 45

def initialize(options = {})
  # Let client provide options object-style, in our initializer
  options = DEFAULT_OPTIONS.merge(options)
  self.s3_namespace = options[:s3_namespace]

  # Let client provide options DSL-style by calling our writers
  yield(self) if block_given?

  namespace self.s3_namespace do

    desc 'List files in S3 bucket'
    task :list_files, [:bucket, :subdirectory, :recursive, :filters] do |task, args|
      raise ::ArgumentError.new(":bucket is required") unless bucket = args[:bucket]
      list = storage.list_files(
        bucket,
        :subdirectory => args[:subdirectory],
        :recursive    => args[:recursive] != 'false',
        :filters      => args[:filters])
      puts "Files in S3 bucket \"#{bucket}/#{args[:subdirectory]}\":"
      list.sort.each { |path| puts "  #{path}" }
    end

    desc 'Download files from S3 bucket'
    task :download_files, [:bucket, :to_dir_path, :subdirectory, :recursive, :filters] do |task, args|
      raise ::ArgumentError.new(":bucket is required") unless bucket = args[:bucket]
      raise ::ArgumentError.new(":to_dir_path is required") unless to_dir_path = args[:to_dir_path]
      count = storage.download_files(
        bucket,
        to_dir_path,
        :subdirectory => args[:subdirectory],
        :recursive    => args[:recursive] != 'false',
        :filters      => args[:filters])
      puts "Downloaded #{count} file(s)."
    end

    desc 'Upload files to S3 bucket'
    task :upload_files, [:bucket, :from_dir_path, :subdirectory, :recursive, :access, :filters] do |task, args|
      raise ::ArgumentError.new(":bucket is required") unless bucket = args[:bucket]
      raise ::ArgumentError.new(":from_dir_path is required") unless from_dir_path = args[:from_dir_path]
      count = storage.upload_files(
        bucket,
        from_dir_path,
        :subdirectory => args[:subdirectory],
        :recursive => args[:recursive] != 'false',
        :access => args[:access],
        :filters => args[:filters])
      puts "Uploaded #{count} file(s)."
    end

    desc 'Copy files between S3 buckets'
    task :copy_files, [:from_bucket, :from_subdirectory, :to_bucket, :to_subdirectory, :recursive, :access, :filters] do |task, args|
      raise ::ArgumentError.new(":from_bucket is required") unless from_bucket = args[:from_bucket]
      raise ::ArgumentError.new(":to_bucket is required") unless to_bucket = args[:to_bucket]
      verbose = ::Rake.application.options.trace
      recursive = args[:recursive] != 'false'

      # establish from/to credentials before copying.
      from_storage = Interface.new(
        :aws_access_key_id     => ENV['FROM_AWS_ACCESS_KEY_ID'],
        :aws_secret_access_key => ENV['FROM_AWS_SECRET_ACCESS_KEY'],
        :logger                => logger)
      to_storage = Interface.new(
        :aws_access_key_id     => ENV['TO_AWS_ACCESS_KEY_ID'],
        :aws_secret_access_key => ENV['TO_AWS_SECRET_ACCESS_KEY'],
        :logger                => logger)

      # download
      ::Dir.mktmpdir do |temp_dir|
        ::Dir.chdir(temp_dir) do
          download_count = from_storage.download_files(
            from_bucket,
            temp_dir,
            :subdirectory => args[:from_subdirectory],
            :recursive    => recursive,
            :filters      => args[:filters])

          upload_count = to_storage.upload_files(
            to_bucket,
            temp_dir,
            :subdirectory => args[:to_subdirectory],
            :recursive    => recursive,
            :access       => args[:access],
            :filters      => nil)  # already filtered during download

          if upload_count == download_count
            puts "Copied #{upload_count} file(s)."
          else
            fail "Failed to upload all downloaded files (#{upload_count} uploaded != #{download_count} downloaded)."
          end
        end
      end
    end

    desc 'Delete files from S3 bucket'
    task :delete_files, [:bucket, :subdirectory, :recursive, :filters] do |task, args|
      raise ::ArgumentError.new(":bucket is required") unless bucket = args[:bucket]
      count = storage.delete_files(
        bucket,
        :subdirectory => args[:subdirectory],
        :recursive    => args[:recursive] != 'false',
        :filters      => args[:filters])
      puts "Deleted #{count} file(s)."
    end

  end # namespace
end

Instance Attribute Details

#s3_namespaceObject

Returns the value of attribute s3_namespace.



43
44
45
# File 'lib/right_develop/s3/rake_task.rb', line 43

def s3_namespace
  @s3_namespace
end

Instance Method Details

#loggerObject

initialize



152
153
154
155
156
157
158
# File 'lib/right_develop/s3/rake_task.rb', line 152

def logger
  unless @logger
    verbose = Rake.application.options.trace
    @logger = verbose ? Logger.new(STDOUT) : RightDevelop::Utility::Shell.null_logger
  end
  @logger
end

#storageObject



160
161
162
163
164
165
# File 'lib/right_develop/s3/rake_task.rb', line 160

def storage
  @storage ||= Interface.new(
    :aws_access_key_id     => ENV['AWS_ACCESS_KEY_ID'],
    :aws_secret_access_key => ENV['AWS_SECRET_ACCESS_KEY'],
    :logger                => logger)
end