Module: Blufin::AWS
- Defined in:
- lib/core/aws.rb
Constant Summary collapse
- S3_EXCLUDE =
"--exclude '.DS_Store' --exclude '*/.DS_Store' --exclude '*.icloud' --exclude '*/*.icloud'"
Class Method Summary collapse
-
.download_s3_data(bucket_name, bucket_path, file: nil, profile: nil, region: nil, use_cache: true) ⇒ Object
Pulls a file from S3 and saves it to the /tmp folder.
-
.is_ec2 ⇒ Object
Checks if script is running on an EC2 instance.
-
.upload_s3_data(bucket_name, bucket_path, file_or_path, profile: nil, region: nil, is_file: false, dryrun: false) ⇒ Object
Uploads a file (or path) to S3.
Class Method Details
.download_s3_data(bucket_name, bucket_path, file: nil, profile: nil, region: nil, use_cache: true) ⇒ Object
Pulls a file from S3 and saves it to the /tmp folder. To avoid making multiple calls, caching is turned on by default.
48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 |
# File 'lib/core/aws.rb', line 48 def self.download_s3_data(bucket_name, bucket_path, file: nil, profile: nil, region: nil, use_cache: true) raise RuntimeError, 'profile cannot be nil.' if profile.nil? raise RuntimeError, 'region cannot be nil.' if region.nil? bucket_path = Blufin::Strings::remove_surrounding_slashes(bucket_path).strip bucket_path_tmp = bucket_path.length > 0 ? "-#{bucket_path.gsub('/', '-')}" : '' bucket_path_s3 = bucket_path.length > 0 ? "/#{bucket_path}" : '' unless file.nil? bucket_path_tmp = "#{bucket_path_tmp}-#{file}" bucket_path_s3 = "#{bucket_path_s3}/#{file}" end tmp_location = "/tmp/awx-s3-cache-#{bucket_name}#{bucket_path_tmp}" # If path/file exists and we're using cached values, simply return. if file.nil? return tmp_location if Blufin::Files::path_exists(tmp_location) && use_cache && Blufin::Files::get_files_in_dir(tmp_location).any? else return tmp_location if Blufin::Files::file_exists(tmp_location) && use_cache end begin # Clear out all (possibly stale) files. raise RuntimeError unless Blufin::Terminal::execute_proc("Wiping temp-folder: #{Blufin::Terminal::format_directory(tmp_location)}", Proc.new { system("rm -rf #{tmp_location}") if file.nil? system("rm #{tmp_location}") unless file.nil? Blufin::Files::create_directory(tmp_location) }, verbose: true) raise RuntimeError unless Blufin::Terminal::execute("aws s3 cp s3://#{bucket_name}#{bucket_path_s3} #{tmp_location} --recursive --region #{region}#{App::AWS::get_profile_for_cli}", verbose: true)[0] tmp_location rescue system("rm -rf #{tmp_location}") if file.nil? system("rm #{tmp_location}") unless file.nil? Blufin::Terminal::error("Unable to download from S3 bucket: #{Blufin::Terminal::format_highlight("s3://#{bucket_name}#{bucket_path_s3}")}", "Either the bucket doesn't exist, you don't have permissions or something else is wrong.", true, false) end end |
.is_ec2 ⇒ Object
Checks if script is running on an EC2 instance. Not sure if this is 100% reliable, but it’s about as good as it gets I think.
10 11 12 13 14 |
# File 'lib/core/aws.rb', line 10 def self.is_ec2 res = `#{Blufin::Base::get_base_path}#{Blufin::Base::OPT_PATH}/shell/ec2-check`.to_s res = Blufin::Strings::strip_newline(res) res.downcase.strip == 'yes' end |
.upload_s3_data(bucket_name, bucket_path, file_or_path, profile: nil, region: nil, is_file: false, dryrun: false) ⇒ Object
Uploads a file (or path) to S3. If path, will upload recursively (which is mandatory with s3 sync command).
18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 |
# File 'lib/core/aws.rb', line 18 def self.upload_s3_data(bucket_name, bucket_path, file_or_path, profile: nil, region: nil, is_file: false, dryrun: false) raise RuntimeError, 'profile cannot be nil.' if profile.nil? raise RuntimeError, 'region cannot be nil.' if region.nil? raise RuntimeError, "File or path not found: #{file_or_path}" unless Blufin::Files::file_exists(file_or_path) file_or_path = File.(file_or_path) bucket_path = Blufin::Strings::remove_surrounding_slashes(bucket_path).strip bucket_path_s3 = bucket_path.length > 0 ? "/#{bucket_path}" : '' s3_url = "s3://#{bucket_name}#{bucket_path_s3}" begin if is_file cmd = "aws s3 cp #{file_or_path} #{s3_url} --region #{region}#{App::AWS::get_profile_for_cli}" else cmd = "aws s3 sync #{file_or_path} #{s3_url} --delete --region #{region}#{App::AWS::get_profile_for_cli}" end if dryrun cmd = "#{cmd} --dryrun" Blufin::Terminal::info('Performing Dry Run:', Blufin::Terminal::format_command(cmd)) system("#{cmd} #{S3_EXCLUDE} --exclude '*.blank*'") else raise RuntimeError unless Blufin::Terminal::execute("#{cmd} #{S3_EXCLUDE} --exclude '*.blank*'", verbose: true, text: cmd)[0] end s3_url rescue Blufin::Terminal::error("Unable to download from S3 bucket: #{Blufin::Terminal::format_highlight(s3_url)}", "Either the bucket doesn't exist, you don't have permissions or something else is wrong.", true, false) end end |