Module: Aptible::CLI::Helpers::S3LogHelpers
- Includes:
- DateHelpers
- Defined in:
- lib/aptible/cli/helpers/s3_log_helpers.rb
Instance Method Summary collapse
- #decrypt_and_translate_s3_file(file, enc_key, region, bucket, path) ⇒ Object
- #encryption_key(filesum, possible_keys) ⇒ Object
- #ensure_aws_creds ⇒ Object
- #find_s3_files_by_attrs(region, bucket, stack, attrs, time_range = nil) ⇒ Object
- #find_s3_files_by_string_match(region, bucket, stack, strings) ⇒ Object
- #info_from_path(file) ⇒ Object
- #s3_client(region) ⇒ Object
- #time_match?(time_range, start_timestamp, end_timestamp) ⇒ Boolean
- #validate_log_search_options(options = {}) ⇒ Object
Methods included from DateHelpers
#utc_date, #utc_datetime, #utc_string
Instance Method Details
#decrypt_and_translate_s3_file(file, enc_key, region, bucket, path) ⇒ Object
106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 |
# File 'lib/aptible/cli/helpers/s3_log_helpers.rb', line 106 def decrypt_and_translate_s3_file(file, enc_key, region, bucket, path) # AWS warns us about using the legacy encryption schema s3 = Kernel.silence_warnings do Aws::S3::EncryptionV2::Client.new( encryption_key: enc_key, region: region, key_wrap_schema: :aes_gcm, content_encryption_schema: :aes_gcm_no_padding, security_profile: :v2_and_legacy ) end # Just write it to a file directly location = File.join(path, file.split('/').drop(4).join('/')) FileUtils.mkdir_p(File.dirname(location)) File.open(location, 'wb') do |f| CLI.logger.info location # Is this memory efficient? s3.get_object(bucket: bucket, key: file, response_target: f) end end |
#encryption_key(filesum, possible_keys) ⇒ Object
195 196 197 198 199 200 201 202 203 204 |
# File 'lib/aptible/cli/helpers/s3_log_helpers.rb', line 195 def encryption_key(filesum, possible_keys) # The key can be determined from the sum possible_keys.each do |k| keysum = Digest::SHA256.hexdigest(Base64.strict_decode64(k)) next unless keysum == filesum return Base64.strict_decode64(k) end m = "Did not find a matching key for shasum #{filesum}" raise Thor::Error, m end |
#ensure_aws_creds ⇒ Object
10 11 12 13 14 15 16 17 18 19 |
# File 'lib/aptible/cli/helpers/s3_log_helpers.rb', line 10 def ensure_aws_creds cred_errors = [] unless ENV['AWS_ACCESS_KEY_ID'] cred_errors << 'Missing environment variable: AWS_ACCESS_KEY_ID' end unless ENV['AWS_SECRET_ACCESS_KEY'] cred_errors << 'Missing environment variable: AWS_SECRET_ACCESS_KEY' end raise Thor::Error, cred_errors.join(' ') if cred_errors.any? end |
#find_s3_files_by_attrs(region, bucket, stack, attrs, time_range = nil) ⇒ Object
143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 |
# File 'lib/aptible/cli/helpers/s3_log_helpers.rb', line 143 def find_s3_files_by_attrs(region, bucket, stack, attrs, time_range = nil) # This function uses the known path schema to return files matching # any provided criteria. EG: # * attrs: { :type => 'app', :id => 123 } # * attrs: { :container_id => 'deadbeef' } begin stack_logs = s3_client(region).bucket(bucket) .objects(prefix: stack) .map(&:key) rescue => error raise Thor::Error, error. end attrs.each do |k, v| stack_logs = stack_logs.select do |f| if k == :container_id # Match short container IDs info_from_path(f)[k].start_with?(v) else info_from_path(f)[k] == v end end end if time_range # select only logs within the time range stack_logs = stack_logs.select do |f| info = info_from_path(f) first_log = info[:start_time] last_log = info[:end_time] if first_log.nil? || last_log.nil? m = 'Cannot determine precise timestamps of file: ' \ "#{f.split('/').drop(4).join('/')}" CLI.logger.warn m false else time_match?(time_range, first_log, last_log) end end end stack_logs end |
#find_s3_files_by_string_match(region, bucket, stack, strings) ⇒ Object
127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 |
# File 'lib/aptible/cli/helpers/s3_log_helpers.rb', line 127 def find_s3_files_by_string_match(region, bucket, stack, strings) # This function just regex matches a provided string anywhwere # in the s3 path begin stack_logs = s3_client(region).bucket(bucket) .objects(prefix: stack) .map(&:key) rescue => error raise Thor::Error, error. end strings.each do |s| stack_logs = stack_logs.select { |f| f =~ /#{s}/ } end stack_logs end |
#info_from_path(file) ⇒ Object
64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 |
# File 'lib/aptible/cli/helpers/s3_log_helpers.rb', line 64 def info_from_path(file) properties = {} properties[:stack], _, properties[:schema], properties[:shasum], type_id, *remainder = file.split('/') properties[:id] = type_id.split('-').last.to_i properties[:type] = type_id.split('-').first case properties[:schema] when 'v2' # Eliminate the extensions split_by_dot = remainder.pop.split('.') - %w(log bck gz) properties[:container_id] = split_by_dot.first.delete!('-json') properties[:uploaded_at] = utc_datetime(split_by_dot.last) when 'v3' case properties[:type] when 'apps' properties[:service_id] = remainder.first.split('-').last.to_i file_name = remainder.second else file_name = remainder.first end # The file name may have differing number of elements due to # docker file log rotation. So we eliminate some useless items # and then work from the beginning or end of the remaining to find # known elements, ignoring any .1 .2 (or none at all) extension # found in the middle of the file name. EG: # ['container_id', 'start_time', 'end_time'] # or # ['container_id', '.1', 'start_time', 'end_time']] split_by_dot = file_name.split('.') - %w(log gz archived) properties[:container_id] = split_by_dot.first.delete!('-json') properties[:start_time] = utc_datetime(split_by_dot[-2]) properties[:end_time] = utc_datetime(split_by_dot[-1]) else m = "Cannot determine aptible log naming schema from #{file}" raise Thor::Error, m end properties end |
#s3_client(region) ⇒ Object
206 207 208 209 210 |
# File 'lib/aptible/cli/helpers/s3_log_helpers.rb', line 206 def s3_client(region) @s3_client ||= Kernel.silence_warnings do Aws::S3::Resource.new(region: region) end end |
#time_match?(time_range, start_timestamp, end_timestamp) ⇒ Boolean
188 189 190 191 192 193 |
# File 'lib/aptible/cli/helpers/s3_log_helpers.rb', line 188 def time_match?(time_range, , ) return false if .nil? || .nil? return false if time_range.last < return false if time_range.first > true end |
#validate_log_search_options(options = {}) ⇒ Object
21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 |
# File 'lib/aptible/cli/helpers/s3_log_helpers.rb', line 21 def ( = {}) = [ [:app_id], [:database_id], [:endpoint_id], [:container_id] ] = [[:start_date], [:end_date]] unless [:string_matches] || .any? m = 'You must specify an option to identify the logs to download,' \ ' either: --string-matches, --app-id, --database-id,' \ ' --endpoint-id, or --container-id' raise Thor::Error, m end m = 'You cannot pass --app-id, --database-id, --endpoint-id, or ' \ '--container-id when using --string-matches.' raise Thor::Error, m if [:string_matches] && .any? m = 'You must specify only one of ' \ '--app-id, --database-id, --endpoint-id or --container-id' raise Thor::Error, m if .any? && !.one? m = 'The options --start-date/--end-date cannot be used when ' \ 'searching by string' raise Thor::Error, m if [:string_matches] && .any? m = 'You must pass both --start-date and --end-date' raise Thor::Error, m if .any? && !.all? if [:container_id] && [:container_id].length < 12 m = 'You must specify at least the first 12 characters of the ' \ 'container ID' raise Thor::Error, m end if [:download_location] && ![:decryption_keys] m = 'You must provide decryption keys with the --decryption-keys' \ 'option in order to download files.' raise Thor::Error, m end end |