Class: FakeS3::FileStore

Inherits:
Object
  • Object
show all
Defined in:
lib/fakes3/file_store.rb

Constant Summary collapse

SHUCK_METADATA_DIR =
".fakes3_metadataFFF"
SUBSECOND_PRECISION =

S3 clients with overly strict date parsing fails to parse ISO 8601 dates without any sub second precision (e.g. jets3t v0.7.2), and the examples given in the official AWS S3 documentation specify three (3) decimals for sub second precision.

3

Instance Method Summary collapse

Constructor Details

#initialize(root) ⇒ FileStore

Returns a new instance of FileStore.



18
19
20
21
22
23
24
25
26
27
28
# File 'lib/fakes3/file_store.rb', line 18

def initialize(root)
  @root = root
  @buckets = []
  @bucket_hash = {}
  Dir[File.join(root,"*")].each do |bucket|
    bucket_name = File.basename(bucket)
    bucket_obj = Bucket.new(bucket_name,Time.now,[])
    @buckets << bucket_obj
    @bucket_hash[bucket_name] = bucket_obj
  end
end

Instance Method Details

#bucketsObject



49
50
51
# File 'lib/fakes3/file_store.rb', line 49

def buckets
  @buckets
end

#combine_object_parts(bucket, upload_id, object_name, parts, request) ⇒ Object



216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
# File 'lib/fakes3/file_store.rb', line 216

def combine_object_parts(bucket, upload_id, object_name, parts, request)
  upload_path   = File.join(@root, bucket.name)
  base_path     = File.join(upload_path, "#{upload_id}_#{object_name}")

  complete_file = ""
  chunk         = ""
  part_paths    = []

  parts.sort_by { |part| part[:number] }.each do |part|
    part_path    = "#{base_path}_part#{part[:number]}"
    content_path = File.join(part_path, SHUCK_METADATA_DIR, 'content')

    File.open(content_path, 'rb') { |f| chunk = f.read }
    etag = Digest::MD5.hexdigest(chunk)

    raise new Error "invalid file chunk" unless part[:etag] == etag
    complete_file << chunk
    part_paths    << part_path
  end

  object = do_store_object(bucket, object_name, complete_file, request)

  # clean up parts
  part_paths.each do |path|
    FileUtils.remove_dir(path)
  end

  object
end

#copy_object(src_bucket_name, src_name, dst_bucket_name, dst_name, request) ⇒ Object



106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
# File 'lib/fakes3/file_store.rb', line 106

def copy_object(src_bucket_name, src_name, dst_bucket_name, dst_name, request)
  src_root = File.join(@root,src_bucket_name,src_name,SHUCK_METADATA_DIR)
   = File.join(src_root,"metadata")
   = YAML.load(File.open(,'rb').read)
  src_content_filename = File.join(src_root,"content")

  dst_filename= File.join(@root,dst_bucket_name,dst_name)
  FileUtils.mkdir_p(dst_filename)

   = File.join(dst_filename,SHUCK_METADATA_DIR)
  FileUtils.mkdir_p()

  content = File.join(,"content")
   = File.join(,"metadata")

  if src_bucket_name != dst_bucket_name || src_name != dst_name
    File.open(content,'wb') do |f|
      File.open(src_content_filename,'rb') do |input|
        f << input.read
      end
    end

    File.open(,'w') do |f|
      File.open(,'r') do |input|
        f << input.read
      end
    end
  end

   = request.header["x-amz-metadata-directive"].first
  if  == "REPLACE"
     = (content,request)
    File.open(,'w') do |f|
      f << YAML::dump()
    end
  end

  src_bucket = get_bucket(src_bucket_name) || create_bucket(src_bucket_name)
  dst_bucket = get_bucket(dst_bucket_name) || create_bucket(dst_bucket_name)

  obj = S3Object.new
  obj.name = dst_name
  obj.md5 = [:md5]
  obj.content_type = [:content_type]
  obj.size = [:size]
  obj.modified_date = [:modified_date]

  src_obj = src_bucket.find(src_name)
  dst_bucket.add(obj)
  return obj
end

#create_bucket(bucket) ⇒ Object



61
62
63
64
65
66
67
68
69
# File 'lib/fakes3/file_store.rb', line 61

def create_bucket(bucket)
  FileUtils.mkdir_p(File.join(@root,bucket))
  bucket_obj = Bucket.new(bucket,Time.now,[])
  if !@bucket_hash[bucket]
    @buckets << bucket_obj
    @bucket_hash[bucket] = bucket_obj
  end
  bucket_obj
end

#create_metadata(content, request) ⇒ Object

TODO: abstract getting meta data from request.



260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
# File 'lib/fakes3/file_store.rb', line 260

def (content,request)
   = {}
  [:md5] = Digest::MD5.file(content).hexdigest
  [:content_type] = request.header["content-type"].first
  [:size] = File.size(content)
  [:modified_date] = File.mtime(content).utc.iso8601(SUBSECOND_PRECISION)
  [:amazon_metadata] = {}
  [:custom_metadata] = {}

  # Add custom metadata from the request header
  request.header.each do |key, value|
    match = /^x-amz-([^-]+)-(.*)$/.match(key)
    next unless match
    if match[1].eql?('meta') && (match_key = match[2])
      [:custom_metadata][match_key] = value.join(', ')
      next
    end
    [:amazon_metadata][key.gsub(/^x-amz-/, '')] = value.join(', ')
  end
  return 
end

#delete_bucket(bucket_name) ⇒ Object

Raises:



71
72
73
74
75
76
77
# File 'lib/fakes3/file_store.rb', line 71

def delete_bucket(bucket_name)
  bucket = get_bucket(bucket_name)
  raise NoSuchBucket if !bucket
  raise BucketNotEmpty if bucket.objects.count > 0
  FileUtils.rm_r(get_bucket_folder(bucket))
  @bucket_hash.delete(bucket_name)
end

#delete_object(bucket, object_name, request) ⇒ Object



246
247
248
249
250
251
252
253
254
255
256
257
# File 'lib/fakes3/file_store.rb', line 246

def delete_object(bucket,object_name,request)
  begin
    filename = File.join(@root,bucket.name,object_name)
    FileUtils.rm_rf(filename)
    object = bucket.find(object_name)
    bucket.remove(object)
  rescue
    puts $!
    $!.backtrace.each { |line| puts line }
    return nil
  end
end

#do_store_object(bucket, object_name, filedata, request) ⇒ Object



182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
# File 'lib/fakes3/file_store.rb', line 182

def do_store_object(bucket, object_name, filedata, request)
  begin
    filename = File.join(@root,bucket.name,object_name)
    FileUtils.mkdir_p(filename)

     = File.join(filename,SHUCK_METADATA_DIR)
    FileUtils.mkdir_p()

    content  = File.join(filename,SHUCK_METADATA_DIR,"content")
     = File.join(filename,SHUCK_METADATA_DIR,"metadata")

    File.open(content,'wb') { |f| f << filedata }

     = (content,request)
    File.open(,'w') do |f|
      f << YAML::dump()
    end

    obj = S3Object.new
    obj.name = object_name
    obj.md5 = [:md5]
    obj.content_type = [:content_type]
    obj.size = [:size]
    obj.modified_date = [:modified_date]

    bucket.add(obj)
    return obj
  rescue
    puts $!
    $!.backtrace.each { |line| puts line }
    return nil
  end
end

#get_bucket(bucket) ⇒ Object



57
58
59
# File 'lib/fakes3/file_store.rb', line 57

def get_bucket(bucket)
  @bucket_hash[bucket]
end

#get_bucket_folder(bucket) ⇒ Object



53
54
55
# File 'lib/fakes3/file_store.rb', line 53

def get_bucket_folder(bucket)
  File.join(@root,bucket.name)
end

#get_object(bucket, object_name, request) ⇒ Object



79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
# File 'lib/fakes3/file_store.rb', line 79

def get_object(bucket,object_name, request)
  begin
    real_obj = S3Object.new
    obj_root = File.join(@root,bucket,object_name,SHUCK_METADATA_DIR)
     = YAML.load(File.open(File.join(obj_root,"metadata"),'rb'))
    real_obj.name = object_name
    real_obj.md5 = [:md5]
    real_obj.content_type = .fetch(:content_type) { "application/octet-stream" }
    #real_obj.io = File.open(File.join(obj_root,"content"),'rb')
    real_obj.io = RateLimitableFile.open(File.join(obj_root,"content"),'rb')
    real_obj.size = .fetch(:size) { 0 }
    real_obj.creation_date = File.ctime(obj_root).utc.iso8601(SUBSECOND_PRECISION)
    real_obj.modified_date = .fetch(:modified_date) do
      File.mtime(File.join(obj_root,"content")).utc.iso8601(SUBSECOND_PRECISION)
    end
    real_obj. = .fetch(:custom_metadata) { {} }
    return real_obj
  rescue
    puts $!
    $!.backtrace.each { |line| puts line }
    return nil
  end
end

#object_metadata(bucket, object) ⇒ Object



103
104
# File 'lib/fakes3/file_store.rb', line 103

def (bucket,object)
end

#rate_limit=(rate_limit) ⇒ Object

Pass a rate limit in bytes per second



31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
# File 'lib/fakes3/file_store.rb', line 31

def rate_limit=(rate_limit)
  if rate_limit.is_a?(String)
    if rate_limit =~ /^(\d+)$/
      RateLimitableFile.rate_limit = rate_limit.to_i
    elsif rate_limit =~ /^(.*)K$/
      RateLimitableFile.rate_limit = $1.to_f * 1000
    elsif rate_limit =~ /^(.*)M$/
      RateLimitableFile.rate_limit = $1.to_f * 1000000
    elsif rate_limit =~ /^(.*)G$/
      RateLimitableFile.rate_limit = $1.to_f * 1000000000
    else
      raise "Invalid Rate Limit Format: Valid values include (1000,10K,1.1M)"
    end
  else
    RateLimitableFile.rate_limit = nil
  end
end

#store_object(bucket, object_name, request) ⇒ Object



158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
# File 'lib/fakes3/file_store.rb', line 158

def store_object(bucket, object_name, request)
  filedata = ""

  # TODO put a tmpfile here first and mv it over at the end
  content_type = request.content_type || ""

  match = content_type.match(/^multipart\/form-data; boundary=(.+)/)
  boundary = match[1] if match
  if boundary
    boundary  = WEBrick::HTTPUtils::dequote(boundary)
    form_data = WEBrick::HTTPUtils::parse_form_data(request.body, boundary)

    if form_data['file'] == nil or form_data['file'] == ""
      raise WEBrick::HTTPStatus::BadRequest
    end

    filedata = form_data['file']
  else
    request.body { |chunk| filedata << chunk }
  end

  do_store_object(bucket, object_name, filedata, request)
end