Method: IntelligentUtils#run_intelligent_upload_flow
- Defined in:
- lib/filestack/utils/utils.rb
#run_intelligent_upload_flow(jobs, filepath, io, state, storage) ⇒ Array
Runs the intelligent upload flow, from start to finish
205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 |
# File 'lib/filestack/utils/utils.rb', line 205 def run_intelligent_upload_flow(jobs, filepath, io, state, storage) = ProgressBar.new(jobs.length) generator = create_intelligent_generator(jobs) working_offset = FilestackConfig::DEFAULT_OFFSET_SIZE while generator.alive? batch = get_generator_batch(generator) # run parts Parallel.map(batch, in_threads: 4) do |part| state = run_intelligent_uploads(part, filepath, io, state, storage) # condition: a chunk has failed but we have not reached the maximum retries while bad_state(state) # condition: timeout to S3, requiring offset size to be changed if state.error_type == 'S3_NETWORK' sleep(5) state.offset = working_offset = change_offset(working_offset, state) # condition: timeout to backend, requiring only backoff elsif ['S3_SERVER', 'BACKEND_SERVER'].include? state.error_type sleep(state.backoff) end state.add_retry state = run_intelligent_uploads(part, filepath, io, state, storage) end raise "Upload has failed. Please try again later." unless state.ok .increment! end end end |