Module: HPC::TemplateGeneration

Included in:
BATCH, LSF, SLURM
Defined in:
lib/rbbt/hpc/batch.rb

Instance Method Summary collapse

Instance Method Details

#batch_options(job, options) ⇒ Object



146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
# File 'lib/rbbt/hpc/batch.rb', line 146

def batch_options(job, options)
  IndiferentHash.setup(options)

  batch_options = IndiferentHash.setup({})

  keys = [
    :queue,
    :account,
    :partition,
    :exclusive,
    :highmem,
    :time,
    :nodes,
    :task_cpus,
    :mem,
    :mem_per_cpu,
    :gres,
    :lua_modules,
    :conda,
    :contraints,
    :licenses,
    :batch_dir,
    :batch_name,
    :contain,
    :sync,
    :contain_and_sync,
    :copy_image,
    :drbbt,
    :env_cmd,
    :manifest,
    :user_group,
    :wipe_container,
    :workdir,
    :purge_deps,
    :singularity,
    :singularity_img,
    :singularity_mounts,
    :singularity_opt_dir,
    :singularity_ruby_inline
  ]

  keys.each do |key|
    next if options[key].nil?
    batch_options[key] = Misc.process_options options, key
  end

  batch_dir = batch_options[:batch_dir]

  batch_name = File.basename(batch_dir)
  inputs_dir = File.join(batch_dir, 'inputs_dir')

  keys_from_config = [
    :queue,
    :highmem,
    :exclusive,
    :env_cmd,
    :user_group,
    :singularity_img,
    :singularity_mounts,
    :singularity_opt_dir,
    :singularity_ruby_inline,
    :singularity
  ]

  keys_from_config.each do |key|
    next unless batch_options.include? key
    default_value = Rbbt::Config.get(key, "batch_#{key}", "batch")
    next if default_value.nil? 
    Misc.add_defaults batch_options, default_value
  end

  user = batch_options[:user] ||= ENV['USER'] || `whoami`.strip
  group = batch_options[:group] ||= File.basename(File.dirname(ENV['HOME']))
  batch_options[:scratch_group_dir] = File.join('/gpfs/scratch/', group)
  batch_options[:projects_group_dir] = File.join('/gpfs/projects/', group)

  batch_options[:singularity] = true if batch_options[:singularity_img]

  if batch_options[:contain_and_sync]
    if batch_options[:contain].nil?
      contain_base = Rbbt::Config.get(:contain_base_dir, :batch_contain, :batch, :default => "/scratch/tmp/rbbt-[USER]")
      contain_base = contain_base.sub('[USER]', user)
      random_file = TmpFile.random_name
      batch_options[:contain] = File.join(contain_base, random_file)
    end

    batch_options[:sync] ||= "~/.rbbt/var/jobs" 
    batch_options[:wipe_container] ||= 'post'
  end

  if batch_options[:contain] && ! batch_options[:hardened]
    options[:workdir_all] = batch_options[:contain]
  end

  Misc.add_defaults batch_options, 
    :batch_name => batch_name,
    :inputs_dir => inputs_dir, 
    :nodes => 1, 
    :step_path => job.path,
    :task_cpus => 1,
    :time => '2min', 
    :env_cmd => '_JAVA_OPTIONS="-Xms1g -Xmx${MAX_MEMORY}m"',
    :singularity_img => ENV["SINGULARITY_IMG"] || "~/rbbt.singularity.img",
    :singularity_ruby_inline => ENV["SINGULARITY_RUBY_INLINE"] || "~/.singularity_ruby_inline",
    :singularity_opt_dir => ENV["SINGULARITY_OPT_DIR"] || "~/singularity_opt",
    :workdir => Dir.pwd 

  exec_cmd = exec_cmd(job, batch_options)
  rbbt_cmd = rbbt_job_exec_cmd(job, options)

  Misc.add_defaults batch_options, 
    :exec_cmd => exec_cmd,
    :rbbt_cmd => rbbt_cmd

  batch_dir = batch_options[:batch_dir]

  Misc.add_defaults batch_options,
    :fout   => File.join(batch_dir, 'std.out'),
    :ferr   => File.join(batch_dir, 'std.err'),
    :fjob   => File.join(batch_dir, 'job.id'),
    :fdep   => File.join(batch_dir, 'dependencies.list'),
    :fcfdep => File.join(batch_dir, 'canfail_dependencies.list'),
    :fexit  => File.join(batch_dir, 'exit.status'),
    :fsync  => File.join(batch_dir, 'sync.log'),
    :fsexit  => File.join(batch_dir, 'sync.status'),
    :fenv  => File.join(batch_dir, 'env.vars'),
    :fcmd   => File.join(batch_dir, 'command.batch')

  batch_options
end

#batch_system_variablesObject



315
316
317
318
319
# File 'lib/rbbt/hpc/batch.rb', line 315

def batch_system_variables
  "let MAX_MEMORY=\"$(grep MemTotal /proc/meminfo|grep -o \"[[:digit:]]*\") / 1024\"\n"
end

#cleanup_environment(options = {}) ⇒ Object



449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
# File 'lib/rbbt/hpc/batch.rb', line 449

def cleanup_environment(options = {})
  cleanup_environment = ""

  cleanup_environment +="if [ $exit_status == '0' ]; then\n\#{options[:exec_cmd]} workflow forget_deps --purge --recursive_purge \"$step_path\" 2>1 >> '\#{options[:fsync]}'\nfi\n" if options[:purge_deps]

  if options[:sync]
    if options[:wipe_container] == 'force'
      cleanup_environment +="batch_erase_contain_dir\n"
    elsif options[:wipe_container] == 'post' || options[:wipe_container] == 'both'
      cleanup_environment +="if [ $sync_es == '0' -a $empty_contain_dir == 'true' ]; then\nbatch_erase_contain_dir\nfi\n"
    end
  end
  cleanup_environment
end

#coda(options) ⇒ Object



474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
# File 'lib/rbbt/hpc/batch.rb', line 474

def coda(options)
  coda ="echo $exit_status > '\#{options[:fexit]}'\n"

  if options[:sync]
    coda +="if [ $sync_es == '0' ]; then\nexit $exit_status\nelse\nexit $sync_es\nfi\n"
  else
    coda +="exit $exit_status\n"
  end

  coda
end

#exec_cmd(job, options = {}) ⇒ Object



40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
# File 'lib/rbbt/hpc/batch.rb', line 40

def exec_cmd(job, options = {})
  env_cmd     = Misc.process_options options, :env_cmd
  development = Misc.process_options options, :development

  if contain = options[:contain]
    contain = File.expand_path(contain)
    env_cmd ||= ""
    env_cmd << " TMPDIR='#{contain}/.rbbt/tmp' "
  end

  if options[:singularity]

    group, user, user_group, scratch_group_dir, projects_group_dir = options.values_at :group, :user, :user_group, :scratch_group_dir, :projects_group_dir

    singularity_img, singularity_opt_dir, singularity_ruby_inline, singularity_mounts = options.values_at :singularity_img, :singularity_opt_dir, :singularity_ruby_inline, :singularity_mounts

    singularity_cmd = %(singularity exec -e -B "#{File.expand_path singularity_opt_dir}":/singularity_opt/ -B "#{File.expand_path singularity_ruby_inline}":"/.singularity_ruby_inline":rw ) 

    if singularity_mounts
      singularity_mounts.split(",").each do |mount|
        singularity_cmd += "-B #{ mount } "
      end
    end

    if contain && options[:hardened]
      singularity_cmd << %( -C -H "#{contain}" \
-B "/.singularity_ruby_inline":"#{contain}/.singularity_ruby_inline":rw 
-B "#{options[:batch_dir]}" \
-B /scratch/tmp \
      #{ group != user_group ? "-B /gpfs/projects/#{user_group}" : "" } \
-B #{scratch_group_dir} \
-B #{projects_group_dir} \
-B /apps/ \
-B ~/git:"#{contain}/git":ro \
      #{Open.exists?('~/.rbbt/software/opt/')? '-B ~/.rbbt/software/opt/:"/opt/":ro' : '' } \
-B ~/.rbbt:"#{contain}/home/":ro)
    end

    singularity_cmd << " #{singularity_img} "
  end

  if env_cmd
    exec_cmd = %(env #{env_cmd} rbbt)
  else
    exec_cmd = %(rbbt)
  end

  exec_cmd << "--dev '#{development}'" if development

  exec_cmd = singularity_cmd  + exec_cmd if singularity_cmd

  exec_cmd
end

#execute(options) ⇒ Object



416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
# File 'lib/rbbt/hpc/batch.rb', line 416

def execute(options)
  exec_cmd, job_cmd, task_cpus = options.values_at :exec_cmd, :rbbt_cmd, :task_cpus

  script="step_path=$(\n\#{exec_cmd} \#{job_cmd} --printpath\n)\nexit_status=$?\n\n[[ -z $BATCH_JOB_ID ]] || \#{exec_cmd} workflow write_info --recursive --force=false --check_pid \"$step_path\" batch_job $BATCH_JOB_ID\n[[ -z $BATCH_SYSTEM ]] || \#{exec_cmd} workflow write_info --recursive --force=false --check_pid \"$step_path\" batch_system $BATCH_SYSTEM\n\#{exec_cmd} workflow write_info --recursive --force=false --check_pid \"$step_path\" batch_cpus \#{task_cpus}\n"

  script
end

#follow_job(batch_dir, tail = true) ⇒ Object



629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
# File 'lib/rbbt/hpc/batch.rb', line 629

def follow_job(batch_dir, tail = true)
  fjob = File.join(batch_dir, 'job.id')
  fout = File.join(batch_dir, 'std.out')
  ferr = File.join(batch_dir, 'std.err')
  fexit = File.join(batch_dir, 'exit.status')
  fstatus = File.join(batch_dir, 'job.status')

  job = Open.read(fjob).strip if Open.exists?(fjob)

  if job && ! File.exist?(fexit)
    begin
      status_txt = job_status(job)
      STDERR.puts Log.color(:magenta, "Status [#{job.to_i}]:")
      STDERR.puts status_txt
      lines = status_txt.split("\n").length
    rescue
      if ! File.exist?(fexit)
        STDERR.puts Log.color(:magenta, "Job #{job.to_i} not done and not running. STDERR:")
        STDERR.puts Open.read(ferr)
      end
      return
    end
  end

  if File.exist?(fexit)
    exit_status = Open.read(fexit)
    if exit_status.to_i == 0
      STDERR.puts Log.color(:magenta, "Job #{job} done with exit_status 0. STDOUT:")
      STDERR.puts Open.read(fout)
    else
      STDERR.puts Log.color(:magenta, "Job #{job.to_i} done with exit_status #{exit_status}. STDERR:")
      STDERR.puts Open.read(ferr)
    end
    return
  end

  if tail
    Log.severity = 10
    while ! File.exist? fout
      if job
        STDERR.puts
        Log.clear_line(STDERR)
        STDERR.write Log.color(:magenta, "Waiting for Output")
        3.times do
          STDERR.write Log.color(:magenta, ".")
          sleep 1
        end
        status_txt = job_status(job)
        lines.times do
          Log.clear_line(STDERR)
        end
        Log.clear_line(STDERR)
        STDERR.puts Log.color(:magenta, "Status [#{job.to_i}]:")
        STDERR.puts status_txt
        lines = status_txt.split("\n").length
      end
    end
    STDERR.puts
    Log.clear_line(STDERR)
    STDERR.puts Log.color(:magenta, "Output:")
    begin
      status_txt = job_status(job)
      Open.write(fstatus, status_txt) unless status_txt.nil? || status_txt.empty?
      out = CMD.cmd("tail -f '#{fout}'", :pipe => true) if File.exist?(fout) and not tail == :STDERR
      err = CMD.cmd("tail -f '#{ferr}'", :pipe => true) if File.exist?(ferr)

      terr = Misc.consume_stream(err, true, STDERR) if err
      tout = Misc.consume_stream(out, true, STDOUT) if out

      sleep 3 while job_queued(job)
    rescue Aborted
    ensure
      begin
        terr.exit if terr
        tout.exit if tout
        err.close if err
        err.join if err
      rescue Exception
      end

      begin
        out.close if out
        out.join if out
      rescue Exception
      end
    end
  end
end

#header(options) ⇒ Object



138
139
140
141
142
143
144
# File 'lib/rbbt/hpc/batch.rb', line 138

def header(options)
  header ="#!/bin/bash\n"

  header
end

#hold_dependencies(job, batch_job) ⇒ Object



618
619
620
621
622
623
624
625
626
627
# File 'lib/rbbt/hpc/batch.rb', line 618

def hold_dependencies(job, batch_job)
  job.set_info :batch_job, batch_job
  job.set_info :batch_system, self.batch_system
  job.dependencies.each do |dep|
    next unless dep.waiting?
    next if (dep_batch_job = dep.info[:batch_job]) && job_queued(dep_batch_job)

    hold_dependencies(dep, batch_job)
  end
end

#job_queued(job) ⇒ Object



718
719
720
# File 'lib/rbbt/hpc/batch.rb', line 718

def job_queued(job)
  job_status(job).split(/\s+/).include?(job.to_s)
end

#job_template(job, options = {}) ⇒ Object



496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
# File 'lib/rbbt/hpc/batch.rb', line 496

def job_template(job, options = {})
  batch_options = batch_options job, options

  header              = self.header(batch_options)

             = self.(batch_options)

  prepare_environment = self.prepare_environment(batch_options)

  execute             = self.execute(batch_options)

  sync_environment    = self.sync_environment(batch_options)

  cleanup_environment = self.cleanup_environment(batch_options)

  coda                = self.coda(batch_options)

  "\#{header}\n\n# \#{Log.color :green, \"0. Meta-data\"}\n\#{meta_data}\n\n# \#{Log.color :green, \"1. Prepare environment\"}\n\#{prepare_environment}\nenv > \#{batch_options[:fenv]}\n\n# \#{Log.color :green, \"2. Execute\"}\n\#{execute}\n\n# \#{Log.color :green, \"3. Sync and cleanup environment\"}\n\#{sync_environment}\n\#{cleanup_environment}\n\n# \#{Log.color :green, \"4. Exit\"}\n\#{coda}\n"
end

#load_conda(env = nil) ⇒ Object



301
302
303
304
305
306
307
308
309
310
311
312
# File 'lib/rbbt/hpc/batch.rb', line 301

def load_conda(env = nil)
  return "" if env.nil? || env.empty?

  "if ! type conda | grep function &> /dev/null; then\nif [ ! -z $CONDA_EXE ]; then\nsource \"$(dirname $(dirname $CONDA_EXE))/etc/profile.d/conda.sh\" &> /dev/null\nfi\nfi\nconda activate \#{ env }\n"
end

#load_modules(modules = []) ⇒ Object



290
291
292
293
294
295
296
297
298
299
# File 'lib/rbbt/hpc/batch.rb', line 290

def load_modules(modules = [])
  modules = modules.split(/,\s*/) if String === modules

  str = ""
  modules.each do |mod|
    str << "module load #{ mod }" << "\n"
  end if modules

  str
end

#meta_data(options) ⇒ Object



277
278
279
280
281
282
283
284
285
286
287
288
# File 'lib/rbbt/hpc/batch.rb', line 277

def (options)
  meta ="#MANIFEST: \#{(options[:manifest] || []) * \", \"}\n#DEPENDENCIES: \#{(options[:dependencies] || []) * \", \"}\n#EXEC_CMD: \#{options[:exec_cmd]}\n#CMD: \#{options[:rbbt_cmd]}\n#STEP_PATH: \#{options[:step_path]}\n"

  meta = meta.split("\n").reject{|line| line =~ /: $/} * "\n"
  meta
end

#prepare_environment(options = {}) ⇒ Object



321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
# File 'lib/rbbt/hpc/batch.rb', line 321

def prepare_environment(options = {})
  modules = options[:lua_modules]
  conda = options[:conda]

  prepare_environment = ""

  functions = ""

  if contain = options[:contain]
    contain = File.expand_path(contain)
    functions +="function batch_erase_contain_dir(){\nrm -Rfv '\#{contain}' 2>1 >> '\#{options[:fsync]}'\n}\n"

    prepare_environment +="if ls -A '\#{contain}' &> /dev/null ; then\nempty_contain_dir=\"false\"\nelse\nempty_contain_dir=\"true\"\nfi\n"

    prepare_environment +="batch_erase_contain_dir()\n" if options[:wipe_container] == 'force'
  end

  if sync = options[:sync]
    source = if options[:singularity]
               File.join(options[:contain], '.rbbt/var/jobs')
             elsif options[:contain]
               File.join(options[:contain], 'var/jobs')
             else
               '~/.rbbt/var/jobs/'
             end

    source = File.expand_path(source)
    sync = File.expand_path(sync)
    functions +="function batch_sync_contain_dir(){\nmkdir -p \"$(dirname '\#{sync}')\"\nrsync -avztAXHP --copy-unsafe-links \"\#{source}/\" \"\#{sync}/\" 2>1 >> '\#{options[:fsync]}'\nsync_es=\"$?\"\necho $sync_es > '\#{options[:fsexit]}'\nfind '\#{sync}' -type l -ls | awk '$13 ~ /^\#{sync.gsub('/','\\/')}/ { sub(\"\#{source}\", \"\#{sync}\", $13); print $11, $13 }' | while read A B; do rm $A; ln -s $B $A; done\n}\n"
  end

  if options[:singularity]

    group, user, user_group, scratch_group_dir, projects_group_dir = options.values_at :group, :user, :user_group, :scratch_group_dir, :projects_group_dir

    singularity_img, singularity_opt_dir, singularity_ruby_inline = options.values_at :singularity_img, :singularity_opt_dir, :singularity_ruby_inline

    prepare_environment +="# Load singularity modules\ncommand -v singularity &> /dev/null || module load singularity\nmkdir -p \"\#{File.expand_path singularity_opt_dir}\"\n"

    if contain && options[:hardened]

      prepare_environment +="# Prepare container for singularity\nmkdir -p \"\#{contain}\"/.rbbt/etc/\n\nfor dir in .ruby_inline git home; do\nmkdir -p \"\#{contain}\"/$dir\ndone\n\nfor tmpd in persist_locks  produce_locks  R_sockets  sensiblewrite  sensiblewrite_locks  step_info_locks  tsv_open_locks; do\nmkdir -p \"\#{contain}/.rbbt/tmp/$tmpd\"\ndone\n\n# Copy environment\ncp ~/.rbbt/etc/environment \#{contain}/.rbbt/etc/\n\n# Set search_paths\necho \"singularity: /singularity_opt/{PKGDIR}/{TOPLEVEL}/{SUBPATH}\" > \#{contain}/.rbbt/etc/search_paths\necho \"rbbt_user: /home/rbbt/.rbbt/{TOPLEVEL}/{SUBPATH}\" >> \#{contain}/.rbbt/etc/search_paths\necho \"outside_home: \#{contain}/home/{TOPLEVEL}/{SUBPATH}\" >> \#{contain}/.rbbt/etc/search_paths\necho \"group_projects: \#{projects_group_dir}/{PKGDIR}/{TOPLEVEL}/{SUBPATH}\" >> \#{contain}/.rbbt/etc/search_paths\necho \"group_scratch: \#{scratch_group_dir}/{PKGDIR}/{TOPLEVEL}/{SUBPATH}\" >> \#{contain}/.rbbt/etc/search_paths\necho \"user_projects: \#{projects_group_dir}/\#{user}/{PKGDIR}/{TOPLEVEL}/{SUBPATH}\" >> \#{contain}/.rbbt/etc/search_paths\necho \"user_scratch: \#{scratch_group_dir}/\#{user}/{PKGDIR}/{TOPLEVEL}/{SUBPATH}\" >> \#{contain}/.rbbt/etc/search_paths\n"
    end
  end

  batch_system_variables + load_modules(modules) + "\n" + load_conda(conda) + "\n"  + functions + "\n" + prepare_environment
end

#prepare_submision(template, batch_dir, clean_batch_job = false, batch_dependencies = []) ⇒ Object



535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
# File 'lib/rbbt/hpc/batch.rb', line 535

def prepare_submision(template, batch_dir, clean_batch_job = false, batch_dependencies = [])
  Open.mkdir batch_dir
  fcmd   = File.join(batch_dir, 'command.batch')
  fdep   = File.join(batch_dir, 'dependencies.list')
  fcfdep = File.join(batch_dir, 'canfail_dependencies.list')

  Open.write(fcmd, template)

  %w(std.out std.err job.id job.status dependencies.list canfail_dependencies.list exit.status sync.log inputs_dir).each do |filename|
    path = File.join(batch_dir, filename)
    Open.rm_rf path if File.exist? path
  end if clean_batch_job

  batch_dependencies = [] if batch_dependencies.nil?

  canfail_dependencies = batch_dependencies.select{|dep| dep =~ /^canfail:(\d+)/ }.collect{|dep| dep.partition(":").last}
  dependencies = batch_dependencies.reject{|dep| dep =~ /^canfail:(\d+)/ }

  Open.write(fdep, dependencies * "\n") if dependencies.any?
  Open.write(fcfdep, canfail_dependencies * "\n") if canfail_dependencies.any?

  fcmd
end

#rbbt_job_exec_cmd(job, options) ⇒ Object



94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
# File 'lib/rbbt/hpc/batch.rb', line 94

def rbbt_job_exec_cmd(job, options)

  jobname  = job.clean_name
  workflow = job.original_workflow || job.workflow
  task     = job.original_task_name || job.task_name

  Misc.add_defaults options, :jobname => jobname

  task = Symbol === job.overriden ? job.overriden : job.task_name

  #override_deps = job.overriden_deps.collect do |dep| 
  #  name = [dep.workflow.to_s, dep.task_name] * "#"
  #  [name, dep.path] * "="  
  #end.uniq * ","

  if job.overriden?
    #override_deps = job.rec_dependencies.
    #  select{|dep| Symbol === dep.overriden }.
    
    override_deps = job.overriden_deps.
      collect do |dep| 
      name = [dep.workflow.to_s, dep.task_name] * "#"
      [name, dep.path] * "="  
    end.uniq * ","

    options[:override_deps] = override_deps unless override_deps.empty?
  end

  # Save inputs into inputs_dir
  inputs_dir = Misc.process_options options, :inputs_dir
  saved = Step.save_job_inputs(job, inputs_dir) if inputs_dir
  options[:load_inputs] = inputs_dir if saved && saved.any?

  saved.each do |input|
    options.delete input
  end if saved

  cmds = CMD.process_cmd_options options.merge(:add_option_dashes => true)

  "workflow task \#{workflow} \#{task} \#{cmds}\n".strip
end

#run_job(job, options = {}) ⇒ Object



560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
# File 'lib/rbbt/hpc/batch.rb', line 560

def run_job(job, options = {})
  system = self.to_s.split("::").last

  if (batch_job = job.info[:batch_job]) && job_queued(batch_job)
    Log.info "Job #{job.short_path} already queued in #{batch_job}"
    return batch_job 
  end

  if job.running?
    Log.info "Job #{job.short_path} already running in #{job.info[:pid]}"
    return job.info[:batch_job]
  end

  batch_base_dir, clean_batch_job, remove_batch_dir, procpath, tail, batch_dependencies, dry_run = Misc.process_options options, 
    :batch_base_dir, :clean_batch_job, :remove_batch_dir, :batch_procpath, :tail, :batch_dependencies, :dry_run,
    :batch_base_dir => File.expand_path(File.join('~/rbbt-batch')) 

  workflow = job.original_workflow ||job.workflow
  task_name = job.original_task_name || job.task_name

  workflows_to_load = job.rec_dependencies.select{|d| Step === d}.collect{|d| d.workflow }.compact.collect(&:to_s) - [workflow.to_s]

  TmpFile.with_file(nil, remove_batch_dir, :tmpdir => batch_base_dir, :prefix => "#{system}_rbbt_job-#{workflow.to_s}-#{task_name}-") do |batch_dir|
    Misc.add_defaults options, 
      :batch_dir => batch_dir, 
      :inputs_dir => File.join(batch_dir, "inputs_dir"),
      :workflows => workflows_to_load.any? ? workflows_to_load.uniq * "," : nil

    options[:procpath_performance] ||= File.join(batch_dir, "procpath##{procpath.gsub(',', '#')}") if procpath

    template = self.job_template(job, options.dup)

    fcmd = prepare_submision(template, options[:batch_dir], clean_batch_job, batch_dependencies)

    batch_job = run_template(batch_dir, dry_run)

    hold_dependencies(job, batch_job) unless dry_run

    return [batch_job, batch_dir] unless tail

    t_monitor = Thread.new do
      self.follow_job(batch_dir, :STDERR)
    end
    self.wait_for_job(batch_dir)
    t_monitor.raise Aborted
    return unless Open.read(File.join(batch_dir, 'exit.status')).strip == '0'
    path = Open.read(File.join(batch_dir, 'std.out')).strip
    if Open.exists?(path) && job.path != path
      Log.info "Path of BATCH job #{path} is different from original job #{job.path}. Stablishing link."
      Open.ln path, job.path
      Open.ln path + '.info', job.path + '.info'  if Open.exists?(path + '.info')
      Open.ln path + '.files', job.path + '.files' if Open.exists?(path + '.files')
    end

    [batch_job, batch_dir]
  end
end

#sync_environment(options = {}) ⇒ Object



433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
# File 'lib/rbbt/hpc/batch.rb', line 433

def sync_environment(options = {})
  sync_environment = ""

  if options[:sync]
    sync_environment +="if [ $exit_status == '0' ]; then\nbatch_sync_contain_dir\nelse\nsync_es=$exit_status\nfi\n"
  end

  sync_environment
end

#wait_for_job(batch_dir, time = 1) ⇒ Object



722
723
724
725
726
727
728
729
730
# File 'lib/rbbt/hpc/batch.rb', line 722

def wait_for_job(batch_dir, time = 1)
  fexit = File.join(batch_dir, 'exit.status')
  fjob = File.join(batch_dir, 'job.id')
  job = Open.read(fjob) if Open.exists?(fjob)

  while ! Open.exists?(fexit)
    sleep time
  end
end