Class: OpenStudio::Analysis::ServerApi

Inherits:
Object
  • Object
show all
Defined in:
lib/openstudio/analysis/server_api.rb

Constant Summary collapse

BATCH_RUN_METHODS =

Define set of anlaysis methods require batch_run to be queued after them

['lhs', 'preflight', 'single_run', 'repeat_run', 'doe', 'diag', 'baseline_perturbation', 'batch_datapoints'].freeze

Instance Attribute Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(options = {}) ⇒ ServerApi

Returns a new instance of ServerApi.



46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
# File 'lib/openstudio/analysis/server_api.rb', line 46

def initialize(options = {})
  defaults = { hostname: 'http://localhost:8080', log_path: File.expand_path('~/os_server_api.log') }
  options = defaults.merge(options)
  if ENV['OS_SERVER_LOG_PATH']
    @logger = ::Logger.new(ENV['OS_SERVER_LOG_PATH'] + '/os_server_api.log')
  else
    @logger = ::Logger.new(options[:log_path])
  end

  @hostname = options[:hostname]

  raise 'no host defined for server api class' if @hostname.nil?

  # TODO: add support for the proxy

  # create connection with basic capabilities
  @conn = Faraday.new(url: @hostname) do |faraday|
    faraday.request :url_encoded # form-encode POST params
    faraday.use Faraday::Response::Logger, @logger
    # faraday.response @logger # log requests to STDOUT
    faraday.adapter Faraday.default_adapter # make requests with Net::HTTP
  end

  # create connection to server api with multipart capabilities
  @conn_multipart = Faraday.new(url: @hostname) do |faraday|
    faraday.request :multipart
    faraday.request :url_encoded # form-encode POST params
    faraday.use Faraday::Response::Logger, @logger
    # faraday.response :logger # log requests to STDOUT
    faraday.adapter Faraday.default_adapter # make requests with Net::HTTP
  end
end

Instance Attribute Details

#hostnameObject (readonly)

Returns the value of attribute hostname.



41
42
43
# File 'lib/openstudio/analysis/server_api.rb', line 41

def hostname
  @hostname
end

Instance Method Details

#alive?Boolean

Check if the machine is alive

return [Boolean] True if the machine has an awake value set

Returns:

  • (Boolean)


222
223
224
225
226
227
228
# File 'lib/openstudio/analysis/server_api.rb', line 222

def alive?
  m = machine_status

  m = !m[:status][:awake].nil? if m

  m
end

#analysis_dencity_json(analysis_id) ⇒ Object



415
416
417
418
419
420
421
422
423
424
425
# File 'lib/openstudio/analysis/server_api.rb', line 415

def analysis_dencity_json(analysis_id)
  # Return the hash of the dencity format for the analysis
  dencity = nil

  resp = @conn.get "/analyses/#{analysis_id}/dencity.json"
  if resp.status == 200
    dencity = JSON.parse resp.body, symbolize_names: true
  end

  dencity
end

#data_point_status(analysis_id = nil) ⇒ Object

Get a list of analyses and the data points

Parameters:

  • analysis_id (String) (defaults to: nil)

    An analysis ID



714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
# File 'lib/openstudio/analysis/server_api.rb', line 714

def data_point_status(analysis_id = nil)
  data_points = nil
  call_string = nil
  if analysis_id
    call_string = "analyses/#{analysis_id}/status.json"
  else
    call_string = 'analyses/status.json'
  end

  resp = @conn.get call_string, version: 2
  if resp.status == 200
    data_points = JSON.parse(resp.body, symbolize_names: true)[:analyses]
  end

  data_points
end

#datapoint_dencity(datapoint_id) ⇒ Object



403
404
405
406
407
408
409
410
411
412
413
# File 'lib/openstudio/analysis/server_api.rb', line 403

def datapoint_dencity(datapoint_id)
  # Return the JSON (Full) of the datapoint
  data_point = nil

  resp = @conn.get "/data_points/#{datapoint_id}/dencity.json"
  if resp.status == 200
    data_point = JSON.parse resp.body, symbolize_names: true
  end

  data_point
end

#delete_allObject



111
112
113
114
115
116
117
118
119
120
121
# File 'lib/openstudio/analysis/server_api.rb', line 111

def delete_all
  ids = get_project_ids
  puts "deleting projects with IDs: #{ids}"
  success = true
  ids.each do |id|
    r = delete_project id
    success = false if r == false
  end

  success
end

#delete_project(id) ⇒ Object



97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/openstudio/analysis/server_api.rb', line 97

def delete_project(id)
  deleted = false
  response = @conn.delete "/projects/#{id}.json"
  if response.status == 204
    puts "Successfully deleted project #{id}"
    deleted = true
  else
    puts "ERROR deleting project #{id}"
    deleted = false
  end

  deleted
end

#download_database(save_directory = '.') ⇒ Object

Download a MongoDB Snapshot. This database can get large. For 13,000 simulations with DEnCity reporting, the size is around 325MB



355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
# File 'lib/openstudio/analysis/server_api.rb', line 355

def download_database(save_directory = '.')
  downloaded = false
  file_path_and_name = nil

  response = @conn.get do |r|
    r.url '/admin/backup_database?full_backup=true'
    r.options.timeout = 3600 # 60 minutes
  end

  if response.status == 200
    filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
    downloaded = true
    file_path_and_name = "#{save_directory}/#{filename}"
    puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
    File.open(file_path_and_name, 'wb') { |f| f << response.body }
  end

  [downloaded, file_path_and_name]
end

#download_dataframe(analysis_id, format = 'rdata', save_directory = '.') ⇒ Object



285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
# File 'lib/openstudio/analysis/server_api.rb', line 285

def download_dataframe(analysis_id, format = 'rdata', save_directory = '.')
  downloaded = false
  file_path_and_name = nil

  response = @conn.get do |r|
    r.url "/analyses/#{analysis_id}/download_data.#{format}?export=true"
    r.options.timeout = 3600 # 60 minutes
  end
  if response.status == 200
    filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
    downloaded = true
    file_path_and_name = "#{save_directory}/#{filename}"
    puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
    if format == 'rdata'
      File.open(file_path_and_name, 'wb') { |f| f << response.body }
    else
      File.open(file_path_and_name, 'w') { |f| f << response.body }
    end
  end

  [downloaded, file_path_and_name]
end

#download_datapoint(datapoint_id, save_directory = '.') ⇒ Object



328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
# File 'lib/openstudio/analysis/server_api.rb', line 328

def download_datapoint(datapoint_id, save_directory = '.')
  downloaded = false
  file_path_and_name = nil

  response = @conn.get "/data_points/#{datapoint_id}/download_result_file?filename=data_point.zip"
  if response.status == 200
    filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
    downloaded = true
    file_path_and_name = "#{save_directory}/#{filename}"
    puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
    File.open(file_path_and_name, 'wb') { |f| f << response.body }
  else
    response = @conn.get "/data_points/#{datapoint_id}/download"
    if response.status == 200
      filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
      downloaded = true
      file_path_and_name = "#{save_directory}/#{filename}"
      puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
      File.open(file_path_and_name, 'wb') { |f| f << response.body }
    end
  end

  [downloaded, file_path_and_name]
end

#download_datapoint_dencity_jsons(analysis_id, save_directory = '.') ⇒ Object



434
435
436
437
438
439
440
441
442
443
# File 'lib/openstudio/analysis/server_api.rb', line 434

def download_datapoint_dencity_jsons(analysis_id, save_directory = '.')
  # get the list of all the datapoints
  dps = get_datapoint_status(analysis_id)
  dps.each do |dp|
    if dp[:status] == 'completed'
      dp_h = datapoint_dencity(dp[:_id])
      File.open("#{save_directory}/data_point_#{dp[:_id]}_dencity.json", 'w') { |f| f << JSON.pretty_generate(dp_h) }
    end
  end
end

#download_datapoint_jsons(analysis_id, save_directory = '.') ⇒ Object



392
393
394
395
396
397
398
399
400
401
# File 'lib/openstudio/analysis/server_api.rb', line 392

def download_datapoint_jsons(analysis_id, save_directory = '.')
  # get the list of all the datapoints
  dps = get_datapoint_status(analysis_id)
  dps.each do |dp|
    if dp[:status] == 'completed'
      dp_h = get_datapoint(dp[:_id])
      File.open("#{save_directory}/data_point_#{dp[:_id]}.json", 'w') { |f| f << JSON.pretty_generate(dp_h) }
    end
  end
end

#download_datapoint_report(datapoint_id, report_name, save_directory = '.') ⇒ Object



376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
# File 'lib/openstudio/analysis/server_api.rb', line 376

def download_datapoint_report(datapoint_id, report_name, save_directory = '.')
  downloaded = false
  file_path_and_name = nil

  response = @conn.get "/data_points/#{datapoint_id}/download_result_file?filename=#{report_name}"
  if response.status == 200
    filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
    downloaded = true
    file_path_and_name = "#{save_directory}/#{filename}"
    puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
    File.open(file_path_and_name, 'wb') { |f| f << response.body }
  end

  [downloaded, file_path_and_name]
end

#download_dencity_json(analysis_id, save_directory = '.') ⇒ Object



427
428
429
430
431
432
# File 'lib/openstudio/analysis/server_api.rb', line 427

def download_dencity_json(analysis_id, save_directory = '.')
  a_h = analysis_dencity_json(analysis_id)
  if a_h
    File.open("#{save_directory}/analysis_#{analysis_id}_dencity.json", 'w') { |f| f << JSON.pretty_generate(a_h) }
  end
end

#download_variables(analysis_id, format = 'rdata', save_directory = '.') ⇒ Object



308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
# File 'lib/openstudio/analysis/server_api.rb', line 308

def download_variables(analysis_id, format = 'rdata', save_directory = '.')
  downloaded = false
  file_path_and_name = nil

  response = @conn.get "/analyses/#{analysis_id}/variables/download_variables.#{format}"
  if response.status == 200
    filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
    downloaded = true
    file_path_and_name = "#{save_directory}/#{filename}"
    puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
    if format == 'rdata'
      File.open(file_path_and_name, 'wb') { |f| f << response.body }
    else
      File.open(file_path_and_name, 'w') { |f| f << response.body }
    end
  end

  [downloaded, file_path_and_name]
end

#get_analyses(project_id) ⇒ Object



150
151
152
153
154
155
156
157
158
159
160
161
162
163
# File 'lib/openstudio/analysis/server_api.rb', line 150

def get_analyses(project_id)
  analysis_ids = []
  response = @conn.get "/projects/#{project_id}.json"
  if response.status == 200
    analyses = JSON.parse(response.body, symbolize_names: true, max_nesting: false)
    if analyses[:analyses]
      analyses[:analyses].each do |analysis|
        analysis_ids << analysis[:_id]
      end
    end
  end

  analysis_ids
end

#get_analyses_detailed(project_id) ⇒ Object



165
166
167
168
169
170
171
172
173
# File 'lib/openstudio/analysis/server_api.rb', line 165

def get_analyses_detailed(project_id)
  analyses = nil
  response = @conn.get "/projects/#{project_id}.json"
  if response.status == 200
    analyses = JSON.parse(response.body, symbolize_names: true, max_nesting: false)[:analyses]
  end

  analyses
end

#get_analysis(analysis_id) ⇒ Object

return the entire analysis JSON



176
177
178
179
180
181
182
183
184
# File 'lib/openstudio/analysis/server_api.rb', line 176

def get_analysis(analysis_id)
  result = nil
  response = @conn.get "/analyses/#{analysis_id}.json"
  if response.status == 200
    result = JSON.parse(response.body, symbolize_names: true, max_nesting: false)[:analysis]
  end

  result
end

#get_analysis_results(analysis_id) ⇒ Object

return the data point results in JSON format



274
275
276
277
278
279
280
281
282
283
# File 'lib/openstudio/analysis/server_api.rb', line 274

def get_analysis_results(analysis_id)
  analysis = nil

  response = @conn.get "/analyses/#{analysis_id}/analysis_data.json"
  if response.status == 200
    analysis = JSON.parse(response.body, symbolize_names: true, max_nesting: false)
  end

  analysis
end

#get_analysis_status(analysis_id, analysis_type) ⇒ Object

Check the status of the simulation. Format should be: {

analysis: {
  status: "completed",
  analysis_type: "batch_run"
},
  data_points: [
  {
      _id: "bbd57e90-ce59-0131-35de-080027880ca6",
      status: "completed"
  }
]

}



199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
# File 'lib/openstudio/analysis/server_api.rb', line 199

def get_analysis_status(analysis_id, analysis_type)
  status = nil

  # sleep 2  # super cheesy---need to update how this works. Right now there is a good chance to get a
  # race condition when the analysis state changes.
  unless analysis_id.nil?
    resp = @conn.get "analyses/#{analysis_id}/status.json"
    if resp.status == 200
      j = JSON.parse resp.body, symbolize_names: true
      if j && j[:analysis] && j[:analysis][:analysis_type] == analysis_type
        status = j[:analysis][:status]
      elsif j && j[:analysis] && analysis_type == 'batch_run'
        status = j[:analysis][:status]
      end
    end
  end

  status
end

#get_analysis_status_and_json(analysis_id, analysis_type) ⇒ Object



254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
# File 'lib/openstudio/analysis/server_api.rb', line 254

def get_analysis_status_and_json(analysis_id, analysis_type)
  status = nil
  j = nil

  # sleep 2  # super cheesy---need to update how this works. Right now there is a good chance to get a
  # race condition when the analysis state changes.
  unless analysis_id.nil?
    resp = @conn.get "analyses/#{analysis_id}/status.json"
    if resp.status == 200
      j = JSON.parse resp.body, symbolize_names: true
      if j && j[:analysis] && j[:analysis][:analysis_type] == analysis_type
        status = j[:analysis][:status]
      end
    end
  end

  [status, j]
end

#get_datapoint(data_point_id) ⇒ Object

Return the JSON (Full) of the datapoint



754
755
756
757
758
759
760
761
762
763
# File 'lib/openstudio/analysis/server_api.rb', line 754

def get_datapoint(data_point_id)
  data_point = nil

  resp = @conn.get "/data_points/#{data_point_id}.json"
  if resp.status == 200
    data_point = JSON.parse resp.body, symbolize_names: true
  end

  data_point
end

#get_datapoint_status(analysis_id, filter = nil) ⇒ Object

This is the former version of get data point status. The new version is preferred and allows for checking data points across all analyses.



733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
# File 'lib/openstudio/analysis/server_api.rb', line 733

def get_datapoint_status(analysis_id, filter = nil)
  data_points = nil
  # get the status of all the entire analysis
  unless analysis_id.nil?
    if filter.nil? || filter == ''
      resp = @conn.get "analyses/#{analysis_id}/status.json"
      if resp.status == 200
        data_points = JSON.parse(resp.body, symbolize_names: true)[:analysis][:data_points]
      end
    else
      resp = @conn.get "analyses/#{analysis_id}/status.json", jobs: filter
      if resp.status == 200
        data_points = JSON.parse(resp.body, symbolize_names: true)[:analysis][:data_points]
      end
    end
  end

  data_points
end

#get_project_idsObject



92
93
94
95
# File 'lib/openstudio/analysis/server_api.rb', line 92

def get_project_ids
  ids = get_projects
  ids.map { |project| project[:uuid] }
end

#get_projectsObject



79
80
81
82
83
84
85
86
87
88
89
90
# File 'lib/openstudio/analysis/server_api.rb', line 79

def get_projects
  response = @conn.get '/projects.json'

  projects_json = nil
  if response.status == 200
    projects_json = JSON.parse(response.body, symbolize_names: true, max_nesting: false)
  else
    raise 'did not receive a 200 in get_projects'
  end

  projects_json
end

#kill_all_analysesObject



697
698
699
700
701
702
703
704
705
706
707
708
709
# File 'lib/openstudio/analysis/server_api.rb', line 697

def kill_all_analyses
  project_ids = get_project_ids
  puts "List of projects ids are: #{project_ids}"

  project_ids.each do |project_id|
    analysis_ids = get_analyses(project_id)
    puts analysis_ids
    analysis_ids.each do |analysis_id|
      puts "Trying to kill #{analysis_id}"
      kill_analysis(analysis_id)
    end
  end
end

#kill_analysis(analysis_id) ⇒ Object

Kill the analysis

Parameters:

  • analysis (String)

    Analysis ID to stop



683
684
685
686
687
688
689
690
691
692
693
694
695
# File 'lib/openstudio/analysis/server_api.rb', line 683

def kill_analysis(analysis_id)
  analysis_action = { analysis_action: 'stop' }

  response = @conn.post do |req|
    req.url "analyses/#{analysis_id}/action.json"
    req.headers['Content-Type'] = 'application/json'
    req.body = analysis_action.to_json
  end

  if response.status == 200
    puts "Killed analysis #{analysis_id}"
  end
end

#machine_statusObject

Retrieve the machine status

return [Hash]



233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
# File 'lib/openstudio/analysis/server_api.rb', line 233

def machine_status
  status = nil

  begin
    resp = @conn.get do |req|
      req.url 'status.json'
      req.options.timeout = 120
      req.options.open_timeout = 120
    end

    if resp.status == 200
      j = JSON.parse resp.body, symbolize_names: true
      status = j if j
    end
  rescue Faraday::ConnectionFailed
  rescue Net::ReadTimeout
  end

  status
end

#new_analysis(project_id, options) ⇒ Object



445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
# File 'lib/openstudio/analysis/server_api.rb', line 445

def new_analysis(project_id, options)
  defaults = {
    analysis_name: nil,
    reset_uuids: false,
    push_to_dencity: false
  }
  options = defaults.merge(options)

  raise 'No project id passed' if project_id.nil?

  formulation_json = nil
  if options[:formulation_file]
    raise "No formulation exists #{options[:formulation_file]}" unless File.exist?(options[:formulation_file])
    formulation_json = JSON.parse(File.read(options[:formulation_file]), symbolize_names: true)
  end

  # read in the analysis id from the analysis.json file
  analysis_id = nil
  if formulation_json
    if options[:reset_uuids]
      analysis_id = SecureRandom.uuid
      formulation_json[:analysis][:uuid] = analysis_id

      formulation_json[:analysis][:problem][:workflow].each do |wf|
        wf[:uuid] = SecureRandom.uuid
        if wf[:arguments]
          wf[:arguments].each do |arg|
            arg[:uuid] = SecureRandom.uuid
          end
        end
        if wf[:variables]
          wf[:variables].each do |var|
            var[:uuid] = SecureRandom.uuid
            var[:argument][:uuid] = SecureRandom.uuid if var[:argument]
          end
        end
      end
    else
      analysis_id = formulation_json[:analysis][:uuid]
    end

    # set the analysis name
    formulation_json[:analysis][:name] = (options[:analysis_name]).to_s unless options[:analysis_name].nil?
  else
    formulation_json = {
      analysis: options
    }
    puts formulation_json
    analysis_id = SecureRandom.uuid
    formulation_json[:analysis][:uuid] = analysis_id
  end
  raise "No analysis id defined in analysis.json #{options[:formulation_file]}" if analysis_id.nil?

  # save out this file to compare
  # File.open('formulation_merge.json', 'w') { |f| f << JSON.pretty_generate(formulation_json) }

  response = @conn.post do |req|
    req.url "projects/#{project_id}/analyses.json"
    req.headers['Content-Type'] = 'application/json'
    req.body = formulation_json.to_json
    req.options[:timeout] = 600 # seconds
  end

  if response.status == 201
    puts "asked to create analysis with #{analysis_id}"
    # puts resp.inspect
    analysis_id = JSON.parse(response.body)['_id']
    puts "options[:push_to_dencity] = #{options[:push_to_dencity]}"
    upload_to_dencity(analysis_id, formulation_json) if options[:push_to_dencity]
    puts "new analysis created with ID: #{analysis_id}"
  else
    raise 'Could not create new analysis'
  end

  # check if we need to upload the analysis zip file
  if options[:upload_file]
    raise "upload file does not exist #{options[:upload_file]}" unless File.exist?(options[:upload_file])

    payload = { file: Faraday::UploadIO.new(options[:upload_file], 'application/zip') }
    response = @conn_multipart.post "analyses/#{analysis_id}/upload.json", payload do |req|
      req.options[:timeout] = 1800 # seconds
    end

    if response.status == 201
      puts 'Successfully uploaded ZIP file'
    else
      raise response.inspect
    end
  end

  analysis_id
end

#new_project(options = {}) ⇒ Object



123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
# File 'lib/openstudio/analysis/server_api.rb', line 123

def new_project(options = {})
  defaults = { project_name: "Project #{::Time.now.strftime('%Y-%m-%d %H:%M:%S')}" }
  options = defaults.merge(options)
  project_id = nil

  # TODO: make this a display name and a machine name
  project_hash = { project: { name: (options[:project_name]).to_s } }

  response = @conn.post do |req|
    req.url '/projects.json'
    req.headers['Content-Type'] = 'application/json'
    req.body = project_hash.to_json
  end

  if response.status == 201
    project_id = JSON.parse(response.body)['_id']

    puts "new project created with ID: #{project_id}"
    # grab the project id
  elsif response.status == 500
    puts '500 Error'
    puts response.inspect
  end

  project_id
end

#queue_single_run(formulation_filename, analysis_zip_filename, analysis_type, run_data_point_filename = 'run_openstudio_workflow_monthly.rb') ⇒ Object



834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
# File 'lib/openstudio/analysis/server_api.rb', line 834

def queue_single_run(formulation_filename, analysis_zip_filename, analysis_type,
                     run_data_point_filename = 'run_openstudio_workflow_monthly.rb')
  project_options = {}
  project_id = new_project(project_options)

  analysis_options = {
    formulation_file: formulation_filename,
    upload_file: analysis_zip_filename,
    reset_uuids: true
  }
  analysis_id = new_analysis(project_id, analysis_options)

  run_options = {
    analysis_action: 'start',
    without_delay: false,
    analysis_type: analysis_type,
    simulate_data_point_filename: 'simulate_data_point.rb',
    run_data_point_filename: run_data_point_filename
  }
  start_analysis(analysis_id, run_options)

  analysis_id
end

#run(formulation_filename, analysis_zip_filename, analysis_type, options = {}) ⇒ Object

Submit the analysis for running via the API

Parameters:

  • formulation_filename (String)

    Name of the analysis.json file

  • analysis_zip_filename (String)

    Name of the analysis.zip file

  • analysis_type (String)

    Type of analysis to run

  • options (Hash) (defaults to: {})

    Hash of options

Options Hash (options):

  • :run_data_point_filename (String)

    Name of ruby file that the server runs – will be deprecated

  • :push_to_dencity (String)

    Whether or not to push to DEnCity

  • :batch_run_method (String)

    Which batch run method to use (batch_run or batch_run_local [no R])



788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
# File 'lib/openstudio/analysis/server_api.rb', line 788

def run(formulation_filename, analysis_zip_filename, analysis_type, options = {})
  defaults = {
    run_data_point_filename: 'run_openstudio_workflow_monthly.rb',
    push_to_dencity: false,
    batch_run_method: 'batch_run',
    without_delay: false
  }
  options = defaults.merge(options)

  project_options = {}
  project_id = new_project(project_options)

  analysis_options = {
    formulation_file: formulation_filename,
    upload_file: analysis_zip_filename,
    reset_uuids: true,
    push_to_dencity: options[:push_to_dencity]
  }

  analysis_id = new_analysis(project_id, analysis_options)

  run_options = {
    analysis_action: 'start',
    without_delay: options[:without_delay],
    analysis_type: analysis_type,
    simulate_data_point_filename: 'simulate_data_point.rb', # TODO: remove these from server?
    run_data_point_filename: options[:run_data_point_filename]
  }
  start_analysis(analysis_id, run_options)

  # If the analysis is a staged analysis, then go ahead and run batch run
  # because there is no explicit way to tell the system to do it
  if BATCH_RUN_METHODS.include? analysis_type
    run_options = {
      analysis_action: 'start',
      without_delay: false,
      analysis_type: options[:batch_run_method],
      simulate_data_point_filename: 'simulate_data_point.rb',
      run_data_point_filename: options[:run_data_point_filename]
    }
    start_analysis(analysis_id, run_options)
  end

  analysis_id
end

#run_batch_run_across_analysesObject



858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
# File 'lib/openstudio/analysis/server_api.rb', line 858

def run_batch_run_across_analyses
  project_options = {}
  project_id = new_project(project_options)

  analysis_options = {
    formulation_file: nil,
    upload_file: nil,
    reset_uuids: true
  }
  analysis_id = new_analysis(project_id, analysis_options)

  run_options = {
    analysis_action: 'start',
    without_delay: false,
    analysis_type: 'batch_run_analyses',
    simulate_data_point_filename: 'simulate_data_point.rb',
    run_data_point_filename: 'run_openstudio_workflow_monthly.rb'
  }
  start_analysis(analysis_id, run_options)

  analysis_id
end

#run_file(formulation_filename, analysis_zip_filename) ⇒ Object

Submit a generic analysis. This will use the options that are configured in the JSON file including the analysis type and options. Note that this may not work for all cases were multiple analyses need to run (e.g. single_run, queue_model, lhs)



771
772
773
774
775
776
777
# File 'lib/openstudio/analysis/server_api.rb', line 771

def run_file(formulation_filename, analysis_zip_filename)
  # parse the JSON file to grab the analysis type
  j = JSON.parse(formulation_filename, symbolize_names: true)
  analysis_type = j[:analysis][:problem][:analysis_type]

  run(formulation_filename, analysis_zip_filename, analysis_type)
end

#start_analysis(analysis_id, options) ⇒ Object



662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
# File 'lib/openstudio/analysis/server_api.rb', line 662

def start_analysis(analysis_id, options)
  defaults = { analysis_action: 'start', without_delay: false }
  options = defaults.merge(options)

  puts "Run analysis is configured with #{options.to_json}"
  response = @conn.post do |req|
    req.url "analyses/#{analysis_id}/action.json"
    req.headers['Content-Type'] = 'application/json'
    req.body = options.to_json
    req.options[:timeout] = 1800 # seconds
  end

  if response.status == 200
    puts "Received request to run analysis #{analysis_id}"
  else
    raise 'Could not start the analysis'
  end
end

#upload_datapoint(analysis_id, options) ⇒ Object

Upload a single datapoint

Parameters:

  • analysis (String)

    Analysis ID to attach datapoint

  • options (Hash)

    Options

Options Hash (options):

  • :datapoint_file (String)

    Path to datapoint JSON to upload

  • :reset_uuids (Boolean)

    Flag on whether or not to reset the UUID in the datapoint JSON to a new random value.



602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
# File 'lib/openstudio/analysis/server_api.rb', line 602

def upload_datapoint(analysis_id, options)
  defaults = { reset_uuids: false }
  options = defaults.merge(options)

  raise 'No analysis id passed' if analysis_id.nil?
  raise 'No datapoints file passed to new_analysis' unless options[:datapoint_file]
  raise "No datapoints_file exists #{options[:datapoint_file]}" unless File.exist?(options[:datapoint_file])

  dp_hash = JSON.parse(File.open(options[:datapoint_file]).read, symbolize_names: true)

  # There are two instances of the analysis ID. There is one in the file,
  # and the other is in the POST url. Ideally remove the version in the
  # file and support only the URL based analysis_id
  dp_hash[:analysis_uuid] = analysis_id

  if options[:reset_uuids]
    dp_hash[:uuid] = SecureRandom.uuid
  end

  # merge in the analysis_id as it has to be what is in the database
  response = @conn.post do |req|
    req.url "analyses/#{analysis_id}/data_points.json"
    req.headers['Content-Type'] = 'application/json'
    req.body = dp_hash.to_json
  end

  if response.status == 201
    puts "new datapoints created for analysis #{analysis_id}"
    return JSON.parse(response.body, symbolize_names: true)
  else
    raise "could not create new datapoints #{response.body}"
  end
end

#upload_datapoints(analysis_id, options) ⇒ Object

Upload multiple data points to the server.

Parameters:

  • analysis (String)

    Analysis ID to attach datapoint



638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
# File 'lib/openstudio/analysis/server_api.rb', line 638

def upload_datapoints(analysis_id, options)
  defaults = {}
  options = defaults.merge(options)

  raise 'No analysis id passed' if analysis_id.nil?
  raise 'No datapoints file passed to new_analysis' unless options[:datapoints_file]
  raise "No datapoints_file exists #{options[:datapoints_file]}" unless File.exist?(options[:datapoints_file])

  dp_hash = JSON.parse(File.open(options[:datapoints_file]).read, symbolize_names: true)

  # merge in the analysis_id as it has to be what is in the database
  response = @conn.post do |req|
    req.url "analyses/#{analysis_id}/data_points/batch_upload.json"
    req.headers['Content-Type'] = 'application/json'
    req.body = dp_hash.to_json
  end

  if response.status == 201
    puts "new datapoints created for analysis #{analysis_id}"
  else
    raise "could not create new datapoints #{response.body}"
  end
end

#upload_to_dencity(analysis_uuid, analysis) ⇒ Object



538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
# File 'lib/openstudio/analysis/server_api.rb', line 538

def upload_to_dencity(analysis_uuid, analysis)
  require 'dencity'
  puts "Attempting to connect to DEnCity server using settings at '~/.dencity/config.yml'"
  conn = Dencity.connect
  raise "Could not connect to DEnCity server at #{hostname}." unless conn.connected?
  begin
    r = conn.
  rescue Faraday::ParsingError => user_id_failure
    raise "Error in user_id field: #{user_id_failure.message}"
  rescue MultiJson::ParseError => authentication_failure
    raise "Error in attempted authentication: #{authentication_failure.message}"
  end
  user_uuid = r.id

  # Find the analysis.json file that SHOULD BE IN THE FOLDER THAT THIS SCRIPT IS IN (or change the below)
  # Check that the analysis has not yet been registered with the DEnCity instance.
  # TODO This should be simplified with a retrieve_analysis_by_user_defined_id' method in the future
  user_analyses = []
  r = conn.dencity_get 'analyses'
  runner.registerError('Unable to retrieve analyses from DEnCity server') unless r['status'] == 200
  r['data'].each do |dencity_analysis|
    user_analyses << dencity_analysis['id'] if dencity_analysis['user_id'] == user_uuid
  end
  found_analysis_uuid = false
  user_analyses.each do |dencity_analysis_id|
    dencity_analysis = conn.retrieve_analysis_by_id(dencity_analysis_id)
    if dencity_analysis['user_defined_id'] == analysis_uuid
      found_analysis_uuid = true
      break
    end
  end
  raise "Analysis with user_defined_id of #{analysis_uuid} found on DEnCity." if found_analysis_uuid
  dencity_hash = OpenStudio::Analysis.to_dencity_analysis(analysis, analysis_uuid)

  # Write the analysis DEnCity hash to dencity_analysis.json
  f = File.new('dencity_analysis.json', 'wb')
  f.write(JSON.pretty_generate(dencity_hash))
  f.close

  # Upload the processed analysis json.
  upload = conn.load_analysis 'dencity_analysis.json'
  begin
    upload_response = upload.push
  rescue StandardError => e
    runner.registerError("Upload failure: #{e.message} in #{e.backtrace.join('/n')}")
  else
    if NoMethodError == upload_response.class
      raise "ERROR: Server responded with a NoMethodError: #{upload_response}"
    end
    if upload_response.status.to_s[0] == '2'
      puts 'Successfully uploaded processed analysis json file to the DEnCity server.'
    else
      puts 'ERROR: Server returned a non-20x status. Response below.'
      puts upload_response
      raise
    end
  end
end