Class: OpenStudio::Analysis::ServerApi

Inherits:
Object
  • Object
show all
Defined in:
lib/openstudio/analysis/server_api.rb

Constant Summary collapse

BATCH_RUN_METHODS =

Define set of anlaysis methods require batch_run to be queued after them

['lhs', 'preflight', 'single_run', 'repeat_run', 'doe', 'diag', 'baseline_perturbation', 'batch_datapoints'].freeze

Instance Attribute Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(options = {}) ⇒ ServerApi

Returns a new instance of ServerApi.



16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
# File 'lib/openstudio/analysis/server_api.rb', line 16

def initialize(options = {})
  defaults = { hostname: 'http://localhost:8080', log_path: File.expand_path('~/os_server_api.log') }
  options = defaults.merge(options)
  if ENV['OS_SERVER_LOG_PATH']
    @logger = ::Logger.new(ENV['OS_SERVER_LOG_PATH'] + '/os_server_api.log')
  else
    @logger = ::Logger.new(options[:log_path])
  end

  @hostname = options[:hostname]

  raise 'no host defined for server api class' if @hostname.nil?

  # TODO: add support for the proxy

  # create connection with basic capabilities
  @conn = Faraday.new(url: @hostname) do |faraday|
    faraday.request :url_encoded # form-encode POST params
    faraday.options.timeout = 300
    faraday.options.open_timeout = 300
    faraday.options.write_timeout = 1800
    faraday.use Faraday::Response::Logger, @logger
    # faraday.response @logger # log requests to STDOUT
    faraday.adapter Faraday.default_adapter # make requests with Net::HTTP
  end

  # create connection to server api with multipart capabilities
  @conn_multipart = Faraday.new(url: @hostname) do |faraday|
    faraday.request :multipart
    faraday.request :url_encoded # form-encode POST params
    faraday.options.timeout = 300
    faraday.options.open_timeout = 300
    faraday.options.write_timeout = 1800
    faraday.use Faraday::Response::Logger, @logger
    # faraday.response :logger # log requests to STDOUT
    faraday.adapter Faraday.default_adapter # make requests with Net::HTTP
  end
end

Instance Attribute Details

#hostnameObject (readonly)

Returns the value of attribute hostname.



11
12
13
# File 'lib/openstudio/analysis/server_api.rb', line 11

def hostname
  @hostname
end

Instance Method Details

#alive?Boolean

Check if the machine is alive

return [Boolean] True if the machine has an awake value set

Returns:

  • (Boolean)


200
201
202
203
204
205
206
# File 'lib/openstudio/analysis/server_api.rb', line 200

def alive?
  m = machine_status

  m = !m[:status][:awake].nil? if m

  m
end

#analysis_dencity_json(analysis_id) ⇒ Object



396
397
398
399
400
401
402
403
404
405
406
# File 'lib/openstudio/analysis/server_api.rb', line 396

def analysis_dencity_json(analysis_id)
  # Return the hash of the dencity format for the analysis
  dencity = nil

  resp = @conn.get "/analyses/#{analysis_id}/dencity.json"
  if resp.status == 200
    dencity = JSON.parse resp.body, symbolize_names: true
  end

  dencity
end

#data_point_status(analysis_id = nil) ⇒ Object

Get a list of analyses and the data points

Parameters:

  • analysis_id (String) (defaults to: nil)

    An analysis ID



694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
# File 'lib/openstudio/analysis/server_api.rb', line 694

def data_point_status(analysis_id = nil)
  data_points = nil
  call_string = nil
  if analysis_id
    call_string = "analyses/#{analysis_id}/status.json"
  else
    call_string = 'analyses/status.json'
  end

  resp = @conn.get call_string, version: 2
  if resp.status == 200
    data_points = JSON.parse(resp.body, symbolize_names: true)[:analyses]
  end

  data_points
end

#datapoint_dencity(datapoint_id) ⇒ Object



384
385
386
387
388
389
390
391
392
393
394
# File 'lib/openstudio/analysis/server_api.rb', line 384

def datapoint_dencity(datapoint_id)
  # Return the JSON (Full) of the datapoint
  data_point = nil

  resp = @conn.get "/data_points/#{datapoint_id}/dencity.json"
  if resp.status == 200
    data_point = JSON.parse resp.body, symbolize_names: true
  end

  data_point
end

#delete_allObject



87
88
89
90
91
92
93
94
95
96
97
# File 'lib/openstudio/analysis/server_api.rb', line 87

def delete_all
  ids = get_project_ids
  puts "deleting projects with IDs: #{ids}"
  success = true
  ids.each do |id|
    r = delete_project id
    success = false if r == false
  end

  success
end

#delete_project(id) ⇒ Object



73
74
75
76
77
78
79
80
81
82
83
84
85
# File 'lib/openstudio/analysis/server_api.rb', line 73

def delete_project(id)
  deleted = false
  response = @conn.delete "/projects/#{id}.json"
  if response.status == 204
    puts "Successfully deleted project #{id}"
    deleted = true
  else
    puts "ERROR deleting project #{id}"
    deleted = false
  end

  deleted
end

#download_database(save_directory = '.') ⇒ Object

Download a MongoDB Snapshot. This database can get large. For 13,000 simulations with DEnCity reporting, the size is around 325MB



336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
# File 'lib/openstudio/analysis/server_api.rb', line 336

def download_database(save_directory = '.')
  downloaded = false
  file_path_and_name = nil

  response = @conn.get do |r|
    r.url '/admin/backup_database?full_backup=true'
    r.options.timeout = 3600 # 60 minutes
  end

  if response.status == 200
    filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
    downloaded = true
    file_path_and_name = "#{save_directory}/#{filename}"
    puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
    File.open(file_path_and_name, 'wb') { |f| f << response.body }
  end

  [downloaded, file_path_and_name]
end

#download_dataframe(analysis_id, format = 'rdata', save_directory = '.') ⇒ Object



266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
# File 'lib/openstudio/analysis/server_api.rb', line 266

def download_dataframe(analysis_id, format = 'rdata', save_directory = '.')
  downloaded = false
  file_path_and_name = nil

  response = @conn.get do |r|
    r.url "/analyses/#{analysis_id}/download_data.#{format}?export=true"
    r.options.timeout = 3600 # 60 minutes
  end
  if response.status == 200
    filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
    downloaded = true
    file_path_and_name = "#{save_directory}/#{filename}"
    puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
    if format == 'rdata'
      File.open(file_path_and_name, 'wb') { |f| f << response.body }
    else
      File.open(file_path_and_name, 'w') { |f| f << response.body }
    end
  end

  [downloaded, file_path_and_name]
end

#download_datapoint(datapoint_id, save_directory = '.') ⇒ Object



309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
# File 'lib/openstudio/analysis/server_api.rb', line 309

def download_datapoint(datapoint_id, save_directory = '.')
  downloaded = false
  file_path_and_name = nil

  response = @conn.get "/data_points/#{datapoint_id}/download_result_file?filename=data_point.zip"
  if response.status == 200
    filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
    downloaded = true
    file_path_and_name = "#{save_directory}/#{filename}"
    puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
    File.open(file_path_and_name, 'wb') { |f| f << response.body }
  else
    response = @conn.get "/data_points/#{datapoint_id}/download"
    if response.status == 200
      filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
      downloaded = true
      file_path_and_name = "#{save_directory}/#{filename}"
      puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
      File.open(file_path_and_name, 'wb') { |f| f << response.body }
    end
  end

  [downloaded, file_path_and_name]
end

#download_datapoint_dencity_jsons(analysis_id, save_directory = '.') ⇒ Object



415
416
417
418
419
420
421
422
423
424
# File 'lib/openstudio/analysis/server_api.rb', line 415

def download_datapoint_dencity_jsons(analysis_id, save_directory = '.')
  # get the list of all the datapoints
  dps = get_datapoint_status(analysis_id)
  dps.each do |dp|
    if dp[:status] == 'completed'
      dp_h = datapoint_dencity(dp[:_id])
      File.open("#{save_directory}/data_point_#{dp[:_id]}_dencity.json", 'w') { |f| f << JSON.pretty_generate(dp_h) }
    end
  end
end

#download_datapoint_jsons(analysis_id, save_directory = '.') ⇒ Object



373
374
375
376
377
378
379
380
381
382
# File 'lib/openstudio/analysis/server_api.rb', line 373

def download_datapoint_jsons(analysis_id, save_directory = '.')
  # get the list of all the datapoints
  dps = get_datapoint_status(analysis_id)
  dps.each do |dp|
    if dp[:status] == 'completed'
      dp_h = get_datapoint(dp[:_id])
      File.open("#{save_directory}/data_point_#{dp[:_id]}.json", 'w') { |f| f << JSON.pretty_generate(dp_h) }
    end
  end
end

#download_datapoint_report(datapoint_id, report_name, save_directory = '.') ⇒ Object



357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
# File 'lib/openstudio/analysis/server_api.rb', line 357

def download_datapoint_report(datapoint_id, report_name, save_directory = '.')
  downloaded = false
  file_path_and_name = nil

  response = @conn.get "/data_points/#{datapoint_id}/download_result_file?filename=#{report_name}"
  if response.status == 200
    filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
    downloaded = true
    file_path_and_name = "#{save_directory}/#{filename}"
    puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
    File.open(file_path_and_name, 'wb') { |f| f << response.body }
  end

  [downloaded, file_path_and_name]
end

#download_dencity_json(analysis_id, save_directory = '.') ⇒ Object



408
409
410
411
412
413
# File 'lib/openstudio/analysis/server_api.rb', line 408

def download_dencity_json(analysis_id, save_directory = '.')
  a_h = analysis_dencity_json(analysis_id)
  if a_h
    File.open("#{save_directory}/analysis_#{analysis_id}_dencity.json", 'w') { |f| f << JSON.pretty_generate(a_h) }
  end
end

#download_variables(analysis_id, format = 'rdata', save_directory = '.') ⇒ Object



289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
# File 'lib/openstudio/analysis/server_api.rb', line 289

def download_variables(analysis_id, format = 'rdata', save_directory = '.')
  downloaded = false
  file_path_and_name = nil

  response = @conn.get "/analyses/#{analysis_id}/variables/download_variables.#{format}"
  if response.status == 200
    filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
    downloaded = true
    file_path_and_name = "#{save_directory}/#{filename}"
    puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
    if format == 'rdata'
      File.open(file_path_and_name, 'wb') { |f| f << response.body }
    else
      File.open(file_path_and_name, 'w') { |f| f << response.body }
    end
  end

  [downloaded, file_path_and_name]
end

#get_analyses(project_id) ⇒ Object



130
131
132
133
134
135
136
137
138
139
140
141
# File 'lib/openstudio/analysis/server_api.rb', line 130

def get_analyses(project_id)
  analysis_ids = []
  response = @conn.get "/projects/#{project_id}.json"
  if response.status == 200
    analyses = JSON.parse(response.body, symbolize_names: true, max_nesting: false)
    analyses[:analyses]&.each do |analysis|
      analysis_ids << analysis[:_id]
    end
  end

  analysis_ids
end

#get_analyses_detailed(project_id) ⇒ Object



143
144
145
146
147
148
149
150
151
# File 'lib/openstudio/analysis/server_api.rb', line 143

def get_analyses_detailed(project_id)
  analyses = nil
  response = @conn.get "/projects/#{project_id}.json"
  if response.status == 200
    analyses = JSON.parse(response.body, symbolize_names: true, max_nesting: false)[:analyses]
  end

  analyses
end

#get_analysis(analysis_id) ⇒ Object

return the entire analysis JSON



154
155
156
157
158
159
160
161
162
# File 'lib/openstudio/analysis/server_api.rb', line 154

def get_analysis(analysis_id)
  result = nil
  response = @conn.get "/analyses/#{analysis_id}.json"
  if response.status == 200
    result = JSON.parse(response.body, symbolize_names: true, max_nesting: false)[:analysis]
  end

  result
end

#get_analysis_results(analysis_id) ⇒ Object

return the data point results in JSON format



255
256
257
258
259
260
261
262
263
264
# File 'lib/openstudio/analysis/server_api.rb', line 255

def get_analysis_results(analysis_id)
  analysis = nil

  response = @conn.get "/analyses/#{analysis_id}/analysis_data.json"
  if response.status == 200
    analysis = JSON.parse(response.body, symbolize_names: true, max_nesting: false)
  end

  analysis
end

#get_analysis_status(analysis_id, analysis_type) ⇒ Object

Check the status of the simulation. Format should be: {

analysis: {
  status: "completed",
  analysis_type: "batch_run"
},
  data_points: [
  {
      _id: "bbd57e90-ce59-0131-35de-080027880ca6",
      status: "completed"
  }
]

}



177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
# File 'lib/openstudio/analysis/server_api.rb', line 177

def get_analysis_status(analysis_id, analysis_type)
  status = nil

  # sleep 2  # super cheesy---need to update how this works. Right now there is a good chance to get a
  # race condition when the analysis state changes.
  unless analysis_id.nil?
    resp = @conn.get "analyses/#{analysis_id}/status.json"
    if resp.status == 200
      j = JSON.parse resp.body, symbolize_names: true
      if j && j[:analysis] && j[:analysis][:analysis_type] == analysis_type
        status = j[:analysis][:status]
      elsif j && j[:analysis] && analysis_type == 'batch_run'
        status = j[:analysis][:status]
      end
    end
  end

  status
end

#get_analysis_status_and_json(analysis_id, analysis_type) ⇒ Object



235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
# File 'lib/openstudio/analysis/server_api.rb', line 235

def get_analysis_status_and_json(analysis_id, analysis_type)
  status = nil
  j = nil

  # sleep 2  # super cheesy---need to update how this works. Right now there is a good chance to get a
  # race condition when the analysis state changes.
  unless analysis_id.nil?
    resp = @conn.get "analyses/#{analysis_id}/status.json"
    if resp.status == 200
      j = JSON.parse resp.body, symbolize_names: true
      if j && j[:analysis] && j[:analysis][:analysis_type] == analysis_type
        status = j[:analysis][:status]
      end
    end
  end

  [status, j]
end

#get_datapoint(data_point_id) ⇒ Object

Return the JSON (Full) of the datapoint



734
735
736
737
738
739
740
741
742
743
# File 'lib/openstudio/analysis/server_api.rb', line 734

def get_datapoint(data_point_id)
  data_point = nil

  resp = @conn.get "/data_points/#{data_point_id}.json"
  if resp.status == 200
    data_point = JSON.parse resp.body, symbolize_names: true
  end

  data_point
end

#get_datapoint_status(analysis_id, filter = nil) ⇒ Object

This is the former version of get data point status. The new version is preferred and allows for checking data points across all analyses.



713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
# File 'lib/openstudio/analysis/server_api.rb', line 713

def get_datapoint_status(analysis_id, filter = nil)
  data_points = nil
  # get the status of all the entire analysis
  unless analysis_id.nil?
    if filter.nil? || filter == ''
      resp = @conn.get "analyses/#{analysis_id}/status.json"
      if resp.status == 200
        data_points = JSON.parse(resp.body, symbolize_names: true)[:analysis][:data_points]
      end
    else
      resp = @conn.get "analyses/#{analysis_id}/status.json", jobs: filter
      if resp.status == 200
        data_points = JSON.parse(resp.body, symbolize_names: true)[:analysis][:data_points]
      end
    end
  end

  data_points
end

#get_project_idsObject



68
69
70
71
# File 'lib/openstudio/analysis/server_api.rb', line 68

def get_project_ids
  ids = get_projects
  ids.map { |project| project[:uuid] }
end

#get_projectsObject



55
56
57
58
59
60
61
62
63
64
65
66
# File 'lib/openstudio/analysis/server_api.rb', line 55

def get_projects
  response = @conn.get '/projects.json'

  projects_json = nil
  if response.status == 200
    projects_json = JSON.parse(response.body, symbolize_names: true, max_nesting: false)
  else
    raise 'did not receive a 200 in get_projects'
  end

  projects_json
end

#kill_all_analysesObject



677
678
679
680
681
682
683
684
685
686
687
688
689
# File 'lib/openstudio/analysis/server_api.rb', line 677

def kill_all_analyses
  project_ids = get_project_ids
  puts "List of projects ids are: #{project_ids}"

  project_ids.each do |project_id|
    analysis_ids = get_analyses(project_id)
    puts analysis_ids
    analysis_ids.each do |analysis_id|
      puts "Trying to kill #{analysis_id}"
      kill_analysis(analysis_id)
    end
  end
end

#kill_analysis(analysis_id) ⇒ Object

Kill the analysis

Parameters:

  • analysis (String)

    Analysis ID to stop



663
664
665
666
667
668
669
670
671
672
673
674
675
# File 'lib/openstudio/analysis/server_api.rb', line 663

def kill_analysis(analysis_id)
  analysis_action = { analysis_action: 'stop' }

  response = @conn.post do |req|
    req.url "analyses/#{analysis_id}/action.json"
    req.headers['Content-Type'] = 'application/json'
    req.body = analysis_action.to_json
  end

  if response.status == 200
    puts "Killed analysis #{analysis_id}"
  end
end

#machine_statusObject

Retrieve the machine status

return [Hash]



211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
# File 'lib/openstudio/analysis/server_api.rb', line 211

def machine_status
  status = nil

  begin
    resp = @conn.get do |req|
      req.url 'status.json'
      req.options.timeout = 300
      req.options.open_timeout = 300
    end
    puts "machine_status resp.status: #{resp.status}"
    puts resp.inspect
    if resp.status == 200
      j = JSON.parse resp.body, symbolize_names: true
      status = j if j
    end
  rescue Faraday::ConnectionFailed => e
    puts "machine_Status ConnectionFailed: #{e.message}"
  rescue Net::ReadTimeout => e
    puts "machine_Status ReadTimeout: #{e.message}"
  end

  status
end

#new_analysis(project_id, options) ⇒ Object



426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
# File 'lib/openstudio/analysis/server_api.rb', line 426

def new_analysis(project_id, options)
  defaults = {
    analysis_name: nil,
    reset_uuids: false,
    push_to_dencity: false
  }
  options = defaults.merge(options)

  raise 'No project id passed' if project_id.nil?

  formulation_json = nil
  if options[:formulation_file]
    raise "No formulation exists #{options[:formulation_file]}" unless File.exist?(options[:formulation_file])
    formulation_json = JSON.parse(File.read(options[:formulation_file]), symbolize_names: true)
  end

  # read in the analysis id from the analysis.json file
  analysis_id = nil
  if formulation_json
    if options[:reset_uuids]
      analysis_id = SecureRandom.uuid
      formulation_json[:analysis][:uuid] = analysis_id

      formulation_json[:analysis][:problem][:workflow].each do |wf|
        wf[:uuid] = SecureRandom.uuid
        wf[:arguments]&.each do |arg|
          arg[:uuid] = SecureRandom.uuid
        end
        wf[:variables]&.each do |var|
          var[:uuid] = SecureRandom.uuid
          var[:argument][:uuid] = SecureRandom.uuid if var[:argument]
        end
      end
    else
      analysis_id = formulation_json[:analysis][:uuid]
    end

    # set the analysis name
    formulation_json[:analysis][:name] = (options[:analysis_name]).to_s unless options[:analysis_name].nil?
  else
    formulation_json = {
      analysis: options
    }
    puts formulation_json
    analysis_id = SecureRandom.uuid
    formulation_json[:analysis][:uuid] = analysis_id
  end
  raise "No analysis id defined in analysis.json #{options[:formulation_file]}" if analysis_id.nil?

  # save out this file to compare
  # File.open('formulation_merge.json', 'w') { |f| f << JSON.pretty_generate(formulation_json) }

  response = @conn.post do |req|
    req.url "projects/#{project_id}/analyses.json"
    req.headers['Content-Type'] = 'application/json'
    req.body = formulation_json.to_json
    req.options.timeout = 600 # seconds
    req.options.write_timeout = 1800
  end

  if response.status == 201
    puts "asked to create analysis with #{analysis_id}"
    # puts resp.inspect
    analysis_id = JSON.parse(response.body)['_id']
    puts "options[:push_to_dencity] = #{options[:push_to_dencity]}"
    upload_to_dencity(analysis_id, formulation_json) if options[:push_to_dencity]
    puts "new analysis created with ID: #{analysis_id}"
  else
    raise 'Could not create new analysis'
  end

  # check if we need to upload the analysis zip file
  if options[:upload_file]
    raise "upload file does not exist #{options[:upload_file]}" unless File.exist?(options[:upload_file])

    payload = { file: Faraday::UploadIO.new(options[:upload_file], 'application/zip') }
    response = @conn_multipart.post "analyses/#{analysis_id}/upload.json", payload do |req|
      req.options.timeout = 1800 # seconds
      req.options.write_timeout = 1800
    end

    if response.status == 201
      puts 'Successfully uploaded ZIP file'
    else
      raise response.inspect
    end
  end

  analysis_id
end

#new_project(options = {}) ⇒ Object



99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
# File 'lib/openstudio/analysis/server_api.rb', line 99

def new_project(options = {})
  defaults = { project_name: "Project #{::Time.now.strftime('%Y-%m-%d %H:%M:%S')}" }
  options = defaults.merge(options)
  project_id = nil

  # TODO: make this a display name and a machine name
  project_hash = { project: { name: (options[:project_name]).to_s } }
  begin
    response = @conn.post do |req|
      req.url '/projects.json'
      req.headers['Content-Type'] = 'application/json'
      req.body = project_hash.to_json
    end
    puts "response.status: #{response.status}"
    puts response.inspect
  rescue Net::OpenTimeout => e
    puts "new_project OpenTimeout: #{e.message}"
  end
  if response.status == 201
    project_id = JSON.parse(response.body)['_id']

    puts "new project created with ID: #{project_id}"
    # grab the project id
  elsif response.status == 500
    puts '500 Error'
    puts response.inspect
  end

  project_id
end

#queue_single_run(formulation_filename, analysis_zip_filename, analysis_type, run_data_point_filename = 'run_openstudio_workflow_monthly.rb') ⇒ Object



814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
# File 'lib/openstudio/analysis/server_api.rb', line 814

def queue_single_run(formulation_filename, analysis_zip_filename, analysis_type,
                     run_data_point_filename = 'run_openstudio_workflow_monthly.rb')
  project_options = {}
  project_id = new_project(project_options)

  analysis_options = {
    formulation_file: formulation_filename,
    upload_file: analysis_zip_filename,
    reset_uuids: true
  }
  analysis_id = new_analysis(project_id, analysis_options)

  run_options = {
    analysis_action: 'start',
    without_delay: false,
    analysis_type: analysis_type,
    simulate_data_point_filename: 'simulate_data_point.rb',
    run_data_point_filename: run_data_point_filename
  }
  start_analysis(analysis_id, run_options)

  analysis_id
end

#run(formulation_filename, analysis_zip_filename, analysis_type, options = {}) ⇒ Object

Submit the analysis for running via the API

Parameters:

  • formulation_filename (String)

    Name of the analysis.json file

  • analysis_zip_filename (String)

    Name of the analysis.zip file

  • analysis_type (String)

    Type of analysis to run

  • options (Hash) (defaults to: {})

    Hash of options

Options Hash (options):

  • :run_data_point_filename (String)

    Name of ruby file that the server runs – will be deprecated

  • :push_to_dencity (String)

    Whether or not to push to DEnCity

  • :batch_run_method (String)

    Which batch run method to use (batch_run or batch_run_local [no R])



768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
# File 'lib/openstudio/analysis/server_api.rb', line 768

def run(formulation_filename, analysis_zip_filename, analysis_type, options = {})
  defaults = {
    run_data_point_filename: 'run_openstudio_workflow_monthly.rb',
    push_to_dencity: false,
    batch_run_method: 'batch_run',
    without_delay: false
  }
  options = defaults.merge(options)

  project_options = {}
  project_id = new_project(project_options)

  analysis_options = {
    formulation_file: formulation_filename,
    upload_file: analysis_zip_filename,
    reset_uuids: true,
    push_to_dencity: options[:push_to_dencity]
  }

  analysis_id = new_analysis(project_id, analysis_options)

  run_options = {
    analysis_action: 'start',
    without_delay: options[:without_delay],
    analysis_type: analysis_type,
    simulate_data_point_filename: 'simulate_data_point.rb', # TODO: remove these from server?
    run_data_point_filename: options[:run_data_point_filename]
  }
  start_analysis(analysis_id, run_options)

  # If the analysis is a staged analysis, then go ahead and run batch run
  # because there is no explicit way to tell the system to do it
  if BATCH_RUN_METHODS.include? analysis_type
    run_options = {
      analysis_action: 'start',
      without_delay: false,
      analysis_type: options[:batch_run_method],
      simulate_data_point_filename: 'simulate_data_point.rb',
      run_data_point_filename: options[:run_data_point_filename]
    }
    start_analysis(analysis_id, run_options)
  end

  analysis_id
end

#run_batch_run_across_analysesObject



838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
# File 'lib/openstudio/analysis/server_api.rb', line 838

def run_batch_run_across_analyses
  project_options = {}
  project_id = new_project(project_options)

  analysis_options = {
    formulation_file: nil,
    upload_file: nil,
    reset_uuids: true
  }
  analysis_id = new_analysis(project_id, analysis_options)

  run_options = {
    analysis_action: 'start',
    without_delay: false,
    analysis_type: 'batch_run_analyses',
    simulate_data_point_filename: 'simulate_data_point.rb',
    run_data_point_filename: 'run_openstudio_workflow_monthly.rb'
  }
  start_analysis(analysis_id, run_options)

  analysis_id
end

#run_file(formulation_filename, analysis_zip_filename) ⇒ Object

Submit a generic analysis. This will use the options that are configured in the JSON file including the analysis type and options. Note that this may not work for all cases were multiple analyses need to run (e.g. single_run, queue_model, lhs)



751
752
753
754
755
756
757
# File 'lib/openstudio/analysis/server_api.rb', line 751

def run_file(formulation_filename, analysis_zip_filename)
  # parse the JSON file to grab the analysis type
  j = JSON.parse(formulation_filename, symbolize_names: true)
  analysis_type = j[:analysis][:problem][:analysis_type]

  run(formulation_filename, analysis_zip_filename, analysis_type)
end

#start_analysis(analysis_id, options) ⇒ Object



641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
# File 'lib/openstudio/analysis/server_api.rb', line 641

def start_analysis(analysis_id, options)
  defaults = { analysis_action: 'start', without_delay: false }
  options = defaults.merge(options)

  puts "Run analysis is configured with #{options.to_json}"
  response = @conn.post do |req|
    req.url "analyses/#{analysis_id}/action.json"
    req.headers['Content-Type'] = 'application/json'
    req.body = options.to_json
    req.options.timeout = 1800 # seconds
    req.options.write_timeout = 1800
  end

  if response.status == 200
    puts "Received request to run analysis #{analysis_id}"
  else
    raise 'Could not start the analysis'
  end
end

#upload_datapoint(analysis_id, options) ⇒ Object

Upload a single datapoint

Parameters:

  • analysis (String)

    Analysis ID to attach datapoint

  • options (Hash)

    Options

Options Hash (options):

  • :datapoint_file (String)

    Path to datapoint JSON to upload

  • :reset_uuids (Boolean)

    Flag on whether or not to reset the UUID in the datapoint JSON to a new random value.



581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
# File 'lib/openstudio/analysis/server_api.rb', line 581

def upload_datapoint(analysis_id, options)
  defaults = { reset_uuids: false }
  options = defaults.merge(options)

  raise 'No analysis id passed' if analysis_id.nil?
  raise 'No datapoints file passed to new_analysis' unless options[:datapoint_file]
  raise "No datapoints_file exists #{options[:datapoint_file]}" unless File.exist?(options[:datapoint_file])

  dp_hash = JSON.parse(File.open(options[:datapoint_file]).read, symbolize_names: true)

  # There are two instances of the analysis ID. There is one in the file,
  # and the other is in the POST url. Ideally remove the version in the
  # file and support only the URL based analysis_id
  dp_hash[:analysis_uuid] = analysis_id

  if options[:reset_uuids]
    dp_hash[:uuid] = SecureRandom.uuid
  end

  # merge in the analysis_id as it has to be what is in the database
  response = @conn.post do |req|
    req.url "analyses/#{analysis_id}/data_points.json"
    req.headers['Content-Type'] = 'application/json'
    req.body = dp_hash.to_json
  end

  if response.status == 201
    puts "new datapoints created for analysis #{analysis_id}"
    return JSON.parse(response.body, symbolize_names: true)
  else
    raise "could not create new datapoints #{response.body}"
  end
end

#upload_datapoints(analysis_id, options) ⇒ Object

Upload multiple data points to the server.

Parameters:

  • analysis (String)

    Analysis ID to attach datapoint



617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
# File 'lib/openstudio/analysis/server_api.rb', line 617

def upload_datapoints(analysis_id, options)
  defaults = {}
  options = defaults.merge(options)

  raise 'No analysis id passed' if analysis_id.nil?
  raise 'No datapoints file passed to new_analysis' unless options[:datapoints_file]
  raise "No datapoints_file exists #{options[:datapoints_file]}" unless File.exist?(options[:datapoints_file])

  dp_hash = JSON.parse(File.open(options[:datapoints_file]).read, symbolize_names: true)

  # merge in the analysis_id as it has to be what is in the database
  response = @conn.post do |req|
    req.url "analyses/#{analysis_id}/data_points/batch_upload.json"
    req.headers['Content-Type'] = 'application/json'
    req.body = dp_hash.to_json
  end

  if response.status == 201
    puts "new datapoints created for analysis #{analysis_id}"
  else
    raise "could not create new datapoints #{response.body}"
  end
end

#upload_to_dencity(analysis_uuid, analysis) ⇒ Object



517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
# File 'lib/openstudio/analysis/server_api.rb', line 517

def upload_to_dencity(analysis_uuid, analysis)
  require 'dencity'
  puts "Attempting to connect to DEnCity server using settings at '~/.dencity/config.yml'"
  conn = Dencity.connect
  raise "Could not connect to DEnCity server at #{hostname}." unless conn.connected?
  begin
    r = conn.
  rescue Faraday::ParsingError => user_id_failure
    raise "Error in user_id field: #{user_id_failure.message}"
  rescue MultiJson::ParseError => authentication_failure
    raise "Error in attempted authentication: #{authentication_failure.message}"
  end
  user_uuid = r.id

  # Find the analysis.json file that SHOULD BE IN THE FOLDER THAT THIS SCRIPT IS IN (or change the below)
  # Check that the analysis has not yet been registered with the DEnCity instance.
  # TODO This should be simplified with a retrieve_analysis_by_user_defined_id' method in the future
  user_analyses = []
  r = conn.dencity_get 'analyses'
  runner.registerError('Unable to retrieve analyses from DEnCity server') unless r['status'] == 200
  r['data'].each do |dencity_analysis|
    user_analyses << dencity_analysis['id'] if dencity_analysis['user_id'] == user_uuid
  end
  found_analysis_uuid = false
  user_analyses.each do |dencity_analysis_id|
    dencity_analysis = conn.retrieve_analysis_by_id(dencity_analysis_id)
    if dencity_analysis['user_defined_id'] == analysis_uuid
      found_analysis_uuid = true
      break
    end
  end
  raise "Analysis with user_defined_id of #{analysis_uuid} found on DEnCity." if found_analysis_uuid
  dencity_hash = OpenStudio::Analysis.to_dencity_analysis(analysis, analysis_uuid)

  # Write the analysis DEnCity hash to dencity_analysis.json
  f = File.new('dencity_analysis.json', 'wb')
  f.write(JSON.pretty_generate(dencity_hash))
  f.close

  # Upload the processed analysis json.
  upload = conn.load_analysis 'dencity_analysis.json'
  begin
    upload_response = upload.push
  rescue StandardError => e
    runner.registerError("Upload failure: #{e.message} in #{e.backtrace.join('/n')}")
  else
    if NoMethodError == upload_response.class
      raise "ERROR: Server responded with a NoMethodError: #{upload_response}"
    end
    if upload_response.status.to_s[0] == '2'
      puts 'Successfully uploaded processed analysis json file to the DEnCity server.'
    else
      puts 'ERROR: Server returned a non-20x status. Response below.'
      puts upload_response
      raise
    end
  end
end