Module: OpenStudio::Analysis
- Defined in:
- lib/openstudio/analysis.rb,
lib/openstudio/analysis/version.rb,
lib/openstudio/analysis/workflow.rb,
lib/openstudio/analysis/server_api.rb,
lib/openstudio/analysis/formulation.rb,
lib/openstudio/analysis/support_files.rb,
lib/openstudio/analysis/workflow_step.rb,
lib/openstudio/analysis/server_scripts.rb,
lib/openstudio/analysis/translator/excel.rb,
lib/openstudio/analysis/translator/workflow.rb,
lib/openstudio/analysis/algorithm_attributes.rb,
lib/openstudio/analysis/translator/datapoints.rb
Defined Under Namespace
Modules: Translator Classes: AlgorithmAttributes, Formulation, SeedModel, ServerApi, ServerScripts, SupportFiles, WeatherFile, Workflow, WorkflowStep
Constant Summary collapse
- VERSION =
format should be ^.*-1
[0-9]
for example: -rc1, -beta6, -customusecase0 '1.3.3'.freeze
- @@measure_paths =
['./measures']
Class Method Summary collapse
-
.aws_instance_options(filename) ⇒ Object
Retrieve aws instance options from a project.
-
.create(display_name) ⇒ Object
Create a new analysis.
-
.from_csv(filename) ⇒ Object
Load an set of batch datapoints from a csv.
-
.from_excel(filename) ⇒ Object
Load an analysis from excel.
-
.load(h) ⇒ Object
Load the analysis json or from a file.
-
.make_osws(osa_filename, osd_array) ⇒ Object
Process an OSA with a set of OSDs into OSWs.
-
.measure_paths ⇒ Object
List of paths to look for measures when adding them.
- .measure_paths=(new_array) ⇒ Object
-
.to_dencity_analysis(analysis_hash, analysis_uuid) ⇒ Object
Generate a DEnCity complient hash for uploading from the analysis hash TODO make this work off of the analysis object, not the hash.
Class Method Details
.aws_instance_options(filename) ⇒ Object
Retrieve aws instance options from a project. This will return a hash
74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 |
# File 'lib/openstudio/analysis.rb', line 74 def self.(filename) if File.extname(filename) == '.xlsx' excel = OpenStudio::Analysis::Translator::Excel.new(filename) excel.process = { os_server_version: excel.settings['openstudio_server_version'], server_instance_type: excel.settings['server_instance_type'], worker_instance_type: excel.settings['worker_instance_type'], worker_node_number: excel.settings['worker_nodes'].to_i, user_id: excel.settings['user_id'], aws_tags: excel., analysis_type: excel.analyses.first.analysis_type, cluster_name: excel.cluster_name } elsif File.extname(filename) == '.csv' csv = OpenStudio::Analysis::Translator::Datapoints.new(filename) csv.process = csv.settings else raise 'Invalid file extension' end return end |
.create(display_name) ⇒ Object
Create a new analysis
40 41 42 |
# File 'lib/openstudio/analysis.rb', line 40 def self.create(display_name) OpenStudio::Analysis::Formulation.new(display_name) end |
.from_csv(filename) ⇒ Object
Load an set of batch datapoints from a csv. This will create a analysis of type ‘batch_datapoints’ which requires ‘batch_run’
61 62 63 64 65 |
# File 'lib/openstudio/analysis.rb', line 61 def self.from_csv(filename) csv = OpenStudio::Analysis::Translator::Datapoints.new(filename) csv.process csv.analysis end |
.from_excel(filename) ⇒ Object
Load an analysis from excel. This will create an array of analyses because excel can create more than one analyses
53 54 55 56 57 |
# File 'lib/openstudio/analysis.rb', line 53 def self.from_excel(filename) excel = OpenStudio::Analysis::Translator::Excel.new(filename) excel.process excel.analyses end |
.load(h) ⇒ Object
Load the analysis json or from a file. If this is a json then it must have symbolized keys
46 47 48 49 |
# File 'lib/openstudio/analysis.rb', line 46 def self.load(h) h = MultiJson.load(h, symbolize_keys: true) unless h.is_a? Hash OpenStudio::Analysis::Formulation.from_hash h end |
.make_osws(osa_filename, osd_array) ⇒ Object
Process an OSA with a set of OSDs into OSWs
68 69 70 71 |
# File 'lib/openstudio/analysis.rb', line 68 def self.make_osws(osa_filename, osd_array) translator = OpenStudio::Analysis::Translator::Workflow.new(osa_filename) osd_array.each { |osd| translator.process_datapoints osd } end |
.measure_paths ⇒ Object
List of paths to look for measures when adding them. This currently only is used when loading an analysis hash file. It looks in the order of the measure_paths. As soon as it finds one, it stops.
45 46 47 |
# File 'lib/openstudio/analysis/formulation.rb', line 45 def self.measure_paths @@measure_paths end |
.measure_paths=(new_array) ⇒ Object
49 50 51 |
# File 'lib/openstudio/analysis/formulation.rb', line 49 def self.measure_paths=(new_array) @@measure_paths = new_array end |
.to_dencity_analysis(analysis_hash, analysis_uuid) ⇒ Object
Generate a DEnCity complient hash for uploading from the analysis hash TODO make this work off of the analysis object, not the hash.
101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 |
# File 'lib/openstudio/analysis.rb', line 101 def self.to_dencity_analysis(analysis_hash, analysis_uuid) dencity_hash = {} a = analysis_hash[:analysis] provenance = {} provenance[:user_defined_id] = analysis_uuid provenance[:user_created_date] = ::Time.now provenance[:analysis_types] = [a[:problem][:analysis_type]] provenance[:name] = a[:name] provenance[:display_name] = a[:display_name] provenance[:description] = 'Auto-generated DEnCity analysis hash using the OpenStudio Analysis Gem' = [] if a[:problem] if a[:problem][:algorithm] provenance[:analysis_information] = a[:problem][:algorithm] else raise 'No algorithm found in the analysis.json.' end if a[:problem][:workflow] a[:problem][:workflow].each do |wf| new_wfi = {} new_wfi[:id] = wf[:measure_definition_uuid] new_wfi[:version_id] = wf[:measure_definition_version_uuid] # Eventually all of this could be pulled directly from BCL new_wfi[:name] = wf[:measure_definition_class_name] if wf[:measure_definition_class_name] new_wfi[:display_name] = wf[:measure_definition_display_name] if wf[:measure_definition_display_name] new_wfi[:type] = wf[:measure_type] if wf[:measure_type] new_wfi[:modeler_description] = wf[:modeler_description] if wf[:modeler_description] new_wfi[:description] = wf[:description] if wf[:description] new_wfi[:arguments] = [] wf[:arguments]&.each do |arg| wfi_arg = {} wfi_arg[:display_name] = arg[:display_name] if arg[:display_name] wfi_arg[:display_name_short] = arg[:display_name_short] if arg[:display_name_short] wfi_arg[:name] = arg[:name] if arg[:name] wfi_arg[:data_type] = arg[:value_type] if arg[:value_type] wfi_arg[:default_value] = nil wfi_arg[:description] = '' wfi_arg[:display_units] = '' # should be haystack compatible unit strings wfi_arg[:units] = '' # should be haystack compatible unit strings new_wfi[:arguments] << wfi_arg end wf[:variables]&.each do |arg| wfi_var = {} wfi_var[:display_name] = arg[:argument][:display_name] if arg[:argument][:display_name] wfi_var[:display_name_short] = arg[:argument][:display_name_short] if arg[:argument][:display_name_short] wfi_var[:name] = arg[:argument][:name] if arg[:argument][:name] wfi_var[:default_value] = nil wfi_var[:data_type] = arg[:argument][:value_type] if arg[:argument][:value_type] wfi_var[:description] = '' wfi_var[:display_units] = arg[:units] if arg[:units] wfi_var[:units] = '' # should be haystack compatible unit strings new_wfi[:arguments] << wfi_var end << new_wfi end else raise 'No workflow found in the analysis.json' end dencity_hash[:analysis] = provenance dencity_hash[:measure_definitions] = else raise 'No problem found in the analysis.json' end return dencity_hash end |