Class: Vellum::PromptExecutionMeta
- Inherits:
-
Object
- Object
- Vellum::PromptExecutionMeta
- Defined in:
- lib/vellum_ai/types/prompt_execution_meta.rb
Overview
request opted into with ‘expand_meta`.
Constant Summary collapse
- OMIT =
Object.new
Instance Attribute Summary collapse
-
#additional_properties ⇒ OpenStruct
readonly
Additional properties unmapped to the current class definition.
- #cost ⇒ Vellum::Price readonly
- #deployment_release_tag ⇒ String readonly
- #finish_reason ⇒ Vellum::FinishReasonEnum readonly
- #latency ⇒ Integer readonly
- #model_name ⇒ String readonly
- #prompt_version_id ⇒ String readonly
- #usage ⇒ Vellum::MlModelUsage readonly
Class Method Summary collapse
Instance Method Summary collapse
- #initialize(model_name: OMIT, latency: OMIT, deployment_release_tag: OMIT, prompt_version_id: OMIT, finish_reason: OMIT, usage: OMIT, cost: OMIT, additional_properties: nil) ⇒ Vellum::PromptExecutionMeta constructor
- #to_json ⇒ String
Constructor Details
#initialize(model_name: OMIT, latency: OMIT, deployment_release_tag: OMIT, prompt_version_id: OMIT, finish_reason: OMIT, usage: OMIT, cost: OMIT, additional_properties: nil) ⇒ Vellum::PromptExecutionMeta
43 44 45 46 47 48 49 50 51 52 53 54 55 |
# File 'lib/vellum_ai/types/prompt_execution_meta.rb', line 43 def initialize(model_name: OMIT, latency: OMIT, deployment_release_tag: OMIT, prompt_version_id: OMIT, finish_reason: OMIT, usage: OMIT, cost: OMIT, additional_properties: nil) @model_name = model_name if model_name != OMIT @latency = latency if latency != OMIT @deployment_release_tag = deployment_release_tag if deployment_release_tag != OMIT @prompt_version_id = prompt_version_id if prompt_version_id != OMIT @finish_reason = finish_reason if finish_reason != OMIT @usage = usage if usage != OMIT @cost = cost if cost != OMIT @additional_properties = additional_properties @_field_set = { "model_name": model_name, "latency": latency, "deployment_release_tag": deployment_release_tag, "prompt_version_id": prompt_version_id, "finish_reason": finish_reason, "usage": usage, "cost": cost }.reject do | _k, v | v == OMIT end end |
Instance Attribute Details
#additional_properties ⇒ OpenStruct (readonly)
Returns Additional properties unmapped to the current class definition.
27 28 29 |
# File 'lib/vellum_ai/types/prompt_execution_meta.rb', line 27 def additional_properties @additional_properties end |
#cost ⇒ Vellum::Price (readonly)
25 26 27 |
# File 'lib/vellum_ai/types/prompt_execution_meta.rb', line 25 def cost @cost end |
#deployment_release_tag ⇒ String (readonly)
17 18 19 |
# File 'lib/vellum_ai/types/prompt_execution_meta.rb', line 17 def deployment_release_tag @deployment_release_tag end |
#finish_reason ⇒ Vellum::FinishReasonEnum (readonly)
21 22 23 |
# File 'lib/vellum_ai/types/prompt_execution_meta.rb', line 21 def finish_reason @finish_reason end |
#latency ⇒ Integer (readonly)
15 16 17 |
# File 'lib/vellum_ai/types/prompt_execution_meta.rb', line 15 def latency @latency end |
#model_name ⇒ String (readonly)
13 14 15 |
# File 'lib/vellum_ai/types/prompt_execution_meta.rb', line 13 def model_name @model_name end |
#prompt_version_id ⇒ String (readonly)
19 20 21 |
# File 'lib/vellum_ai/types/prompt_execution_meta.rb', line 19 def prompt_version_id @prompt_version_id end |
#usage ⇒ Vellum::MlModelUsage (readonly)
23 24 25 |
# File 'lib/vellum_ai/types/prompt_execution_meta.rb', line 23 def usage @usage end |
Class Method Details
.from_json(json_object:) ⇒ Vellum::PromptExecutionMeta
60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 |
# File 'lib/vellum_ai/types/prompt_execution_meta.rb', line 60 def self.from_json(json_object:) struct = JSON.parse(json_object, object_class: OpenStruct) parsed_json = JSON.parse(json_object) model_name = parsed_json["model_name"] latency = parsed_json["latency"] deployment_release_tag = parsed_json["deployment_release_tag"] prompt_version_id = parsed_json["prompt_version_id"] finish_reason = parsed_json["finish_reason"] unless parsed_json["usage"].nil? usage = parsed_json["usage"].to_json usage = Vellum::MlModelUsage.from_json(json_object: usage) else usage = nil end unless parsed_json["cost"].nil? cost = parsed_json["cost"].to_json cost = Vellum::Price.from_json(json_object: cost) else cost = nil end new( model_name: model_name, latency: latency, deployment_release_tag: deployment_release_tag, prompt_version_id: prompt_version_id, finish_reason: finish_reason, usage: usage, cost: cost, additional_properties: struct ) end |
.validate_raw(obj:) ⇒ Void
103 104 105 106 107 108 109 110 111 |
# File 'lib/vellum_ai/types/prompt_execution_meta.rb', line 103 def self.validate_raw(obj:) obj.model_name&.is_a?(String) != false || raise("Passed value for field obj.model_name is not the expected type, validation failed.") obj.latency&.is_a?(Integer) != false || raise("Passed value for field obj.latency is not the expected type, validation failed.") obj.deployment_release_tag&.is_a?(String) != false || raise("Passed value for field obj.deployment_release_tag is not the expected type, validation failed.") obj.prompt_version_id&.is_a?(String) != false || raise("Passed value for field obj.prompt_version_id is not the expected type, validation failed.") obj.finish_reason&.is_a?(Vellum::FinishReasonEnum) != false || raise("Passed value for field obj.finish_reason is not the expected type, validation failed.") obj.usage.nil? || Vellum::MlModelUsage.validate_raw(obj: obj.usage) obj.cost.nil? || Vellum::Price.validate_raw(obj: obj.cost) end |
Instance Method Details
#to_json ⇒ String
94 95 96 |
# File 'lib/vellum_ai/types/prompt_execution_meta.rb', line 94 def to_json @_field_set&.to_json end |