Class: Vellum::PromptExecutionMeta

Inherits:
Object
  • Object
show all
Defined in:
lib/vellum_ai/types/prompt_execution_meta.rb

Overview

request opted into with ‘expand_meta`.

Constant Summary collapse

OMIT =
Object.new

Instance Attribute Summary collapse

Class Method Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(model_name: OMIT, latency: OMIT, deployment_release_tag: OMIT, prompt_version_id: OMIT, finish_reason: OMIT, usage: OMIT, cost: OMIT, additional_properties: nil) ⇒ Vellum::PromptExecutionMeta

Parameters:

  • model_name (String) (defaults to: OMIT)
  • latency (Integer) (defaults to: OMIT)
  • deployment_release_tag (String) (defaults to: OMIT)
  • prompt_version_id (String) (defaults to: OMIT)
  • finish_reason (Vellum::FinishReasonEnum) (defaults to: OMIT)
  • usage (Vellum::MlModelUsage) (defaults to: OMIT)
  • cost (Vellum::Price) (defaults to: OMIT)
  • additional_properties (OpenStruct) (defaults to: nil)

    Additional properties unmapped to the current class definition



43
44
45
46
47
48
49
50
51
52
53
54
55
# File 'lib/vellum_ai/types/prompt_execution_meta.rb', line 43

def initialize(model_name: OMIT, latency: OMIT, deployment_release_tag: OMIT, prompt_version_id: OMIT, finish_reason: OMIT, usage: OMIT, cost: OMIT, additional_properties: nil)
  @model_name = model_name if model_name != OMIT
  @latency = latency if latency != OMIT
  @deployment_release_tag = deployment_release_tag if deployment_release_tag != OMIT
  @prompt_version_id = prompt_version_id if prompt_version_id != OMIT
  @finish_reason = finish_reason if finish_reason != OMIT
  @usage = usage if usage != OMIT
  @cost = cost if cost != OMIT
  @additional_properties = additional_properties
  @_field_set = { "model_name": model_name, "latency": latency, "deployment_release_tag": deployment_release_tag, "prompt_version_id": prompt_version_id, "finish_reason": finish_reason, "usage": usage, "cost": cost }.reject do | _k, v |
  v == OMIT
end
end

Instance Attribute Details

#additional_propertiesOpenStruct (readonly)

Returns Additional properties unmapped to the current class definition.

Returns:

  • (OpenStruct)

    Additional properties unmapped to the current class definition



27
28
29
# File 'lib/vellum_ai/types/prompt_execution_meta.rb', line 27

def additional_properties
  @additional_properties
end

#costVellum::Price (readonly)

Returns:



25
26
27
# File 'lib/vellum_ai/types/prompt_execution_meta.rb', line 25

def cost
  @cost
end

#deployment_release_tagString (readonly)

Returns:

  • (String)


17
18
19
# File 'lib/vellum_ai/types/prompt_execution_meta.rb', line 17

def deployment_release_tag
  @deployment_release_tag
end

#finish_reasonVellum::FinishReasonEnum (readonly)



21
22
23
# File 'lib/vellum_ai/types/prompt_execution_meta.rb', line 21

def finish_reason
  @finish_reason
end

#latencyInteger (readonly)

Returns:

  • (Integer)


15
16
17
# File 'lib/vellum_ai/types/prompt_execution_meta.rb', line 15

def latency
  @latency
end

#model_nameString (readonly)

Returns:

  • (String)


13
14
15
# File 'lib/vellum_ai/types/prompt_execution_meta.rb', line 13

def model_name
  @model_name
end

#prompt_version_idString (readonly)

Returns:

  • (String)


19
20
21
# File 'lib/vellum_ai/types/prompt_execution_meta.rb', line 19

def prompt_version_id
  @prompt_version_id
end

#usageVellum::MlModelUsage (readonly)



23
24
25
# File 'lib/vellum_ai/types/prompt_execution_meta.rb', line 23

def usage
  @usage
end

Class Method Details

.from_json(json_object:) ⇒ Vellum::PromptExecutionMeta

Parameters:

  • json_object (String)

Returns:



60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
# File 'lib/vellum_ai/types/prompt_execution_meta.rb', line 60

def self.from_json(json_object:)
  struct = JSON.parse(json_object, object_class: OpenStruct)
  parsed_json = JSON.parse(json_object)
  model_name = parsed_json["model_name"]
  latency = parsed_json["latency"]
  deployment_release_tag = parsed_json["deployment_release_tag"]
  prompt_version_id = parsed_json["prompt_version_id"]
  finish_reason = parsed_json["finish_reason"]
  unless parsed_json["usage"].nil?
    usage = parsed_json["usage"].to_json
    usage = Vellum::MlModelUsage.from_json(json_object: usage)
  else
    usage = nil
  end
  unless parsed_json["cost"].nil?
    cost = parsed_json["cost"].to_json
    cost = Vellum::Price.from_json(json_object: cost)
  else
    cost = nil
  end
  new(
    model_name: model_name,
    latency: latency,
    deployment_release_tag: deployment_release_tag,
    prompt_version_id: prompt_version_id,
    finish_reason: finish_reason,
    usage: usage,
    cost: cost,
    additional_properties: struct
  )
end

.validate_raw(obj:) ⇒ Void

Parameters:

  • obj (Object)

Returns:

  • (Void)


103
104
105
106
107
108
109
110
111
# File 'lib/vellum_ai/types/prompt_execution_meta.rb', line 103

def self.validate_raw(obj:)
  obj.model_name&.is_a?(String) != false || raise("Passed value for field obj.model_name is not the expected type, validation failed.")
  obj.latency&.is_a?(Integer) != false || raise("Passed value for field obj.latency is not the expected type, validation failed.")
  obj.deployment_release_tag&.is_a?(String) != false || raise("Passed value for field obj.deployment_release_tag is not the expected type, validation failed.")
  obj.prompt_version_id&.is_a?(String) != false || raise("Passed value for field obj.prompt_version_id is not the expected type, validation failed.")
  obj.finish_reason&.is_a?(Vellum::FinishReasonEnum) != false || raise("Passed value for field obj.finish_reason is not the expected type, validation failed.")
  obj.usage.nil? || Vellum::MlModelUsage.validate_raw(obj: obj.usage)
  obj.cost.nil? || Vellum::Price.validate_raw(obj: obj.cost)
end

Instance Method Details

#to_jsonString

Returns:

  • (String)


94
95
96
# File 'lib/vellum_ai/types/prompt_execution_meta.rb', line 94

def to_json
  @_field_set&.to_json
end