Module: DSPy::LM::UsageFactory
- Extended by:
- T::Sig
- Defined in:
- lib/dspy/lm/usage.rb
Overview
Factory for creating appropriate usage objects
Class Method Summary collapse
- .convert_to_hash(value) ⇒ Object
- .create(provider, usage_data) ⇒ Object
- .create_anthropic_usage(data) ⇒ Object
- .create_generic_usage(data) ⇒ Object
- .create_openai_usage(data) ⇒ Object
Class Method Details
.convert_to_hash(value) ⇒ Object
106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 |
# File 'lib/dspy/lm/usage.rb', line 106 def self.convert_to_hash(value) return nil if value.nil? return value if value.is_a?(Hash) && value.keys.all? { |k| k.is_a?(Symbol) } # Convert object to hash if it responds to to_h if value.respond_to?(:to_h) hash = value.to_h # Ensure symbol keys and integer values hash.transform_keys(&:to_sym).transform_values(&:to_i) else nil end rescue nil end |
.create(provider, usage_data) ⇒ Object
53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 |
# File 'lib/dspy/lm/usage.rb', line 53 def self.create(provider, usage_data) return nil if usage_data.nil? # If already a Usage struct, return as-is return usage_data if usage_data.is_a?(Usage) # Handle test doubles by converting to hash if usage_data.respond_to?(:to_h) usage_data = usage_data.to_h end # Convert hash to appropriate struct return nil unless usage_data.is_a?(Hash) # Normalize keys to symbols normalized = usage_data.transform_keys(&:to_sym) case provider.to_s.downcase when 'openai' create_openai_usage(normalized) when 'anthropic' create_anthropic_usage(normalized) else create_generic_usage(normalized) end end |
.create_anthropic_usage(data) ⇒ Object
123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 |
# File 'lib/dspy/lm/usage.rb', line 123 def self.create_anthropic_usage(data) # Anthropic uses input_tokens/output_tokens input_tokens = data[:input_tokens] || 0 output_tokens = data[:output_tokens] || 0 total_tokens = data[:total_tokens] || (input_tokens + output_tokens) Usage.new( input_tokens: input_tokens, output_tokens: output_tokens, total_tokens: total_tokens ) rescue => e DSPy.logger.debug("Failed to create Anthropic usage: #{e.}") nil end |
.create_generic_usage(data) ⇒ Object
140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 |
# File 'lib/dspy/lm/usage.rb', line 140 def self.create_generic_usage(data) # Generic fallback input_tokens = data[:input_tokens] || data[:prompt_tokens] || 0 output_tokens = data[:output_tokens] || data[:completion_tokens] || 0 total_tokens = data[:total_tokens] || (input_tokens + output_tokens) Usage.new( input_tokens: input_tokens, output_tokens: output_tokens, total_tokens: total_tokens ) rescue => e DSPy.logger.debug("Failed to create generic usage: #{e.}") nil end |
.create_openai_usage(data) ⇒ Object
83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 |
# File 'lib/dspy/lm/usage.rb', line 83 def self.create_openai_usage(data) # OpenAI uses prompt_tokens/completion_tokens input_tokens = data[:prompt_tokens] || data[:input_tokens] || 0 output_tokens = data[:completion_tokens] || data[:output_tokens] || 0 total_tokens = data[:total_tokens] || (input_tokens + output_tokens) # Convert prompt_tokens_details and completion_tokens_details to hashes if needed prompt_details = convert_to_hash(data[:prompt_tokens_details]) completion_details = convert_to_hash(data[:completion_tokens_details]) OpenAIUsage.new( input_tokens: input_tokens, output_tokens: output_tokens, total_tokens: total_tokens, prompt_tokens_details: prompt_details, completion_tokens_details: completion_details ) rescue => e DSPy.logger.debug("Failed to create OpenAI usage: #{e.}") nil end |