Class: DSPy::LM::GeminiAdapter
- Defined in:
- lib/dspy/lm/adapters/gemini_adapter.rb
Instance Attribute Summary
Attributes inherited from Adapter
Instance Method Summary collapse
- #chat(messages:, signature: nil, **extra_params, &block) ⇒ Object
-
#initialize(model:, api_key:) ⇒ GeminiAdapter
constructor
A new instance of GeminiAdapter.
Constructor Details
#initialize(model:, api_key:) ⇒ GeminiAdapter
Returns a new instance of GeminiAdapter.
10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 |
# File 'lib/dspy/lm/adapters/gemini_adapter.rb', line 10 def initialize(model:, api_key:) super validate_api_key!(api_key, 'gemini') @client = Gemini.new( credentials: { service: 'generative-language-api', api_key: api_key }, options: { model: model, server_sent_events: true } ) end |
Instance Method Details
#chat(messages:, signature: nil, **extra_params, &block) ⇒ Object
26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 |
# File 'lib/dspy/lm/adapters/gemini_adapter.rb', line 26 def chat(messages:, signature: nil, **extra_params, &block) = () # Validate vision support if images are present if contains_images?() VisionModels.validate_vision_support!('gemini', model) # Convert messages to Gemini format with proper image handling = () end # Convert DSPy message format to Gemini format = () request_params = { contents: }.merge(extra_params) begin # Always use streaming content = "" final_response_data = nil @client.stream_generate_content(request_params) do |chunk| # Handle case where chunk might be a string (from SSE VCR) if chunk.is_a?(String) begin chunk = JSON.parse(chunk) rescue JSON::ParserError => e raise AdapterError, "Failed to parse Gemini streaming response: #{e.}" end end # Extract content from chunks if chunk.dig('candidates', 0, 'content', 'parts') chunk_text = extract_text_from_parts(chunk.dig('candidates', 0, 'content', 'parts')) content += chunk_text # Call block only if provided (for real streaming) block.call(chunk) if block_given? end # Store final response data (usage, metadata) from last chunk if chunk['usageMetadata'] || chunk.dig('candidates', 0, 'finishReason') final_response_data = chunk end end # Extract usage information from final chunk usage_data = final_response_data&.dig('usageMetadata') usage_struct = usage_data ? UsageFactory.create('gemini', usage_data) : nil # Create metadata from final chunk = { provider: 'gemini', model: model, finish_reason: final_response_data&.dig('candidates', 0, 'finishReason'), safety_ratings: final_response_data&.dig('candidates', 0, 'safetyRatings'), streaming: block_given? } # Create typed metadata = ResponseMetadataFactory.create('gemini', ) Response.new( content: content, usage: usage_struct, metadata: ) rescue => e handle_gemini_error(e) end end |