Class: ActiveAgent::GenerationProvider::OpenAIProvider

Inherits:
Base
  • Object
show all
Defined in:
lib/active_agent/generation_provider/open_ai_provider.rb

Instance Attribute Summary

Attributes inherited from Base

#client, #config, #prompt, #response

Instance Method Summary collapse

Constructor Details

#initialize(config) ⇒ OpenAIProvider



11
12
13
14
15
16
# File 'lib/active_agent/generation_provider/open_ai_provider.rb', line 11

def initialize(config)
  super
  @api_key = config["api_key"]
  @model_name = config["model"] || "gpt-4o-mini"        
  @client = OpenAI::Client.new(access_token: @api_key, log_errors: true)
end

Instance Method Details

#chat_prompt(parameters: prompt_parameters) ⇒ Object



28
29
30
31
# File 'lib/active_agent/generation_provider/open_ai_provider.rb', line 28

def chat_prompt(parameters: prompt_parameters)
  parameters[:stream] = provider_stream if prompt.options[:stream] || config["stream"]
  chat_response(@client.chat(parameters: parameters))
end

#embed(prompt) ⇒ Object



33
34
35
36
37
38
39
# File 'lib/active_agent/generation_provider/open_ai_provider.rb', line 33

def embed(prompt)
  @prompt = prompt

  embeddings_prompt(parameters: embeddings_parameters)
rescue => e
  raise GenerationProviderError, e.message  
end

#embeddings_parameters(input: prompt.message.content, model: "text-embedding-ada-002") ⇒ Object



41
42
43
44
45
46
# File 'lib/active_agent/generation_provider/open_ai_provider.rb', line 41

def embeddings_parameters(input: prompt.message.content, model: "text-embedding-ada-002")
  {
    model: model,
    input: input
  }
end

#embeddings_prompt(parameters:) ⇒ Object



54
55
56
# File 'lib/active_agent/generation_provider/open_ai_provider.rb', line 54

def embeddings_prompt(parameters: )
  embeddings_response(@client.embeddings(parameters: embeddings_parameters))
end

#embeddings_response(response) ⇒ Object



48
49
50
51
52
# File 'lib/active_agent/generation_provider/open_ai_provider.rb', line 48

def embeddings_response(response)
  message = Message.new(content:response.dig("data", 0, "embedding"), role: "assistant")

  @response = ActiveAgent::GenerationProvider::Response.new(prompt: prompt, message: message, raw_response: response)
end

#generate(prompt) ⇒ Object



18
19
20
21
22
23
24
25
26
# File 'lib/active_agent/generation_provider/open_ai_provider.rb', line 18

def generate(prompt)
  @prompt = prompt

  # parameters[:instructions] = prompt.instructions.content if prompt.instructions.present?

  chat_prompt(parameters: prompt_parameters)
rescue => e
  raise GenerationProviderError, e.message
end