Class: LLM::Clients::OpenAI

Inherits:
Object
  • Object
show all
Includes:
HTTParty
Defined in:
lib/llm/clients/open_ai.rb,
lib/llm/clients/open_ai/response.rb

Defined Under Namespace

Classes: Response

Instance Method Summary collapse

Constructor Details

#initialize(llm:) ⇒ OpenAI

Returns a new instance of OpenAI.



12
13
14
15
# File 'lib/llm/clients/open_ai.rb', line 12

def initialize(llm:)
  @llm = llm
  @logger = LLM.config.logger
end

Instance Method Details

#chat(messages, options = {}) ⇒ Object



17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
# File 'lib/llm/clients/open_ai.rb', line 17

def chat(messages, options = {})
  parameters = {
    model: @llm.canonical_name,
    messages: messages,
    temperature: options[:temperature],
    response_format: options[:response_format]&.response_format,
    max_tokens: options[:max_output_tokens],
    top_p: options[:top_p],
    stop: options[:stop_sequences],
    presence_penalty: options[:presence_penalty],
    frequency_penalty: options[:frequency_penalty],
    tools: options[:tools],
    tool_choice: options[:tool_choice]
  }.compact

  return chat_streaming(parameters, options[:on_message], options[:on_complete]) if options[:stream]

  resp = post_url("/chat/completions", body: parameters.to_json)

  Response.new(resp).to_normalized_response
end