Module: AI

Included in:
IntelliAgent
Defined in:
lib/intelli_agent/ai.rb

Overview

In the future, this became a bus to more than one AI provider

Constant Summary collapse

BASIC_MODEL =
ENV.fetch('OPENAI_BASIC_MODEL')
ADVANCED_MODEL =
ENV.fetch('OPENAI_ADVANCED_MODEL')

Class Method Summary collapse

Class Method Details

.chat(messages, model: AI::BASIC_MODEL, response_format: nil) ⇒ Object



45
46
47
48
49
50
51
# File 'lib/intelli_agent/ai.rb', line 45

def self.chat(messages, model: AI::BASIC_MODEL, response_format: nil)
  parameters = { model:, messages: }
  parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)

  response = OpenAI::Client.new.chat(parameters:)
  response.dig('choices', 0, 'message', 'content').strip
end

.embed(input, model: 'text-embedding-3-large') ⇒ Object



6
7
8
9
# File 'lib/intelli_agent/ai.rb', line 6

def self.embed(input, model: 'text-embedding-3-large')
  response = OpenAI::Client.new.embeddings(parameters: { input:, model: })
  response.dig('data', 0, 'embedding')
end

.modelsObject



53
54
55
# File 'lib/intelli_agent/ai.rb', line 53

def self.models
  OpenAI::Client.new.models.list
end

.single_chat(system:, user:, model: AI::BASIC_MODEL, response_format: nil) ⇒ Object



32
33
34
35
36
37
38
39
40
41
42
43
# File 'lib/intelli_agent/ai.rb', line 32

def self.single_chat(system:, user:, model: AI::BASIC_MODEL, response_format: nil)
  parameters = { model:,
                 messages: [
                   { role: 'system', content: system },
                   { role: 'user', content: user }
                 ] }

  parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)

  response = OpenAI::Client.new.chat(parameters:)
  response.dig('choices', 0, 'message', 'content').strip
end

.single_prompt(prompt:, model: AI::BASIC_MODEL, response_format: nil) ⇒ Object



11
12
13
14
15
16
17
18
# File 'lib/intelli_agent/ai.rb', line 11

def self.single_prompt(prompt:, model: AI::BASIC_MODEL, response_format: nil)
  parameters = { model:, messages: [{ role: 'user', content: prompt }] }

  parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)

  response = OpenAI::Client.new.chat(parameters:)
  response.dig('choices', 0, 'message', 'content').strip
end

.vision(prompt:, image_url:, response_format: nil) ⇒ Object



20
21
22
23
24
25
26
27
28
29
30
# File 'lib/intelli_agent/ai.rb', line 20

def self.vision(prompt:, image_url:, response_format: nil)
  messages = [{ type: :text, text: prompt },
              { type: :image_url, image_url: { url: image_url } }]

  parameters = { model: AI::ADVANCED_MODEL, messages: [{ role: :user, content: messages }] }
  parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)

  response = OpenAI::Client.new.chat(parameters:)

  response.dig('choices', 0, 'message', 'content').strip
end