Class: SmartPrompt::Conversation

Inherits:
Object
  • Object
show all
Includes:
APIHandler
Defined in:
lib/smart_prompt/conversation.rb

Constant Summary

Constants included from APIHandler

APIHandler::MAX_RETRIES, APIHandler::RETRY_OPTIONS

Instance Attribute Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(engine, tools = nil) ⇒ Conversation



11
12
13
14
15
16
17
18
19
20
21
22
23
# File 'lib/smart_prompt/conversation.rb', line 11

def initialize(engine, tools = nil)
  SmartPrompt.logger.info "Create Conversation"
  @messages = []
  @engine = engine
  @adapters = engine.adapters
  @llms = engine.llms
  @current_llm_name = nil
  @templates = engine.templates
  @temperature = 0.7
  @current_adapter = engine.current_adapter
  @last_response = nil
  @tools = tools
end

Instance Attribute Details

#config_fileObject (readonly)

Returns the value of attribute config_file.



8
9
10
# File 'lib/smart_prompt/conversation.rb', line 8

def config_file
  @config_file
end

#last_call_idObject (readonly)

Returns the value of attribute last_call_id.



9
10
11
# File 'lib/smart_prompt/conversation.rb', line 9

def last_call_id
  @last_call_id
end

#last_responseObject (readonly)

Returns the value of attribute last_response.



8
9
10
# File 'lib/smart_prompt/conversation.rb', line 8

def last_response
  @last_response
end

#messagesObject (readonly)

Returns the value of attribute messages.



8
9
10
# File 'lib/smart_prompt/conversation.rb', line 8

def messages
  @messages
end

Instance Method Details

#add_message(msg, with_history = false) ⇒ Object



47
48
49
50
51
52
# File 'lib/smart_prompt/conversation.rb', line 47

def add_message(msg, with_history = false)
  if with_history
    history_messages << msg
  end
  @messages << msg
end

#embeddings(length) ⇒ Object



155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
# File 'lib/smart_prompt/conversation.rb', line 155

def embeddings(length)
  Retriable.retriable(RETRY_OPTIONS) do
    raise ConfigurationError, "No LLM selected" if @current_llm.nil?
    text = ""
    @messages.each do |msg|
      if msg[:role] == "user"
        text = msg[:content]
      end
    end
    @last_response = @current_llm.embeddings(text, @model_name)
    @messages = []
    @messages << { role: "system", content: @sys_msg }
    normalize(@last_response, length)
  end
end

#history_messagesObject



43
44
45
# File 'lib/smart_prompt/conversation.rb', line 43

def history_messages
  @engine.history_messages
end

#model(model_name) ⇒ Object



32
33
34
35
36
37
# File 'lib/smart_prompt/conversation.rb', line 32

def model(model_name)
  @model_name = model_name
  if @engine.config["better_prompt_db"]
    BetterPrompt.add_model(@current_llm_name, @model_name)
  end
end

#normalize(x, length) ⇒ Object



145
146
147
148
149
150
151
152
153
# File 'lib/smart_prompt/conversation.rb', line 145

def normalize(x, length)
  if x.length > length
    x = Numo::NArray.cast(x[0..length - 1])
    norm = Math.sqrt((x * x).sum)
    return (x / norm).to_a
  else
    return x.concat([0] * (x.length - length))
  end
end

#prompt(template_name, params = {}, with_history: false) ⇒ Object



54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
# File 'lib/smart_prompt/conversation.rb', line 54

def prompt(template_name, params = {}, with_history: false)
  if template_name.class == Symbol
    template_name = template_name.to_s
    SmartPrompt.logger.info "Use template #{template_name}"
    raise "Template #{template_name} not found" unless @templates.key?(template_name)
    content = @templates[template_name].render(params)
    add_message({ role: "user", content: content }, with_history)
    if @engine.config["better_prompt_db"]
      BetterPrompt.add_prompt(template_name, "user", content)
    end
    self
  else
    add_message({ role: "user", content: template_name }, with_history)
    if @engine.config["better_prompt_db"]
      BetterPrompt.add_prompt("NULL", "user", template_name)
    end
    self
  end
end

#send_msg(params = {}) ⇒ Object



91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
# File 'lib/smart_prompt/conversation.rb', line 91

def send_msg(params = {})
  Retriable.retriable(RETRY_OPTIONS) do
    raise ConfigurationError, "No LLM selected" if @current_llm.nil?
    if @engine.config["better_prompt_db"]
      if params[:with_history]
        @last_call_id = BetterPrompt.add_model_call(@current_llm_name, @model_name, history_messages, false, @temperature, 0, 0.0, 0, @tools)
      else
        @last_call_id = BetterPrompt.add_model_call(@current_llm_name, @model_name, @messages, false, @temperature, 0, 0.0, 0, @tools)
      end
    end
    if params[:with_history]
      @last_response = @current_llm.send_request(history_messages, @model_name, @temperature, @tools, nil)
    else
      @last_response = @current_llm.send_request(@messages, @model_name, @temperature, @tools, nil)
    end
    if @last_response == ""
      @last_response = @current_llm.last_response
    end
    if @engine.config["better_prompt_db"]
      BetterPrompt.add_response(@last_call_id, @last_response, false)
    end
    @messages = []
    @messages << { role: "system", content: @sys_msg }
    @last_response
  end
rescue => e
  return "Failed to call LLM after #{MAX_RETRIES} attempts: #{e.message}"
end

#send_msg_by_stream(params = {}, &proc) ⇒ Object



120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
# File 'lib/smart_prompt/conversation.rb', line 120

def send_msg_by_stream(params = {}, &proc)
  Retriable.retriable(RETRY_OPTIONS) do
    raise ConfigurationError, "No LLM selected" if @current_llm.nil?
    if @engine.config["better_prompt_db"]
      if params[:with_history]
        @last_call_id = BetterPrompt.add_model_call(@current_llm_name, @model_name, history_messages, true, @temperature, 0, 0.0, 0, @tools)
      else
        @last_call_id = BetterPrompt.add_model_call(@current_llm_name, @model_name, @messages, true, @temperature, 0, 0.0, 0, @tools)
      end
    end
    if params[:with_history]
      @current_llm.send_request(history_messages, @model_name, @temperature, @tools, proc)
    else
      @current_llm.send_request(@messages, @model_name, @temperature, @tools, proc)
    end
    if @engine.config["better_prompt_db"]
      BetterPrompt.add_response(@last_call_id, @engine.stream_response, true)
    end
    @messages = []
    @messages << { role: "system", content: @sys_msg }
  end
rescue => e
  return "Failed to call LLM after #{MAX_RETRIES} attempts: #{e.message}"
end

#send_msg_onceObject



83
84
85
86
87
88
89
# File 'lib/smart_prompt/conversation.rb', line 83

def send_msg_once
  raise "No LLM selected" if @current_llm.nil?
  @last_response = @current_llm.send_request(@messages, @model_name, @temperature)
  @messages = []
  @messages << { role: "system", content: @sys_msg }
  @last_response
end

#sys_msg(message, params) ⇒ Object



74
75
76
77
78
79
80
81
# File 'lib/smart_prompt/conversation.rb', line 74

def sys_msg(message, params)
  @sys_msg = message
  add_message({ role: "system", content: message }, params[:with_history])
  if @engine.config["better_prompt_db"]
    BetterPrompt.add_prompt("NULL", "system", message)
  end
  self
end

#temperature(temperature) ⇒ Object



39
40
41
# File 'lib/smart_prompt/conversation.rb', line 39

def temperature(temperature)
  @temperature = temperature
end

#use(llm_name) ⇒ Object



25
26
27
28
29
30
# File 'lib/smart_prompt/conversation.rb', line 25

def use(llm_name)
  raise "Adapter #{adapter_name} not configured" unless @llms.key?(llm_name)
  @current_llm = @llms[llm_name]
  @current_llm_name = llm_name
  self
end