Method: Prompt.stream_prompt

Defined in:
lib/prompt.rb

.stream_prompt(input, conversation = '', temp = load_temperature()) ⇒ Object

Streams the response, VERY NICE



8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
# File 'lib/prompt.rb', line 8

def self.stream_prompt(input, conversation = '', temp = load_temperature())
  if temp.nil?
    temp = 0.7
  end
  if conversation.length == 0
    conversation += input
  else
    conversation += "\n My question: #{input}"
  end
  response = ''
  unless client.nil?
    client.chat(
      parameters: {
        model: "gpt-3.5-turbo",
        messages: [{ role: "user", content: conversation}],
        temperature: temp, ## Should be a parameter
        stream: proc do |chunk, _bytesize|
          response += chunk.dig("choices", 0, "delta", "content") unless chunk.dig("choices", 0, "delta", "content").nil?
          print chunk.dig("choices", 0, "delta", "content")
        end
      }
    )
    context = {
      "input" => input,
      "response" => response,
    }

    return context
  end
end