Class: GPT::Responses
- Inherits:
-
Object
- Object
- GPT::Responses
- Defined in:
- lib/gpt/responses.rb
Instance Method Summary collapse
- #cancel(response_id) ⇒ Object
- #create(payload) ⇒ Object
- #delete(response_id) ⇒ Object
- #get(response_id, include: nil, include_obfuscation: nil, starting_after: nil, stream: nil) ⇒ Object
-
#initialize(client) ⇒ Responses
constructor
A new instance of Responses.
- #input_items(response_id, after: nil, before: nil, include: nil, limit: nil, order: nil) ⇒ Object
- #stream(payload) ⇒ Object
- #stream_text(payload) ⇒ Object
Constructor Details
#initialize(client) ⇒ Responses
Returns a new instance of Responses.
3 4 5 |
# File 'lib/gpt/responses.rb', line 3 def initialize(client) @client = client end |
Instance Method Details
#cancel(response_id) ⇒ Object
28 29 30 |
# File 'lib/gpt/responses.rb', line 28 def cancel(response_id) @client.json_post("/v1/responses/#{response_id}/cancel") end |
#create(payload) ⇒ Object
7 8 9 10 11 |
# File 'lib/gpt/responses.rb', line 7 def create(payload) res = @client.json_post('/v1/responses', body: payload) res.extend(GPT::ResponseExtender) if res.is_a?(Hash) res end |
#delete(response_id) ⇒ Object
24 25 26 |
# File 'lib/gpt/responses.rb', line 24 def delete(response_id) @client.json_delete("/v1/responses/#{response_id}") end |
#get(response_id, include: nil, include_obfuscation: nil, starting_after: nil, stream: nil) ⇒ Object
13 14 15 16 17 18 19 20 21 22 |
# File 'lib/gpt/responses.rb', line 13 def get(response_id, include: nil, include_obfuscation: nil, starting_after: nil, stream: nil) query = {} query['include[]'] = include if include query['include_obfuscation'] = include_obfuscation unless include_obfuscation.nil? query['starting_after'] = starting_after if starting_after query['stream'] = stream unless stream.nil? res = @client.json_get("/v1/responses/#{response_id}", query: query) res.extend(GPT::ResponseExtender) if res.is_a?(Hash) res end |
#input_items(response_id, after: nil, before: nil, include: nil, limit: nil, order: nil) ⇒ Object
32 33 34 35 36 37 38 39 40 |
# File 'lib/gpt/responses.rb', line 32 def input_items(response_id, after: nil, before: nil, include: nil, limit: nil, order: nil) query = {} query['after'] = after if after query['before'] = before if before query['include[]'] = include if include query['limit'] = limit if limit query['order'] = order if order @client.json_get("/v1/responses/#{response_id}/input_items", query: query) end |
#stream(payload) ⇒ Object
42 43 44 45 46 47 48 |
# File 'lib/gpt/responses.rb', line 42 def stream(payload) payload = payload.dup payload['stream'] = true @client.sse_stream('/v1/responses', body: payload) do |chunk| yield chunk if block_given? end end |
#stream_text(payload) ⇒ Object
50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 |
# File 'lib/gpt/responses.rb', line 50 def stream_text(payload) buffer = ''.dup stream(payload) do |chunk| buffer << chunk parts = buffer.split("\n\n", -1) buffer = parts.pop || ''.dup parts.each do |raw_event| lines = raw_event.split("\n") event_name = nil data_lines = [] lines.each do |line| if line.start_with?('event:') event_name = line.sub('event:', '').strip elsif line.start_with?('data:') data_lines << line.sub('data:', '').strip end end next if data_lines.empty? data = data_lines.join("\n") next if data == '[DONE]' begin json = Oj.load(data) rescue Oj::ParseError next end case event_name when 'response.output_text.delta' delta = json['delta'] yield delta if delta && !delta.empty? when 'response.delta' delta = json.dig('delta', 'content') if delta.is_a?(Array) text_piece = delta.find { |c| c['type'] == 'output_text' || c['type'] == 'text' } yield(text_piece['text']) if text_piece && text_piece['text'] && !text_piece['text'].empty? end else # ignore other events end end end true end |