23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
|
# File 'lib/langgraph_rb/chat_ruby_openai.rb', line 23
def call(messages, tools: nil)
raise ArgumentError, 'messages must be an Array' unless messages.is_a?(Array)
tool_definitions = (tools || @bound_tools)
tool_schemas = Array(tool_definitions).flat_map do |tool|
if tool.respond_to?(:to_openai_tool_schema)
Array(tool.to_openai_tool_schema)
else
[tool]
end
end
request_payload = {
model: @model,
messages: normalize_messages(messages)
}
request_payload[:temperature] = @temperature if @temperature.present?
if tool_schemas && !tool_schemas.empty?
request_payload[:tools] = tool_schemas
request_payload[:tool_choice] = 'auto'
end
notify_llm_request({
name: 'OpenAI::ChatCompletion',
model: @model,
model_parameters: { temperature: @temperature },
input: request_payload
})
response = @client.chat(parameters: request_payload)
message = (response)
tool_calls = message["tool_calls"]
text_content = message["content"]
usage = (response)
notify_llm_response({
output: tool_calls ? { tool_calls: tool_calls } : text_content,
prompt_tokens: usage[:prompt_tokens],
completion_tokens: usage[:completion_tokens],
total_tokens: usage[:total_tokens]
})
if tool_calls && !tool_calls.empty?
normalized_calls = tool_calls.map do |tc|
{
id: tc["id"],
name: tc["function"]["name"],
arguments: parse_tool_arguments(tc["function"]["arguments"])
}
end
{ tool_calls: normalized_calls }
else
text_content
end
rescue => e
notify_llm_error({ error: e.response&.dig(:body) || e.message })
raise e
end
|