Class: LLMs::Executors::AnthropicExecutor

Inherits:
BaseExecutor show all
Defined in:
lib/llms/executors/anthropic_executor.rb

Constant Summary

Constants inherited from BaseExecutor

BaseExecutor::DEFAULT_TEMPERATURE

Instance Attribute Summary

Attributes inherited from BaseExecutor

#available_tools, #base_url, #client, #last_error, #last_received_message, #last_received_message_id, #last_sent_message, #last_usage_data, #max_completion_tokens, #max_thinking_tokens, #max_tokens, #model_name, #system_prompt, #temperature, #thinking_effort, #thinking_mode

Instance Method Summary collapse

Methods inherited from BaseExecutor

#execute_prompt, #initialize

Constructor Details

This class inherits a constructor from LLMs::Executors::BaseExecutor

Instance Method Details

#execute_conversation(conversation, &block) ⇒ Object



11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
# File 'lib/llms/executors/anthropic_executor.rb', line 11

def execute_conversation(conversation, &block)
  if block_given?
    stream_conversation(conversation) do |handler|
      handler.on(:text_delta) do |event|
        yield event.text
      end
      ## TODO configure whether to yield thinking deltas
      handler.on(:thinking_delta) do |event|
        yield event.thinking
      end
    end
  else
    send_conversation(conversation)
  end
end

#send_conversation(conversation) ⇒ Object



56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
# File 'lib/llms/executors/anthropic_executor.rb', line 56

def send_conversation(conversation)
  init_new_request(conversation)

  start_time = Time.now
  begin        
    http_response = client_request
  rescue Faraday::BadRequestError => e        
    @last_error = e.response[:body]
    return nil
  end
  execution_time = Time.now - start_time

  if http_response['error']
    @last_error = http_response['error']
    return nil
  end

  @last_received_message_id = LLMs::Adapters::AnthropicMessageAdapter.find_message_id(http_response)
  @last_received_message = LLMs::Adapters::AnthropicMessageAdapter.message_from_api_format(http_response)
  @last_usage_data = calculate_usage(http_response, execution_time)

  @last_received_message
end

#stream_conversation(conversation) {|emitter| ... } ⇒ Object

Yields:

  • (emitter)


27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
# File 'lib/llms/executors/anthropic_executor.rb', line 27

def stream_conversation(conversation)
  init_new_request(conversation)

  emitter = Stream::EventEmitter.new
  yield emitter if block_given?

  start_time = Time.now
  begin
    http_response, stream_parsed_response = stream_client_request(emitter)
  rescue Faraday::BadRequestError => e
    @last_error = e.response[:body]
    return nil
  end
  execution_time = Time.now - start_time

  api_response = stream_parsed_response || http_response

  if api_response['error']
    @last_error = api_response['error']
    return nil
  end

  @last_received_message_id = LLMs::Adapters::AnthropicMessageAdapter.find_message_id(api_response)
  @last_received_message = LLMs::Adapters::AnthropicMessageAdapter.message_from_api_format(api_response)
  @last_usage_data = calculate_usage(api_response, execution_time)

  @last_received_message
end