Class: LLM::Anthropic

Inherits:
Provider show all
Includes:
Format
Defined in:
lib/llm/shell/internal/llm.rb/lib/llm/providers/anthropic.rb,
lib/llm/shell/internal/llm.rb/lib/llm/providers/anthropic/files.rb,
lib/llm/shell/internal/llm.rb/lib/llm/providers/anthropic/format.rb,
lib/llm/shell/internal/llm.rb/lib/llm/providers/anthropic/models.rb,
lib/llm/shell/internal/llm.rb/lib/llm/providers/anthropic/error_handler.rb,
lib/llm/shell/internal/llm.rb/lib/llm/providers/anthropic/stream_parser.rb

Overview

The Anthropic class implements a provider for [Anthropic](www.anthropic.com).

Examples:

#!/usr/bin/env ruby
require "llm"

llm = LLM.anthropic(key: ENV["KEY"])
bot = LLM::Bot.new(llm)
bot.chat ["Tell me about this photo", File.open("/images/dog.jpg", "rb")]
bot.messages.select(&:assistant?).each { print "[#{_1.role}]", _1.content, "\n" }

Defined Under Namespace

Modules: Format, Response Classes: ErrorHandler, Files, Models, StreamParser

Constant Summary collapse

HOST =
"api.anthropic.com"

Instance Method Summary collapse

Methods included from Format

#format

Methods inherited from Provider

#audio, #chat, clients, #embed, #images, #inspect, #moderations, #respond, #responses, #schema, #server_tool, #vector_stores, #with

Constructor Details

#initializeAnthropic

Returns a new instance of Anthropic.

Parameters:

  • key (String, nil)

    The secret key for authentication



30
31
32
# File 'lib/llm/shell/internal/llm.rb/lib/llm/providers/anthropic.rb', line 30

def initialize(**)
  super(host: HOST, **)
end

Instance Method Details

#assistant_roleString

Returns the role of the assistant in the conversation. Usually “assistant” or “model”

Returns:

  • (String)

    Returns the role of the assistant in the conversation. Usually “assistant” or “model”



78
79
80
# File 'lib/llm/shell/internal/llm.rb/lib/llm/providers/anthropic.rb', line 78

def assistant_role
  "assistant"
end

#complete(prompt, params = {}) ⇒ LLM::Response

Provides an interface to the chat completions API

Examples:

llm = LLM.openai(key: ENV["KEY"])
messages = [{role: "system", content: "Your task is to answer all of my questions"}]
res = llm.complete("5 + 2 ?", messages:)
print "[#{res.choices[0].role}]", res.choices[0].content, "\n"

Parameters:

  • prompt (String)

    The input prompt to be completed

  • params (Hash) (defaults to: {})

    The parameters to maintain throughout the conversation. Any parameter the provider supports can be included and not only those listed here.

Returns:

Raises:

See Also:



44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
# File 'lib/llm/shell/internal/llm.rb/lib/llm/providers/anthropic.rb', line 44

def complete(prompt, params = {})
  params = {role: :user, model: default_model, max_tokens: 1024}.merge!(params)
  tools  = resolve_tools(params.delete(:tools))
  params = [params, format_tools(tools)].inject({}, &:merge!).compact
  role, stream = params.delete(:role), params.delete(:stream)
  params[:stream] = true if stream.respond_to?(:<<) || stream == true
  req = Net::HTTP::Post.new("/v1/messages", headers)
  messages = [*(params.delete(:messages) || []), Message.new(role, prompt)]
  body = JSON.dump({messages: [format(messages)].flatten}.merge!(params))
  set_body_stream(req, StringIO.new(body))
  res = execute(request: req, stream:)
  LLM::Response.new(res)
    .extend(LLM::Anthropic::Response::Completion)
    .extend(Module.new { define_method(:__tools__) { tools } })
end

#default_modelString

Returns the default model for chat completions

Returns:

  • (String)

See Also:



86
87
88
# File 'lib/llm/shell/internal/llm.rb/lib/llm/providers/anthropic.rb', line 86

def default_model
  "claude-sonnet-4-20250514"
end

#filesLLM::Anthropic::Files

Provides an interface to Anthropic’s files API

Returns:

See Also:



72
73
74
# File 'lib/llm/shell/internal/llm.rb/lib/llm/providers/anthropic.rb', line 72

def files
  LLM::Anthropic::Files.new(self)
end

#modelsLLM::Anthropic::Models

Provides an interface to Anthropic’s models API



64
65
66
# File 'lib/llm/shell/internal/llm.rb/lib/llm/providers/anthropic.rb', line 64

def models
  LLM::Anthropic::Models.new(self)
end

#server_toolsString => LLM::ServerTool

Note:

This method includes certain tools that require configuration through a set of options that are easier to set through the LLM::Provider#server_tool method.

Returns:

See Also:



97
98
99
100
101
102
103
# File 'lib/llm/shell/internal/llm.rb/lib/llm/providers/anthropic.rb', line 97

def server_tools
  {
    bash: server_tool(:bash, type: "bash_20250124"),
    web_search: server_tool(:web_search, type: "web_search_20250305", max_uses: 5),
    text_editor: server_tool(:str_replace_based_edit_tool, type: "text_editor_20250728", max_characters: 10_000)
  }
end

#web_search(query:) ⇒ LLM::Response

A convenience method for performing a web search using the Anthropic web search tool.

Examples:

llm = LLM.anthropic(key: ENV["KEY"])
res = llm.web_search(query: "summarize today's news")
res.search_results.each { |item| print item.title, ": ", item.url, "\n" }

Parameters:

  • query (String)

    The search query.

Returns:



114
115
116
117
# File 'lib/llm/shell/internal/llm.rb/lib/llm/providers/anthropic.rb', line 114

def web_search(query:)
  complete(query, tools: [server_tools[:web_search]])
    .extend(LLM::Anthropic::Response::WebSearch)
end