Class: Anthropic::Resources::Messages
- Inherits:
-
Object
- Object
- Anthropic::Resources::Messages
- Defined in:
- lib/anthropic/resources/messages.rb,
lib/anthropic/resources/messages/batches.rb
Defined Under Namespace
Classes: Batches
Instance Attribute Summary collapse
Instance Method Summary collapse
-
#count_tokens(messages: , model: , system_: nil, thinking: nil, tool_choice: nil, tools: nil, request_options: {}) ⇒ Anthropic::Models::MessageTokensCount
Some parameter documentations has been truncated, see Models::MessageCountTokensParams for more details.
-
#create(max_tokens: , messages: , model: , metadata: nil, service_tier: nil, stop_sequences: nil, system_: nil, temperature: nil, thinking: nil, tool_choice: nil, tools: nil, top_k: nil, top_p: nil, request_options: {}) ⇒ Anthropic::Models::Message
See #stream for streaming counterpart.
-
#initialize(client:) ⇒ Messages
constructor
private
A new instance of Messages.
-
#stream(max_tokens: , messages: , model: , metadata: nil, service_tier: nil, stop_sequences: nil, system_: nil, temperature: nil, thinking: nil, tool_choice: nil, tools: nil, top_k: nil, top_p: nil, request_options: {}) ⇒ Anthropic::Streaming::MessageStream
See #create for non-streaming counterpart.
-
#stream_raw(max_tokens: , messages: , model: , metadata: nil, service_tier: nil, stop_sequences: nil, system_: nil, temperature: nil, thinking: nil, tool_choice: nil, tools: nil, top_k: nil, top_p: nil, request_options: {}) ⇒ Anthropic::Internal::Stream<Anthropic::Models::RawMessageStartEvent, Anthropic::Models::RawMessageDeltaEvent, Anthropic::Models::RawMessageStopEvent, Anthropic::Models::RawContentBlockStartEvent, Anthropic::Models::RawContentBlockDeltaEvent, Anthropic::Models::RawContentBlockStopEvent>
See #create for non-streaming counterpart.
Constructor Details
#initialize(client:) ⇒ Messages
This method is part of a private API. You should avoid using this method if possible, as it may be removed or be changed in the future.
Returns a new instance of Messages.
269 270 271 272 |
# File 'lib/anthropic/resources/messages.rb', line 269 def initialize(client:) @client = client @batches = Anthropic::Resources::Messages::Batches.new(client: client) end |
Instance Attribute Details
#batches ⇒ Anthropic::Resources::Messages::Batches (readonly)
7 8 9 |
# File 'lib/anthropic/resources/messages.rb', line 7 def batches @batches end |
Instance Method Details
#count_tokens(messages: , model: , system_: nil, thinking: nil, tool_choice: nil, tools: nil, request_options: {}) ⇒ Anthropic::Models::MessageTokensCount
Some parameter documentations has been truncated, see Models::MessageCountTokensParams for more details.
Count the number of tokens in a Message.
The Token Count API can be used to count the number of tokens in a Message, including tools, images, and documents, without creating it.
Learn more about token counting in our [user guide](/en/docs/build-with-claude/token-counting)
255 256 257 258 259 260 261 262 263 264 |
# File 'lib/anthropic/resources/messages.rb', line 255 def count_tokens(params) parsed, = Anthropic::MessageCountTokensParams.dump_request(params) @client.request( method: :post, path: "v1/messages/count_tokens", body: parsed, model: Anthropic::MessageTokensCount, options: ) end |
#create(max_tokens: , messages: , model: , metadata: nil, service_tier: nil, stop_sequences: nil, system_: nil, temperature: nil, thinking: nil, tool_choice: nil, tools: nil, top_k: nil, top_p: nil, request_options: {}) ⇒ Anthropic::Models::Message
See #stream for streaming counterpart.
Some parameter documentations has been truncated, see Models::MessageCreateParams for more details.
Send a structured list of input messages with text and/or image content, and the model will generate the next message in the conversation.
The Messages API can be used for either single queries or stateless multi-turn conversations.
Learn more about the Messages API in our [user guide](/en/docs/initial-setup)
55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 |
# File 'lib/anthropic/resources/messages.rb', line 55 def create(params) parsed, = Anthropic::MessageCreateParams.dump_request(params) if parsed[:stream] = "Please use `#stream` for the streaming use case." raise ArgumentError.new() end tool_models = get_structured_output_models(parsed) unwrap = if tool_models.any? ->(raw) { parse_structured_outputs!(raw, tool_models) } end if .empty? && @client.timeout == Anthropic::Client::DEFAULT_TIMEOUT_IN_SECONDS model = parsed[:model].to_sym max_tokens = parsed[:max_tokens].to_i timeout = @client.calculate_nonstreaming_timeout( max_tokens, Anthropic::Client::MODEL_NONSTREAMING_TOKENS[model] ) = {timeout: timeout} else = {timeout: 600, **} end @client.request( method: :post, path: "v1/messages", body: parsed, model: Anthropic::Message, unwrap: unwrap, options: ) end |
#stream(max_tokens: , messages: , model: , metadata: nil, service_tier: nil, stop_sequences: nil, system_: nil, temperature: nil, thinking: nil, tool_choice: nil, tools: nil, top_k: nil, top_p: nil, request_options: {}) ⇒ Anthropic::Streaming::MessageStream
See #create for non-streaming counterpart.
Some parameter documentations has been truncated, see Models::MessageCreateParams for more details.
Send a structured list of input messages with text and/or image content, and the model will generate the next message in the conversation with streaming.
The Messages API can be used for either single queries or stateless multi-turn conversations.
Learn more about the Messages API in our [user guide](/en/docs/initial-setup)
136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 |
# File 'lib/anthropic/resources/messages.rb', line 136 def stream(params) parsed, = Anthropic::Models::MessageCreateParams.dump_request(params) unless parsed.fetch(:stream, true) = "Please use `#create` for the non-streaming use case." raise ArgumentError.new() end parsed.store(:stream, true) tool_models = get_structured_output_models(parsed) raw_stream = @client.request( method: :post, path: "v1/messages", headers: {"accept" => "text/event-stream"}, body: parsed, stream: Anthropic::Internal::Stream, model: Anthropic::Models::RawMessageStreamEvent, options: ) Anthropic::Streaming::MessageStream.new( raw_stream: raw_stream, tool_models: tool_models ) end |
#stream_raw(max_tokens: , messages: , model: , metadata: nil, service_tier: nil, stop_sequences: nil, system_: nil, temperature: nil, thinking: nil, tool_choice: nil, tools: nil, top_k: nil, top_p: nil, request_options: {}) ⇒ Anthropic::Internal::Stream<Anthropic::Models::RawMessageStartEvent, Anthropic::Models::RawMessageDeltaEvent, Anthropic::Models::RawMessageStopEvent, Anthropic::Models::RawContentBlockStartEvent, Anthropic::Models::RawContentBlockDeltaEvent, Anthropic::Models::RawContentBlockStopEvent>
See #create for non-streaming counterpart.
Some parameter documentations has been truncated, see Models::MessageCreateParams for more details.
Send a structured list of input messages with text and/or image content, and the model will generate the next message in the conversation.
The Messages API can be used for either single queries or stateless multi-turn conversations.
Learn more about the Messages API in our [user guide](/en/docs/initial-setup)
207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 |
# File 'lib/anthropic/resources/messages.rb', line 207 def stream_raw(params) parsed, = Anthropic::MessageCreateParams.dump_request(params) unless parsed.fetch(:stream, true) = "Please use `#create` for the non-streaming use case." raise ArgumentError.new() end parsed.store(:stream, true) @client.request( method: :post, path: "v1/messages", headers: {"accept" => "text/event-stream"}, body: parsed, stream: Anthropic::Internal::Stream, model: Anthropic::RawMessageStreamEvent, options: {timeout: 600, **} ) end |