Class: Anthropic::Resources::Beta::Messages
- Inherits:
-
Object
- Object
- Anthropic::Resources::Beta::Messages
- Defined in:
- lib/anthropic/resources/beta/messages.rb,
lib/anthropic/resources/beta/messages/batches.rb
Defined Under Namespace
Classes: Batches
Instance Attribute Summary collapse
Instance Method Summary collapse
-
#count_tokens(messages: , model: , context_management: nil, mcp_servers: nil, output_format: nil, system_: nil, thinking: nil, tool_choice: nil, tools: nil, betas: nil, request_options: {}) ⇒ Anthropic::Models::Beta::BetaMessageTokensCount
Some parameter documentations has been truncated, see Models::Beta::MessageCountTokensParams for more details.
-
#create(max_tokens: , messages: , model: , container: nil, context_management: nil, mcp_servers: nil, metadata: nil, output_format: nil, service_tier: nil, stop_sequences: nil, system_: nil, temperature: nil, thinking: nil, tool_choice: nil, tools: nil, top_k: nil, top_p: nil, betas: nil, request_options: {}) ⇒ Anthropic::Models::Beta::BetaMessage
See #stream_raw for streaming counterpart.
-
#initialize(client:) ⇒ Messages
constructor
private
A new instance of Messages.
-
#stream_raw(max_tokens: , messages: , model: , container: nil, context_management: nil, mcp_servers: nil, metadata: nil, output_format: nil, service_tier: nil, stop_sequences: nil, system_: nil, temperature: nil, thinking: nil, tool_choice: nil, tools: nil, top_k: nil, top_p: nil, betas: nil, request_options: {}) ⇒ Anthropic::Internal::Stream<Anthropic::Models::Beta::BetaRawMessageStartEvent, Anthropic::Models::Beta::BetaRawMessageDeltaEvent, Anthropic::Models::Beta::BetaRawMessageStopEvent, Anthropic::Models::Beta::BetaRawContentBlockStartEvent, Anthropic::Models::Beta::BetaRawContentBlockDeltaEvent, Anthropic::Models::Beta::BetaRawContentBlockStopEvent>
See #create for non-streaming counterpart.
-
#stream_raw(max_tokens: , messages: , model: , container: nil, mcp_servers: nil, metadata: nil, service_tier: nil, stop_sequences: nil, system_: nil, temperature: nil, thinking: nil, tool_choice: nil, tools: nil, top_k: nil, top_p: nil, betas: nil, request_options: {}) ⇒ Anthropic::Internal::Stream<Anthropic::Models::Beta::BetaRawMessageStartEvent, Anthropic::Models::Beta::BetaRawMessageDeltaEvent, Anthropic::Models::Beta::BetaRawMessageStopEvent, Anthropic::Models::Beta::BetaRawContentBlockStartEvent, Anthropic::Models::Beta::BetaRawContentBlockDeltaEvent, Anthropic::Models::Beta::BetaRawContentBlockStopEvent>
See #create for non-streaming counterpart.
- #tool_runner(params) ⇒ Anthropic::Helpers::Tools::Runner
Constructor Details
#initialize(client:) ⇒ Messages
This method is part of a private API. You should avoid using this method if possible, as it may be removed or be changed in the future.
Returns a new instance of Messages.
319 320 321 322 |
# File 'lib/anthropic/resources/beta/messages.rb', line 319 def initialize(client:) @client = client @batches = Anthropic::Resources::Beta::Messages::Batches.new(client: client) end |
Instance Attribute Details
#batches ⇒ Anthropic::Resources::Beta::Messages::Batches (readonly)
8 9 10 |
# File 'lib/anthropic/resources/beta/messages.rb', line 8 def batches @batches end |
Instance Method Details
#count_tokens(messages: , model: , context_management: nil, mcp_servers: nil, output_format: nil, system_: nil, thinking: nil, tool_choice: nil, tools: nil, betas: nil, request_options: {}) ⇒ Anthropic::Models::Beta::BetaMessageTokensCount
Some parameter documentations has been truncated, see Models::Beta::MessageCountTokensParams for more details.
Count the number of tokens in a Message.
The Token Count API can be used to count the number of tokens in a Message, including tools, images, and documents, without creating it.
Learn more about token counting in our user guide
303 304 305 306 307 308 309 310 311 312 313 314 |
# File 'lib/anthropic/resources/beta/messages.rb', line 303 def count_tokens(params) parsed, = Anthropic::Beta::MessageCountTokensParams.dump_request(params) header_params = {betas: "anthropic-beta"} @client.request( method: :post, path: "v1/messages/count_tokens?beta=true", headers: parsed.slice(*header_params.keys).transform_keys(header_params), body: parsed.except(*header_params.keys), model: Anthropic::Beta::BetaMessageTokensCount, options: ) end |
#create(max_tokens: , messages: , model: , container: nil, context_management: nil, mcp_servers: nil, metadata: nil, output_format: nil, service_tier: nil, stop_sequences: nil, system_: nil, temperature: nil, thinking: nil, tool_choice: nil, tools: nil, top_k: nil, top_p: nil, betas: nil, request_options: {}) ⇒ Anthropic::Models::Beta::BetaMessage
See #stream_raw for streaming counterpart.
Some parameter documentations has been truncated, see Models::Beta::MessageCreateParams for more details.
Send a structured list of input messages with text and/or image content, and the model will generate the next message in the conversation.
The Messages API can be used for either single queries or stateless multi-turn conversations.
Learn more about the Messages API in our user guide
74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 |
# File 'lib/anthropic/resources/beta/messages.rb', line 74 def create(params) parsed, = Anthropic::Beta::MessageCreateParams.dump_request(params) if parsed[:stream] = "Please use `#stream` for the streaming use case." raise ArgumentError.new() end tools, models = Anthropic::Helpers::Messages.distill_input_schema_models!(parsed, strict: nil) unwrap = ->(raw) { Anthropic::Helpers::Messages.parse_input_schemas!(raw, tools:, models:) } if .empty? && @client.timeout == Anthropic::Client::DEFAULT_TIMEOUT_IN_SECONDS model = parsed[:model]&.to_sym max_tokens = parsed[:max_tokens].to_i timeout = @client.calculate_nonstreaming_timeout( max_tokens, Anthropic::Client::MODEL_NONSTREAMING_TOKENS[model] ) = {timeout: timeout} else = {timeout: 600, **} end header_params = {betas: "anthropic-beta"} @client.request( method: :post, path: "v1/messages?beta=true", headers: parsed.slice(*header_params.keys).transform_keys(header_params), body: parsed.except(*header_params.keys), model: Anthropic::Beta::BetaMessage, unwrap: unwrap, options: ) end |
#stream_raw(max_tokens: , messages: , model: , container: nil, context_management: nil, mcp_servers: nil, metadata: nil, output_format: nil, service_tier: nil, stop_sequences: nil, system_: nil, temperature: nil, thinking: nil, tool_choice: nil, tools: nil, top_k: nil, top_p: nil, betas: nil, request_options: {}) ⇒ Anthropic::Internal::Stream<Anthropic::Models::Beta::BetaRawMessageStartEvent, Anthropic::Models::Beta::BetaRawMessageDeltaEvent, Anthropic::Models::Beta::BetaRawMessageStopEvent, Anthropic::Models::Beta::BetaRawContentBlockStartEvent, Anthropic::Models::Beta::BetaRawContentBlockDeltaEvent, Anthropic::Models::Beta::BetaRawContentBlockStopEvent>
See #create for non-streaming counterpart.
Some parameter documentations has been truncated, see Models::Beta::MessageCreateParams for more details.
Send a structured list of input messages with text and/or image content, and the model will generate the next message in the conversation.
The Messages API can be used for either single queries or stateless multi-turn conversations.
Learn more about the Messages API in our user guide
166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 |
# File 'lib/anthropic/resources/beta/messages.rb', line 166 def stream(params) parsed, = Anthropic::Models::Beta::MessageCreateParams.dump_request(params) unless parsed.fetch(:stream, true) = "Please use `#create` for the non-streaming use case." raise ArgumentError.new() end parsed.store(:stream, true) tools, models = Anthropic::Helpers::Messages.distill_input_schema_models!(parsed, strict: nil) header_params = {betas: "anthropic-beta"} raw_stream = @client.request( method: :post, path: "v1/messages?beta=true", headers: { "accept" => "text/event-stream", **parsed.slice(*header_params.keys) }.transform_keys(header_params), body: parsed.except(*header_params.keys), stream: Anthropic::Internal::Stream, model: Anthropic::Beta::BetaRawMessageStreamEvent, options: {timeout: 600, **} ) Anthropic::Streaming::MessageStream.new(raw_stream:, tools:, models:) end |
#stream_raw(max_tokens: , messages: , model: , container: nil, mcp_servers: nil, metadata: nil, service_tier: nil, stop_sequences: nil, system_: nil, temperature: nil, thinking: nil, tool_choice: nil, tools: nil, top_k: nil, top_p: nil, betas: nil, request_options: {}) ⇒ Anthropic::Internal::Stream<Anthropic::Models::Beta::BetaRawMessageStartEvent, Anthropic::Models::Beta::BetaRawMessageDeltaEvent, Anthropic::Models::Beta::BetaRawMessageStopEvent, Anthropic::Models::Beta::BetaRawContentBlockStartEvent, Anthropic::Models::Beta::BetaRawContentBlockDeltaEvent, Anthropic::Models::Beta::BetaRawContentBlockStopEvent>
See #create for non-streaming counterpart.
Some parameter documentations has been truncated, see Models::Beta::MessageCreateParams for more details.
Send a structured list of input messages with text and/or image content, and the model will generate the next message in the conversation.
The Messages API can be used for either single queries or stateless multi-turn conversations.
Learn more about the Messages API in our user guide
243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 |
# File 'lib/anthropic/resources/beta/messages.rb', line 243 def stream_raw(params) parsed, = Anthropic::Beta::MessageCreateParams.dump_request(params) unless parsed.fetch(:stream, true) = "Please use `#create` for the non-streaming use case." raise ArgumentError.new() end parsed.store(:stream, true) header_params = {betas: "anthropic-beta"} @client.request( method: :post, path: "v1/messages?beta=true", headers: { "accept" => "text/event-stream", **parsed.slice(*header_params.keys) }.transform_keys(header_params), body: parsed.except(*header_params.keys), stream: Anthropic::Internal::Stream, model: Anthropic::Beta::BetaRawMessageStreamEvent, options: {timeout: 600, **} ) end |