Class: Anthropic::Resources::Beta::Messages
- Inherits:
-
Object
- Object
- Anthropic::Resources::Beta::Messages
- Defined in:
- lib/anthropic/resources/beta/messages.rb,
lib/anthropic/resources/beta/messages/batches.rb
Defined Under Namespace
Classes: Batches
Instance Attribute Summary collapse
Instance Method Summary collapse
-
#count_tokens(messages:, model:, context_management: nil, mcp_servers: nil, output_config: nil, output_format: nil, system_: nil, thinking: nil, tool_choice: nil, tools: nil, betas: nil, request_options: {}) ⇒ Anthropic::Models::Beta::BetaMessageTokensCount
Some parameter documentations has been truncated, see Models::Beta::MessageCountTokensParams for more details.
-
#create(max_tokens:, messages:, model:, container: nil, context_management: nil, mcp_servers: nil, metadata: nil, output_config: nil, output_format: nil, service_tier: nil, stop_sequences: nil, system_: nil, temperature: nil, thinking: nil, tool_choice: nil, tools: nil, top_k: nil, top_p: nil, betas: nil, request_options: {}) ⇒ Anthropic::Models::Beta::BetaMessage
See #stream_raw for streaming counterpart.
-
#stream_raw(max_tokens:, messages:, model:, container: nil, context_management: nil, mcp_servers: nil, metadata: nil, output_config: nil, output_format: nil, service_tier: nil, stop_sequences: nil, system_: nil, temperature: nil, thinking: nil, tool_choice: nil, tools: nil, top_k: nil, top_p: nil, betas: nil, request_options: {}) ⇒ Anthropic::Internal::Stream<Anthropic::Models::Beta::BetaRawMessageStartEvent, Anthropic::Models::Beta::BetaRawMessageDeltaEvent, Anthropic::Models::Beta::BetaRawMessageStopEvent, Anthropic::Models::Beta::BetaRawContentBlockStartEvent, Anthropic::Models::Beta::BetaRawContentBlockDeltaEvent, Anthropic::Models::Beta::BetaRawContentBlockStopEvent>
See #create for non-streaming counterpart.
-
#stream_raw(max_tokens:, messages:, model:, container: nil, mcp_servers: nil, metadata: nil, service_tier: nil, stop_sequences: nil, system_: nil, temperature: nil, thinking: nil, tool_choice: nil, tools: nil, top_k: nil, top_p: nil, betas: nil, request_options: {}) ⇒ Anthropic::Internal::Stream<Anthropic::Models::Beta::BetaRawMessageStartEvent, Anthropic::Models::Beta::BetaRawMessageDeltaEvent, Anthropic::Models::Beta::BetaRawMessageStopEvent, Anthropic::Models::Beta::BetaRawContentBlockStartEvent, Anthropic::Models::Beta::BetaRawContentBlockDeltaEvent, Anthropic::Models::Beta::BetaRawContentBlockStopEvent>
See #create for non-streaming counterpart.
- #tool_runner(params) ⇒ Anthropic::Helpers::Tools::Runner
Instance Attribute Details
#batches ⇒ Anthropic::Resources::Beta::Messages::Batches (readonly)
8 9 10 |
# File 'lib/anthropic/resources/beta/messages.rb', line 8 def batches @batches end |
Instance Method Details
#count_tokens(messages:, model:, context_management: nil, mcp_servers: nil, output_config: nil, output_format: nil, system_: nil, thinking: nil, tool_choice: nil, tools: nil, betas: nil, request_options: {}) ⇒ Anthropic::Models::Beta::BetaMessageTokensCount
Some parameter documentations has been truncated, see Models::Beta::MessageCountTokensParams for more details.
Count the number of tokens in a Message.
The Token Count API can be used to count the number of tokens in a Message, including tools, images, and documents, without creating it.
Learn more about token counting in our user guide
322 323 324 325 326 327 328 329 330 331 332 333 334 335 |
# File 'lib/anthropic/resources/beta/messages.rb', line 322 def count_tokens(params) parsed, = Anthropic::Beta::MessageCountTokensParams.dump_request(params) Anthropic::Helpers::Messages.distill_input_schema_models!(parsed, strict: nil, is_beta: true) header_params = {betas: "anthropic-beta"} @client.request( method: :post, path: "v1/messages/count_tokens?beta=true", headers: parsed.slice(*header_params.keys).transform_keys(header_params), body: parsed.except(*header_params.keys), model: Anthropic::Beta::BetaMessageTokensCount, options: ) end |
#create(max_tokens:, messages:, model:, container: nil, context_management: nil, mcp_servers: nil, metadata: nil, output_config: nil, output_format: nil, service_tier: nil, stop_sequences: nil, system_: nil, temperature: nil, thinking: nil, tool_choice: nil, tools: nil, top_k: nil, top_p: nil, betas: nil, request_options: {}) ⇒ Anthropic::Models::Beta::BetaMessage
See #stream_raw for streaming counterpart.
Some parameter documentations has been truncated, see Models::Beta::MessageCreateParams for more details.
Send a structured list of input messages with text and/or image content, and the model will generate the next message in the conversation.
The Messages API can be used for either single queries or stateless multi-turn conversations.
Learn more about the Messages API in our user guide
79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 |
# File 'lib/anthropic/resources/beta/messages.rb', line 79 def create(params) parsed, = Anthropic::Beta::MessageCreateParams.dump_request(params) if parsed[:stream] = "Please use `#stream` for the streaming use case." raise ArgumentError.new() end tools, models = Anthropic::Helpers::Messages.distill_input_schema_models!( parsed, strict: nil, is_beta: true ) unwrap = ->(raw) { Anthropic::Helpers::Messages.parse_input_schemas!(raw, tools:, models:) } if .empty? && @client.timeout == Anthropic::Client::DEFAULT_TIMEOUT_IN_SECONDS model = parsed[:model]&.to_sym max_tokens = parsed[:max_tokens].to_i timeout = @client.calculate_nonstreaming_timeout( max_tokens, Anthropic::Client::MODEL_NONSTREAMING_TOKENS[model] ) = {timeout: timeout} else = {timeout: 600, **} end header_params = {betas: "anthropic-beta"} @client.request( method: :post, path: "v1/messages?beta=true", headers: parsed.slice(*header_params.keys).transform_keys(header_params), body: parsed.except(*header_params.keys), model: Anthropic::Beta::BetaMessage, unwrap: unwrap, options: ) end |
#stream_raw(max_tokens:, messages:, model:, container: nil, context_management: nil, mcp_servers: nil, metadata: nil, output_config: nil, output_format: nil, service_tier: nil, stop_sequences: nil, system_: nil, temperature: nil, thinking: nil, tool_choice: nil, tools: nil, top_k: nil, top_p: nil, betas: nil, request_options: {}) ⇒ Anthropic::Internal::Stream<Anthropic::Models::Beta::BetaRawMessageStartEvent, Anthropic::Models::Beta::BetaRawMessageDeltaEvent, Anthropic::Models::Beta::BetaRawMessageStopEvent, Anthropic::Models::Beta::BetaRawContentBlockStartEvent, Anthropic::Models::Beta::BetaRawContentBlockDeltaEvent, Anthropic::Models::Beta::BetaRawContentBlockStopEvent>
See #create for non-streaming counterpart.
Some parameter documentations has been truncated, see Models::Beta::MessageCreateParams for more details.
Send a structured list of input messages with text and/or image content, and the model will generate the next message in the conversation.
The Messages API can be used for either single queries or stateless multi-turn conversations.
Learn more about the Messages API in our user guide
177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 |
# File 'lib/anthropic/resources/beta/messages.rb', line 177 def stream(params) parsed, = Anthropic::Models::Beta::MessageCreateParams.dump_request(params) unless parsed.fetch(:stream, true) = "Please use `#create` for the non-streaming use case." raise ArgumentError.new() end parsed.store(:stream, true) tools, models = Anthropic::Helpers::Messages.distill_input_schema_models!( parsed, strict: nil, is_beta: true ) header_params = {betas: "anthropic-beta"} raw_stream = @client.request( method: :post, path: "v1/messages?beta=true", headers: stream_headers( "accept" => "text/event-stream", **parsed.slice(*header_params.keys) ).transform_keys(header_params), body: parsed.except(*header_params.keys), stream: Anthropic::Internal::Stream, model: Anthropic::Beta::BetaRawMessageStreamEvent, options: {timeout: 600, **} ) Anthropic::Streaming::MessageStream.new(raw_stream:, tools:, models:) end |
#stream_raw(max_tokens:, messages:, model:, container: nil, mcp_servers: nil, metadata: nil, service_tier: nil, stop_sequences: nil, system_: nil, temperature: nil, thinking: nil, tool_choice: nil, tools: nil, top_k: nil, top_p: nil, betas: nil, request_options: {}) ⇒ Anthropic::Internal::Stream<Anthropic::Models::Beta::BetaRawMessageStartEvent, Anthropic::Models::Beta::BetaRawMessageDeltaEvent, Anthropic::Models::Beta::BetaRawMessageStopEvent, Anthropic::Models::Beta::BetaRawContentBlockStartEvent, Anthropic::Models::Beta::BetaRawContentBlockDeltaEvent, Anthropic::Models::Beta::BetaRawContentBlockStopEvent>
See #create for non-streaming counterpart.
Some parameter documentations has been truncated, see Models::Beta::MessageCreateParams for more details.
Send a structured list of input messages with text and/or image content, and the model will generate the next message in the conversation.
The Messages API can be used for either single queries or stateless multi-turn conversations.
Learn more about the Messages API in our user guide
258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 |
# File 'lib/anthropic/resources/beta/messages.rb', line 258 def stream_raw(params) parsed, = Anthropic::Beta::MessageCreateParams.dump_request(params) unless parsed.fetch(:stream, true) = "Please use `#create` for the non-streaming use case." raise ArgumentError.new() end parsed.store(:stream, true) Anthropic::Helpers::Messages.distill_input_schema_models!(parsed, strict: nil, is_beta: true) header_params = {betas: "anthropic-beta"} @client.request( method: :post, path: "v1/messages?beta=true", headers: stream_headers( "accept" => "text/event-stream", **parsed.slice(*header_params.keys) ).transform_keys(header_params), body: parsed.except(*header_params.keys), stream: Anthropic::Internal::Stream, model: Anthropic::Beta::BetaRawMessageStreamEvent, options: {timeout: 600, **} ) end |
#tool_runner(params) ⇒ Anthropic::Helpers::Tools::Runner
13 14 15 16 17 18 |
# File 'lib/anthropic/resources/beta/messages.rb', line 13 def tool_runner(params) params = params.to_h max_iterations = params.delete(:max_iterations) compaction_control = params.delete(:compaction_control) Anthropic::Helpers::Tools::Runner.new(@client, params:, max_iterations:, compaction_control:) end |