Class: Anthropic::Resources::Beta::Messages

Inherits:
Object
  • Object
show all
Defined in:
lib/anthropic/resources/beta/messages.rb,
lib/anthropic/resources/beta/messages/batches.rb

Defined Under Namespace

Classes: Batches

Instance Attribute Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(client:) ⇒ Messages

This method is part of a private API. You should avoid using this method if possible, as it may be removed or be changed in the future.

Returns a new instance of Messages.

Parameters:



287
288
289
290
# File 'lib/anthropic/resources/beta/messages.rb', line 287

def initialize(client:)
  @client = client
  @batches = Anthropic::Resources::Beta::Messages::Batches.new(client: client)
end

Instance Attribute Details

#batchesAnthropic::Resources::Beta::Messages::Batches (readonly)



8
9
10
# File 'lib/anthropic/resources/beta/messages.rb', line 8

def batches
  @batches
end

Instance Method Details

#count_tokens(messages: , model: , mcp_servers: nil, system_: nil, thinking: nil, tool_choice: nil, tools: nil, betas: nil, request_options: {}) ⇒ Anthropic::Models::Beta::BetaMessageTokensCount

Some parameter documentations has been truncated, see Models::Beta::MessageCountTokensParams for more details.

Count the number of tokens in a Message.

The Token Count API can be used to count the number of tokens in a Message, including tools, images, and documents, without creating it.

Learn more about token counting in our [user guide](/en/docs/build-with-claude/token-counting)

Parameters:

Returns:

See Also:



271
272
273
274
275
276
277
278
279
280
281
282
# File 'lib/anthropic/resources/beta/messages.rb', line 271

def count_tokens(params)
  parsed, options = Anthropic::Beta::MessageCountTokensParams.dump_request(params)
  header_params = {betas: "anthropic-beta"}
  @client.request(
    method: :post,
    path: "v1/messages/count_tokens?beta=true",
    headers: parsed.slice(*header_params.keys).transform_keys(header_params),
    body: parsed.except(*header_params.keys),
    model: Anthropic::Beta::BetaMessageTokensCount,
    options: options
  )
end

#create(max_tokens: , messages: , model: , container: nil, mcp_servers: nil, metadata: nil, service_tier: nil, stop_sequences: nil, system_: nil, temperature: nil, thinking: nil, tool_choice: nil, tools: nil, top_k: nil, top_p: nil, betas: nil, request_options: {}) ⇒ Anthropic::Models::Beta::BetaMessage

See #stream_raw for streaming counterpart.

Some parameter documentations has been truncated, see Models::Beta::MessageCreateParams for more details.

Send a structured list of input messages with text and/or image content, and the model will generate the next message in the conversation.

The Messages API can be used for either single queries or stateless multi-turn conversations.

Learn more about the Messages API in our [user guide](/en/docs/initial-setup)

Parameters:

Returns:

See Also:



62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
# File 'lib/anthropic/resources/beta/messages.rb', line 62

def create(params)
  parsed, options = Anthropic::Beta::MessageCreateParams.dump_request(params)
  if parsed[:stream]
    message = "Please use `#stream` for the streaming use case."
    raise ArgumentError.new(message)
  end

  if options.empty? && @client.timeout == Anthropic::Client::DEFAULT_TIMEOUT_IN_SECONDS
    model = parsed[:model].to_sym
    max_tokens = parsed[:max_tokens].to_i
    timeout = @client.calculate_nonstreaming_timeout(
      max_tokens,
      Anthropic::Client::MODEL_NONSTREAMING_TOKENS[model]
    )
    options = {timeout: timeout}
  else
    options = {timeout: 600, **options}
  end

  header_params = {betas: "anthropic-beta"}
  @client.request(
    method: :post,
    path: "v1/messages?beta=true",
    headers: parsed.slice(*header_params.keys).transform_keys(header_params),
    body: parsed.except(*header_params.keys),
    model: Anthropic::Beta::BetaMessage,
    options: options
  )
end

#stream_raw(max_tokens: , messages: , model: , container: nil, mcp_servers: nil, metadata: nil, service_tier: nil, stop_sequences: nil, system_: nil, temperature: nil, thinking: nil, tool_choice: nil, tools: nil, top_k: nil, top_p: nil, betas: nil, request_options: {}) ⇒ Anthropic::Internal::Stream<Anthropic::Models::Beta::BetaRawMessageStartEvent, Anthropic::Models::Beta::BetaRawMessageDeltaEvent, Anthropic::Models::Beta::BetaRawMessageStopEvent, Anthropic::Models::Beta::BetaRawContentBlockStartEvent, Anthropic::Models::Beta::BetaRawContentBlockDeltaEvent, Anthropic::Models::Beta::BetaRawContentBlockStopEvent>

See #create for non-streaming counterpart.

Some parameter documentations has been truncated, see Models::Beta::MessageCreateParams for more details.

Send a structured list of input messages with text and/or image content, and the model will generate the next message in the conversation.

The Messages API can be used for either single queries or stateless multi-turn conversations.

Learn more about the Messages API in our [user guide](/en/docs/initial-setup)

Parameters:

Returns:

See Also:



144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
# File 'lib/anthropic/resources/beta/messages.rb', line 144

def stream(params)
  parsed, options = Anthropic::Models::Beta::MessageCreateParams.dump_request(params)
  unless parsed.fetch(:stream, true)
    message = "Please use `#create` for the non-streaming use case."
    raise ArgumentError.new(message)
  end
  parsed.store(:stream, true)
  raw_stream = @client.request(
    method: :post,
    path: "v1/messages?beta=true",
    headers: {"accept" => "text/event-stream"},
    body: parsed,
    stream: Anthropic::Internal::Stream,
    model: Anthropic::Beta::BetaRawMessageStreamEvent,
    options: options
  )
  Anthropic::Streaming::MessageStream.new(raw_stream: raw_stream)
end

#stream_raw(max_tokens: , messages: , model: , container: nil, mcp_servers: nil, metadata: nil, service_tier: nil, stop_sequences: nil, system_: nil, temperature: nil, thinking: nil, tool_choice: nil, tools: nil, top_k: nil, top_p: nil, betas: nil, request_options: {}) ⇒ Anthropic::Internal::Stream<Anthropic::Models::Beta::BetaRawMessageStartEvent, Anthropic::Models::Beta::BetaRawMessageDeltaEvent, Anthropic::Models::Beta::BetaRawMessageStopEvent, Anthropic::Models::Beta::BetaRawContentBlockStartEvent, Anthropic::Models::Beta::BetaRawContentBlockDeltaEvent, Anthropic::Models::Beta::BetaRawContentBlockStopEvent>

See #create for non-streaming counterpart.

Some parameter documentations has been truncated, see Models::Beta::MessageCreateParams for more details.

Send a structured list of input messages with text and/or image content, and the model will generate the next message in the conversation.

The Messages API can be used for either single queries or stateless multi-turn conversations.

Learn more about the Messages API in our [user guide](/en/docs/initial-setup)

Parameters:

Returns:

See Also:



215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
# File 'lib/anthropic/resources/beta/messages.rb', line 215

def stream_raw(params)
  parsed, options = Anthropic::Beta::MessageCreateParams.dump_request(params)
  unless parsed.fetch(:stream, true)
    message = "Please use `#create` for the non-streaming use case."
    raise ArgumentError.new(message)
  end
  parsed.store(:stream, true)
  header_params = {betas: "anthropic-beta"}
  @client.request(
    method: :post,
    path: "v1/messages?beta=true",
    headers: {
      "accept" => "text/event-stream",
      **parsed.slice(*header_params.keys)
    }.transform_keys(header_params),
    body: parsed.except(*header_params.keys),
    stream: Anthropic::Internal::Stream,
    model: Anthropic::Beta::BetaRawMessageStreamEvent,
    options: {timeout: 600, **options}
  )
end