Class: RuboCop::Cop::Prompt::MissingStop

Inherits:
Base
  • Object
show all
Defined in:
lib/rubocop/cop/prompt/missing_stop.rb

Overview

Checks for missing stop tokens or max_tokens in OpenAI::Client.chat calls.

This cop identifies OpenAI::Client.chat method calls and ensures they include either stop: or max_tokens: parameters to prevent runaway generation and ensure predictable behavior.

Examples:

# bad
OpenAI::Client.new.chat(
  parameters: {
    model: "gpt-4",
    messages: [{ role: "user", content: "Hello" }]
  }
)

# bad
client.chat(
  parameters: {
    model: "gpt-4",
    messages: messages
  }
)

# good
OpenAI::Client.new.chat(
  parameters: {
    model: "gpt-4",
    messages: [{ role: "user", content: "Hello" }],
    max_tokens: 100
  }
)

# good
client.chat(
  parameters: {
    model: "gpt-4",
    messages: messages,
    stop: ["END", "\n"]
  }
)

# good
client.chat(
  parameters: {
    model: "gpt-4",
    messages: messages,
    max_tokens: 1000,
    stop: ["END"]
  }
)

Constant Summary collapse

MSG =
"OpenAI::Client.chat call should include 'stop:' or 'max_tokens:' parameter to prevent runaway generation"

Instance Method Summary collapse

Instance Method Details

#on_send(node) ⇒ Object



61
62
63
64
65
66
67
68
69
70
# File 'lib/rubocop/cop/prompt/missing_stop.rb', line 61

def on_send(node)
  return unless openai_chat_call?(node)

  parameters_hash = extract_parameters_hash(node)
  return unless parameters_hash

  return if has_stop_or_max_tokens?(parameters_hash)

  add_offense(node)
end