Class: Durable::Llm::CLI

Inherits:
Thor
  • Object
show all
Defined in:
lib/durable/llm/cli.rb

Overview

Command-line interface for Durable LLM gem.

Provides Thor-based CLI commands for interacting with LLM providers.

Constant Summary collapse

CONVERSATIONS_DIR =
File.expand_path('~/.durable_llm/conversations')
LAST_CONVERSATION_FILE =
File.join(CONVERSATIONS_DIR, 'last_conversation.txt')

Class Method Summary collapse

Instance Method Summary collapse

Class Method Details

.exit_on_failure?Boolean

Returns:

  • (Boolean)


20
21
22
# File 'lib/durable/llm/cli.rb', line 20

def self.exit_on_failure?
  true
end

Instance Method Details

#chatvoid

This method returns an undefined value.

Start an interactive chat session with the model

Parameters:

  • options (Hash)

    a customizable set of options

Raises:

  • (RuntimeError)

    If no provider is found for the specified model



140
# File 'lib/durable/llm/cli.rb', line 140

desc 'chat', 'Start an interactive chat'

#conversationsvoid

This method returns an undefined value.

List all saved conversations



251
# File 'lib/durable/llm/cli.rb', line 251

desc 'conversations', 'List saved conversations'

#delete_conversation(id) ⇒ Object



294
295
296
297
298
299
300
301
302
303
304
305
306
307
# File 'lib/durable/llm/cli.rb', line 294

def delete_conversation(id)
  cli = HighLine.new

  path = conversation_file_path(id)
  if File.exist?(path)
    File.delete(path)
    cli.say("Deleted conversation #{id}")

    # Remove from last conversation if it was the last one
    File.delete(LAST_CONVERSATION_FILE) if last_conversation_id == id && File.exist?(LAST_CONVERSATION_FILE)
  else
    cli.say("Conversation #{id} not found")
  end
end

#modelsvoid

This method returns an undefined value.

List all available models from all providers

Parameters:

  • options (Hash)

    a customizable set of options



224
# File 'lib/durable/llm/cli.rb', line 224

desc 'models', 'List available models'

#prompt(*prompt) ⇒ Object



75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
# File 'lib/durable/llm/cli.rb', line 75

def prompt(*prompt)
  model = options[:model] || 'gpt-3.5-turbo'
  provider_class = Durable::Llm::Providers.model_id_to_provider(model)

  raise "no provider found for model '#{model}'" if provider_class.nil?

  provider_name = provider_class.name.split('::').last.downcase.to_sym
  client = Durable::Llm::Client.new(provider_name)

  conversation_id = options[:conversation] || (options[:continue] ? last_conversation_id : nil)
  conversation = conversation_id ? load_conversation(conversation_id) : nil

  messages = conversation ? conversation['messages'].dup : []
  messages << { role: 'system', content: options[:system] } if options[:system] && !conversation
  messages << { role: 'user', content: prompt.join(' ') }

  params = {
    model: model,
    messages: messages
  }
  params.merge!(options[:option]) if options[:option]

  begin
    if options[:no_stream] || !client.stream?
      response = client.completion(**params)
      assistant_message = response.choices.first.to_s
      puts assistant_message
      messages << { role: 'assistant', content: assistant_message }
    else
      assistant_content = ''
      client.stream(**params) do |chunk|
        print chunk
        assistant_content += chunk
        $stdout.flush
      end
      messages << { role: 'assistant', content: assistant_content }
    end

    # Save conversation
    conversation_data = {
      'id' => conversation_id,
      'model' => model,
      'messages' => messages,
      'created_at' => conversation ? conversation['created_at'] : Time.now.iso8601
    }
    save_conversation(conversation_data)
  rescue Durable::Llm::Error => e
    warn "API Error: #{e.message}"
    exit 1
  rescue StandardError => e
    warn "Unexpected error: #{e.message}"
    exit 1
  end
end