Class: Durable::Llm::CLI

Inherits:
Thor
  • Object
show all
Defined in:
lib/durable/llm/cli.rb

Constant Summary collapse

CONVERSATIONS_DIR =
File.expand_path('~/.durable_llm/conversations')
LAST_CONVERSATION_FILE =
File.join(CONVERSATIONS_DIR, 'last_conversation.txt')

Class Method Summary collapse

Instance Method Summary collapse

Class Method Details

.exit_on_failure?Boolean

Returns:

  • (Boolean)


17
18
19
# File 'lib/durable/llm/cli.rb', line 17

def self.exit_on_failure?
  true
end

Instance Method Details

#chatvoid

This method returns an undefined value.

Start an interactive chat session with the model

Parameters:

  • options (Hash)

    a customizable set of options

Raises:

  • (RuntimeError)

    If no provider is found for the specified model



137
# File 'lib/durable/llm/cli.rb', line 137

desc 'chat', 'Start an interactive chat'

#conversation_file_path(id) ⇒ Object



24
25
26
# File 'lib/durable/llm/cli.rb', line 24

def conversation_file_path(id)
  File.join(CONVERSATIONS_DIR, "#{id}.json")
end

#conversationsvoid

This method returns an undefined value.

List all saved conversations



248
# File 'lib/durable/llm/cli.rb', line 248

desc 'conversations', 'List saved conversations'

#delete_conversation(id) ⇒ Object



291
292
293
294
295
296
297
298
299
300
301
302
303
304
# File 'lib/durable/llm/cli.rb', line 291

def delete_conversation(id)
  cli = HighLine.new

  path = conversation_file_path(id)
  if File.exist?(path)
    File.delete(path)
    cli.say("Deleted conversation #{id}")

    # Remove from last conversation if it was the last one
    File.delete(LAST_CONVERSATION_FILE) if last_conversation_id == id && File.exist?(LAST_CONVERSATION_FILE)
  else
    cli.say("Conversation #{id} not found")
  end
end

#last_conversation_idObject



45
46
47
48
49
# File 'lib/durable/llm/cli.rb', line 45

def last_conversation_id
  return nil unless File.exist?(LAST_CONVERSATION_FILE)

  File.read(LAST_CONVERSATION_FILE).strip
end

#modelsvoid

This method returns an undefined value.

List all available models from all providers

Parameters:

  • options (Hash)

    a customizable set of options



221
# File 'lib/durable/llm/cli.rb', line 221

desc 'models', 'List available models'

#prompt(*prompt) ⇒ Object



72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
# File 'lib/durable/llm/cli.rb', line 72

def prompt(*prompt)
  model = options[:model] || 'gpt-3.5-turbo'
  provider_class = Durable::Llm::Providers.model_id_to_provider(model)

  raise "no provider found for model '#{model}'" if provider_class.nil?

  provider_name = provider_class.name.split('::').last.downcase.to_sym
  client = Durable::Llm::Client.new(provider_name)

  conversation_id = options[:conversation] || (options[:continue] ? last_conversation_id : nil)
  conversation = conversation_id ? load_conversation(conversation_id) : nil

  messages = conversation ? conversation['messages'].dup : []
  messages << { role: 'system', content: options[:system] } if options[:system] && !conversation
  messages << { role: 'user', content: prompt.join(' ') }

  params = {
    model: model,
    messages: messages
  }
  params.merge!(options[:option]) if options[:option]

  begin
    if options[:no_stream] || !client.stream?
      response = client.completion(**params)
      assistant_message = response.choices.first.to_s
      puts assistant_message
      messages << { role: 'assistant', content: assistant_message }
    else
      assistant_content = ''
      client.stream(**params) do |chunk|
        print chunk
        assistant_content += chunk
        $stdout.flush
      end
      messages << { role: 'assistant', content: assistant_content }
    end

    # Save conversation
    conversation_data = {
      'id' => conversation_id,
      'model' => model,
      'messages' => messages,
      'created_at' => conversation ? conversation['created_at'] : Time.now.iso8601
    }
    save_conversation(conversation_data)
  rescue Durable::Llm::Error => e
    warn "API Error: #{e.message}"
    exit 1
  rescue StandardError => e
    warn "Unexpected error: #{e.message}"
    exit 1
  end
end