Class: Durable::Llm::CLI
- Inherits:
-
Thor
- Object
- Thor
- Durable::Llm::CLI
- Defined in:
- lib/durable/llm/cli.rb
Overview
Command-line interface for Durable LLM gem.
Provides Thor-based CLI commands for interacting with LLM providers.
Constant Summary collapse
- CONVERSATIONS_DIR =
File.('~/.durable_llm/conversations')
- LAST_CONVERSATION_FILE =
File.join(CONVERSATIONS_DIR, 'last_conversation.txt')
Class Method Summary collapse
Instance Method Summary collapse
-
#chat ⇒ void
Start an interactive chat session with the model.
-
#conversations ⇒ void
List all saved conversations.
- #delete_conversation(id) ⇒ Object
-
#models ⇒ void
List all available models from all providers.
- #prompt(*prompt) ⇒ Object
Class Method Details
.exit_on_failure? ⇒ Boolean
20 21 22 |
# File 'lib/durable/llm/cli.rb', line 20 def self.exit_on_failure? true end |
Instance Method Details
#chat ⇒ void
This method returns an undefined value.
Start an interactive chat session with the model
140 |
# File 'lib/durable/llm/cli.rb', line 140 desc 'chat', 'Start an interactive chat' |
#conversations ⇒ void
This method returns an undefined value.
List all saved conversations
251 |
# File 'lib/durable/llm/cli.rb', line 251 desc 'conversations', 'List saved conversations' |
#delete_conversation(id) ⇒ Object
294 295 296 297 298 299 300 301 302 303 304 305 306 307 |
# File 'lib/durable/llm/cli.rb', line 294 def delete_conversation(id) cli = HighLine.new path = conversation_file_path(id) if File.exist?(path) File.delete(path) cli.say("Deleted conversation #{id}") # Remove from last conversation if it was the last one File.delete(LAST_CONVERSATION_FILE) if last_conversation_id == id && File.exist?(LAST_CONVERSATION_FILE) else cli.say("Conversation #{id} not found") end end |
#models ⇒ void
This method returns an undefined value.
List all available models from all providers
224 |
# File 'lib/durable/llm/cli.rb', line 224 desc 'models', 'List available models' |
#prompt(*prompt) ⇒ Object
75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 |
# File 'lib/durable/llm/cli.rb', line 75 def prompt(*prompt) model = [:model] || 'gpt-3.5-turbo' provider_class = Durable::Llm::Providers.model_id_to_provider(model) raise "no provider found for model '#{model}'" if provider_class.nil? provider_name = provider_class.name.split('::').last.downcase.to_sym client = Durable::Llm::Client.new(provider_name) conversation_id = [:conversation] || ([:continue] ? last_conversation_id : nil) conversation = conversation_id ? load_conversation(conversation_id) : nil = conversation ? conversation['messages'].dup : [] << { role: 'system', content: [:system] } if [:system] && !conversation << { role: 'user', content: prompt.join(' ') } params = { model: model, messages: } params.merge!([:option]) if [:option] begin if [:no_stream] || !client.stream? response = client.completion(**params) = response.choices.first.to_s puts << { role: 'assistant', content: } else assistant_content = '' client.stream(**params) do |chunk| print chunk assistant_content += chunk $stdout.flush end << { role: 'assistant', content: assistant_content } end # Save conversation conversation_data = { 'id' => conversation_id, 'model' => model, 'messages' => , 'created_at' => conversation ? conversation['created_at'] : Time.now.iso8601 } save_conversation(conversation_data) rescue Durable::Llm::Error => e warn "API Error: #{e.}" exit 1 rescue StandardError => e warn "Unexpected error: #{e.}" exit 1 end end |