Class: Durable::Llm::CLI
- Inherits:
-
Thor
- Object
- Thor
- Durable::Llm::CLI
- Defined in:
- lib/durable/llm/cli.rb
Constant Summary collapse
- CONVERSATIONS_DIR =
File.('~/.durable_llm/conversations')
- LAST_CONVERSATION_FILE =
File.join(CONVERSATIONS_DIR, 'last_conversation.txt')
Class Method Summary collapse
Instance Method Summary collapse
-
#chat ⇒ void
Start an interactive chat session with the model.
- #conversation_file_path(id) ⇒ Object
-
#conversations ⇒ void
List all saved conversations.
- #delete_conversation(id) ⇒ Object
- #last_conversation_id ⇒ Object
-
#models ⇒ void
List all available models from all providers.
- #prompt(*prompt) ⇒ Object
Class Method Details
.exit_on_failure? ⇒ Boolean
17 18 19 |
# File 'lib/durable/llm/cli.rb', line 17 def self.exit_on_failure? true end |
Instance Method Details
#chat ⇒ void
This method returns an undefined value.
Start an interactive chat session with the model
137 |
# File 'lib/durable/llm/cli.rb', line 137 desc 'chat', 'Start an interactive chat' |
#conversation_file_path(id) ⇒ Object
24 25 26 |
# File 'lib/durable/llm/cli.rb', line 24 def conversation_file_path(id) File.join(CONVERSATIONS_DIR, "#{id}.json") end |
#conversations ⇒ void
This method returns an undefined value.
List all saved conversations
248 |
# File 'lib/durable/llm/cli.rb', line 248 desc 'conversations', 'List saved conversations' |
#delete_conversation(id) ⇒ Object
291 292 293 294 295 296 297 298 299 300 301 302 303 304 |
# File 'lib/durable/llm/cli.rb', line 291 def delete_conversation(id) cli = HighLine.new path = conversation_file_path(id) if File.exist?(path) File.delete(path) cli.say("Deleted conversation #{id}") # Remove from last conversation if it was the last one File.delete(LAST_CONVERSATION_FILE) if last_conversation_id == id && File.exist?(LAST_CONVERSATION_FILE) else cli.say("Conversation #{id} not found") end end |
#last_conversation_id ⇒ Object
45 46 47 48 49 |
# File 'lib/durable/llm/cli.rb', line 45 def last_conversation_id return nil unless File.exist?(LAST_CONVERSATION_FILE) File.read(LAST_CONVERSATION_FILE).strip end |
#models ⇒ void
This method returns an undefined value.
List all available models from all providers
221 |
# File 'lib/durable/llm/cli.rb', line 221 desc 'models', 'List available models' |
#prompt(*prompt) ⇒ Object
72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 |
# File 'lib/durable/llm/cli.rb', line 72 def prompt(*prompt) model = [:model] || 'gpt-3.5-turbo' provider_class = Durable::Llm::Providers.model_id_to_provider(model) raise "no provider found for model '#{model}'" if provider_class.nil? provider_name = provider_class.name.split('::').last.downcase.to_sym client = Durable::Llm::Client.new(provider_name) conversation_id = [:conversation] || ([:continue] ? last_conversation_id : nil) conversation = conversation_id ? load_conversation(conversation_id) : nil = conversation ? conversation['messages'].dup : [] << { role: 'system', content: [:system] } if [:system] && !conversation << { role: 'user', content: prompt.join(' ') } params = { model: model, messages: } params.merge!([:option]) if [:option] begin if [:no_stream] || !client.stream? response = client.completion(**params) = response.choices.first.to_s puts << { role: 'assistant', content: } else assistant_content = '' client.stream(**params) do |chunk| print chunk assistant_content += chunk $stdout.flush end << { role: 'assistant', content: assistant_content } end # Save conversation conversation_data = { 'id' => conversation_id, 'model' => model, 'messages' => , 'created_at' => conversation ? conversation['created_at'] : Time.now.iso8601 } save_conversation(conversation_data) rescue Durable::Llm::Error => e warn "API Error: #{e.}" exit 1 rescue StandardError => e warn "Unexpected error: #{e.}" exit 1 end end |