Class: Async::Ollama::Client

Inherits:
REST::Resource
  • Object
show all
Defined in:
lib/async/ollama/client.rb

Overview

Represents a connection to the Ollama service, providing methods to generate completions, chat, and list models.

Constant Summary collapse

ENDPOINT =

The default endpoint to connect to.

Async::HTTP::Endpoint.parse("http://localhost:11434")

Instance Method Summary collapse

Instance Method Details

#chat(messages, **options, &block) ⇒ Object

Sends a chat request with the given messages to Ollama.



43
44
45
46
47
48
49
50
51
52
53
54
# File 'lib/async/ollama/client.rb', line 43

def chat(messages, **options, &block)
  options[:model] ||= MODEL
  options[:messages] = messages
  
  Chat.post(self.with(path: "/api/chat"), options) do |resource, response|
    if block_given?
      yield response
    end
    
    Chat.new(resource, value: response.read, metadata: response.headers)
  end
end

#generate(prompt, **options, &block) ⇒ Object

Generates a response from the given prompt using Ollama.



26
27
28
29
30
31
32
33
34
35
36
37
# File 'lib/async/ollama/client.rb', line 26

def generate(prompt, **options, &block)
  options[:prompt] = prompt
  options[:model] ||= MODEL
  
  Generate.post(self.with(path: "/api/generate"), options) do |resource, response|
    if block_given?
      yield response
    end
    
    Generate.new(resource, value: response.read, metadata: response.headers)
  end
end

#modelsObject

Retrieves the list of available models from Ollama.



58
59
60
# File 'lib/async/ollama/client.rb', line 58

def models
  Models.get(self.with(path: "/api/tags"))
end

#pull(model) ⇒ Object



62
63
64
65
66
67
68
69
70
# File 'lib/async/ollama/client.rb', line 62

def pull(model)
  Pull.post(self.with(path: "/api/pull"), model: model) do |resource, response|
    if block_given?
      yield response
    end
    
    Pull.new(resource, value: response.read, metadata: response.headers)
  end
end