Class: Smollama::Client

Inherits:
Object
  • Object
show all
Defined in:
lib/smollama/client.rb

Class Attribute Summary collapse

Class Method Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(model: nil) ⇒ Client

Initialize with optional overrides



23
24
25
26
27
28
29
30
31
32
33
34
# File 'lib/smollama/client.rb', line 23

def initialize(model: nil)
  @model = model || self.class.default_model
  raise "Model not specified" unless @model

  @connection = Excon.new(
    "#{self.class.base_url}/api/chat",
    persistent: true,
    headers: {
      'Content-Type' => 'application/json'
    }
  )
end

Class Attribute Details

.default_modelObject

Returns the value of attribute default_model.



9
10
11
# File 'lib/smollama/client.rb', line 9

def default_model
  @default_model
end

.server_ipObject

Returns the value of attribute server_ip.



9
10
11
# File 'lib/smollama/client.rb', line 9

def server_ip
  @server_ip
end

.server_portObject

Returns the value of attribute server_port.



9
10
11
# File 'lib/smollama/client.rb', line 9

def server_port
  @server_port
end

Class Method Details

.base_urlObject



15
16
17
18
19
# File 'lib/smollama/client.rb', line 15

def base_url
  raise "Server IP not configured" unless server_ip
  port = server_port || 11434
  "http://#{server_ip}:#{port}"
end

.configure {|_self| ... } ⇒ Object

Yields:

  • (_self)

Yield Parameters:



11
12
13
# File 'lib/smollama/client.rb', line 11

def configure
  yield self if block_given?
end

Instance Method Details

#ask(prompt, **options) ⇒ Object

Convenience method for single message chat



60
61
62
# File 'lib/smollama/client.rb', line 60

def ask(prompt, **options)
  chat(prompt, **options)
end

#chat(message, temperature: nil, top_p: nil, max_tokens: nil, stream: false, images: nil) ⇒ Object

Main chat method with configurable parameters



37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
# File 'lib/smollama/client.rb', line 37

def chat(message, temperature: nil, top_p: nil, max_tokens: nil, stream: false, images: nil)
  messages = build_messages(message, images: images)

  payload = {
    model: @model,
    messages: messages,
    stream: stream
  }

  # Add optional parameters if provided
  payload[:options] = {} if temperature || top_p
  payload[:options][:temperature] = temperature if temperature
  payload[:options][:top_p] = top_p if top_p
  payload[:options][:num_predict] = max_tokens if max_tokens

  if stream
    stream_response(payload) { |chunk| yield chunk if block_given? }
  else
    send_request(payload)
  end
end

#chat_with_history(messages, **options) ⇒ Object

Chat with conversation history



65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
# File 'lib/smollama/client.rb', line 65

def chat_with_history(messages, **options)
  raise "Messages must be an array" unless messages.is_a?(Array)

  payload = {
    model: @model,
    messages: messages,
    stream: options[:stream] || false
  }

  # Add optional parameters
  payload[:options] = {}
  payload[:options][:temperature] = options[:temperature] if options[:temperature]
  payload[:options][:top_p] = options[:top_p] if options[:top_p]
  payload[:options][:num_predict] = options[:max_tokens] if options[:max_tokens]

  if payload[:stream]
    stream_response(payload) { |chunk| yield chunk if block_given? }
  else
    send_request(payload)
  end
end

#list_modelsObject

Get available models



88
89
90
91
92
93
94
95
96
97
# File 'lib/smollama/client.rb', line 88

def list_models
  response = Excon.get(
    "#{self.class.base_url}/api/tags",
    headers: { 'Content-Type' => 'application/json' }
  )

  JSON.parse(response.body)
rescue Excon::Error => e
  { error: "Failed to list models: #{e.message}" }
end

#pingObject

Check if server is reachable



100
101
102
103
104
105
# File 'lib/smollama/client.rb', line 100

def ping
  response = Excon.get("#{self.class.base_url}/")
  response.status == 200
rescue Excon::Error
  false
end