Class: Soka::LLMs::Base

Inherits:
Object
  • Object
show all
Defined in:
lib/soka/llms/base.rb

Overview

Base class for LLM providers

Direct Known Subclasses

Anthropic, Gemini, OpenAI

Instance Attribute Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(model: nil, api_key: nil, **options) ⇒ Base

Returns a new instance of Base.



17
18
19
20
21
22
# File 'lib/soka/llms/base.rb', line 17

def initialize(model: nil, api_key: nil, **options)
  @model = model || default_model
  @api_key = api_key || api_key_from_env
  @options = default_options.merge(options)
  validate_configuration!
end

Instance Attribute Details

#api_keyObject (readonly)

Returns the value of attribute api_key.



15
16
17
# File 'lib/soka/llms/base.rb', line 15

def api_key
  @api_key
end

#modelObject (readonly)

Returns the value of attribute model.



15
16
17
# File 'lib/soka/llms/base.rb', line 15

def model
  @model
end

#optionsObject (readonly)

Returns the value of attribute options.



15
16
17
# File 'lib/soka/llms/base.rb', line 15

def options
  @options
end

Instance Method Details

#chat(messages, **params) ⇒ Object

Raises:

  • (NotImplementedError)


24
25
26
# File 'lib/soka/llms/base.rb', line 24

def chat(messages, **params)
  raise NotImplementedError, "#{self.class} must implement #chat method"
end

#streaming_chat(messages, **params) ⇒ Object

Raises:

  • (NotImplementedError)


28
29
30
# File 'lib/soka/llms/base.rb', line 28

def streaming_chat(messages, **params, &)
  raise NotImplementedError, "#{self.class} does not support streaming"
end

#supports_streaming?Boolean

Returns:

  • (Boolean)


32
33
34
# File 'lib/soka/llms/base.rb', line 32

def supports_streaming?
  false
end