Class: LastLLM::Providers::Ollama

Inherits:
LastLLM::Provider show all
Defined in:
lib/last_llm/providers/ollama.rb

Overview

Ollama provider implementation

Constant Summary collapse

BASE_ENDPOINT =

API Configuration

'http://172.17.0.1:11434'
DEFAULT_MODEL =
'llama3.2:latest'
DEFAULT_TEMPERATURE =

LLM Default Parameters

0.7
DEFAULT_TOP_P =
0.7
DEFAULT_MAX_TOKENS =
24_576
DEFAULT_TEMPERATURE_OBJECT =
0.2
SUCCESS_STATUS =

Response Configuration

200
SERVER_ERROR_STATUS =

Error Status Codes

500
BAD_REQUEST_STATUS =
400

Instance Attribute Summary

Attributes inherited from LastLLM::Provider

#config, #name

Class Method Summary collapse

Instance Method Summary collapse

Methods inherited from LastLLM::Provider

#parse_response

Constructor Details

#initialize(config) ⇒ Ollama

Returns a new instance of Ollama.



26
27
28
29
30
# File 'lib/last_llm/providers/ollama.rb', line 26

def initialize(config)
  super(Constants::OLLAMA, config)
  @conn = connection(config[:base_url] || BASE_ENDPOINT)
  logger.debug("#{@name}: Initialized Ollama provider with endpoint: #{config[:base_url] || BASE_ENDPOINT}")
end

Class Method Details

.execute_tool(tool, response) ⇒ Hash?

Execute a tool from an Ollama response

Parameters:

  • tool (LastLLM::Tool)

    The tool to execute

  • response (Hash)

    The Ollama response containing tool call information

Returns:

  • (Hash, nil)

    The result of the function call or nil if the tool wasn’t called



78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
# File 'lib/last_llm/providers/ollama.rb', line 78

def self.execute_tool(tool, response)
  # Ollama doesn't have native function calling, so we need to parse from the content
  # This is a simplified implementation that would need to be enhanced for production
  content = response.dig(:message, :content)
  return nil unless content&.include?(tool.name)

  # Simple regex to extract JSON from the content
  # This is a basic implementation and might need enhancement
  if content =~ /#{tool.name}\s*\(([^)]+)\)/i
    args_str = ::Regexp.last_match(1)
    begin
      args = JSON.parse("{#{args_str}}", symbolize_names: true)
      return tool.call(args)
    rescue JSON::ParserError
      return nil
    end
  end

  nil
end

.format_tool(tool) ⇒ Hash

Format a tool for Ollama function calling

Parameters:

Returns:

  • (Hash)

    The tool in Ollama format



66
67
68
69
70
71
72
# File 'lib/last_llm/providers/ollama.rb', line 66

def self.format_tool(tool)
  {
    name: tool.name,
    description: tool.description,
    parameters: tool.parameters
  }
end

Instance Method Details

#generate_object(prompt, schema, options = {}) ⇒ Object



44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
# File 'lib/last_llm/providers/ollama.rb', line 44

def generate_object(prompt, schema, options = {})
  model = get_model(options, DEFAULT_MODEL)
  logger.info("#{@name}: Generating object with model: #{model}")
  logger.debug("#{@name}: Object prompt: #{format_prompt_for_logging(prompt)}")

  system_prompt = 'You are a helpful assistant that responds with valid JSON.'
  formatted_prompt = LastLLM::StructuredOutput.format_prompt(prompt, schema)

  options = options.dup
  options[:system_prompt] = system_prompt
  options[:temperature] ||= DEFAULT_TEMPERATURE_OBJECT

  make_request(formatted_prompt, options) do |result|
    content = result.dig(:choices, 0, :message, :content)
    logger.debug("#{@name}: Raw JSON response: #{content}")
    parse_json_response(content)
  end
end

#generate_text(prompt, options = {}) ⇒ Object



32
33
34
35
36
37
38
39
40
41
42
# File 'lib/last_llm/providers/ollama.rb', line 32

def generate_text(prompt, options = {})
  model = get_model(options, DEFAULT_MODEL)
  logger.info("#{@name}: Generating text with model: #{model}")
  logger.debug("#{@name}: Text prompt: #{format_prompt_for_logging(prompt)}")

  make_request(prompt, options) do |result|
    response = result.dig(:choices, 0, :message, :content).to_s
    logger.debug("#{@name}: Generated response of #{response.length} characters")
    response
  end
end