Class: Ragdoll::ImageDescriptionService

Inherits:
Object
  • Object
show all
Defined in:
app/services/ragdoll/image_description_service.rb

Defined Under Namespace

Classes: DescriptionError

Constant Summary collapse

DEFAULT_OPTIONS =
{
  model: "gemma3",
  provider: :ollama,
  assume_model_exists: true, # Bypass registry check
  temperature: 0.4,
  prompt: "Describe the image in detail."
}.freeze
DEFAULT_FALLBACK_OPTIONS =
{
  model: "smollm2",
  provider: :ollama,
  assume_model_exists: true, # Bypass LLM registry check
  temperature: 0.6
}.freeze

Instance Method Summary collapse

Constructor Details

#initialize(primary: DEFAULT_OPTIONS, fallback: DEFAULT_FALLBACK_OPTIONS) ⇒ ImageDescriptionService

Returns a new instance of ImageDescriptionService.



28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
# File 'app/services/ragdoll/image_description_service.rb', line 28

def initialize(primary: DEFAULT_OPTIONS, fallback: DEFAULT_FALLBACK_OPTIONS)
  puts "🚀 ImageDescriptionService: Initializing with primary: #{primary.inspect}"
  puts "🚀 ImageDescriptionService: Initializing with fallback: #{fallback.inspect}"

  # Configure RubyLLM using the same pattern as the working example
  configure_ruby_llm_globally

  # Duplicate hashes to avoid modifying frozen constants
  primary_opts = primary.dup
  fallback_opts = fallback.dup

  primary_temp    = primary_opts.delete(:temperature) || DEFAULT_OPTIONS[:temperature]
  @primary_prompt = primary_opts.delete(:prompt) || DEFAULT_OPTIONS[:prompt]
  fallback_temp   = fallback_opts.delete(:temperature) || DEFAULT_FALLBACK_OPTIONS[:temperature]

  puts "🤖 ImageDescriptionService: Attempting to create primary model..."
  begin
    @primary = RubyLLM.chat(**primary_opts).with_temperature(primary_temp)
    puts "✅ ImageDescriptionService: Primary model created successfully: #{@primary.class}"
  rescue StandardError => e
    puts "❌ ImageDescriptionService: Primary model creation failed: #{e.message}"
    @primary = nil
  end

  puts "🔄 ImageDescriptionService: Attempting to create fallback model..."
  begin
    @fallback = RubyLLM.chat(**fallback_opts).with_temperature(fallback_temp)
    puts "✅ ImageDescriptionService: Fallback model created successfully: #{@fallback.class}"
  rescue StandardError => e
    puts "❌ ImageDescriptionService: Fallback model creation failed: #{e.message}"
    @fallback = nil
  end

  return unless @primary.nil? && @fallback.nil?

  puts "⚠️  ImageDescriptionService: WARNING - No models available! Service will return placeholders only."
end

Instance Method Details

#generate_description(path) ⇒ Object

Generate a description for a local image file. path (String) - absolute path to the image



68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
# File 'app/services/ragdoll/image_description_service.rb', line 68

def generate_description(path)
  puts "🔍 ImageDescriptionService: Starting description generation for #{path}"
  start_time = Time.now

  @image_path = path
  return "" unless @image_path && File.exist?(@image_path) && image_file?

  # Attempt to read image and prepare data; on failure return placeholder
  data = nil
  begin
    puts "📸 ImageDescriptionService: Reading image with Magick..."
    @image = Magick::Image.read(@image_path).first
    data = prepare_image_data
    puts "✅ ImageDescriptionService: Image data prepared (#{data.length} chars base64)"
  rescue StandardError => e
    puts "❌ ImageDescriptionService: Failed to read image: #{e.message}"
    return "[Image file: #{File.basename(@image_path)}]"
  end
  return "" unless data

  # Attempt vision model call if client available
  if @primary
    puts "🤖 ImageDescriptionService: Attempting primary model (#{@primary.inspect})"
    begin
      @primary.add_message(
        role: "user",
        content: [
          { type: "text", text: @primary_prompt },
          { type: "image_url", image_url: { url: "data:#{@image.mime_type};base64,#{data}" } }
        ]
      )
      puts "📤 ImageDescriptionService: Calling primary model complete()..."
      response = @primary.complete
      puts "📥 ImageDescriptionService: Primary model response received: #{response.inspect}"
      desc = extract_description(response)
      if desc && !desc.empty?
        elapsed = Time.now - start_time
        puts "✅ ImageDescriptionService: Primary model success! Description: '#{desc[0..100]}...' (#{elapsed.round(2)}s)"
        return desc
      end
    rescue StandardError => e
      puts "❌ ImageDescriptionService: Primary model failed: #{e.message}"
    end
  else
    puts "⚠️  ImageDescriptionService: No primary model available"
  end

  # Attempt fallback if available
  if @fallback
    puts "🔄 ImageDescriptionService: Attempting fallback model (#{@fallback.inspect})"
    begin
      fallback_response = @fallback.ask(fallback_prompt).content
      elapsed = Time.now - start_time
      puts "✅ ImageDescriptionService: Fallback model success! Description: '#{fallback_response[0..100]}...' (#{elapsed.round(2)}s)"
      return fallback_response
    rescue StandardError => e
      puts "❌ ImageDescriptionService: Fallback model failed: #{e.message}"
    end
  else
    puts "⚠️  ImageDescriptionService: No fallback model available"
  end

  # Default placeholder when LLM unavailable
  elapsed = Time.now - start_time
  puts "🔚 ImageDescriptionService: Returning placeholder after #{elapsed.round(2)}s"
  "[Image file: #{File.basename(@image_path)}]"
end