Class: AIFaker::LLMAdapters::RubyLLM

Inherits:
Object
  • Object
show all
Defined in:
lib/AIFaker/llm_adapters/ruby_llm.rb

Class Method Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(provider:, ui: nil, **options) ⇒ RubyLLM

Returns a new instance of RubyLLM.



13
14
15
16
17
18
19
20
21
22
23
24
# File 'lib/AIFaker/llm_adapters/ruby_llm.rb', line 13

def initialize(provider:, ui: nil, **options)
  @provider = provider
  @ui = ui
  @options = options
  @connected = false
  @schema = nil
  @models = nil
  @associations = nil
  @plan = nil
  @generated_uniques = Hash.new { |h, k| h[k] = Hash.new { |hh, kk| hh[kk] = [] } }
  @failure_feedback = {}
end

Class Method Details

.available?Boolean

Returns:

  • (Boolean)


6
7
8
9
10
11
# File 'lib/AIFaker/llm_adapters/ruby_llm.rb', line 6

def self.available?
  require "ruby_llm"
  true
rescue LoadError
  false
end

Instance Method Details

#apply_fix!(_fix) ⇒ Object



83
84
85
# File 'lib/AIFaker/llm_adapters/ruby_llm.rb', line 83

def apply_fix!(_fix)
  # The fix is typically guidance; in v0 we just retry with fresh attributes.
end

#connect!Object



30
31
32
33
34
35
36
37
38
# File 'lib/AIFaker/llm_adapters/ruby_llm.rb', line 30

def connect!
  self.class.available? or raise ConnectionError, "Missing dependency: add `gem \"ruby_llm\"` to your Gemfile."

  @provider_sym = @provider.to_sym
  configure_ruby_llm!
  @connected = true
rescue StandardError => e
  raise ConnectionError, "Failed to connect via ruby_llm (provider=#{@provider}): #{e.class}: #{e.message}"
end

#connected?Boolean

Returns:

  • (Boolean)


26
27
28
# File 'lib/AIFaker/llm_adapters/ruby_llm.rb', line 26

def connected?
  @connected
end

#generate_attributes(model) ⇒ Object



53
54
55
56
57
58
59
60
61
62
# File 'lib/AIFaker/llm_adapters/ruby_llm.rb', line 53

def generate_attributes(model)
  prompt = build_prompt_for(model)
  response_text = chat_with_indicator(prompt, label: "Searching #{model.name}")
  parsed = parse_json_hash(response_text)
  attrs = coerce_attributes(model, parsed)
  remember_generated_unique_values(model, attrs)
  attrs
rescue StandardError => e
  raise Error, "LLM attribute generation failed for #{model.name}: #{e.class}: #{e.message}"
end

#register_failure(model, error:, attempted_attributes:) ⇒ Object



71
72
73
74
75
76
77
78
79
80
81
# File 'lib/AIFaker/llm_adapters/ruby_llm.rb', line 71

def register_failure(model, error:, attempted_attributes:)
  taken_by_column = uniqueness_taken_values(model, error, attempted_attributes)
  @failure_feedback[model.name] = {
    message: "#{error.class}: #{error.message}",
    details: validation_error_details(error),
    attempted_attributes: attempted_attributes,
    dont_suggest: taken_by_column
  }
rescue StandardError
  nil
end

#share_models!(models, associations) ⇒ Object



44
45
46
47
# File 'lib/AIFaker/llm_adapters/ruby_llm.rb', line 44

def share_models!(models, associations)
  @models = models
  @associations = associations
end

#share_schema!(schema) ⇒ Object



40
41
42
# File 'lib/AIFaker/llm_adapters/ruby_llm.rb', line 40

def share_schema!(schema)
  @schema = schema
end

#share_seed_plan!(plan) ⇒ Object



49
50
51
# File 'lib/AIFaker/llm_adapters/ruby_llm.rb', line 49

def share_seed_plan!(plan)
  @plan = plan
end

#suggest_fix(model, error:, last_attributes:) ⇒ Object



64
65
66
67
68
69
# File 'lib/AIFaker/llm_adapters/ruby_llm.rb', line 64

def suggest_fix(model, error:, last_attributes:)
  prompt = build_fix_prompt_for(model, error:, last_attributes:)
  chat_with_indicator(prompt, label: "Searching fix for #{model.name}")
rescue StandardError
  nil
end