Class: Ragdoll::Core::Configuration
- Inherits:
-
Object
- Object
- Ragdoll::Core::Configuration
- Defined in:
- lib/ragdoll/core/configuration.rb
Defined Under Namespace
Classes: ConfigurationFileNotFoundError, ConfigurationLoadUnknownError, ConfigurationSaveError
Constant Summary collapse
- DEFAULT =
{ # Base directory for all Ragdoll files - single source of truth base_directory: File.join(Dir.home, ".config", "ragdoll"), # Configuration file path derived from base directory config_filepath: File.join(Dir.home, ".config", "ragdoll", "config.yml"), # Model configurations organized by purpose with inheritance support models: { text_generation: { default: -> { Model.new(ENV.fetch("RAGDOLL_DEFAULT_TEXT_MODEL", "openai/gpt-4o")) }, summary: -> { Model.new(ENV.fetch("RAGDOLL_SUMMARY_MODEL", "openai/gpt-4o")) }, keywords: -> { Model.new(ENV.fetch("RAGDOLL_KEYWORDS_MODEL", "openai/gpt-4o")) } }, embedding: { provider: :openai, text: -> { Model.new(ENV.fetch("RAGDOLL_TEXT_EMBEDDING_MODEL", "openai/text-embedding-3-small")) }, image: -> { Model.new(ENV.fetch("RAGDOLL_IMAGE_EMBEDDING_MODEL", "openai/clip-vit-base-patch32")) }, audio: -> { Model.new(ENV.fetch("RAGDOLL_AUDIO_EMBEDDING_MODEL", "openai/whisper-1")) }, max_dimensions: 3072, cache_embeddings: true } }, # Processing configuration by content type processing: { text: { chunking: { max_tokens: 1000, overlap: 200 } }, default: { chunking: { max_tokens: 4096, overlap: 128 } }, search: { similarity_threshold: 0.7, max_results: 10, analytics: { enable: true, usage_tracking_enabled: true, ranking_enabled: true, recency_weight: 0.3, frequency_weight: 0.7, similarity_weight: 1.0 } } }, # LLM provider configurations (renamed from ruby_llm_config) llm_providers: { default_provider: :openai, openai: { api_key: -> { ENV.fetch("OPENAI_API_KEY", nil) }, organization: -> { ENV.fetch("OPENAI_ORGANIZATION", nil) }, project: -> { ENV.fetch("OPENAI_PROJECT", nil) } }, anthropic: { api_key: -> { ENV.fetch("ANTHROPIC_API_KEY", nil) } }, google: { api_key: -> { ENV.fetch("GOOGLE_API_KEY", nil) }, project_id: -> { ENV.fetch("GOOGLE_PROJECT_ID", nil) } }, azure: { api_key: -> { ENV.fetch("AZURE_OPENAI_API_KEY", nil) }, endpoint: -> { ENV.fetch("AZURE_OPENAI_ENDPOINT", nil) }, api_version: -> { ENV.fetch("AZURE_OPENAI_API_VERSION", "2024-02-01") } }, ollama: { endpoint: -> { ENV.fetch("OLLAMA_ENDPOINT", "http://localhost:11434") } }, huggingface: { api_key: -> { ENV.fetch("HUGGINGFACE_API_KEY", nil) } }, openrouter: { api_key: -> { ENV.fetch("OPENROUTER_API_KEY", nil) } } }, # Summarization configuration summarization: { enable: true, max_length: 300, min_content_length: 300 }, # Database configuration with standardized ENV variable name database: { adapter: "postgresql", database: "ragdoll_development", username: "ragdoll", password: -> { ENV.fetch("RAGDOLL_DATABASE_PASSWORD", nil) }, host: "localhost", port: 5432, auto_migrate: true, logger: nil }, # Logging configuration with corrected key names and path derivation logging: { level: :warn, # Fixed: was log_level, now matches usage directory: File.join(Dir.home, ".config", "ragdoll", "logs"), filepath: File.join(Dir.home, ".config", "ragdoll", "logs", "ragdoll.log") }, # Prompt templates for customizable text generation prompt_templates: { rag_enhancement: " You are an AI assistant. Use the following context to help answer the user's question.\n If the context doesn't contain relevant information, say so.\n\n Context:\n {{context}}\n\n Question: {{prompt}}\n\n Answer:\n TEMPLATE\n }\n\n}.freeze\n".strip
Class Method Summary collapse
Instance Method Summary collapse
-
#embedding_model(content_type = :text) ⇒ Object
Resolve embedding model for content type.
-
#initialize(config = {}) ⇒ Configuration
constructor
A new instance of Configuration.
-
#method_missing(method_name, *args, &block) ⇒ Object
Enable method delegation to the internal OpenStruct.
-
#parse_provider_model(provider_model_string) ⇒ Object
Parse a provider/model string into its components Format: “provider/model” -> { provider: :provider, model: “model” } Format: “model” -> { provider: nil, model: “model” } (RubyLLM determines provider).
-
#prompt_template(template_name = :rag_enhancement) ⇒ Object
Get prompt template.
-
#provider_credentials(provider = nil) ⇒ Object
Get provider credentials for a given provider.
-
#resolve_model(task_type) ⇒ Object
Resolve model with inheritance support Returns the model string for a given task, with inheritance from default.
- #respond_to_missing?(method_name, include_private = false) ⇒ Boolean
- #save(path: nil) ⇒ Object
Constructor Details
#initialize(config = {}) ⇒ Configuration
Returns a new instance of Configuration.
141 142 143 144 145 |
# File 'lib/ragdoll/core/configuration.rb', line 141 def initialize(config = {}) merged_config = deep_merge(self.class::DEFAULT, config) resolved_config = resolve_procs(merged_config, []) @config = OpenStruct.new(resolved_config) end |
Dynamic Method Handling
This class handles dynamic methods through the method_missing method
#method_missing(method_name, *args, &block) ⇒ Object
Enable method delegation to the internal OpenStruct
222 223 224 |
# File 'lib/ragdoll/core/configuration.rb', line 222 def method_missing(method_name, *args, &block) @config.send(method_name, *args, &block) end |
Class Method Details
.load(path: nil) ⇒ Object
147 148 149 150 151 152 153 154 155 156 157 |
# File 'lib/ragdoll/core/configuration.rb', line 147 def self.load(path: nil) path ||= DEFAULT[:config_filepath] raise ConfigurationFileNotFoundError, "Configuration file not found: #{path}" unless File.exist?(path) new(YAML.safe_load_file(path) || {}) rescue Errno::ENOENT raise ConfigurationFileNotFoundError, "Configuration file not found: #{path}" rescue StandardError => e raise ConfigurationLoadUnknownError, "Failed to load configuration from #{path}: #{e.message}" end |
Instance Method Details
#embedding_model(content_type = :text) ⇒ Object
Resolve embedding model for content type
212 213 214 |
# File 'lib/ragdoll/core/configuration.rb', line 212 def (content_type = :text) @config.models[:embedding][content_type] || @config.models[:embedding][:text] end |
#parse_provider_model(provider_model_string) ⇒ Object
Parse a provider/model string into its components Format: “provider/model” -> { provider: :provider, model: “model” } Format: “model” -> { provider: nil, model: “model” } (RubyLLM determines provider)
180 181 182 183 184 185 186 187 188 189 190 |
# File 'lib/ragdoll/core/configuration.rb', line 180 def parse_provider_model(provider_model_string) return { provider: nil, model: nil } if provider_model_string.nil? || provider_model_string.empty? parts = provider_model_string.split("/", 2) if parts.length == 2 { provider: parts[0].to_sym, model: parts[1] } else # If no slash, let RubyLLM determine provider from model name { provider: nil, model: provider_model_string } end end |
#prompt_template(template_name = :rag_enhancement) ⇒ Object
Get prompt template
217 218 219 |
# File 'lib/ragdoll/core/configuration.rb', line 217 def prompt_template(template_name = :rag_enhancement) @config.prompt_templates[template_name] end |
#provider_credentials(provider = nil) ⇒ Object
Get provider credentials for a given provider
206 207 208 209 |
# File 'lib/ragdoll/core/configuration.rb', line 206 def provider_credentials(provider = nil) provider ||= @config.llm_providers[:default_provider] @config.llm_providers[provider] || {} end |
#resolve_model(task_type) ⇒ Object
Resolve model with inheritance support Returns the model string for a given task, with inheritance from default
194 195 196 197 198 199 200 201 202 203 |
# File 'lib/ragdoll/core/configuration.rb', line 194 def resolve_model(task_type) case task_type when :embedding @config.models[:embedding] when :text, :summary, :keywords, :default @config.models[:text_generation][task_type] || @config.models[:text_generation][:default] else @config.models[:text_generation][:default] end end |
#respond_to_missing?(method_name, include_private = false) ⇒ Boolean
226 227 228 |
# File 'lib/ragdoll/core/configuration.rb', line 226 def respond_to_missing?(method_name, include_private = false) @config.respond_to?(method_name, include_private) || super end |
#save(path: nil) ⇒ Object
159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 |
# File 'lib/ragdoll/core/configuration.rb', line 159 def save(path: nil) if path.nil? path = @config.config_filepath else save_filepath = @config.config_filepath @config.config_filepath = path end FileUtils.mkdir_p(File.dirname(path)) File.write(path, @config.to_yaml) rescue StandardError => e @config.config_filepath = save_filepath unless save_filepath.nil? raise ConfigurationSaveError, "Failed to save configuration to #{path}: #{e.message}" end |