Class: DSPy::Teleprompt::MIPROv2
- Inherits:
-
Teleprompter
- Object
- Teleprompter
- DSPy::Teleprompt::MIPROv2
- Extended by:
- T::Sig
- Includes:
- Dry::Configurable
- Defined in:
- lib/dspy/teleprompt/mipro_v2.rb
Overview
MIPROv2: Multi-prompt Instruction Proposal with Retrieval Optimization State-of-the-art prompt optimization combining bootstrap sampling, instruction generation, and Bayesian optimization
Defined Under Namespace
Modules: AutoMode Classes: MIPROv2Result
Constant Summary collapse
- EvaluatedCandidate =
Simple data structure for evaluated candidate configurations (immutable)
Data.define( :instruction, :few_shot_examples, :type, :metadata, :config_id ) do extend T::Sig # Generate a config ID based on content sig { params(instruction: String, few_shot_examples: T::Array[T.untyped], type: CandidateType, metadata: T::Hash[Symbol, T.untyped]).returns(EvaluatedCandidate) } def self.create(instruction:, few_shot_examples: [], type: CandidateType::Baseline, metadata: {}) content = "#{instruction}_#{few_shot_examples.size}_#{type.serialize}_#{.hash}" config_id = Digest::SHA256.hexdigest(content)[0, 12] new( instruction: instruction.freeze, few_shot_examples: few_shot_examples.freeze, type: type, metadata: .freeze, config_id: config_id ) end sig { returns(T::Hash[Symbol, T.untyped]) } def to_h { instruction: instruction, few_shot_examples: few_shot_examples.size, type: type.serialize, metadata: , config_id: config_id } end end
Instance Attribute Summary collapse
-
#mipro_config ⇒ Object
readonly
Returns the value of attribute mipro_config.
-
#proposer ⇒ Object
readonly
Returns the value of attribute proposer.
Class Method Summary collapse
- .apply_auto_defaults(config, preset) ⇒ Object
-
.configure(&block) ⇒ Object
Class-level configuration method - sets defaults for new instances.
-
.default_config_block ⇒ Object
Get the default configuration block.
Instance Method Summary collapse
- #compile(program, trainset:, valset: nil) ⇒ Object
-
#initialize(metric: nil, **kwargs) ⇒ MIPROv2
constructor
Override dry-configurable’s initialize to add our parameter validation.
Constructor Details
#initialize(metric: nil, **kwargs) ⇒ MIPROv2
Override dry-configurable’s initialize to add our parameter validation
329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 |
# File 'lib/dspy/teleprompt/mipro_v2.rb', line 329 def initialize(metric: nil, **kwargs) # Reject old config parameter pattern if kwargs.key?(:config) raise ArgumentError, "config parameter is no longer supported. Use .configure blocks instead." end # Let dry-configurable handle its initialization super(**kwargs) # Apply class-level configuration if it exists if self.class.default_config_block configure(&self.class.default_config_block) end @metric = metric # Initialize proposer with a basic config for now (will be updated later) @proposer = DSPy::Propose::GroundedProposer.new(config: DSPy::Propose::GroundedProposer::Config.new) @optimization_trace = [] @evaluated_candidates = [] @trial_history = {} end |
Instance Attribute Details
#mipro_config ⇒ Object (readonly)
Returns the value of attribute mipro_config.
323 324 325 |
# File 'lib/dspy/teleprompt/mipro_v2.rb', line 323 def mipro_config @mipro_config end |
#proposer ⇒ Object (readonly)
Returns the value of attribute proposer.
326 327 328 |
# File 'lib/dspy/teleprompt/mipro_v2.rb', line 326 def proposer @proposer end |
Class Method Details
.apply_auto_defaults(config, preset) ⇒ Object
206 207 208 209 210 211 212 213 214 215 216 217 218 219 |
# File 'lib/dspy/teleprompt/mipro_v2.rb', line 206 def apply_auto_defaults(config, preset) settings = AUTO_PRESET_SETTINGS.fetch(preset) { {} } config.auto_preset = preset config.num_trials = settings[:candidate_budget] if settings[:candidate_budget] config.num_instruction_candidates = settings[:instruction_candidates] if settings[:instruction_candidates] config.bootstrap_sets = settings[:bootstrap_sets] if settings[:bootstrap_sets] config.max_bootstrapped_examples = settings[:max_bootstrapped_examples] if settings.key?(:max_bootstrapped_examples) config.max_labeled_examples = settings[:max_labeled_examples] if settings.key?(:max_labeled_examples) config.optimization_strategy = settings[:optimization_strategy] if settings[:optimization_strategy] config.early_stopping_patience = settings[:early_stopping_patience] if settings[:early_stopping_patience] config.minibatch_size = settings[:minibatch_size] if settings.key?(:minibatch_size) config.valset_target_size = settings[:valset_target_size] if settings[:valset_target_size] end |
.configure(&block) ⇒ Object
Class-level configuration method - sets defaults for new instances
190 191 192 193 194 195 |
# File 'lib/dspy/teleprompt/mipro_v2.rb', line 190 def self.configure(&block) if block_given? # Store configuration in a class variable for new instances @default_config_block = block end end |
.default_config_block ⇒ Object
Get the default configuration block
198 199 200 |
# File 'lib/dspy/teleprompt/mipro_v2.rb', line 198 def self.default_config_block @default_config_block end |
Instance Method Details
#compile(program, trainset:, valset: nil) ⇒ Object
360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 |
# File 'lib/dspy/teleprompt/mipro_v2.rb', line 360 def compile(program, trainset:, valset: nil) validate_inputs(program, trainset, valset) instrument_step('miprov2_compile', { trainset_size: trainset.size, valset_size: valset&.size || 0, num_trials: config.num_trials, optimization_strategy: optimization_strategy_name, mode: infer_auto_mode }) do # Convert examples to typed format typed_trainset = ensure_typed_examples(trainset) typed_valset = valset ? ensure_typed_examples(valset) : nil if auto_preset_active? typed_trainset, typed_valset = prepare_datasets_for_auto(typed_trainset, typed_valset) typed_valset = apply_auto_preset!(program, typed_valset) else typed_valset = limit_validation_set(typed_valset, config.valset_target_size) end # Use validation set if available, otherwise use part of training set evaluation_set = typed_valset || typed_trainset.take([typed_trainset.size / 3, 10].max) # Phase 1: Bootstrap few-shot examples emit_event('phase_start', { phase: 1, name: 'bootstrap' }) demo_candidates = phase_1_bootstrap(program, typed_trainset) emit_event('phase_complete', { phase: 1, num_predictors: demo_candidates.keys.size, demo_sets_per_predictor: demo_candidates[0]&.size || 0 }) # Phase 2: Generate instruction candidates emit_event('phase_start', { phase: 2, name: 'instruction_proposal' }) proposal_result = phase_2_propose_instructions(program, typed_trainset, demo_candidates) emit_event('phase_complete', { phase: 2, num_candidates: proposal_result.num_candidates, best_instruction_preview: proposal_result.best_instruction[0, 50] }) # Phase 3: Bayesian optimization emit_event('phase_start', { phase: 3, name: 'optimization' }) optimization_result = phase_3_optimize( program, evaluation_set, proposal_result, demo_candidates ) emit_event('phase_complete', { phase: 3, best_score: optimization_result[:best_score], trials_completed: optimization_result[:trials_completed] }) # Build final result final_result = build_miprov2_result( optimization_result, demo_candidates, proposal_result ) @trial_history = optimization_result[:trial_logs] || {} save_results(final_result) final_result end end |