Class: DSPy::Teleprompt::MIPROv2

Inherits:
Teleprompter show all
Extended by:
T::Sig
Includes:
Dry::Configurable
Defined in:
lib/dspy/teleprompt/mipro_v2.rb

Overview

MIPROv2: Multi-prompt Instruction Proposal with Retrieval Optimization State-of-the-art prompt optimization combining bootstrap sampling, instruction generation, and Bayesian optimization

Defined Under Namespace

Modules: AutoMode Classes: MIPROv2Result

Constant Summary collapse

EvaluatedCandidate =

Simple data structure for evaluated candidate configurations (immutable)

Data.define(
  :instruction,
  :few_shot_examples,
  :type,
  :metadata,
  :config_id
) do
  extend T::Sig
  
  # Generate a config ID based on content
  sig { params(instruction: String, few_shot_examples: T::Array[T.untyped], type: CandidateType, metadata: T::Hash[Symbol, T.untyped]).returns(EvaluatedCandidate) }
  def self.create(instruction:, few_shot_examples: [], type: CandidateType::Baseline, metadata: {})
    content = "#{instruction}_#{few_shot_examples.size}_#{type.serialize}_#{metadata.hash}"
    config_id = Digest::SHA256.hexdigest(content)[0, 12]
    
    new(
      instruction: instruction.freeze,
      few_shot_examples: few_shot_examples.freeze,
      type: type,
      metadata: .freeze,
      config_id: config_id
    )
  end

  sig { returns(T::Hash[Symbol, T.untyped]) }
  def to_h
    {
      instruction: instruction,
      few_shot_examples: few_shot_examples.size,
      type: type.serialize,
      metadata: ,
      config_id: config_id
    }
  end
end

Instance Attribute Summary collapse

Attributes inherited from Teleprompter

#config, #evaluator, #metric

Class Method Summary collapse

Instance Method Summary collapse

Methods inherited from Teleprompter

#create_evaluator, #ensure_typed_examples, #evaluate_program, #save_results, #validate_inputs

Constructor Details

#initialize(metric: nil, **kwargs) ⇒ MIPROv2

Override dry-configurable’s initialize to add our parameter validation



248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
# File 'lib/dspy/teleprompt/mipro_v2.rb', line 248

def initialize(metric: nil, **kwargs)
  # Reject old config parameter pattern
  if kwargs.key?(:config)
    raise ArgumentError, "config parameter is no longer supported. Use .configure blocks instead."
  end
  
  # Let dry-configurable handle its initialization
  super(**kwargs)
  
  # Apply class-level configuration if it exists
  if self.class.default_config_block
    configure(&self.class.default_config_block)
  end
  
  @metric = metric
  
  # Initialize proposer with a basic config for now (will be updated later)  
  @proposer = DSPy::Propose::GroundedProposer.new(config: DSPy::Propose::GroundedProposer::Config.new)
  @optimization_trace = []
  @evaluated_candidates = []
end

Instance Attribute Details

#mipro_configObject (readonly)

Returns the value of attribute mipro_config.



242
243
244
# File 'lib/dspy/teleprompt/mipro_v2.rb', line 242

def mipro_config
  @mipro_config
end

#proposerObject (readonly)

Returns the value of attribute proposer.



245
246
247
# File 'lib/dspy/teleprompt/mipro_v2.rb', line 245

def proposer
  @proposer
end

Class Method Details

.configure(&block) ⇒ Object

Class-level configuration method - sets defaults for new instances



129
130
131
132
133
134
# File 'lib/dspy/teleprompt/mipro_v2.rb', line 129

def self.configure(&block)
  if block_given?
    # Store configuration in a class variable for new instances
    @default_config_block = block
  end
end

.default_config_blockObject

Get the default configuration block



137
138
139
# File 'lib/dspy/teleprompt/mipro_v2.rb', line 137

def self.default_config_block
  @default_config_block
end

Instance Method Details

#compile(program, trainset:, valset: nil) ⇒ Object



278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
# File 'lib/dspy/teleprompt/mipro_v2.rb', line 278

def compile(program, trainset:, valset: nil)
  validate_inputs(program, trainset, valset)

  instrument_step('miprov2_compile', {
    trainset_size: trainset.size,
    valset_size: valset&.size || 0,
    num_trials: config.num_trials,
    optimization_strategy: config.optimization_strategy,
    mode: infer_auto_mode
  }) do
    # Convert examples to typed format
    typed_trainset = ensure_typed_examples(trainset)
    typed_valset = valset ? ensure_typed_examples(valset) : nil

    # Use validation set if available, otherwise use part of training set
    evaluation_set = typed_valset || typed_trainset.take([typed_trainset.size / 3, 10].max)

    # Phase 1: Bootstrap few-shot examples
    emit_event('phase_start', { phase: 1, name: 'bootstrap' })
    bootstrap_result = phase_1_bootstrap(program, typed_trainset)
    emit_event('phase_complete', { 
      phase: 1, 
      success_rate: bootstrap_result.statistics[:success_rate],
      candidate_sets: bootstrap_result.candidate_sets.size
    })

    # Phase 2: Generate instruction candidates
    emit_event('phase_start', { phase: 2, name: 'instruction_proposal' })
    proposal_result = phase_2_propose_instructions(program, typed_trainset, bootstrap_result)
    emit_event('phase_complete', { 
      phase: 2, 
      num_candidates: proposal_result.num_candidates,
      best_instruction_preview: proposal_result.best_instruction[0, 50]
    })

    # Phase 3: Bayesian optimization
    emit_event('phase_start', { phase: 3, name: 'optimization' })
    optimization_result = phase_3_optimize(
      program,
      evaluation_set,
      proposal_result,
      bootstrap_result
    )
    emit_event('phase_complete', { 
      phase: 3, 
      best_score: optimization_result[:best_score],
      trials_completed: optimization_result[:trials_completed]
    })

    # Build final result
    final_result = build_miprov2_result(
      optimization_result,
      bootstrap_result,
      proposal_result
    )

    save_results(final_result)
    final_result
  end
end