Class: MachineLearningWorkbench::Optimizer::NaturalEvolutionStrategies::BDNES

Inherits:
Base
  • Object
show all
Defined in:
lib/machine_learning_workbench/optimizer/natural_evolution_strategies/bdnes.rb

Overview

Block-Diagonal Natural Evolution Strategies

Constant Summary collapse

MAX_RSEED =

block random seeds to be on the same range as ‘Random.new_seed`

10**Random.new_seed.size

Instance Attribute Summary collapse

Attributes inherited from Base

#best, #eye, #last_fits, #ndims, #obj_fn, #opt_type, #parallel_fit, #rescale_lrate, #rescale_popsize, #rng

Instance Method Summary collapse

Methods inherited from Base

#cmaes_lrate, #cmaes_popsize, #cmaes_utilities, #interface_methods, #lrate, #move_inds, #sorted_inds, #standard_normal_sample, #standard_normal_samples, #utils

Constructor Details

#initialize(ndims_lst, obj_fn, opt_type, parallel_fit: false, rseed: nil, parallel_update: false, **init_opts) ⇒ BDNES

Initialize a list of XNES, one for each block see class ‘Base` for the description of the rest of the arguments.

Parameters:

  • ndims_lst (Array<Integer>)

    list of sizes for each block in the block-diagonal matrix. Note: entire (reconstructed) individuals will be passed to the ‘obj_fn` regardless of the division here described.

  • init_opts (Hash)

    the rest of the options will be passed directly to XNES



19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
# File 'lib/machine_learning_workbench/optimizer/natural_evolution_strategies/bdnes.rb', line 19

def initialize ndims_lst, obj_fn, opt_type, parallel_fit: false, rseed: nil, parallel_update: false, **init_opts
  # mu_init: 0, sigma_init: 1
  # init_opts = {rseed: rseed, mu_init: mu_init, sigma_init: sigma_init}
  # TODO: accept list of `mu_init`s and `sigma_init`s
  @ndims_lst, @obj_fn, @opt_type, @parallel_fit = ndims_lst, obj_fn, opt_type, parallel_fit
  block_fit = -> (*args) { raise "Should never be called" }
  # the BD-NES seed should ensure deterministic reproducibility
  # but each block should have a different seed
  # puts "BD-NES rseed: #{s}"  # currently disabled
  @rng = Random.new rseed || Random.new_seed
  @blocks = ndims_lst.map do |ndims|
    b_rseed = rng.rand MAX_RSEED
    XNES.new ndims, block_fit, opt_type, rseed: b_rseed, **init_opts
  end
  # Need `popsize` to be the same for all blocks, to make complete individuals
  @popsize = blocks.map(&:popsize).max
  blocks.each { |xnes| xnes.instance_variable_set :@popsize, popsize }

  @best = [(opt_type==:max ? -1 : 1) * Float::INFINITY, nil]
  @last_fits = []
  @parallel_update = parallel_update
  require 'parallel' if parallel_update
end

Instance Attribute Details

#blocksObject (readonly)

Returns the value of attribute blocks.



9
10
11
# File 'lib/machine_learning_workbench/optimizer/natural_evolution_strategies/bdnes.rb', line 9

def blocks
  @blocks
end

#ndims_lstObject (readonly)

Returns the value of attribute ndims_lst.



9
10
11
# File 'lib/machine_learning_workbench/optimizer/natural_evolution_strategies/bdnes.rb', line 9

def ndims_lst
  @ndims_lst
end

#parallel_updateObject (readonly)

Returns the value of attribute parallel_update.



9
10
11
# File 'lib/machine_learning_workbench/optimizer/natural_evolution_strategies/bdnes.rb', line 9

def parallel_update
  @parallel_update
end

#popsizeObject (readonly)

Returns the value of attribute popsize.



9
10
11
# File 'lib/machine_learning_workbench/optimizer/natural_evolution_strategies/bdnes.rb', line 9

def popsize
  @popsize
end

Instance Method Details

#convergenceObject



124
125
126
# File 'lib/machine_learning_workbench/optimizer/natural_evolution_strategies/bdnes.rb', line 124

def convergence
  blocks.map(&:convergence).reduce(:+)
end

#load(data) ⇒ Object



132
133
134
135
136
137
138
139
140
# File 'lib/machine_learning_workbench/optimizer/natural_evolution_strategies/bdnes.rb', line 132

def load data
  fit = -> (*args) { raise "Should never be called" }
  @blocks = data.map do |block_data|
    ndims = block_data.first.size
    XNES.new(ndims, fit, opt_type).tap do |nes|
      nes.load block_data
    end
  end
end

#muObject



116
117
118
# File 'lib/machine_learning_workbench/optimizer/natural_evolution_strategies/bdnes.rb', line 116

def mu
  blocks.map(&:mu).reduce { |mem, var| mem.concatenate var, axis: 1 }
end

#saveObject



128
129
130
# File 'lib/machine_learning_workbench/optimizer/natural_evolution_strategies/bdnes.rb', line 128

def save
  blocks.map &:save
end

#sigmaObject

Raises:

  • (NotImplementedError)


120
121
122
# File 'lib/machine_learning_workbench/optimizer/natural_evolution_strategies/bdnes.rb', line 120

def sigma
  raise NotImplementedError, "need to write a concatenation like for mu here"
end

#sorted_inds_lstObject



43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
# File 'lib/machine_learning_workbench/optimizer/natural_evolution_strategies/bdnes.rb', line 43

def sorted_inds_lst
  # Build samples and inds from the list of blocks
  samples_lst, inds_lst = blocks.map do |xnes|
    samples = xnes.standard_normal_samples
    inds = xnes.move_inds(samples)
    [samples.to_a, inds]
  end.transpose

  # Join the individuals for evaluation
  full_inds = inds_lst.reduce { |mem, var| mem.concatenate var, axis: 1 }
  # Need to fix sample dimensions for sorting
  # - current dims: nblocks x ninds x [block sizes]
  # - for sorting: ninds x nblocks x [block sizes]
  full_samples = samples_lst.transpose

  # Evaluate fitness of complete individuals
  fits = parallel_fit ? obj_fn.call(full_inds) : full_inds.map(&obj_fn)
  # Quick cure for NaN fitnesses
  fits.map { |x| x.nan? ? (opt_type==:max ? -1 : 1) * Float::INFINITY : x }
  @last_fits = fits # allows checking for stagnation

  # Sort inds based on fit and opt_type, save best
  # sorted = [fits, full_inds, full_samples].transpose.sort_by(&:first)
  # sorted.reverse! if opt_type==:min
  # this_best = sorted.last.take(2)
  # opt_cmp_fn = opt_type==:min ? :< : :>
  # @best = this_best if this_best.first.send(opt_cmp_fn, best.first)
  # sorted_samples = sorted.map(&:last)



  # BUG IN NARRAY SORT!! ruby-numo/numo-narray#97
  # sort_idxs = fits.sort_index
  sort_idxs = fits.size.times.sort_by { |i| fits[i] }.to_na



  sort_idxs = sort_idxs.reverse if opt_type == :min
  this_best = [fits[sort_idxs[-1]], full_inds[sort_idxs[-1], true]]
  opt_cmp_fn = opt_type==:min ? :< : :>
  @best = this_best if this_best.first.send(opt_cmp_fn, best.first)
  sorted_samples = full_samples.values_at *sort_idxs

  # Need to bring back sample dimensions for each block
  # - current dims: ninds x nblocks x [block sizes]
  # - target blocks list: nblocks x ninds x [block sizes]
  block_samples = sorted_samples.transpose

  # then back to NArray for usage in training
  block_samples.map &:to_na
end

#train(picks: sorted_inds_lst) ⇒ Object

TODO: refactor DRY



98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
# File 'lib/machine_learning_workbench/optimizer/natural_evolution_strategies/bdnes.rb', line 98

def train picks: sorted_inds_lst
  if parallel_update
    # Parallel.each(blocks.zip(picks)) do |xnes, s_inds|
    #   xnes.train picks: s_inds
    # end
    # Actually it's not this simple.
    # Forks do not act on the parent, so I need to send back updated mu and sigma
    # Luckily we have `NES#save` and `NES#load` at the ready
    # Next: need to implement `#marshal_dump` and `#marshal_load` in `Base`
    # Actually using `Cumo` rather than `Parallel` may avoid marshaling altogether
    raise NotImplementedError, "Should dump and load each instance"
  else
    blocks.zip(picks).each do |xnes, s_inds|
      xnes.train picks: s_inds
    end
  end
end