Class: MyMathGem::Optimization::Optimizer

Inherits:
Object
  • Object
show all
Defined in:
lib/my_math_gem/optimization.rb

Instance Attribute Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(f:, grad: nil, x_init:, learning_rate: 0.01, max_iter: 1000, tol: 1e-6, method: :gd, momentum: 0.9, beta1: 0.9, beta2: 0.999, epsilon: 1e-8, lr_decay: nil, decay_rate: 0.99, verbose: false, early_stopping: true) ⇒ Optimizer

Returns a new instance of Optimizer.



6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
# File 'lib/my_math_gem/optimization.rb', line 6

def initialize(f:, grad: nil, x_init:, learning_rate: 0.01, max_iter: 1000, tol: 1e-6,
               method: :gd, momentum: 0.9, beta1: 0.9, beta2: 0.999, epsilon: 1e-8,
               lr_decay: nil, decay_rate: 0.99, verbose: false, early_stopping: true)
  @f = f
  @grad = grad
  @x = x_init.dup
  @learning_rate = learning_rate.to_f
  @max_iter = max_iter
  @tol = tol
  @method = method
  @momentum = momentum
  @beta1 = beta1
  @beta2 = beta2
  @epsilon = epsilon
  @lr_decay = lr_decay  # :linear, :exponential, or nil
  @decay_rate = decay_rate
  @verbose = verbose
  @early_stopping = early_stopping

  @v = Array.new(@x.size, 0.0)         # for momentum
  @m = Array.new(@x.size, 0.0)         # for Adam
  @s = Array.new(@x.size, 0.0)         # for Adam
  @t = 0                             # timestep for Adam

  @grad ||= ->(x) { numerical_gradient(@f, x) }

  @history = []
end

Instance Attribute Details

#fxObject (readonly)

Returns the value of attribute fx.



4
5
6
# File 'lib/my_math_gem/optimization.rb', line 4

def fx
  @fx
end

#historyObject (readonly)

Returns the value of attribute history.



4
5
6
# File 'lib/my_math_gem/optimization.rb', line 4

def history
  @history
end

#xObject (readonly)

Returns the value of attribute x.



4
5
6
# File 'lib/my_math_gem/optimization.rb', line 4

def x
  @x
end

Instance Method Details

#optimizeObject



35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
# File 'lib/my_math_gem/optimization.rb', line 35

def optimize
  prev_fx = nil

  @max_iter.times do |i|
    @t += 1
    g = @grad.call(@x)
    raise ArgumentError, "Gradien harus array dengan panjang sama seperti x" unless valid_gradient?(g)

    case @method
    when :gd
      step = scalar_multiply(g, @learning_rate)
      @x = vector_subtract(@x, step)
    when :momentum
      @v = vector_add(scalar_multiply(@v, @momentum), scalar_multiply(g, 1 - @momentum))
      step = scalar_multiply(@v, @learning_rate)
      @x = vector_subtract(@x, step)
    when :adam
      update_adam(g)
    else
      raise ArgumentError, "Metode optimasi tidak dikenal: #{@method}"
    end

    @fx = @f.call(@x)
    @history << { iteration: i + 1, x: @x.dup, fx: @fx }

    if @verbose
      puts "Iterasi #{i+1}: f(x) = #{@fx}, ||grad|| = #{vector_norm(g)}"
    end

    if prev_fx && @early_stopping
      change = (prev_fx - @fx).abs
      break if change < @tol
    end
    prev_fx = @fx

    apply_lr_decay(i)
  end

  @x
end