Class: DNN::Optimizers::Adam

Inherits:
Optimizer show all
Defined in:
lib/dnn/core/optimizers.rb

Instance Attribute Summary collapse

Attributes inherited from Optimizer

#learning_rate

Class Method Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(learning_rate = 0.001, beta1: 0.9, beta2: 0.999) ⇒ Adam

Returns a new instance of Adam.



163
164
165
166
167
168
169
170
# File 'lib/dnn/core/optimizers.rb', line 163

def initialize(learning_rate = 0.001, beta1: 0.9, beta2: 0.999)
  super(learning_rate)
  @beta1 = beta1
  @beta2 = beta2
  @iter = 0
  @m = {}
  @v = {}
end

Instance Attribute Details

#beta1Object

Returns the value of attribute beta1.



156
157
158
# File 'lib/dnn/core/optimizers.rb', line 156

def beta1
  @beta1
end

#beta2Object

Returns the value of attribute beta2.



157
158
159
# File 'lib/dnn/core/optimizers.rb', line 157

def beta2
  @beta2
end

Class Method Details

.load_hash(hash) ⇒ Object



159
160
161
# File 'lib/dnn/core/optimizers.rb', line 159

def self.load_hash(hash)
  self.new(hash[:learning_rate], beta1: hash[:beta1], beta2: hash[:beta2])
end

Instance Method Details

#to_hashObject



184
185
186
# File 'lib/dnn/core/optimizers.rb', line 184

def to_hash
  super({beta1: @beta1, beta2: @beta2})
end

#update(params) ⇒ Object



172
173
174
175
176
177
178
179
180
181
182
# File 'lib/dnn/core/optimizers.rb', line 172

def update(params)
  @iter += 1
  lr = @learning_rate * Math.sqrt(1 - @beta2**@iter) / (1 - @beta1**@iter) 
  params.select { |key, param| param.is_a?(LearningParam) }.each_value do |param|
    @m[param] ||= 0
    @v[param] ||= 0
    @m[param] += (1 - @beta1) * (param.grad - @m[param])
    @v[param] += (1 - @beta2) * (param.grad**2 - @v[param])
    param.data -= lr * @m[param] / Xumo::NMath.sqrt(@v[param] + 1e-7)
  end
end