Class: DNN::Optimizers::Adam

Inherits:
Optimizer show all
Defined in:
lib/dnn/core/optimizers.rb

Instance Attribute Summary collapse

Attributes inherited from Optimizer

#learning_rate

Class Method Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(learning_rate = 0.001, beta1: 0.9, beta2: 0.999) ⇒ Adam



166
167
168
169
170
171
172
173
# File 'lib/dnn/core/optimizers.rb', line 166

def initialize(learning_rate = 0.001, beta1: 0.9, beta2: 0.999)
  super(learning_rate)
  @beta1 = beta1
  @beta2 = beta2
  @iter = 0
  @m = {}
  @v = {}
end

Instance Attribute Details

#beta1Object

Returns the value of attribute beta1.



159
160
161
# File 'lib/dnn/core/optimizers.rb', line 159

def beta1
  @beta1
end

#beta2Object

Returns the value of attribute beta2.



160
161
162
# File 'lib/dnn/core/optimizers.rb', line 160

def beta2
  @beta2
end

Class Method Details

.load_hash(hash) ⇒ Object



162
163
164
# File 'lib/dnn/core/optimizers.rb', line 162

def self.load_hash(hash)
  self.new(hash[:learning_rate], beta1: hash[:beta1], beta2: hash[:beta2])
end

Instance Method Details

#to_hashObject



189
190
191
# File 'lib/dnn/core/optimizers.rb', line 189

def to_hash
  super({beta1: @beta1, beta2: @beta2})
end

#update(layer) ⇒ Object



175
176
177
178
179
180
181
182
183
184
185
186
187
# File 'lib/dnn/core/optimizers.rb', line 175

def update(layer)
  @iter += 1
  @m[layer] ||= {}
  @v[layer] ||= {}
  lr = @learning_rate * Math.sqrt(1 - @beta2**@iter) / (1 - @beta1**@iter) 
  layer.params.each_key do |key|
    @m[layer][key] ||= 0
    @v[layer][key] ||= 0
    @m[layer][key] += (1 - @beta1) * (layer.grads[key] - @m[layer][key])
    @v[layer][key] += (1 - @beta2) * (layer.grads[key]**2 - @v[layer][key])
    layer.params[key] -= lr * @m[layer][key] / Xumo::NMath.sqrt(@v[layer][key] + 1e-7)
  end
end