Class: DNN::Optimizers::AdaGrad

Inherits:
Optimizer show all
Defined in:
lib/dnn/core/optimizers.rb

Instance Attribute Summary

Attributes inherited from Optimizer

#learning_rate

Class Method Summary collapse

Instance Method Summary collapse

Methods inherited from Optimizer

#to_hash

Constructor Details

#initialize(learning_rate = 0.01) ⇒ AdaGrad

Returns a new instance of AdaGrad.



77
78
79
80
# File 'lib/dnn/core/optimizers.rb', line 77

def initialize(learning_rate = 0.01)
  super(learning_rate)
  @g = {}
end

Class Method Details

.load_hash(hash) ⇒ Object



82
83
84
# File 'lib/dnn/core/optimizers.rb', line 82

def self.load_hash(hash)
  self.new(hash[:learning_rate])
end

Instance Method Details

#update(layer) ⇒ Object



86
87
88
89
90
91
92
93
# File 'lib/dnn/core/optimizers.rb', line 86

def update(layer)
  @g[layer] ||= {}
  layer.params.each_key do |key|
    @g[layer][key] ||= 0
    @g[layer][key] += layer.grads[key]**2
    layer.params[key] -= (@learning_rate / Xumo::NMath.sqrt(@g[layer][key] + 1e-7)) * layer.grads[key]
  end
end