Class: DNN::Optimizers::AdaGrad

Inherits:
Optimizer show all
Defined in:
lib/dnn/core/optimizers.rb

Instance Attribute Summary

Attributes inherited from Optimizer

#learning_rate

Class Method Summary collapse

Instance Method Summary collapse

Methods inherited from Optimizer

#to_hash

Constructor Details

#initialize(learning_rate = 0.01) ⇒ AdaGrad

Returns a new instance of AdaGrad.



78
79
80
81
# File 'lib/dnn/core/optimizers.rb', line 78

def initialize(learning_rate = 0.01)
  super(learning_rate)
  @g = {}
end

Class Method Details

.load_hash(hash) ⇒ Object



83
84
85
# File 'lib/dnn/core/optimizers.rb', line 83

def self.load_hash(hash)
  self.new(hash[:learning_rate])
end

Instance Method Details

#update(params) ⇒ Object



87
88
89
90
91
92
93
# File 'lib/dnn/core/optimizers.rb', line 87

def update(params)
  params.select { |key, param| param.grad }.each_value do |param|
    @g[param] ||= 0
    @g[param] += param.grad**2
    param.data -= (@learning_rate / NMath.sqrt(@g[param] + 1e-7)) * param.grad
  end
end