Class: DNN::Optimizers::AdaGrad

Inherits:
Object
  • Object
show all
Defined in:
lib/dnn/core/optimizers.rb

Class Method Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(learning_rate = 0.01) ⇒ AdaGrad

Returns a new instance of AdaGrad.



54
55
56
57
# File 'lib/dnn/core/optimizers.rb', line 54

def initialize(learning_rate = 0.01)
  super(learning_rate)
  @g = {}
end

Class Method Details

.load_hash(hash) ⇒ Object



59
60
61
# File 'lib/dnn/core/optimizers.rb', line 59

def self.load_hash(hash)
  @learning_rate = hash[:learning_rate]
end

Instance Method Details

#update(layer) ⇒ Object



63
64
65
66
67
68
69
70
# File 'lib/dnn/core/optimizers.rb', line 63

def update(layer)
  @g[layer] ||= {}
  layer.params.each_key do |key|
    @g[layer][key] ||= 0
    @g[layer][key] += layer.grads[key]**2
    layer.params[key] -= (@learning_rate / NMath.sqrt(@g[layer][key] + 1e-7)) * layer.grads[key]
  end
end