Class: DNN::Optimizers::AdaDelta

Inherits:
Optimizer show all
Defined in:
lib/dnn/core/optimizers.rb

Instance Attribute Summary collapse

Attributes inherited from Optimizer

#learning_rate

Class Method Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(rho: 0.95) ⇒ AdaDelta

Returns a new instance of AdaDelta.



131
132
133
134
135
136
# File 'lib/dnn/core/optimizers.rb', line 131

def initialize(rho: 0.95)
  super(nil)
  @rho = rho
  @h = {}
  @s = {}
end

Instance Attribute Details

#rhoObject

Returns the value of attribute rho.



125
126
127
# File 'lib/dnn/core/optimizers.rb', line 125

def rho
  @rho
end

Class Method Details

.load_hash(hash) ⇒ Object



127
128
129
# File 'lib/dnn/core/optimizers.rb', line 127

def self.load_hash(hash)
  self.new(rho: hash[:rho])
end

Instance Method Details

#to_hashObject



149
150
151
# File 'lib/dnn/core/optimizers.rb', line 149

def to_hash
  super({rho: @rho})
end

#update(params) ⇒ Object



138
139
140
141
142
143
144
145
146
147
# File 'lib/dnn/core/optimizers.rb', line 138

def update(params)
  params.select { |key, param| param.grad }.each_value do |param|
    @h[param] ||= Xumo::SFloat.zeros(*param.data.shape)
    @s[param] ||= Xumo::SFloat.zeros(*param.data.shape)
    @h[param] = @rho * @h[param] + (1 - @rho) * param.grad**2
    v = (NMath.sqrt(@s[param] + 1e-6) / NMath.sqrt(@h[param] + 1e-6)) * param.grad
    @s[param] = @rho * @s[param] + (1 - @rho) * v**2
    param.data -= v
  end
end