Class: DNN::Optimizers::Adam

Inherits:
Optimizer show all
Includes:
Numo
Defined in:
lib/dnn/core/optimizers.rb

Instance Attribute Summary collapse

Attributes inherited from Optimizer

#learning_rate

Class Method Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(learning_rate = 0.001, beta1 = 0.9, beta2 = 0.999) ⇒ Adam

Returns a new instance of Adam.



108
109
110
111
112
113
114
115
# File 'lib/dnn/core/optimizers.rb', line 108

def initialize(learning_rate = 0.001, beta1 = 0.9, beta2 = 0.999)
  super(learning_rate)
  @beta1 = beta1
  @beta2 = beta2
  @iter = 0
  @m = {}
  @v = {}
end

Instance Attribute Details

#beta1Object

Returns the value of attribute beta1.



105
106
107
# File 'lib/dnn/core/optimizers.rb', line 105

def beta1
  @beta1
end

#beta2Object

Returns the value of attribute beta2.



106
107
108
# File 'lib/dnn/core/optimizers.rb', line 106

def beta2
  @beta2
end

Class Method Details

.load_hash(hash) ⇒ Object



117
118
119
# File 'lib/dnn/core/optimizers.rb', line 117

def self.load_hash(hash)
  self.new(hash[:learning_rate], hash[:beta1], hash[:beta2])
end

Instance Method Details

#to_hashObject



135
136
137
# File 'lib/dnn/core/optimizers.rb', line 135

def to_hash
  super({beta1: @beta1, beta2: @beta2})
end

#update(layer) ⇒ Object



121
122
123
124
125
126
127
128
129
130
131
132
133
# File 'lib/dnn/core/optimizers.rb', line 121

def update(layer)
  @iter += 1
  @m[layer] ||= {}
  @v[layer] ||= {}
  lr = @learning_rate * Math.sqrt(1 - @beta2**@iter) / (1 - @beta1**@iter) 
  layer.params.each_key do |key|
    @m[layer][key] ||= 0
    @v[layer][key] ||= 0
    @m[layer][key] += (1 - @beta1) * (layer.grads[key] - @m[layer][key])
    @v[layer][key] += (1 - @beta2) * (layer.grads[key]**2 - @v[layer][key])
    layer.params[key] -= lr * @m[layer][key] / NMath.sqrt(@v[layer][key] + 1e-7)
  end
end