Class: DNN::Optimizers::AdaGrad

Inherits:
Optimizer show all
Defined in:
lib/dnn/core/optimizers.rb

Instance Attribute Summary collapse

Attributes inherited from Optimizer

#clip_norm, #status

Instance Method Summary collapse

Methods inherited from Optimizer

from_hash, #update, #update_layers

Constructor Details

#initialize(lr: 0.01, eps: 1e-7, clip_norm: nil) ⇒ AdaGrad

Returns a new instance of AdaGrad.

Parameters:

  • lr (Float) (defaults to: 0.01)

    Learning rate.

  • eps (Float) (defaults to: 1e-7)

    Value to avoid division by zero.



120
121
122
123
124
125
126
# File 'lib/dnn/core/optimizers.rb', line 120

def initialize(lr: 0.01, eps: 1e-7, clip_norm: nil)
  super(clip_norm: clip_norm)
  @lr = lr
  @eps = eps
  @g = {}
  @status = { g: @g }
end

Instance Attribute Details

#epsObject

Returns the value of attribute eps.



116
117
118
# File 'lib/dnn/core/optimizers.rb', line 116

def eps
  @eps
end

#lrObject

Returns the value of attribute lr.



115
116
117
# File 'lib/dnn/core/optimizers.rb', line 115

def lr
  @lr
end

Instance Method Details

#load_hash(hash) ⇒ Object



140
141
142
# File 'lib/dnn/core/optimizers.rb', line 140

def load_hash(hash)
  initialize(lr: hash[:lr], eps: hash[:eps], clip_norm: hash[:clip_norm])
end

#to_hashObject



136
137
138
# File 'lib/dnn/core/optimizers.rb', line 136

def to_hash
  super(lr: @lr, eps: @eps)
end