Class: DNN::Optimizers::AdaBound

Inherits:
Adam show all
Defined in:
lib/dnn/core/optimizers.rb

Instance Attribute Summary collapse

Attributes inherited from Adam

#alpha, #amsgrad, #beta1, #beta2, #eps

Attributes inherited from Optimizer

#clip_norm, #status

Instance Method Summary collapse

Methods inherited from Optimizer

#dump, from_hash, load, #update

Constructor Details

#initialize(alpha: 0.001, beta1: 0.9, beta2: 0.999, final_lr: 0.1, gamma: 0.001, eps: 1e-7, amsgrad: false, clip_norm: nil) ⇒ AdaBound

Returns a new instance of AdaBound.

Parameters:

  • final_lr (Float) (defaults to: 0.1)

    Final learning rate.

  • gamma (Float) (defaults to: 0.001)

    Lower and upper range value.



327
328
329
330
331
# File 'lib/dnn/core/optimizers.rb', line 327

def initialize(alpha: 0.001, beta1: 0.9, beta2: 0.999, final_lr: 0.1, gamma: 0.001, eps: 1e-7, amsgrad: false, clip_norm: nil)
  super(alpha: alpha, beta1: beta1, beta2: beta2, eps: eps, amsgrad: amsgrad, clip_norm: clip_norm)
  @final_lr = final_lr
  @gamma = gamma
end

Instance Attribute Details

#final_lrObject

Returns the value of attribute final_lr.



322
323
324
# File 'lib/dnn/core/optimizers.rb', line 322

def final_lr
  @final_lr
end

#gammaObject

Returns the value of attribute gamma.



323
324
325
# File 'lib/dnn/core/optimizers.rb', line 323

def gamma
  @gamma
end

Instance Method Details

#load_hash(hash) ⇒ Object



367
368
369
370
# File 'lib/dnn/core/optimizers.rb', line 367

def load_hash(hash)
  initialize(alpha: hash[:alpha], beta1: hash[:beta1], beta2: hash[:beta2],
             final_lr: hash[:final_lr], gamma: hash[:gamma], eps: hash[:eps], amsgrad: hash[:amsgrad], clip_norm: hash[:clip_norm])
end

#to_hashObject



333
334
335
336
337
338
# File 'lib/dnn/core/optimizers.rb', line 333

def to_hash
  {
    class: self.class.name, alpha: @alpha, beta1: @beta1, beta2: @beta2,
    final_lr: @final_lr, gamma: @gamma, eps: @eps, amsgrad: amsgrad, clip_norm: @clip_norm
  }
end