Class: DNN::Optimizers::AdaBound

Inherits:
Adam show all
Defined in:
lib/dnn/core/optimizers.rb

Instance Attribute Summary collapse

Attributes inherited from Adam

#alpha, #amsgrad, #beta1, #beta2, #eps

Attributes inherited from Optimizer

#clip_norm, #status

Instance Method Summary collapse

Methods inherited from Optimizer

from_hash, #update, #update_layers

Constructor Details

#initialize(alpha: 0.001, beta1: 0.9, beta2: 0.999, final_lr: 0.1, gamma: 0.001, eps: 1e-7, amsgrad: false, clip_norm: nil) ⇒ AdaBound

Returns a new instance of AdaBound.

Parameters:

  • final_lr (Float) (defaults to: 0.1)

    Final learning rate.

  • gamma (Float) (defaults to: 0.001)

    Lower and upper range value.



314
315
316
317
318
# File 'lib/dnn/core/optimizers.rb', line 314

def initialize(alpha: 0.001, beta1: 0.9, beta2: 0.999, final_lr: 0.1, gamma: 0.001, eps: 1e-7, amsgrad: false, clip_norm: nil)
  super(alpha: alpha, beta1: beta1, beta2: beta2, eps: eps, amsgrad: amsgrad, clip_norm: clip_norm)
  @final_lr = final_lr
  @gamma = gamma
end

Instance Attribute Details

#final_lrObject

Returns the value of attribute final_lr.



309
310
311
# File 'lib/dnn/core/optimizers.rb', line 309

def final_lr
  @final_lr
end

#gammaObject

Returns the value of attribute gamma.



310
311
312
# File 'lib/dnn/core/optimizers.rb', line 310

def gamma
  @gamma
end

Instance Method Details

#load_hash(hash) ⇒ Object



354
355
356
357
# File 'lib/dnn/core/optimizers.rb', line 354

def load_hash(hash)
  initialize(alpha: hash[:alpha], beta1: hash[:beta1], beta2: hash[:beta2],
             final_lr: hash[:final_lr], gamma: hash[:gamma], eps: hash[:eps], amsgrad: hash[:amsgrad], clip_norm: hash[:clip_norm])
end

#to_hashObject



320
321
322
323
324
325
# File 'lib/dnn/core/optimizers.rb', line 320

def to_hash
  {
    class: self.class.name, alpha: @alpha, beta1: @beta1, beta2: @beta2,
    final_lr: @final_lr, gamma: @gamma, eps: @eps, amsgrad: amsgrad, clip_norm: @clip_norm
  }
end