Class: DNN::Optimizers::AdaBound

Inherits:
Adam show all
Defined in:
lib/dnn/core/optimizers.rb

Instance Attribute Summary collapse

Attributes inherited from Adam

#alpha, #amsgrad, #beta1, #beta2, #eps

Attributes inherited from Optimizer

#clip_norm

Instance Method Summary collapse

Methods inherited from Optimizer

from_hash, #update, #update_layers

Constructor Details

#initialize(alpha: 0.001, beta1: 0.9, beta2: 0.999, final_lr: 0.1, gamma: 0.001, eps: 1e-7, amsgrad: false, clip_norm: nil) ⇒ AdaBound

Returns a new instance of AdaBound.

Parameters:

  • final_lr (Float) (defaults to: 0.1)

    Final learning rate.

  • gamma (Float) (defaults to: 0.001)

    Lower and upper range value.



308
309
310
311
312
# File 'lib/dnn/core/optimizers.rb', line 308

def initialize(alpha: 0.001, beta1: 0.9, beta2: 0.999, final_lr: 0.1, gamma: 0.001, eps: 1e-7, amsgrad: false, clip_norm: nil)
  super(alpha: alpha, beta1: beta1, beta2: beta2, eps: eps, amsgrad: amsgrad, clip_norm: clip_norm)
  @final_lr = final_lr
  @gamma = gamma
end

Instance Attribute Details

#final_lrObject

Returns the value of attribute final_lr.



303
304
305
# File 'lib/dnn/core/optimizers.rb', line 303

def final_lr
  @final_lr
end

#gammaObject

Returns the value of attribute gamma.



304
305
306
# File 'lib/dnn/core/optimizers.rb', line 304

def gamma
  @gamma
end

Instance Method Details

#load_hash(hash) ⇒ Object



348
349
350
351
# File 'lib/dnn/core/optimizers.rb', line 348

def load_hash(hash)
  initialize(alpha: hash[:alpha], beta1: hash[:beta1], beta2: hash[:beta2],
             final_lr: hash[:final_lr], gamma: hash[:gamma], eps: hash[:eps], amsgrad: hash[:amsgrad], clip_norm: hash[:clip_norm])
end

#to_hashObject



314
315
316
317
318
319
# File 'lib/dnn/core/optimizers.rb', line 314

def to_hash
  {
    class: self.class.name, alpha: @alpha, beta1: @beta1, beta2: @beta2,
    final_lr: @final_lr, gamma: @gamma, eps: @eps, amsgrad: amsgrad, clip_norm: @clip_norm
  }
end