Class: DNN::Optimizers::Optimizer

Inherits:
Object
  • Object
show all
Defined in:
lib/dnn/core/optimizers.rb

Overview

Super class of all optimizer classes.

Direct Known Subclasses

AdaDelta, AdaGrad, Adam, RMSProp, RMSPropGraves, SGD

Instance Attribute Summary collapse

Class Method Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(clip_norm: nil) ⇒ Optimizer

Returns a new instance of Optimizer.

Parameters:

  • clip_norm (Float | NilClass) (defaults to: nil)

    Gradient clip norm.



19
20
21
# File 'lib/dnn/core/optimizers.rb', line 19

def initialize(clip_norm: nil)
  @clip_norm = clip_norm
end

Instance Attribute Details

#clip_normObject

Returns the value of attribute clip_norm.



7
8
9
# File 'lib/dnn/core/optimizers.rb', line 7

def clip_norm
  @clip_norm
end

#statusObject (readonly)

Returns the value of attribute status.



6
7
8
# File 'lib/dnn/core/optimizers.rb', line 6

def status
  @status
end

Class Method Details

.from_hash(hash) ⇒ Object

Raises:



9
10
11
12
13
14
15
16
# File 'lib/dnn/core/optimizers.rb', line 9

def self.from_hash(hash)
  return nil unless hash
  optimizer_class = DNN.const_get(hash[:class])
  optimizer = optimizer_class.allocate
  raise DNNError, "#{optimizer.class} is not an instance of #{self} class." unless optimizer.is_a?(self)
  optimizer.load_hash(hash)
  optimizer
end

Instance Method Details

#load_hash(hash) ⇒ Object



59
60
61
# File 'lib/dnn/core/optimizers.rb', line 59

def load_hash(hash)
  initialize(clip_norm: hash[:clip_norm])
end

#to_hash(merge_hash = nil) ⇒ Object



39
40
41
42
43
# File 'lib/dnn/core/optimizers.rb', line 39

def to_hash(merge_hash = nil)
  hash = { class: self.class.name, clip_norm: @clip_norm }
  hash.merge!(merge_hash) if merge_hash
  hash
end

#update(params) ⇒ Object



23
24
25
26
27
28
29
# File 'lib/dnn/core/optimizers.rb', line 23

def update(params)
  clip_grads(params) if @clip_norm
  update_params(params)
  params.each do |param|
    param.grad = Xumo::SFloat[0]
  end
end

#update_layers(layers) ⇒ Object

Update layers has params.



32
33
34
35
36
37
# File 'lib/dnn/core/optimizers.rb', line 32

def update_layers(layers)
  target_params = layers.select { |layer| layer.is_a?(Layers::TrainableLayer) && layer.trainable }
                        .map { |layer| layer.get_params.values }.flatten.compact
                        .select(&:grad)
  update(target_params)
end