Class: DNN::Optimizers::Optimizer
- Inherits:
-
Object
- Object
- DNN::Optimizers::Optimizer
- Defined in:
- lib/dnn/core/optimizers.rb
Overview
Super class of all optimizer classes.
Instance Attribute Summary collapse
-
#clip_norm ⇒ Object
Returns the value of attribute clip_norm.
Class Method Summary collapse
Instance Method Summary collapse
-
#initialize(clip_norm: nil) ⇒ Optimizer
constructor
A new instance of Optimizer.
- #load_hash(hash) ⇒ Object
- #to_hash(merge_hash = nil) ⇒ Object
- #update(params) ⇒ Object
-
#update_layers(layers) ⇒ Object
Update layers has params.
Constructor Details
#initialize(clip_norm: nil) ⇒ Optimizer
Returns a new instance of Optimizer.
18 19 20 |
# File 'lib/dnn/core/optimizers.rb', line 18 def initialize(clip_norm: nil) @clip_norm = clip_norm end |
Instance Attribute Details
#clip_norm ⇒ Object
Returns the value of attribute clip_norm.
6 7 8 |
# File 'lib/dnn/core/optimizers.rb', line 6 def clip_norm @clip_norm end |
Class Method Details
.from_hash(hash) ⇒ Object
8 9 10 11 12 13 14 15 |
# File 'lib/dnn/core/optimizers.rb', line 8 def self.from_hash(hash) return nil unless hash optimizer_class = DNN.const_get(hash[:class]) optimizer = optimizer_class.allocate raise DNNError, "#{optimizer.class} is not an instance of #{self} class." unless optimizer.is_a?(self) optimizer.load_hash(hash) optimizer end |
Instance Method Details
#load_hash(hash) ⇒ Object
58 59 60 |
# File 'lib/dnn/core/optimizers.rb', line 58 def load_hash(hash) initialize(clip_norm: hash[:clip_norm]) end |
#to_hash(merge_hash = nil) ⇒ Object
38 39 40 41 42 |
# File 'lib/dnn/core/optimizers.rb', line 38 def to_hash(merge_hash = nil) hash = { class: self.class.name, clip_norm: @clip_norm } hash.merge!(merge_hash) if merge_hash hash end |
#update(params) ⇒ Object
22 23 24 25 26 27 28 |
# File 'lib/dnn/core/optimizers.rb', line 22 def update(params) clip_grads(params) if @clip_norm update_params(params) params.each do |param| param.grad = Xumo::SFloat[0] end end |
#update_layers(layers) ⇒ Object
Update layers has params.
31 32 33 34 35 36 |
# File 'lib/dnn/core/optimizers.rb', line 31 def update_layers(layers) target_params = layers.select { |layer| layer.is_a?(Layers::TrainableLayer) && layer.trainable } .map { |layer| layer.get_params.values }.flatten.compact .select(&:grad) update(target_params) end |