Class: DNN::Layers::RNN
- Inherits:
-
Connection
- Object
- Layer
- TrainableLayer
- Connection
- DNN::Layers::RNN
- Includes:
- LayerNode
- Defined in:
- lib/dnn/core/layers/rnn_layers.rb
Overview
Super class of all RNN classes.
Instance Attribute Summary collapse
-
#hidden ⇒ Object
readonly
Returns the value of attribute hidden.
-
#num_units ⇒ Object
readonly
Returns the value of attribute num_units.
-
#recurrent_weight ⇒ Object
readonly
Returns the value of attribute recurrent_weight.
-
#recurrent_weight_initializer ⇒ Object
readonly
Returns the value of attribute recurrent_weight_initializer.
-
#recurrent_weight_regularizer ⇒ Object
readonly
Returns the value of attribute recurrent_weight_regularizer.
-
#return_sequences ⇒ Object
readonly
Returns the value of attribute return_sequences.
-
#stateful ⇒ Object
readonly
Returns the value of attribute stateful.
Attributes inherited from Connection
#bias, #bias_initializer, #bias_regularizer, #weight, #weight_initializer, #weight_regularizer
Attributes inherited from TrainableLayer
Attributes inherited from Layer
Instance Method Summary collapse
- #backward_node(dh2s) ⇒ Object
- #build(input_shape) ⇒ Object
- #compute_output_shape ⇒ Object
- #forward_node(xs) ⇒ Object
- #get_params ⇒ Object
-
#initialize(num_units, stateful: false, return_sequences: true, weight_initializer: Initializers::RandomNormal.new, recurrent_weight_initializer: Initializers::RandomNormal.new, bias_initializer: Initializers::Zeros.new, weight_regularizer: nil, recurrent_weight_regularizer: nil, bias_regularizer: nil, use_bias: true) ⇒ RNN
constructor
A new instance of RNN.
- #load_hash(hash) ⇒ Object
- #regularizers ⇒ Object
-
#reset_state ⇒ Object
Reset the state of RNN.
- #to_hash(merge_hash = nil) ⇒ Object
Methods included from LayerNode
Methods inherited from Connection
Methods inherited from TrainableLayer
Methods inherited from Layer
#<<, #built?, #call, call, #clean, #forward, from_hash
Constructor Details
#initialize(num_units, stateful: false, return_sequences: true, weight_initializer: Initializers::RandomNormal.new, recurrent_weight_initializer: Initializers::RandomNormal.new, bias_initializer: Initializers::Zeros.new, weight_regularizer: nil, recurrent_weight_regularizer: nil, bias_regularizer: nil, use_bias: true) ⇒ RNN
Returns a new instance of RNN.
21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 |
# File 'lib/dnn/core/layers/rnn_layers.rb', line 21 def initialize(num_units, stateful: false, return_sequences: true, weight_initializer: Initializers::RandomNormal.new, recurrent_weight_initializer: Initializers::RandomNormal.new, bias_initializer: Initializers::Zeros.new, weight_regularizer: nil, recurrent_weight_regularizer: nil, bias_regularizer: nil, use_bias: true) super(weight_initializer: weight_initializer, bias_initializer: bias_initializer, weight_regularizer: weight_regularizer, bias_regularizer: bias_regularizer, use_bias: use_bias) @num_units = num_units @stateful = stateful @return_sequences = return_sequences @hidden_layers = [] @hidden = Param.new @recurrent_weight = Param.new(nil, Xumo::SFloat[0]) @recurrent_weight_initializer = recurrent_weight_initializer @recurrent_weight_regularizer = recurrent_weight_regularizer end |
Instance Attribute Details
#hidden ⇒ Object (readonly)
Returns the value of attribute hidden.
10 11 12 |
# File 'lib/dnn/core/layers/rnn_layers.rb', line 10 def hidden @hidden end |
#num_units ⇒ Object (readonly)
Returns the value of attribute num_units.
8 9 10 |
# File 'lib/dnn/core/layers/rnn_layers.rb', line 8 def num_units @num_units end |
#recurrent_weight ⇒ Object (readonly)
Returns the value of attribute recurrent_weight.
9 10 11 |
# File 'lib/dnn/core/layers/rnn_layers.rb', line 9 def recurrent_weight @recurrent_weight end |
#recurrent_weight_initializer ⇒ Object (readonly)
Returns the value of attribute recurrent_weight_initializer.
13 14 15 |
# File 'lib/dnn/core/layers/rnn_layers.rb', line 13 def recurrent_weight_initializer @recurrent_weight_initializer end |
#recurrent_weight_regularizer ⇒ Object (readonly)
Returns the value of attribute recurrent_weight_regularizer.
14 15 16 |
# File 'lib/dnn/core/layers/rnn_layers.rb', line 14 def recurrent_weight_regularizer @recurrent_weight_regularizer end |
#return_sequences ⇒ Object (readonly)
Returns the value of attribute return_sequences.
12 13 14 |
# File 'lib/dnn/core/layers/rnn_layers.rb', line 12 def return_sequences @return_sequences end |
#stateful ⇒ Object (readonly)
Returns the value of attribute stateful.
11 12 13 |
# File 'lib/dnn/core/layers/rnn_layers.rb', line 11 def stateful @stateful end |
Instance Method Details
#backward_node(dh2s) ⇒ Object
69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 |
# File 'lib/dnn/core/layers/rnn_layers.rb', line 69 def backward_node(dh2s) unless @return_sequences dh = dh2s dh2s = Xumo::SFloat.zeros(dh.shape[0], @time_length, dh.shape[1]) dh2s[true, -1, false] = dh end dxs = Xumo::SFloat.zeros(@xs_shape) dh = 0 (dh2s.shape[1] - 1).downto(0) do |t| dh2 = dh2s[true, t, false] dx, dh = @hidden_layers[t].backward(dh2 + dh) dxs[true, t, false] = dx end dxs end |
#build(input_shape) ⇒ Object
43 44 45 46 47 48 |
# File 'lib/dnn/core/layers/rnn_layers.rb', line 43 def build(input_shape) unless input_shape.length == 2 raise DNNShapeError, "Input shape is #{input_shape}. But input shape must be 2 dimensional." end super end |
#compute_output_shape ⇒ Object
85 86 87 88 |
# File 'lib/dnn/core/layers/rnn_layers.rb', line 85 def compute_output_shape @time_length = @input_shape[0] @return_sequences ? [@time_length, @num_units] : [@num_units] end |
#forward_node(xs) ⇒ Object
54 55 56 57 58 59 60 61 62 63 64 65 66 67 |
# File 'lib/dnn/core/layers/rnn_layers.rb', line 54 def forward_node(xs) create_hidden_layer @xs_shape = xs.shape hs = Xumo::SFloat.zeros(xs.shape[0], @time_length, @num_units) h = @stateful && @hidden.data ? @hidden.data : Xumo::SFloat.zeros(xs.shape[0], @num_units) xs.shape[1].times do |t| x = xs[true, t, false] @hidden_layers[t].trainable = @trainable h = @hidden_layers[t].forward(x, h) hs[true, t, false] = h end @hidden.data = h @return_sequences ? hs : h end |
#get_params ⇒ Object
115 116 117 |
# File 'lib/dnn/core/layers/rnn_layers.rb', line 115 def get_params { weight: @weight, recurrent_weight: @recurrent_weight, bias: @bias, hidden: @hidden } end |
#load_hash(hash) ⇒ Object
102 103 104 105 106 107 108 109 110 111 112 113 |
# File 'lib/dnn/core/layers/rnn_layers.rb', line 102 def load_hash(hash) initialize(hash[:num_units], stateful: hash[:stateful], return_sequences: hash[:return_sequences], weight_initializer: Initializers::Initializer.from_hash(hash[:weight_initializer]), recurrent_weight_initializer: Initializers::Initializer.from_hash(hash[:recurrent_weight_initializer]), bias_initializer: Initializers::Initializer.from_hash(hash[:bias_initializer]), weight_regularizer: Regularizers::Regularizer.from_hash(hash[:weight_regularizer]), recurrent_weight_regularizer: Regularizers::Regularizer.from_hash(hash[:recurrent_weight_regularizer]), bias_regularizer: Regularizers::Regularizer.from_hash(hash[:bias_regularizer]), use_bias: hash[:use_bias]) end |
#regularizers ⇒ Object
124 125 126 127 128 129 130 |
# File 'lib/dnn/core/layers/rnn_layers.rb', line 124 def regularizers regularizers = [] regularizers << @weight_regularizer if @weight_regularizer regularizers << @recurrent_weight_regularizer if @recurrent_weight_regularizer regularizers << @bias_regularizer if @bias_regularizer regularizers end |
#reset_state ⇒ Object
Reset the state of RNN.
120 121 122 |
# File 'lib/dnn/core/layers/rnn_layers.rb', line 120 def reset_state @hidden.data = @hidden.data.fill(0) if @hidden.data end |
#to_hash(merge_hash = nil) ⇒ Object
90 91 92 93 94 95 96 97 98 99 100 |
# File 'lib/dnn/core/layers/rnn_layers.rb', line 90 def to_hash(merge_hash = nil) hash = { num_units: @num_units, stateful: @stateful, return_sequences: @return_sequences, recurrent_weight_initializer: @recurrent_weight_initializer.to_hash, recurrent_weight_regularizer: @recurrent_weight_regularizer&.to_hash, } hash.merge!(merge_hash) if merge_hash super(hash) end |