Class: DNN::Layers::RNN
- Inherits:
-
Connection
- Object
- Layer
- TrainableLayer
- Connection
- DNN::Layers::RNN
- Includes:
- LayerNode
- Defined in:
- lib/dnn/core/layers/rnn_layers.rb
Overview
Super class of all RNN classes.
Instance Attribute Summary collapse
-
#hidden ⇒ Object
readonly
Returns the value of attribute hidden.
-
#num_units ⇒ Object
readonly
Returns the value of attribute num_units.
-
#recurrent_weight ⇒ Object
readonly
Returns the value of attribute recurrent_weight.
-
#recurrent_weight_initializer ⇒ Object
readonly
Returns the value of attribute recurrent_weight_initializer.
-
#recurrent_weight_regularizer ⇒ Object
readonly
Returns the value of attribute recurrent_weight_regularizer.
-
#return_sequences ⇒ Object
readonly
Returns the value of attribute return_sequences.
-
#stateful ⇒ Object
readonly
Returns the value of attribute stateful.
Attributes inherited from Connection
#bias, #bias_initializer, #bias_regularizer, #weight, #weight_initializer, #weight_regularizer
Attributes inherited from TrainableLayer
Attributes inherited from Layer
Instance Method Summary collapse
- #backward_node(dh2s) ⇒ Object
- #build(input_shape) ⇒ Object
- #compute_output_shape ⇒ Object
- #forward_node(xs) ⇒ Object
- #get_params ⇒ Object
-
#initialize(num_units, stateful: false, return_sequences: true, weight_initializer: Initializers::RandomNormal.new, recurrent_weight_initializer: Initializers::RandomNormal.new, bias_initializer: Initializers::Zeros.new, weight_regularizer: nil, recurrent_weight_regularizer: nil, bias_regularizer: nil, use_bias: true) ⇒ RNN
constructor
A new instance of RNN.
- #load_hash(hash) ⇒ Object
- #regularizers ⇒ Object
-
#reset_state ⇒ Object
Reset the state of RNN.
- #to_hash(merge_hash = nil) ⇒ Object
Methods included from LayerNode
Methods inherited from Connection
Methods inherited from TrainableLayer
Methods inherited from Layer
#<<, #built?, #call, call, #clean, #forward, from_hash
Constructor Details
#initialize(num_units, stateful: false, return_sequences: true, weight_initializer: Initializers::RandomNormal.new, recurrent_weight_initializer: Initializers::RandomNormal.new, bias_initializer: Initializers::Zeros.new, weight_regularizer: nil, recurrent_weight_regularizer: nil, bias_regularizer: nil, use_bias: true) ⇒ RNN
Returns a new instance of RNN.
32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 |
# File 'lib/dnn/core/layers/rnn_layers.rb', line 32 def initialize(num_units, stateful: false, return_sequences: true, weight_initializer: Initializers::RandomNormal.new, recurrent_weight_initializer: Initializers::RandomNormal.new, bias_initializer: Initializers::Zeros.new, weight_regularizer: nil, recurrent_weight_regularizer: nil, bias_regularizer: nil, use_bias: true) super(weight_initializer: weight_initializer, bias_initializer: bias_initializer, weight_regularizer: weight_regularizer, bias_regularizer: bias_regularizer, use_bias: use_bias) @num_units = num_units @stateful = stateful @return_sequences = return_sequences @hidden_layers = [] @hidden = Param.new @recurrent_weight = Param.new(nil, Xumo::SFloat[0]) @recurrent_weight_initializer = recurrent_weight_initializer @recurrent_weight_regularizer = recurrent_weight_regularizer end |
Instance Attribute Details
#hidden ⇒ Object (readonly)
Returns the value of attribute hidden.
21 22 23 |
# File 'lib/dnn/core/layers/rnn_layers.rb', line 21 def hidden @hidden end |
#num_units ⇒ Object (readonly)
Returns the value of attribute num_units.
19 20 21 |
# File 'lib/dnn/core/layers/rnn_layers.rb', line 19 def num_units @num_units end |
#recurrent_weight ⇒ Object (readonly)
Returns the value of attribute recurrent_weight.
20 21 22 |
# File 'lib/dnn/core/layers/rnn_layers.rb', line 20 def recurrent_weight @recurrent_weight end |
#recurrent_weight_initializer ⇒ Object (readonly)
Returns the value of attribute recurrent_weight_initializer.
24 25 26 |
# File 'lib/dnn/core/layers/rnn_layers.rb', line 24 def recurrent_weight_initializer @recurrent_weight_initializer end |
#recurrent_weight_regularizer ⇒ Object (readonly)
Returns the value of attribute recurrent_weight_regularizer.
25 26 27 |
# File 'lib/dnn/core/layers/rnn_layers.rb', line 25 def recurrent_weight_regularizer @recurrent_weight_regularizer end |
#return_sequences ⇒ Object (readonly)
Returns the value of attribute return_sequences.
23 24 25 |
# File 'lib/dnn/core/layers/rnn_layers.rb', line 23 def return_sequences @return_sequences end |
#stateful ⇒ Object (readonly)
Returns the value of attribute stateful.
22 23 24 |
# File 'lib/dnn/core/layers/rnn_layers.rb', line 22 def stateful @stateful end |
Instance Method Details
#backward_node(dh2s) ⇒ Object
80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 |
# File 'lib/dnn/core/layers/rnn_layers.rb', line 80 def backward_node(dh2s) unless @return_sequences dh = dh2s dh2s = Xumo::SFloat.zeros(dh.shape[0], @time_length, dh.shape[1]) dh2s[true, -1, false] = dh end dxs = Xumo::SFloat.zeros(@xs_shape) dh = 0 (dh2s.shape[1] - 1).downto(0) do |t| dh2 = dh2s[true, t, false] dx, dh = @hidden_layers[t].backward(dh2 + dh) dxs[true, t, false] = dx end dxs end |
#build(input_shape) ⇒ Object
54 55 56 57 58 59 |
# File 'lib/dnn/core/layers/rnn_layers.rb', line 54 def build(input_shape) unless input_shape.length == 2 raise DNNShapeError, "Input shape is #{input_shape}. But input shape must be 2 dimensional." end super end |
#compute_output_shape ⇒ Object
96 97 98 99 |
# File 'lib/dnn/core/layers/rnn_layers.rb', line 96 def compute_output_shape @time_length = @input_shape[0] @return_sequences ? [@time_length, @num_units] : [@num_units] end |
#forward_node(xs) ⇒ Object
65 66 67 68 69 70 71 72 73 74 75 76 77 78 |
# File 'lib/dnn/core/layers/rnn_layers.rb', line 65 def forward_node(xs) create_hidden_layer @xs_shape = xs.shape hs = Xumo::SFloat.zeros(xs.shape[0], @time_length, @num_units) h = @stateful && @hidden.data ? @hidden.data : Xumo::SFloat.zeros(xs.shape[0], @num_units) xs.shape[1].times do |t| x = xs[true, t, false] @hidden_layers[t].trainable = @trainable h = @hidden_layers[t].forward(x, h) hs[true, t, false] = h end @hidden.data = h @return_sequences ? hs : h end |
#get_params ⇒ Object
126 127 128 |
# File 'lib/dnn/core/layers/rnn_layers.rb', line 126 def get_params { weight: @weight, recurrent_weight: @recurrent_weight, bias: @bias, hidden: @hidden } end |
#load_hash(hash) ⇒ Object
113 114 115 116 117 118 119 120 121 122 123 124 |
# File 'lib/dnn/core/layers/rnn_layers.rb', line 113 def load_hash(hash) initialize(hash[:num_units], stateful: hash[:stateful], return_sequences: hash[:return_sequences], weight_initializer: Initializers::Initializer.from_hash(hash[:weight_initializer]), recurrent_weight_initializer: Initializers::Initializer.from_hash(hash[:recurrent_weight_initializer]), bias_initializer: Initializers::Initializer.from_hash(hash[:bias_initializer]), weight_regularizer: Regularizers::Regularizer.from_hash(hash[:weight_regularizer]), recurrent_weight_regularizer: Regularizers::Regularizer.from_hash(hash[:recurrent_weight_regularizer]), bias_regularizer: Regularizers::Regularizer.from_hash(hash[:bias_regularizer]), use_bias: hash[:use_bias]) end |
#regularizers ⇒ Object
135 136 137 138 139 140 141 |
# File 'lib/dnn/core/layers/rnn_layers.rb', line 135 def regularizers regularizers = [] regularizers << @weight_regularizer if @weight_regularizer regularizers << @recurrent_weight_regularizer if @recurrent_weight_regularizer regularizers << @bias_regularizer if @bias_regularizer regularizers end |
#reset_state ⇒ Object
Reset the state of RNN.
131 132 133 |
# File 'lib/dnn/core/layers/rnn_layers.rb', line 131 def reset_state @hidden.data = @hidden.data.fill(0) if @hidden.data end |
#to_hash(merge_hash = nil) ⇒ Object
101 102 103 104 105 106 107 108 109 110 111 |
# File 'lib/dnn/core/layers/rnn_layers.rb', line 101 def to_hash(merge_hash = nil) hash = { num_units: @num_units, stateful: @stateful, return_sequences: @return_sequences, recurrent_weight_initializer: @recurrent_weight_initializer.to_hash, recurrent_weight_regularizer: @recurrent_weight_regularizer&.to_hash, } hash.merge!(merge_hash) if merge_hash super(hash) end |