Class: DNN::Layers::LSTM
- Inherits:
-
RNN
- Object
- Layer
- HasParamLayer
- RNN
- DNN::Layers::LSTM
- Defined in:
- lib/dnn/core/rnn_layers.rb
Instance Attribute Summary collapse
-
#cell ⇒ Object
Returns the value of attribute cell.
Attributes inherited from RNN
#h, #num_nodes, #stateful, #weight_decay
Attributes inherited from HasParamLayer
Class Method Summary collapse
Instance Method Summary collapse
- #backward(dh2s) ⇒ Object
- #forward(xs) ⇒ Object
-
#initialize(num_nodes, stateful: false, return_sequences: true, weight_initializer: nil, bias_initializer: nil, weight_decay: 0) ⇒ LSTM
constructor
A new instance of LSTM.
Methods inherited from RNN
Methods inherited from HasParamLayer
Methods inherited from Layer
#build, #built?, #prev_layer, #shape, #to_hash
Constructor Details
#initialize(num_nodes, stateful: false, return_sequences: true, weight_initializer: nil, bias_initializer: nil, weight_decay: 0) ⇒ LSTM
Returns a new instance of LSTM.
233 234 235 236 237 238 239 240 241 |
# File 'lib/dnn/core/rnn_layers.rb', line 233 def initialize(num_nodes, stateful: false, return_sequences: true, weight_initializer: nil, bias_initializer: nil, weight_decay: 0) super @cell = nil end |
Instance Attribute Details
#cell ⇒ Object
Returns the value of attribute cell.
222 223 224 |
# File 'lib/dnn/core/rnn_layers.rb', line 222 def cell @cell end |
Class Method Details
.load_hash(hash) ⇒ Object
224 225 226 227 228 229 230 231 |
# File 'lib/dnn/core/rnn_layers.rb', line 224 def self.load_hash(hash) self.new(hash[:num_nodes], stateful: hash[:stateful], return_sequences: hash[:return_sequences], weight_initializer: Util.load_hash(hash[:weight_initializer]), bias_initializer: Util.load_hash(hash[:bias_initializer]), weight_decay: hash[:weight_decay]) end |
Instance Method Details
#backward(dh2s) ⇒ Object
264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 |
# File 'lib/dnn/core/rnn_layers.rb', line 264 def backward(dh2s) @grads[:weight] = Xumo::SFloat.zeros(*@params[:weight].shape) @grads[:weight2] = Xumo::SFloat.zeros(*@params[:weight2].shape) @grads[:bias] = Xumo::SFloat.zeros(*@params[:bias].shape) unless @return_sequences dh = dh2s dh2s = Xumo::SFloat.zeros(dh.shape[0], @time_length, dh.shape[1]) dh2s[true, -1, false] = dh end dxs = Xumo::SFloat.zeros(@xs_shape) dh = 0 dcell = 0 (0...dh2s.shape[1]).to_a.reverse.each do |t| dh2 = dh2s[true, t, false] dx, dh, dcell = @layers[t].backward(dh2 + dh, dcell) dxs[true, t, false] = dx end dxs end |
#forward(xs) ⇒ Object
243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 |
# File 'lib/dnn/core/rnn_layers.rb', line 243 def forward(xs) @xs_shape = xs.shape hs = Xumo::SFloat.zeros(xs.shape[0], @time_length, @num_nodes) h = nil cell = nil if @stateful h = @h if @h cell = @cell if @cell end h ||= Xumo::SFloat.zeros(xs.shape[0], @num_nodes) cell ||= Xumo::SFloat.zeros(xs.shape[0], @num_nodes) xs.shape[1].times do |t| x = xs[true, t, false] h, cell = @layers[t].forward(x, h, cell) hs[true, t, false] = h end @h = h @cell = cell @return_sequences ? hs : h end |