Class: DNN::Layers::LSTM
- Inherits:
-
RNN
- Object
- Layer
- HasParamLayer
- Connection
- RNN
- DNN::Layers::LSTM
- Defined in:
- lib/dnn/core/rnn_layers.rb
Instance Attribute Summary
Attributes inherited from RNN
#bias, #num_nodes, #stateful, #weight, #weight2
Attributes inherited from Connection
#bias_initializer, #l1_lambda, #l2_lambda, #weight_initializer
Attributes inherited from HasParamLayer
Attributes inherited from Layer
Class Method Summary collapse
Instance Method Summary collapse
- #backward(dh2s) ⇒ Object
- #forward(xs) ⇒ Object
-
#initialize(num_nodes, stateful: false, return_sequences: true, weight_initializer: Initializers::RandomNormal.new, bias_initializer: Initializers::Zeros.new, l1_lambda: 0, l2_lambda: 0) ⇒ LSTM
constructor
A new instance of LSTM.
- #reset_state ⇒ Object
Methods inherited from RNN
#d_lasso, #d_ridge, #lasso, #output_shape, #ridge, #shape, #to_hash
Methods inherited from Connection
#d_lasso, #d_ridge, #lasso, #ridge, #to_hash
Methods inherited from HasParamLayer
Methods inherited from Layer
#build, #built?, #output_shape, #to_hash
Constructor Details
#initialize(num_nodes, stateful: false, return_sequences: true, weight_initializer: Initializers::RandomNormal.new, bias_initializer: Initializers::Zeros.new, l1_lambda: 0, l2_lambda: 0) ⇒ LSTM
Returns a new instance of LSTM.
273 274 275 276 277 278 279 280 281 282 |
# File 'lib/dnn/core/rnn_layers.rb', line 273 def initialize(num_nodes, stateful: false, return_sequences: true, weight_initializer: Initializers::RandomNormal.new, bias_initializer: Initializers::Zeros.new, l1_lambda: 0, l2_lambda: 0) super @cell = @params[:c] = Param.new end |
Class Method Details
.load_hash(hash) ⇒ Object
262 263 264 265 266 267 268 269 270 271 |
# File 'lib/dnn/core/rnn_layers.rb', line 262 def self.load_hash(hash) lstm = self.new(hash[:num_nodes], stateful: hash[:stateful], return_sequences: hash[:return_sequences], weight_initializer: Utils.load_hash(hash[:weight_initializer]), bias_initializer: Utils.load_hash(hash[:bias_initializer]), l1_lambda: hash[:l1_lambda], l2_lambda: hash[:l2_lambda]) lstm end |
Instance Method Details
#backward(dh2s) ⇒ Object
305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 |
# File 'lib/dnn/core/rnn_layers.rb', line 305 def backward(dh2s) @weight.grad = Xumo::SFloat.zeros(*@weight.data.shape) @weight2.grad = Xumo::SFloat.zeros(*@weight2.data.shape) @bias.grad = Xumo::SFloat.zeros(*@bias.data.shape) unless @return_sequences dh = dh2s dh2s = Xumo::SFloat.zeros(dh.shape[0], @time_length, dh.shape[1]) dh2s[true, -1, false] = dh end dxs = Xumo::SFloat.zeros(@xs_shape) dh = 0 dc = 0 (0...dh2s.shape[1]).to_a.reverse.each do |t| dh2 = dh2s[true, t, false] dx, dh, dc = @layers[t].backward(dh2 + dh, dc) dxs[true, t, false] = dx end dxs end |
#forward(xs) ⇒ Object
284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 |
# File 'lib/dnn/core/rnn_layers.rb', line 284 def forward(xs) @xs_shape = xs.shape hs = Xumo::SFloat.zeros(xs.shape[0], @time_length, @num_nodes) h = nil c = nil if @stateful h = @hidden.data if @hidden.data c = @cell.data if @cell.data end h ||= Xumo::SFloat.zeros(xs.shape[0], @num_nodes) c ||= Xumo::SFloat.zeros(xs.shape[0], @num_nodes) xs.shape[1].times do |t| x = xs[true, t, false] h, c = @layers[t].forward(x, h, c) hs[true, t, false] = h end @hidden.data = h @cell.data = c @return_sequences ? hs : h end |
#reset_state ⇒ Object
325 326 327 328 |
# File 'lib/dnn/core/rnn_layers.rb', line 325 def reset_state super() @cell.data = @cell.data.fill(0) if @cell.data end |