Class: DNN::Layers::SimpleRNN

Inherits:
RNN show all
Defined in:
lib/dnn/core/layers/rnn_layers.rb

Instance Attribute Summary collapse

Attributes inherited from RNN

#hidden, #num_units, #recurrent_weight, #recurrent_weight_initializer, #recurrent_weight_regularizer, #return_sequences, #stateful

Attributes inherited from Connection

#bias, #bias_initializer, #bias_regularizer, #weight, #weight_initializer, #weight_regularizer

Attributes inherited from TrainableLayer

#trainable

Attributes inherited from Layer

#input_shape, #output_shape

Instance Method Summary collapse

Methods inherited from RNN

#backward_node, #compute_output_shape, #forward_node, #get_params, #regularizers, #reset_state

Methods included from LayerNode

#backward_node, #forward, #forward_node

Methods inherited from Connection

#get_params, #regularizers, #use_bias

Methods inherited from TrainableLayer

#clean, #get_params

Methods inherited from Layer

#<<, #built?, #call, call, #clean, #compute_output_shape, #forward, from_hash

Constructor Details

#initialize(num_units, stateful: false, return_sequences: true, activation: Layers::Tanh.new, weight_initializer: Initializers::RandomNormal.new, recurrent_weight_initializer: Initializers::RandomNormal.new, bias_initializer: Initializers::Zeros.new, weight_regularizer: nil, recurrent_weight_regularizer: nil, bias_regularizer: nil, use_bias: true) ⇒ SimpleRNN

Returns a new instance of SimpleRNN.

Parameters:

  • activation (DNN::Layers::Layer) (defaults to: Layers::Tanh.new)

    Activation function to use in a recurrent network.



181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
# File 'lib/dnn/core/layers/rnn_layers.rb', line 181

def initialize(num_units,
               stateful: false,
               return_sequences: true,
               activation: Layers::Tanh.new,
               weight_initializer: Initializers::RandomNormal.new,
               recurrent_weight_initializer: Initializers::RandomNormal.new,
               bias_initializer: Initializers::Zeros.new,
               weight_regularizer: nil,
               recurrent_weight_regularizer: nil,
               bias_regularizer: nil,
               use_bias: true)
  super(num_units,
        stateful: stateful,
        return_sequences: return_sequences,
        weight_initializer: weight_initializer,
        recurrent_weight_initializer: recurrent_weight_initializer,
        bias_initializer: bias_initializer,
        weight_regularizer: weight_regularizer,
        recurrent_weight_regularizer: recurrent_weight_regularizer,
        bias_regularizer: bias_regularizer,
        use_bias: use_bias)
  @activation = activation
end

Instance Attribute Details

#activationObject (readonly)

Returns the value of attribute activation.



178
179
180
# File 'lib/dnn/core/layers/rnn_layers.rb', line 178

def activation
  @activation
end

Instance Method Details

#build(input_shape) ⇒ Object



205
206
207
208
209
210
211
212
# File 'lib/dnn/core/layers/rnn_layers.rb', line 205

def build(input_shape)
  super
  num_prev_units = input_shape[1]
  @weight.data = Xumo::SFloat.new(num_prev_units, @num_units)
  @recurrent_weight.data = Xumo::SFloat.new(@num_units, @num_units)
  @bias.data = Xumo::SFloat.new(@num_units) if @bias
  init_weight_and_bias
end

#create_hidden_layerObject



214
215
216
# File 'lib/dnn/core/layers/rnn_layers.rb', line 214

def create_hidden_layer
  @hidden_layers = Array.new(@time_length) { SimpleRNNCell.new(@weight, @recurrent_weight, @bias, @activation) }
end

#load_hash(hash) ⇒ Object



222
223
224
225
226
227
228
229
230
231
232
233
234
# File 'lib/dnn/core/layers/rnn_layers.rb', line 222

def load_hash(hash)
  initialize(hash[:num_units],
             stateful: hash[:stateful],
             return_sequences: hash[:return_sequences],
             activation: Layers::Layer.from_hash(hash[:activation]),
             weight_initializer: Initializers::Initializer.from_hash(hash[:weight_initializer]),
             recurrent_weight_initializer: Initializers::Initializer.from_hash(hash[:recurrent_weight_initializer]),
             bias_initializer: Initializers::Initializer.from_hash(hash[:bias_initializer]),
             weight_regularizer: Regularizers::Regularizer.from_hash(hash[:weight_regularizer]),
             recurrent_weight_regularizer: Regularizers::Regularizer.from_hash(hash[:recurrent_weight_regularizer]),
             bias_regularizer: Regularizers::Regularizer.from_hash(hash[:bias_regularizer]),
             use_bias: hash[:use_bias])
end

#to_hashObject



218
219
220
# File 'lib/dnn/core/layers/rnn_layers.rb', line 218

def to_hash
  super(activation: @activation.to_hash)
end