Class: DNN::Layers::RNN

Inherits:
Connection show all
Defined in:
lib/dnn/core/rnn_layers.rb

Overview

Super class of all RNN classes.

Direct Known Subclasses

GRU, LSTM, SimpleRNN

Instance Attribute Summary collapse

Attributes inherited from Connection

#bias, #bias_initializer, #bias_regularizer, #weight, #weight_initializer, #weight_regularizer

Attributes inherited from HasParamLayer

#trainable

Attributes inherited from Layer

#input_shape, #name

Instance Method Summary collapse

Methods inherited from Connection

#use_bias

Methods inherited from Layer

#built?, #call, call, from_hash

Constructor Details

#initialize(num_nodes, stateful: false, return_sequences: true, weight_initializer: Initializers::RandomNormal.new, recurrent_weight_initializer: Initializers::RandomNormal.new, bias_initializer: Initializers::Zeros.new, weight_regularizer: nil, recurrent_weight_regularizer: nil, bias_regularizer: nil, use_bias: true) ⇒ RNN

Returns a new instance of RNN.

Parameters:

  • num_nodes (Integer)

    Number of nodes.

  • stateful (Boolean) (defaults to: false)

    Maintain state between batches.

  • return_sequences (Boolean) (defaults to: true)

    Set the false, only the last of each cell of RNN is left.

  • recurrent_weight_initializer (DNN::Initializers::Initializer) (defaults to: Initializers::RandomNormal.new)

    Recurrent weight initializer.

  • recurrent_weight_regularizer (DNN::Regularizers::Regularizer | NilClass) (defaults to: nil)

    Recurrent weight regularizer.



19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
# File 'lib/dnn/core/rnn_layers.rb', line 19

def initialize(num_nodes,
               stateful: false,
               return_sequences: true,
               weight_initializer: Initializers::RandomNormal.new,
               recurrent_weight_initializer: Initializers::RandomNormal.new,
               bias_initializer: Initializers::Zeros.new,
               weight_regularizer: nil,
               recurrent_weight_regularizer: nil,
               bias_regularizer: nil,
               use_bias: true)
  super(weight_initializer: weight_initializer, bias_initializer: bias_initializer,
        weight_regularizer: weight_regularizer, bias_regularizer: bias_regularizer, use_bias: use_bias)
  @num_nodes = num_nodes
  @stateful = stateful
  @return_sequences = return_sequences
  @layers = []
  @hidden = Param.new
  @recurrent_weight = Param.new(nil, Xumo::SFloat[0])
  @recurrent_weight_initializer = recurrent_weight_initializer
  @recurrent_weight_regularizer = recurrent_weight_regularizer
end

Instance Attribute Details

#hiddenObject (readonly)

Returns the value of attribute hidden.



8
9
10
# File 'lib/dnn/core/rnn_layers.rb', line 8

def hidden
  @hidden
end

#num_nodesObject (readonly)

Returns the value of attribute num_nodes.



6
7
8
# File 'lib/dnn/core/rnn_layers.rb', line 6

def num_nodes
  @num_nodes
end

#recurrent_weightObject (readonly)

Returns the value of attribute recurrent_weight.



7
8
9
# File 'lib/dnn/core/rnn_layers.rb', line 7

def recurrent_weight
  @recurrent_weight
end

#recurrent_weight_initializerObject (readonly)

Returns the value of attribute recurrent_weight_initializer.



11
12
13
# File 'lib/dnn/core/rnn_layers.rb', line 11

def recurrent_weight_initializer
  @recurrent_weight_initializer
end

#recurrent_weight_regularizerObject (readonly)

Returns the value of attribute recurrent_weight_regularizer.



12
13
14
# File 'lib/dnn/core/rnn_layers.rb', line 12

def recurrent_weight_regularizer
  @recurrent_weight_regularizer
end

#return_sequencesObject (readonly)

Returns the value of attribute return_sequences.



10
11
12
# File 'lib/dnn/core/rnn_layers.rb', line 10

def return_sequences
  @return_sequences
end

#statefulObject (readonly)

Returns the value of attribute stateful.



9
10
11
# File 'lib/dnn/core/rnn_layers.rb', line 9

def stateful
  @stateful
end

Instance Method Details

#backward(dh2s) ⇒ Object



63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
# File 'lib/dnn/core/rnn_layers.rb', line 63

def backward(dh2s)
  unless @return_sequences
    dh = dh2s
    dh2s = Xumo::SFloat.zeros(dh.shape[0], @time_length, dh.shape[1])
    dh2s[true, -1, false] = dh
  end
  dxs = Xumo::SFloat.zeros(@xs_shape)
  dh = 0
  (dh2s.shape[1] - 1).downto(0) do |t|
    dh2 = dh2s[true, t, false]
    dx, dh = @layers[t].backward(dh2 + dh)
    dxs[true, t, false] = dx
  end
  dxs
end

#build(input_shape) ⇒ Object



41
42
43
44
45
46
47
# File 'lib/dnn/core/rnn_layers.rb', line 41

def build(input_shape)
  unless input_shape.length == 2
    raise DNN_ShapeError, "Input shape is #{input_shape}. But input shape must be 2 dimensional."
  end
  super
  @time_length = @input_shape[0]
end

#forward(xs) ⇒ Object



49
50
51
52
53
54
55
56
57
58
59
60
61
# File 'lib/dnn/core/rnn_layers.rb', line 49

def forward(xs)
  @xs_shape = xs.shape
  hs = Xumo::SFloat.zeros(xs.shape[0], @time_length, @num_nodes)
  h = @stateful && @hidden.data ? @hidden.data : Xumo::SFloat.zeros(xs.shape[0], @num_nodes)
  xs.shape[1].times do |t|
    x = xs[true, t, false]
    @layers[t].trainable = @trainable
    h = @layers[t].forward(x, h)
    hs[true, t, false] = h
  end
  @hidden.data = h
  @return_sequences ? hs : h
end

#get_paramsObject



108
109
110
# File 'lib/dnn/core/rnn_layers.rb', line 108

def get_params
  { weight: @weight, recurrent_weight: @recurrent_weight, bias: @bias, hidden: @hidden }
end

#load_hash(hash) ⇒ Object



95
96
97
98
99
100
101
102
103
104
105
106
# File 'lib/dnn/core/rnn_layers.rb', line 95

def load_hash(hash)
  initialize(hash[:num_nodes],
             stateful: hash[:stateful],
             return_sequences: hash[:return_sequences],
             weight_initializer: Initializers::Initializer.from_hash(hash[:weight_initializer]),
             recurrent_weight_initializer: Initializers::Initializer.from_hash(hash[:recurrent_weight_initializer]),
             bias_initializer: Initializers::Initializer.from_hash(hash[:bias_initializer]),
             weight_regularizer: Regularizers::Regularizer.from_hash(hash[:weight_regularizer]),
             recurrent_weight_regularizer: Regularizers::Regularizer.from_hash(hash[:recurrent_weight_regularizer]),
             bias_regularizer: Regularizers::Regularizer.from_hash(hash[:bias_regularizer]),
             use_bias: hash[:use_bias])
end

#output_shapeObject



79
80
81
# File 'lib/dnn/core/rnn_layers.rb', line 79

def output_shape
  @return_sequences ? [@time_length, @num_nodes] : [@num_nodes]
end

#regularizersObject



117
118
119
120
121
122
123
# File 'lib/dnn/core/rnn_layers.rb', line 117

def regularizers
  regularizers = []
  regularizers << @weight_regularizer if @weight_regularizer
  regularizers << @recurrent_weight_regularizer if @recurrent_weight_regularizer
  regularizers << @bias_regularizer if @bias_regularizer
  regularizers
end

#reset_stateObject

Reset the state of RNN.



113
114
115
# File 'lib/dnn/core/rnn_layers.rb', line 113

def reset_state
  @hidden.data = @hidden.data.fill(0) if @hidden.data
end

#to_hash(merge_hash = nil) ⇒ Object



83
84
85
86
87
88
89
90
91
92
93
# File 'lib/dnn/core/rnn_layers.rb', line 83

def to_hash(merge_hash = nil)
  hash = {
    num_nodes: @num_nodes,
    stateful: @stateful,
    return_sequences: @return_sequences,
    recurrent_weight_initializer: @recurrent_weight_initializer.to_hash,
    recurrent_weight_regularizer: @recurrent_weight_regularizer&.to_hash,
  }
  hash.merge!(merge_hash) if merge_hash
  super(hash)
end