Class: DNN::Layers::RNN

Inherits:
HasParamLayer show all
Includes:
Activations, Initializers
Defined in:
lib/dnn/core/rnn_layers.rb

Overview

Super class of all RNN classes.

Direct Known Subclasses

GRU, LSTM, SimpleRNN

Instance Attribute Summary collapse

Attributes inherited from HasParamLayer

#grads, #params, #trainable

Instance Method Summary collapse

Methods inherited from HasParamLayer

#build, #update

Methods inherited from Layer

#build, #built?, #prev_layer

Constructor Details

#initialize(num_nodes, stateful: false, return_sequences: true, weight_initializer: nil, bias_initializer: nil, weight_decay: 0) ⇒ RNN

Returns a new instance of RNN.



14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
# File 'lib/dnn/core/rnn_layers.rb', line 14

def initialize(num_nodes,
               stateful: false,
               return_sequences: true,
               weight_initializer: nil,
               bias_initializer: nil,
               weight_decay: 0)
  super()
  @num_nodes = num_nodes
  @stateful = stateful
  @return_sequences = return_sequences
  @weight_initializer = (weight_initializer || RandomNormal.new)
  @bias_initializer = (bias_initializer || Zeros.new)
  @weight_decay = weight_decay
  @layers = []
  @h = nil
end

Instance Attribute Details

#hObject

Returns the value of attribute h.



9
10
11
# File 'lib/dnn/core/rnn_layers.rb', line 9

def h
  @h
end

#num_nodesObject (readonly)

Returns the value of attribute num_nodes.



10
11
12
# File 'lib/dnn/core/rnn_layers.rb', line 10

def num_nodes
  @num_nodes
end

#statefulObject (readonly)

Returns the value of attribute stateful.



11
12
13
# File 'lib/dnn/core/rnn_layers.rb', line 11

def stateful
  @stateful
end

#weight_decayObject (readonly)

Returns the value of attribute weight_decay.



12
13
14
# File 'lib/dnn/core/rnn_layers.rb', line 12

def weight_decay
  @weight_decay
end

Instance Method Details

#backward(dh2s) ⇒ Object



44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
# File 'lib/dnn/core/rnn_layers.rb', line 44

def backward(dh2s)
  @grads[:weight] = Xumo::SFloat.zeros(*@params[:weight].shape)
  @grads[:weight2] = Xumo::SFloat.zeros(*@params[:weight2].shape)
  @grads[:bias] = Xumo::SFloat.zeros(*@params[:bias].shape)
  unless @return_sequences
    dh = dh2s
    dh2s = Xumo::SFloat.zeros(dh.shape[0], @time_length, dh.shape[1])
    dh2s[true, -1, false] = dh
  end
  dxs = Xumo::SFloat.zeros(@xs_shape)
  dh = 0
  (0...dh2s.shape[1]).to_a.reverse.each do |t|
    dh2 = dh2s[true, t, false]
    dx, dh = @layers[t].backward(dh2 + dh)
    dxs[true, t, false] = dx
  end
  dxs
end

#forward(xs) ⇒ Object



31
32
33
34
35
36
37
38
39
40
41
42
# File 'lib/dnn/core/rnn_layers.rb', line 31

def forward(xs)
  @xs_shape = xs.shape
  hs = Xumo::SFloat.zeros(xs.shape[0], @time_length, @num_nodes)
  h = (@stateful && @h) ? @h : Xumo::SFloat.zeros(xs.shape[0], @num_nodes)
  xs.shape[1].times do |t|
    x = xs[true, t, false]
    h = @layers[t].forward(x, h)
    hs[true, t, false] = h
  end
  @h = h
  @return_sequences ? hs : h
end

#init_paramsObject



89
90
91
# File 'lib/dnn/core/rnn_layers.rb', line 89

def init_params
  @time_length = prev_layer.shape[0]
end

#ridgeObject



81
82
83
84
85
86
87
# File 'lib/dnn/core/rnn_layers.rb', line 81

def ridge
  if @weight_decay > 0
    0.5 * (@weight_decay * ((@params[:weight]**2).sum + (@params[:weight2]**2).sum))
  else
    0
  end
end

#shapeObject



77
78
79
# File 'lib/dnn/core/rnn_layers.rb', line 77

def shape
  @return_sequences ? [@time_length, @num_nodes] : [@num_nodes]
end

#to_hash(merge_hash = nil) ⇒ Object



63
64
65
66
67
68
69
70
71
72
73
74
75
# File 'lib/dnn/core/rnn_layers.rb', line 63

def to_hash(merge_hash = nil)
  hash = {
    class: self.class.name,
    num_nodes: @num_nodes,
    stateful: @stateful,
    return_sequences: @return_sequences,
    weight_initializer: @weight_initializer.to_hash,
    bias_initializer: @bias_initializer.to_hash,
    weight_decay: @weight_decay,
  }
  hash.merge!(merge_hash) if merge_hash
  hash
end