Class: DNN::Layers::Dense

Inherits:
HasParamLayer show all
Includes:
Initializers
Defined in:
lib/dnn/core/layers.rb

Instance Attribute Summary collapse

Attributes inherited from HasParamLayer

#grads, #params, #trainable

Class Method Summary collapse

Instance Method Summary collapse

Methods inherited from HasParamLayer

#build, #update

Methods inherited from Layer

#build, #built?, #prev_layer

Constructor Details

#initialize(num_nodes, weight_initializer: nil, bias_initializer: nil, weight_decay: 0) ⇒ Dense

Returns a new instance of Dense.



118
119
120
121
122
123
124
125
126
127
# File 'lib/dnn/core/layers.rb', line 118

def initialize(num_nodes,
               weight_initializer: nil,
               bias_initializer: nil,
               weight_decay: 0)
  super()
  @num_nodes = num_nodes
  @weight_initializer = (weight_initializer || RandomNormal.new)
  @bias_initializer = (bias_initializer || Zeros.new)
  @weight_decay = weight_decay
end

Instance Attribute Details

#num_nodesObject (readonly)

Returns the value of attribute num_nodes.



108
109
110
# File 'lib/dnn/core/layers.rb', line 108

def num_nodes
  @num_nodes
end

#weight_decayObject (readonly)

Returns the value of attribute weight_decay.



109
110
111
# File 'lib/dnn/core/layers.rb', line 109

def weight_decay
  @weight_decay
end

Class Method Details

.load_hash(hash) ⇒ Object



111
112
113
114
115
116
# File 'lib/dnn/core/layers.rb', line 111

def self.load_hash(hash)
  self.new(hash[:num_nodes],
           weight_initializer: Util.load_hash(hash[:weight_initializer]),
           bias_initializer: Util.load_hash(hash[:bias_initializer]),
           weight_decay: hash[:weight_decay])
end

Instance Method Details

#backward(dout) ⇒ Object



134
135
136
137
138
139
140
141
142
# File 'lib/dnn/core/layers.rb', line 134

def backward(dout)
  @grads[:weight] = @x.transpose.dot(dout)
  if @weight_decay > 0
    dridge = @weight_decay * @params[:weight]
    @grads[:weight] += dridge
  end
  @grads[:bias] = dout.sum(0)
  dout.dot(@params[:weight].transpose)
end

#forward(x) ⇒ Object



129
130
131
132
# File 'lib/dnn/core/layers.rb', line 129

def forward(x)
  @x = x
  @x.dot(@params[:weight]) + @params[:bias]
end

#ridgeObject



148
149
150
151
152
153
154
# File 'lib/dnn/core/layers.rb', line 148

def ridge
  if @weight_decay > 0
    0.5 * @weight_decay * (@params[:weight]**2).sum
  else
    0
  end
end

#shapeObject



144
145
146
# File 'lib/dnn/core/layers.rb', line 144

def shape
  [@num_nodes]
end

#to_hashObject



156
157
158
159
160
161
# File 'lib/dnn/core/layers.rb', line 156

def to_hash
  super({num_nodes: @num_nodes,
         weight_initializer: @weight_initializer.to_hash,
         bias_initializer: @bias_initializer.to_hash,
         weight_decay: @weight_decay})
end