Class: DNN::Layers::Dense

Inherits:
HasParamLayer show all
Includes:
Initializers
Defined in:
lib/dnn/core/layers.rb

Instance Attribute Summary collapse

Attributes inherited from HasParamLayer

#grads, #params

Instance Method Summary collapse

Methods inherited from HasParamLayer

#init, #update

Methods inherited from Layer

#init, #prev_layer

Constructor Details

#initialize(num_nodes, weight_initializer: nil, bias_initializer: nil, weight_decay: 0) ⇒ Dense

Returns a new instance of Dense.



80
81
82
83
84
85
86
87
88
89
# File 'lib/dnn/core/layers.rb', line 80

def initialize(num_nodes,
               weight_initializer: nil,
               bias_initializer: nil,
               weight_decay: 0)
  super()
  @num_nodes = num_nodes
  @weight_initializer = (weight_initializer || RandomNormal.new)
  @bias_initializer = (bias_initializer || Zeros.new)
  @weight_decay = weight_decay
end

Instance Attribute Details

#num_nodesObject (readonly)

Returns the value of attribute num_nodes.



77
78
79
# File 'lib/dnn/core/layers.rb', line 77

def num_nodes
  @num_nodes
end

#weight_decayObject (readonly)

Returns the value of attribute weight_decay.



78
79
80
# File 'lib/dnn/core/layers.rb', line 78

def weight_decay
  @weight_decay
end

Instance Method Details

#backward(dout) ⇒ Object



96
97
98
99
100
101
102
103
104
# File 'lib/dnn/core/layers.rb', line 96

def backward(dout)
  @grads[:weight] = @x.transpose.dot(dout)
  if @weight_decay > 0
    dridge = @weight_decay * @params[:weight]
    @grads[:weight] += dridge
  end
  @grads[:bias] = dout.sum(0)
  dout.dot(@params[:weight].transpose)
end

#forward(x) ⇒ Object



91
92
93
94
# File 'lib/dnn/core/layers.rb', line 91

def forward(x)
  @x = x
  @x.dot(@params[:weight]) + @params[:bias]
end

#shapeObject



106
107
108
# File 'lib/dnn/core/layers.rb', line 106

def shape
  [@num_nodes]
end