Class: DNN::Losses::SoftmaxCrossEntropy

Inherits:
Loss
  • Object
show all
Includes:
DNN::Layers::MergeLayerNode
Defined in:
lib/dnn/core/losses.rb

Instance Attribute Summary collapse

Class Method Summary collapse

Instance Method Summary collapse

Methods included from DNN::Layers::MergeLayerNode

#backward, #forward

Methods inherited from Loss

call, #call, #clean, #forward, from_hash, #loss, #regularizers_backward, #regularizers_forward

Constructor Details

#initialize(eps: 1e-7) ⇒ SoftmaxCrossEntropy

Returns a new instance of SoftmaxCrossEntropy.

Parameters:

  • eps (Float) (defaults to: 1e-7)

    Value to avoid nan.



150
151
152
# File 'lib/dnn/core/losses.rb', line 150

def initialize(eps: 1e-7)
  @eps = eps
end

Instance Attribute Details

#epsObject

Returns the value of attribute eps.



139
140
141
# File 'lib/dnn/core/losses.rb', line 139

def eps
  @eps
end

Class Method Details

.softmax(y) ⇒ Object Also known as: activation



142
143
144
# File 'lib/dnn/core/losses.rb', line 142

def softmax(y)
  Xumo::NMath.exp(y) / Xumo::NMath.exp(y).sum(1, keepdims: true)
end

Instance Method Details

#backward_node(d) ⇒ Object



160
161
162
# File 'lib/dnn/core/losses.rb', line 160

def backward_node(d)
  d * (@x - @t) / @x.shape[0]
end

#forward_node(y, t) ⇒ Object



154
155
156
157
158
# File 'lib/dnn/core/losses.rb', line 154

def forward_node(y, t)
  @t = t
  @x = SoftmaxCrossEntropy.softmax(y)
  -(t * Xumo::NMath.log(@x + @eps)).mean(0).sum
end

#load_hash(hash) ⇒ Object



168
169
170
# File 'lib/dnn/core/losses.rb', line 168

def load_hash(hash)
  initialize(eps: hash[:eps])
end

#to_hashObject



164
165
166
# File 'lib/dnn/core/losses.rb', line 164

def to_hash
  super(eps: @eps)
end