Class: DNN::Activations::ELU

Inherits:
Layers::Layer show all
Defined in:
lib/dnn/core/activations.rb

Instance Attribute Summary collapse

Attributes inherited from Layers::Layer

#input_shape, #name

Instance Method Summary collapse

Methods inherited from Layers::Layer

#build, #built?, #call, call, from_hash, #output_shape

Constructor Details

#initialize(alpha = 1.0) ⇒ ELU

Returns a new instance of ELU.

Parameters:

  • alpha (Float) (defaults to: 1.0)

    The slope when the output value is negative.



110
111
112
113
# File 'lib/dnn/core/activations.rb', line 110

def initialize(alpha = 1.0)
  super()
  @alpha = alpha
end

Instance Attribute Details

#alphaObject (readonly)

Returns the value of attribute alpha.



107
108
109
# File 'lib/dnn/core/activations.rb', line 107

def alpha
  @alpha
end

Instance Method Details

#backward(dy) ⇒ Object



126
127
128
129
130
131
132
133
# File 'lib/dnn/core/activations.rb', line 126

def backward(dy)
  dx = Xumo::SFloat.ones(@x.shape)
  dx[@x < 0] = 0
  dx2 = Xumo::SFloat.zeros(@x.shape)
  dx2[@x < 0] = 1
  dx2 *= @alpha * Xumo::NMath.exp(@x)
  dy * (dx + dx2)
end

#forward(x) ⇒ Object



115
116
117
118
119
120
121
122
123
124
# File 'lib/dnn/core/activations.rb', line 115

def forward(x)
  @x = x
  x1 = Xumo::SFloat.zeros(x.shape)
  x1[x >= 0] = 1
  x1 *= x
  x2 = Xumo::SFloat.zeros(x.shape)
  x2[x < 0] = 1
  x2 *= @alpha * Xumo::NMath.exp(x) - @alpha
  x1 + x2
end

#load_hash(hash) ⇒ Object



139
140
141
# File 'lib/dnn/core/activations.rb', line 139

def load_hash(hash)
  initialize(hash[:alpha])
end

#to_hashObject



135
136
137
# File 'lib/dnn/core/activations.rb', line 135

def to_hash
  super(alpha: @alpha)
end