Class: DNN::Layers::ELU

Inherits:
Layer
  • Object
show all
Includes:
LayerNode
Defined in:
lib/dnn/core/layers/activations.rb

Instance Attribute Summary collapse

Attributes inherited from Layer

#input_shape, #output_shape

Instance Method Summary collapse

Methods included from LayerNode

#forward

Methods inherited from Layer

#<<, #build, #built?, #call, call, #clean, #compute_output_shape, #forward, from_hash

Constructor Details

#initialize(alpha = 1.0) ⇒ ELU

Returns a new instance of ELU.

Parameters:

  • alpha (Float) (defaults to: 1.0)

    The slope when the output value is negative.



119
120
121
122
# File 'lib/dnn/core/layers/activations.rb', line 119

def initialize(alpha = 1.0)
  super()
  @alpha = alpha
end

Instance Attribute Details

#alphaObject (readonly)

Returns the value of attribute alpha.



116
117
118
# File 'lib/dnn/core/layers/activations.rb', line 116

def alpha
  @alpha
end

Instance Method Details

#backward_node(dy) ⇒ Object



135
136
137
138
139
140
141
142
# File 'lib/dnn/core/layers/activations.rb', line 135

def backward_node(dy)
  dx = Xumo::SFloat.ones(@x.shape)
  dx[@x < 0] = 0
  dx2 = Xumo::SFloat.zeros(@x.shape)
  dx2[@x < 0] = 1
  dx2 *= @alpha * Xumo::NMath.exp(@x)
  dy * (dx + dx2)
end

#forward_node(x) ⇒ Object



124
125
126
127
128
129
130
131
132
133
# File 'lib/dnn/core/layers/activations.rb', line 124

def forward_node(x)
  @x = x
  x1 = Xumo::SFloat.zeros(x.shape)
  x1[x >= 0] = 1
  x1 *= x
  x2 = Xumo::SFloat.zeros(x.shape)
  x2[x < 0] = 1
  x2 *= @alpha * Xumo::NMath.exp(x) - @alpha
  x1 + x2
end

#load_hash(hash) ⇒ Object



148
149
150
# File 'lib/dnn/core/layers/activations.rb', line 148

def load_hash(hash)
  initialize(hash[:alpha])
end

#to_hashObject



144
145
146
# File 'lib/dnn/core/layers/activations.rb', line 144

def to_hash
  super(alpha: @alpha)
end