Class: BackProp::Neuron

Inherits:
Object
  • Object
show all
Defined in:
lib/perceptron.rb

Constant Summary collapse

ACTIVATION =

available activation functions for Value objects

{
  tanh: :tanh,
  sigmoid: :sigmoid,
  relu: :relu,
}

Instance Attribute Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(input_count, activation: :relu) ⇒ Neuron

Returns a new instance of Neuron.



14
15
16
17
18
# File 'lib/perceptron.rb', line 14

def initialize(input_count, activation: :relu)
  @weights = Array.new(input_count) { Value.new(rand(-1.0..1.0)) }
  @bias = Value.new(rand(-1.0..1.0))
  @activation = ACTIVATION.fetch(activation)
end

Instance Attribute Details

#activationObject (readonly)

Returns the value of attribute activation.



12
13
14
# File 'lib/perceptron.rb', line 12

def activation
  @activation
end

#biasObject (readonly)

Returns the value of attribute bias.



12
13
14
# File 'lib/perceptron.rb', line 12

def bias
  @bias
end

#weightsObject (readonly)

Returns the value of attribute weights.



12
13
14
# File 'lib/perceptron.rb', line 12

def weights
  @weights
end

Instance Method Details

#apply(x = 0) ⇒ Object



20
21
22
23
24
25
26
# File 'lib/perceptron.rb', line 20

def apply(x = 0)
  x = Array.new(@weights.size) { x } if !x.is_a? Enumerable
  sum = @weights.map.with_index { |w, i|
    w * x[i]
  }.inject(Value.new(0)) { |memo, val| memo + val } + @bias
  sum.send(@activation)
end

#descend(step_size) ⇒ Object



28
29
30
31
32
33
# File 'lib/perceptron.rb', line 28

def descend(step_size)
  (@weights + [@bias]).each { |p|
    p.value += (-1 * step_size * p.gradient)
  }
  self
end

#inspectObject



39
40
41
42
43
# File 'lib/perceptron.rb', line 39

def inspect
  fmt = "% .3f|% .3f"
  @weights.map { |w| format(fmt, w.value, w.gradient) }.join("\t") +
    "\t" + format(fmt, @bias.value, @bias.gradient)
end

#to_sObject



35
36
37
# File 'lib/perceptron.rb', line 35

def to_s
  format("N(%s)\t(%s %s)", @weights.join(', '), @bias, @activation)
end