Class: NeuralNetwork

Inherits:
Object
  • Object
show all
Defined in:
lib/NeuralNet.rb

Class Method Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(inputNodes: 0, hiddenNodes: [], outputNodes: 0, learningRate: 0.01, activation: "sigmoid", oneHot: false, reluFactor: 1, momentum: 0.0) ⇒ NeuralNetwork

Returns a new instance of NeuralNetwork.



8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
# File 'lib/NeuralNet.rb', line 8

def initialize(inputNodes:0,hiddenNodes:[],outputNodes:0,
  learningRate: 0.01,activation: "sigmoid",oneHot: false,
  reluFactor: 1,momentum: 0.0)
  @nInputs = inputNodes
  @nHidden = hiddenNodes
  @nHiddenLayers = hiddenNodes.length
  @nOutputs = outputNodes
  @learningRate = learningRate
  @momentum = momentum
  @hiddenWeights = []
  @hiddenBias = []
  @prevHWeightDeltas = []
  @prevHBiasDeltas = []

  tmp1,tmp2 = @nInputs,@nHidden[0]
  @hiddenWeights[0] = DFloat.new([tmp2,tmp1]).rand * 2 - 1
  @hiddenBias[0] = DFloat.new([tmp2,1]).rand * 2 - 1

  # Hidden Layers Update Matrix (momentum)
  @prevHWeightDeltas[0] = DFloat.zeros([tmp2,tmp1])
  @prevHBiasDeltas[0] = DFloat.zeros([tmp2,1])

  for i in (1...@nHiddenLayers)
    tmp1,tmp2 = @nHidden[i-1],@nHidden[i]
    @hiddenWeights[i] =  DFloat.new([tmp2,tmp1]).rand * 2 - 1
    @hiddenBias[i] =  DFloat.new([tmp2,1]).rand * 2 - 1
    @prevHWeightDeltas[i] = DFloat.zeros([tmp2,tmp1])
    @prevHBiasDeltas[i] = DFloat.zeros([tmp2,1])
  end

  @outputWeights = DFloat.new([@nOutputs,@nHidden[@nHiddenLayers-1]]).rand * 2 - 1
  @outputBias =  DFloat.new([@nOutputs,1]).rand * 2 - 1

  # Output Layer Update Matrix (momentum)
  @prevOWeightDeltas =  DFloat.zeros([@nOutputs,@nHidden[@nHiddenLayers-1]])
  @prevOBiasDeltas = DFloat.zeros([@nOutputs,1])

  @hiddenActivation = activation
  @hiddenActivationDerv = activation + "_prime"
  if activation == "relu"
    @reluFactor = reluFactor.to_f
  end

  if oneHot
    @outputActivation = "softmax"
    @outDel = "softmax_out_delta"
  else
    @outputActivation = @hiddenActivation
    @outputActivationDerv = @hiddenActivationDerv
    @outDel = "_out_delta"
  end
end

Class Method Details

.load(path) ⇒ Object



105
106
107
108
109
110
111
112
# File 'lib/NeuralNet.rb', line 105

def self.load(path)
  if File.exist?(path)
    t  = File.binread(path)
    return Marshal.load(t)
  else
    raise Errno::ENOENT , path
  end
end

Instance Method Details

#predict(data) ⇒ Object



93
94
95
96
97
98
99
100
101
102
103
# File 'lib/NeuralNet.rb', line 93

def predict(data)
  x = DFloat[data].transpose
  activations = [x]
  #feed forward
  for i in(0...@nHiddenLayers)
    x1 = method(@hiddenActivation).call(@hiddenWeights[i].dot(activations[-1])+@hiddenBias[i])
    activations.push(x1)
  end
  out = method(@outputActivation).call(@outputWeights.dot(activations[-1])+@outputBias).flatten.to_a
  out
end

#save(path) ⇒ Object



114
115
116
# File 'lib/NeuralNet.rb', line 114

def save(path)
  File.binwrite(path,Marshal.dump(self))
end

#train!(data, label) ⇒ Object



61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
# File 'lib/NeuralNet.rb', line 61

def train!(data,label)
  x = DFloat[data].transpose
  y =  DFloat[label].transpose
  activations = [x]
  #feed forward
  for i in(0...@nHiddenLayers)
    x1 = method(@hiddenActivation).call(@hiddenWeights[i].dot(activations[-1])+@hiddenBias[i])
    activations.push(x1)
  end
  output = method(@outputActivation).call(@outputWeights.dot(activations[-1])+@outputBias)
  #backpropagation
  diff = output - y
  outdelta = method(@outDel).call(output,diff)

  @prevOBiasDeltas = @momentum * @prevOBiasDeltas + outdelta
  @prevOWeightDeltas = @momentum * @prevOWeightDeltas + outdelta.dot(activations[-1].transpose)

  @outputBias -= @prevOBiasDeltas
  @outputWeights -= @prevOWeightDeltas

  delta = @outputWeights.transpose.dot(outdelta)
  (@nHiddenLayers-1).downto(0) do |i|
    delta = delta*(method(@hiddenActivationDerv).call(activations[i+1]))
    @prevHWeightDeltas[i] = @momentum * @prevHWeightDeltas[i] + delta.dot(activations[i].transpose)
    @prevHBiasDeltas[i] = @momentum * @prevHBiasDeltas[i] + delta

    @hiddenWeights[i] -= @prevHWeightDeltas[i]
    @hiddenBias[i] -= @prevHBiasDeltas[i]
    delta = @hiddenWeights[i].transpose.dot(delta)
  end
end