Module: NeuralNetworkRb

Defined in:
lib/neural_network_rb/error.rb,
lib/neural_network_rb/mnist.rb,
lib/neural_network_rb/narray.rb,
lib/neural_network_rb/builder.rb,
lib/neural_network_rb/version.rb,
lib/neural_network_rb/activations.rb,
lib/neural_network_rb/mnist_image.rb,
lib/neural_network_rb/neural_network.rb,
lib/neural_network_rb/activations/dot.rb,
lib/neural_network_rb/activations/relu.rb,
lib/neural_network_rb/embeddings/one_hot.rb,
lib/neural_network_rb/activations/sigmoid.rb,
lib/neural_network_rb/activations/activations.rb,
lib/neural_network_rb/loss/cross_entropy_fetch.rb,
lib/neural_network_rb/loss/softmax_cross_entropy.rb

Defined Under Namespace

Modules: Activations, Embeddings, Loss Classes: MNIST, MNISTImage, NeuralNetwork

Constant Summary collapse

VERSION =
"0.2.0"
E =
Math.exp(1)

Class Method Summary collapse

Class Method Details

.accuracy(values, labels) ⇒ Object



19
20
21
22
23
24
25
26
# File 'lib/neural_network_rb/error.rb', line 19

def accuracy(values, labels)
  size = values.shape[0]
  hits = 0
  size.times.each do |i|
    hits += 1 if labels[i] == values[i, true].max_index
  end
  hits.to_f/size
end

.cross_entropy(values, labels) ⇒ Object



15
16
17
# File 'lib/neural_network_rb/error.rb', line 15

def cross_entropy(values, labels)
  - (labels * values.map {|x| Math.log(x)}).sum/values.ndim
end

.l1error(v1, v2) ⇒ Object



7
8
9
# File 'lib/neural_network_rb/error.rb', line 7

def l1error(v1, v2)
  (v1 - v2).abs.sum
end

.l2error(v1, v2) ⇒ Object



3
4
5
# File 'lib/neural_network_rb/error.rb', line 3

def l2error(v1, v2)
  ((v1 - v2) ** 2).sum
end

.plain_diff(v1, v2) ⇒ Object



11
12
13
# File 'lib/neural_network_rb/error.rb', line 11

def plain_diff(v1, v2)
  v1 - v2
end

.rows(data, obj) ⇒ Object



16
17
18
# File 'lib/neural_network_rb/narray.rb', line 16

def rows(data, obj)
  data[obj, *Array.new(data.ndim-1, true)]
end

.shuffle(data, target, seed) ⇒ Object



5
6
7
8
9
# File 'lib/neural_network_rb/narray.rb', line 5

def shuffle(data, target, seed)
  sample_size = data.shape[0]
  new_order = Numo::DFloat[*(0..sample_size-1).to_a.shuffle(random: Random.new(seed))]
  [rows(data, new_order), rows(target, new_order)]
end

.sigmoid(array) ⇒ Object



6
7
8
9
10
# File 'lib/neural_network_rb/activations.rb', line 6

def sigmoid(array)
  max_array = array.max(1)
  array.map_with_index {|a, i| a - max_array[i]}
       .map_with_index {|x, i| 1/(1 + E**(-x))}
end

.sigmoid_prime(x) ⇒ Object



12
13
14
15
# File 'lib/neural_network_rb/activations.rb', line 12

def sigmoid_prime(x)
  s = sigmoid(x)
  s * (1-s)
end

.softmax(array) ⇒ Object



17
18
19
20
21
22
# File 'lib/neural_network_rb/activations.rb', line 17

def softmax(array)
  max_array = array.max(1)
  exp_array = array.map_with_index {|a, i| a - max_array[i]}.map {|x| Math.exp(x)}
  sum_array = exp_array.sum(1)
  exp_array.map_with_index {|a, i| a/sum_array[i]}
end

.split(data, ratio) ⇒ Object



11
12
13
14
# File 'lib/neural_network_rb/narray.rb', line 11

def split(data, ratio)
  sample_size = (data.shape[0] * ratio).to_i
  [rows(data, 0..sample_size-1), rows(data, sample_size..-1)]
end