Class: NekonekoGen::MLP

Inherits:
Classifier show all
Defined in:
lib/nekoneko_gen/mlp.rb

Overview

Multi Layer Perceptron

Constant Summary collapse

IR =
0.4
HR =
0.1
NOISE_VAR =
0.3
MARGIN =
0.2
DEFAULT_ITERATION =
40

Instance Attribute Summary

Attributes inherited from Classifier

#k

Instance Method Summary collapse

Constructor Details

#initialize(k, options) ⇒ MLP

Returns a new instance of MLP.



16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
# File 'lib/nekoneko_gen/mlp.rb', line 16

def initialize(k, options)
  @k = k
  @output_units = @k == 2 ? 1 : @k
  @hidden_units = (options[:c] || default_hidden_unit).to_i
  @input = []
  @hidden = []
  @input_bias = []
  @hidden_bias = []
  @hidden_units.times do |i|
    @input[i] = Hash.new {|hash, key| hash[key] = default_value }
    @input_bias[i] = default_value
  end
  @output_units.times do |i|
    @hidden[i] = []
    @hidden_units.times do |j|
      @hidden[i][j] = default_value
    end
    @hidden_bias[i] = default_value
  end
end

Instance Method Details

#classify_method_code(lang) ⇒ Object



146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
# File 'lib/nekoneko_gen/mlp.rb', line 146

def classify_method_code(lang)
  lang ||= :ruby
  case lang
  when :ruby
  else
    raise NotImplementedError
  end
  <<CODE
  def self.classify(vec)
input_y = []
HIDDEN_UNITS.times do |i|
  input_y[i] = sigmoid(INPUT_BIAS[i] +
                       INPUT_W[i].values_at(*vec).compact.reduce(0.0, :+))
end
if (K == 2)
  HIDDEN_BIAS[0] +
    input_y.zip(HIDDEN_W[0]).map{|a, b| a * b }.reduce(:+) > 0.0 ? 0 : 1
else
  K.times.map{|i|
    [HIDDEN_BIAS[i] + input_y.zip(HIDDEN_W[i]).map{|a, b| a * b }.reduce(:+), i]
  }.max.pop
end
  end
  def self.sigmoid(a)
1.0 / (1.0 + Math.exp(-a))
  end
CODE
end

#default_hidden_unitObject



13
14
15
# File 'lib/nekoneko_gen/mlp.rb', line 13

def default_hidden_unit
  @k
end

#default_iterationObject



124
125
126
# File 'lib/nekoneko_gen/mlp.rb', line 124

def default_iteration
  DEFAULT_ITERATION
end

#default_valueObject



118
119
120
# File 'lib/nekoneko_gen/mlp.rb', line 118

def default_value
  (rand - 0.5)
end

#features(i = -1)) ⇒ Object



112
113
114
# File 'lib/nekoneko_gen/mlp.rb', line 112

def features(i = -1)
  @input.map{|v| v.size }.reduce(:+)
end

#noiseObject



121
122
123
# File 'lib/nekoneko_gen/mlp.rb', line 121

def noise
  (Math.sqrt(-2.0 * Math.log(rand)) * Math.sin(2.0 * Math::PI * rand)) * NOISE_VAR
end

#parameter_code(lang, index_converter = lambda{|i| i}) ⇒ Object



127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
# File 'lib/nekoneko_gen/mlp.rb', line 127

def parameter_code(lang, index_converter = lambda{|i| i})
  lang ||= :ruby
  case lang
  when :ruby
  else
    raise NotImplementedError
  end
  
  wvec = @input.map {|w|
    w.reduce({}) {|h, kv| h[index_converter.call(kv[0])] = kv[1]; h }
  }
  <<CODE
  HIDDEN_UNITS = #{@hidden_units}
  INPUT_BIAS = #{@input_bias.inspect}
  HIDDEN_BIAS = #{@hidden_bias.inspect}
  INPUT_W = JSON.load(#{wvec.to_json.inspect})
  HIDDEN_W = #{@hidden.inspect}
CODE
end

#sigmoid(a) ⇒ Object



115
116
117
# File 'lib/nekoneko_gen/mlp.rb', line 115

def sigmoid(a)
  1.0 / (1.0 + Math.exp(-a))
end

#update(vec, label) ⇒ Object



36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
# File 'lib/nekoneko_gen/mlp.rb', line 36

def update(vec, label)
  input_y = []
  hidden_y = []
  output_y = []
  
  input_y = @hidden_units.times.map do |i|
    w = @input[i]
    sigmoid(@input_bias[i] + vec.map{|k, v| w[k] * v}.reduce(:+) + noise)
  end
  hidden_y = @output_units.times.map do |i|
    @hidden_bias[i] + input_y.zip(@hidden[i]).map{|a, b| a * b }.reduce(:+)
  end
  output_y = @output_units.times.map do |i|
    sigmoid(hidden_y[i])
  end
  
  loss = 0.0
  dotrain = false
  if (@output_units == 1)
    if (output_y[0] > 0.5)
      l = 0
    else
      l = 1
    end
    if (label == 0)
      if (output_y[0] < 1.0 - MARGIN)
        dotrain = true
      end
    else
      if (output_y[0] > MARGIN)
        dotrain = true
      end
    end
    loss = (label == l) ? 0.0 : 1.0
  else
    max_p, l = output_y.each_with_index.max
    if (l == label)
      if (max_p < 1.0 - MARGIN)
        dotrain = true
      end
    else
      loss = 1.0
      dotrain = true
    end
  end
  if (dotrain)
    output_bp = @output_units.times.map do |i|
      y = hidden_y[i]
      yt = (label == i) ? 1.0 : 0.0
      expy = Math.exp(y)
       -((2.0 * yt - 1.0) * expy + yt) / (Math.exp(2.0 * y) + 2.0 * expy + 1.0)
    end
    hidden_bp = @hidden_units.times.map do |j|
      y = 0.0
      @output_units.times do |i|
        y += output_bp[i] * @hidden[i][j]
      end
      y * (1.0 - input_y[j]) * input_y[j]
    end
    @output_units.times do |j|
      hidden = @hidden[j]
      @hidden_units.times do |i|
        hidden[i] -= HR * input_y[i] * output_bp[j]
      end
      @hidden_bias[j] -= HR * output_bp[j]
    end
    @hidden_units.times do |i|
      input = @input[i]
      vec.each do |k, v|
        input[k] -= IR * v * hidden_bp[i]
      end
      @input_bias[i] -= IR * hidden_bp[i]
    end
  end
  loss
end