Class: MLP
- Inherits:
-
Object
- Object
- MLP
- Defined in:
- lib/nn.rb
Instance Attribute Summary collapse
-
#layers ⇒ Object
readonly
Returns the value of attribute layers.
Instance Method Summary collapse
- #calc(inputs) ⇒ Object
-
#initialize(*layers_config) ⇒ MLP
constructor
A new instance of MLP.
- #inspect ⇒ Object
- #parameters ⇒ Object
- #print_pass(learning_rate, loss, pass, passes, learning_rate_precision = 2, loss_precision = 10) ⇒ Object
- #reset_params ⇒ Object
- #set_activation_function(activation_function) ⇒ Object
- #set_params(params) ⇒ Object
- #show_params(in_words = false) ⇒ Object
- #to_s ⇒ Object
- #zero_grad ⇒ Object
Constructor Details
#initialize(*layers_config) ⇒ MLP
96 97 98 99 100 101 102 103 104 105 106 107 108 |
# File 'lib/nn.rb', line 96 def initialize(*layers_config) number_of_layers = layers_config.size - 1 # last param is the activation function act_array = validate_act_array(layers_config.last, number_of_layers) @layers = Array.new(number_of_layers - 1) # input layer is not really a layer object (number_of_layers - 1).times do |i| @layers[i] = Layer.new(layers_config[i], layers_config[i + 1], act_array[i]) end @layers_config = layers_config end |
Instance Attribute Details
#layers ⇒ Object (readonly)
Returns the value of attribute layers.
139 140 141 |
# File 'lib/nn.rb', line 139 def layers @layers end |
Instance Method Details
#calc(inputs) ⇒ Object
208 209 210 211 212 213 214 |
# File 'lib/nn.rb', line 208 def calc(inputs) out = inputs self.layers.each do |layer| out = layer.calc(out) # chain the results forward, layer by layer end out.size == 1 ? out[0] : out # for convenience end |
#inspect ⇒ Object
141 142 143 144 145 |
# File 'lib/nn.rb', line 141 def inspect lay = @layers_config[0..-2].join(", ") # slice to remove last element act = @layers_config.last.inspect "MLP(#{lay}, #{act})" end |
#parameters ⇒ Object
151 152 153 154 155 |
# File 'lib/nn.rb', line 151 def parameters params = [] self.layers.each { |layer| params += layer.parameters } params end |
#print_pass(learning_rate, loss, pass, passes, learning_rate_precision = 2, loss_precision = 10) ⇒ Object
216 217 218 219 220 221 |
# File 'lib/nn.rb', line 216 def print_pass(learning_rate, loss, pass, passes, learning_rate_precision = 2, loss_precision = 10) passes_format = "%#{passes.digits.length}d" learning_rate_format = "%.#{learning_rate_precision}f" loss_format = "%.#{loss_precision}f" puts "Pass #{passes_format % (pass + 1)} => Learning rate: #{learning_rate_format % learning_rate} => Loss: #{loss_format % loss.value}" end |
#reset_params ⇒ Object
187 188 189 |
# File 'lib/nn.rb', line 187 def reset_params self.layers.each { |layer| layer.reset_params } end |
#set_activation_function(activation_function) ⇒ Object
199 200 201 202 |
# File 'lib/nn.rb', line 199 def set_activation_function(activation_function) act_array = validate_act_array(activation_function, @layers_config.size - 1) self.layers.each_with_index { |layer, i| layer.set_activation_function(act_array[i]) } end |
#set_params(params) ⇒ Object
191 192 193 194 195 196 197 |
# File 'lib/nn.rb', line 191 def set_params(params) params.each_with_index do |layer, li| layer.each_with_index do |neuron, ni| self.layers[li].neurons[ni].set_params(neuron) end end end |
#show_params(in_words = false) ⇒ Object
157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 |
# File 'lib/nn.rb', line 157 def show_params(in_words = false) if in_words n = @layers_config.first puts "Layer 0: (#{n} input#{n > 1 ? "s" : ""})" self.layers.each_with_index do |layer, i| n = layer.neurons.size puts "Layer #{i + 1}: (#{n} neuron#{n > 1 ? "s" : ""}, #{layer.activation_function.inspect} activation)" layer.neurons.each_with_index do |neuron, ii| n = neuron.weights.size puts "\tNeuron #{ii + 1}: (#{n} weight#{n > 1 ? "s" : ""})" puts "\t\tBias: #{neuron.bias.value}" w = neuron.weights.map { |v| v.value }.join(", ") puts "\t\tWeights: #{w}" end end else n = @layers_config.first self.layers.each_with_index do |layer, i| n = layer.neurons.size puts "[" layer.neurons.each_with_index do |neuron, ii| w = neuron.weights.map { |v| v.value }.join(", ") puts "\t[ #{neuron.bias.value}, #{w} #{ii == layer.neurons.size - 1 ? ']' : '],'}" end puts i == self.layers.size - 1 ? "]" : "]," end end nil end |
#to_s ⇒ Object
147 148 149 |
# File 'lib/nn.rb', line 147 def to_s inspect end |
#zero_grad ⇒ Object
204 205 206 |
# File 'lib/nn.rb', line 204 def zero_grad self.parameters.each { |p| p.grad = 0.0 } end |