Class: MLP
- Inherits:
-
Object
- Object
- MLP
- Defined in:
- lib/nn.rb
Instance Attribute Summary collapse
-
#layers ⇒ Object
readonly
Returns the value of attribute layers.
Instance Method Summary collapse
- #calc(inputs) ⇒ Object
-
#initialize(*layers_config) ⇒ MLP
constructor
A new instance of MLP.
- #inspect ⇒ Object
- #parameters ⇒ Object
- #reset_params ⇒ Object
- #set_params(params) ⇒ Object
- #show_params(in_words = false) ⇒ Object
- #to_s ⇒ Object
- #zero_grad ⇒ Object
Constructor Details
#initialize(*layers_config) ⇒ MLP
Returns a new instance of MLP.
85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 |
# File 'lib/nn.rb', line 85 def initialize(*layers_config) number_of_layers = layers_config.size - 1 # last param is the activation function act = layers_config.last if !act.is_a?(Symbol) and !act.is_a?(Array) raise "Activation function must be passed as the last parameter: #{act.class} expected Symbol or Array of Symbols" end single_activation_function = nil if act.is_a?(Symbol) single_activation_function = act else # is Array if not act.all? { |item| item.is_a?(Symbol) } raise "Array with activation functions must contain symbols: #{act}" end if act.size == 1 single_activation_function = act.first elsif act.size != number_of_layers - 1 raise "Array size does not match number of layers with activation functions: #{act.size} expected #{number_of_layers - 1}" end end @layers = Array.new(number_of_layers - 1) # input layer is not really a layer object (number_of_layers - 1).times do |i| @layers[i] = Layer.new(layers_config[i], layers_config[i + 1], single_activation_function.nil? ? act[i] : single_activation_function) end @layers_config = layers_config end |
Instance Attribute Details
#layers ⇒ Object (readonly)
Returns the value of attribute layers.
122 123 124 |
# File 'lib/nn.rb', line 122 def layers @layers end |
Instance Method Details
#calc(inputs) ⇒ Object
186 187 188 189 190 191 192 |
# File 'lib/nn.rb', line 186 def calc(inputs) out = inputs self.layers.each do |layer| out = layer.calc(out) # chain the results forward, layer by layer end out.size == 1 ? out[0] : out # for convenience end |
#inspect ⇒ Object
124 125 126 127 128 |
# File 'lib/nn.rb', line 124 def inspect lay = @layers_config[0..-2].join(", ") # slice to remove last element act = @layers_config.last.inspect "MLP(#{lay}, #{act})" end |
#parameters ⇒ Object
134 135 136 137 138 |
# File 'lib/nn.rb', line 134 def parameters params = [] self.layers.each { |layer| params += layer.parameters } params end |
#reset_params ⇒ Object
170 171 172 |
# File 'lib/nn.rb', line 170 def reset_params self.layers.each { |layer| layer.reset_params } end |
#set_params(params) ⇒ Object
174 175 176 177 178 179 180 |
# File 'lib/nn.rb', line 174 def set_params(params) params.each_with_index do |layer, li| layer.each_with_index do |neuron, ni| self.layers[li].neurons[ni].set_params(neuron) end end end |
#show_params(in_words = false) ⇒ Object
140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 |
# File 'lib/nn.rb', line 140 def show_params(in_words = false) if in_words n = @layers_config.first puts "Layer 0: (#{n} input#{n > 1 ? "s" : ""})" self.layers.each_with_index do |layer, i| n = layer.neurons.size puts "Layer #{i + 1}: (#{n} neuron#{n > 1 ? "s" : ""}, #{layer.activation_function.inspect} activation)" layer.neurons.each_with_index do |neuron, ii| n = neuron.weights.size puts "\tNeuron #{ii + 1}: (#{n} weight#{n > 1 ? "s" : ""})" puts "\t\tBias: #{neuron.bias.value}" w = neuron.weights.map { |v| v.value }.join(", ") puts "\t\tWeights: #{w}" end end else n = @layers_config.first self.layers.each_with_index do |layer, i| n = layer.neurons.size puts "[" layer.neurons.each_with_index do |neuron, ii| w = neuron.weights.map { |v| v.value }.join(", ") puts "\t[ #{neuron.bias.value}, #{w} #{ii == layer.neurons.size - 1 ? ']' : '],'}" end puts i == self.layers.size - 1 ? "]" : "]," end end nil end |
#to_s ⇒ Object
130 131 132 |
# File 'lib/nn.rb', line 130 def to_s inspect end |
#zero_grad ⇒ Object
182 183 184 |
# File 'lib/nn.rb', line 182 def zero_grad self.parameters.each { |p| p.grad = 0.0 } end |