Class: DNN::Model
- Inherits:
-
Object
- Object
- DNN::Model
- Defined in:
- lib/dnn/core/model.rb
Instance Attribute Summary collapse
-
#batch_size ⇒ Object
readonly
Returns the value of attribute batch_size.
-
#layers ⇒ Object
readonly
Returns the value of attribute layers.
-
#optimizer ⇒ Object
readonly
Returns the value of attribute optimizer.
-
#training ⇒ Object
readonly
Returns the value of attribute training.
Class Method Summary collapse
Instance Method Summary collapse
- #<<(layer) ⇒ Object
- #accurate(x, y, batch_size = nil, &batch_proc) ⇒ Object
- #compile(optimizer) ⇒ Object
-
#initialize ⇒ Model
constructor
A new instance of Model.
- #predict(x) ⇒ Object
- #save(file_name) ⇒ Object
- #test(x, y, batch_size = nil, &batch_proc) ⇒ Object
- #train(x, y, epochs, batch_size: 1, batch_proc: nil, verbose: true, &epoch_proc) ⇒ Object
- #train_on_batch(x, y, batch_size, &batch_proc) ⇒ Object
Constructor Details
#initialize ⇒ Model
Returns a new instance of Model.
12 13 14 15 16 |
# File 'lib/dnn/core/model.rb', line 12 def initialize @layers = [] @optimizer = nil @batch_size = nil end |
Instance Attribute Details
#batch_size ⇒ Object (readonly)
Returns the value of attribute batch_size.
5 6 7 |
# File 'lib/dnn/core/model.rb', line 5 def batch_size @batch_size end |
#layers ⇒ Object (readonly)
Returns the value of attribute layers.
3 4 5 |
# File 'lib/dnn/core/model.rb', line 3 def layers @layers end |
#optimizer ⇒ Object (readonly)
Returns the value of attribute optimizer.
4 5 6 |
# File 'lib/dnn/core/model.rb', line 4 def optimizer @optimizer end |
#training ⇒ Object (readonly)
Returns the value of attribute training.
6 7 8 |
# File 'lib/dnn/core/model.rb', line 6 def training @training end |
Class Method Details
.load(file_name) ⇒ Object
8 9 10 |
# File 'lib/dnn/core/model.rb', line 8 def self.load(file_name) Marshal.load(File.binread(file_name)) end |
Instance Method Details
#<<(layer) ⇒ Object
22 23 24 25 26 27 |
# File 'lib/dnn/core/model.rb', line 22 def <<(layer) unless layer.is_a?(Layers::Layer) raise DNN_TypeError.new("layer is not an instance of the DNN::Layers::Layer class.") end @layers << layer end |
#accurate(x, y, batch_size = nil, &batch_proc) ⇒ Object
91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 |
# File 'lib/dnn/core/model.rb', line 91 def accurate(x, y, batch_size = nil, &batch_proc) @batch_size = batch_size if batch_size correct = 0 (x.shape[0].to_f / @batch_size).ceil.times do |i| x_batch = SFloat.zeros(@batch_size, *x.shape[1..-1]) y_batch = SFloat.zeros(@batch_size, *y.shape[1..-1]) @batch_size.times do |j| k = i * @batch_size + j break if k >= x.shape[0] x_batch[j, false] = x[k, false] y_batch[j, false] = y[k, false] end x_batch, y_batch = batch_proc.call(x_batch, y_batch) if batch_proc out = forward(x_batch, false) @batch_size.times do |j| correct += 1 if out[j, true].max_index == y_batch[j, true].max_index end end correct.to_f / x.shape[0] end |
#compile(optimizer) ⇒ Object
29 30 31 32 33 34 35 36 37 38 39 |
# File 'lib/dnn/core/model.rb', line 29 def compile(optimizer) unless optimizer.is_a?(Optimizers::Optimizer) raise DNN_TypeError.new("optimizer is not an instance of the DNN::Optimizers::Optimizer class.") end layers_check @optimizer = optimizer @layers.each do |layer| layer.init(self) end layers_shape_check end |
#predict(x) ⇒ Object
112 113 114 |
# File 'lib/dnn/core/model.rb', line 112 def predict(x) forward(x, false) end |
#save(file_name) ⇒ Object
18 19 20 |
# File 'lib/dnn/core/model.rb', line 18 def save(file_name) File.binwrite(file_name, Marshal.dump(self)) end |
#test(x, y, batch_size = nil, &batch_proc) ⇒ Object
84 85 86 87 88 89 |
# File 'lib/dnn/core/model.rb', line 84 def test(x, y, batch_size = nil, &batch_proc) @batch_size = batch_size if batch_size acc = accurate(x, y, @batch_size, &batch_proc) puts "accurate: #{acc}" acc end |
#train(x, y, epochs, batch_size: 1, batch_proc: nil, verbose: true, &epoch_proc) ⇒ Object
41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 |
# File 'lib/dnn/core/model.rb', line 41 def train(x, y, epochs, batch_size: 1, batch_proc: nil, verbose: true, &epoch_proc) @batch_size = batch_size num_train_data = x.shape[0] (1..epochs).each do |epoch| puts "【 epoch #{epoch}/#{epochs} 】" if verbose (num_train_data.to_f / @batch_size).ceil.times do |index| x_batch, y_batch = Util.get_minibatch(x, y, @batch_size) loss = train_on_batch(x_batch, y_batch, @batch_size, &batch_proc) if loss.nan? puts "\nloss is nan" if verbose return end num_trained_data = (index + 1) * batch_size num_trained_data = num_trained_data > num_train_data ? num_train_data : num_trained_data log = "\r" 20.times do |i| if i < num_trained_data * 20 / num_train_data log << "■" else log << "・" end end log << " #{num_trained_data}/#{num_train_data} loss: #{loss}" print log if verbose end puts "" if verbose epoch_proc.call(epoch) if epoch_proc end end |
#train_on_batch(x, y, batch_size, &batch_proc) ⇒ Object
75 76 77 78 79 80 81 82 |
# File 'lib/dnn/core/model.rb', line 75 def train_on_batch(x, y, batch_size, &batch_proc) @batch_size = batch_size x, y = batch_proc.call(x, y) if batch_proc forward(x, true) backward(y) @layers.each { |layer| layer.update if layer.respond_to?(:update) } @layers[-1].loss(y) end |