Class: DNN::Model

Inherits:
Object
  • Object
show all
Includes:
Numo
Defined in:
lib/dnn/core/model.rb

Instance Attribute Summary collapse

Class Method Summary collapse

Instance Method Summary collapse

Constructor Details

#initializeModel

Returns a new instance of Model.



13
14
15
16
17
18
# File 'lib/dnn/core/model.rb', line 13

def initialize
  @layers = []
  @optimizer = nil
  @batch_size = nil
  @compiled = false
end

Instance Attribute Details

#batch_sizeObject (readonly)

Returns the value of attribute batch_size.



10
11
12
# File 'lib/dnn/core/model.rb', line 10

def batch_size
  @batch_size
end

#layersObject

Returns the value of attribute layers.



8
9
10
# File 'lib/dnn/core/model.rb', line 8

def layers
  @layers
end

#optimizerObject (readonly)

Returns the value of attribute optimizer.



9
10
11
# File 'lib/dnn/core/model.rb', line 9

def optimizer
  @optimizer
end

#trainingObject (readonly)

Returns the value of attribute training.



11
12
13
# File 'lib/dnn/core/model.rb', line 11

def training
  @training
end

Class Method Details

.load(file_name) ⇒ Object



20
21
22
# File 'lib/dnn/core/model.rb', line 20

def self.load(file_name)
  Marshal.load(File.binread(file_name))
end

.load_json(json_str) ⇒ Object



24
25
26
27
28
29
30
# File 'lib/dnn/core/model.rb', line 24

def self.load_json(json_str)
  hash = JSON.parse(json_str, symbolize_names: true)
  model = self.new
  model.layers = hash[:layers].map { |hash_layer| Util.load_hash(hash_layer) }
  model.compile(Util.load_hash(hash[:optimizer]))
  model
end

Instance Method Details

#<<(layer) ⇒ Object



65
66
67
68
69
70
71
# File 'lib/dnn/core/model.rb', line 65

def <<(layer)
  unless layer.is_a?(Layers::Layer)
    raise DNN_TypeError.new("layer is not an instance of the DNN::Layers::Layer class.")
  end
  @layers << layer
  self
end

#accurate(x, y, batch_size = nil, &batch_proc) ⇒ Object



138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
# File 'lib/dnn/core/model.rb', line 138

def accurate(x, y, batch_size = nil, &batch_proc)
  @batch_size = batch_size if batch_size
  correct = 0
  (x.shape[0].to_f / @batch_size).ceil.times do |i|
    x_batch = SFloat.zeros(@batch_size, *x.shape[1..-1])
    y_batch = SFloat.zeros(@batch_size, *y.shape[1..-1])
    @batch_size.times do |j|
      k = i * @batch_size + j
      break if k >= x.shape[0]
      x_batch[j, false] = x[k, false]
      y_batch[j, false] = y[k, false]
    end
    x_batch, y_batch = batch_proc.call(x_batch, y_batch) if batch_proc
    out = forward(x_batch, false)
    @batch_size.times do |j|
     correct += 1 if out[j, true].max_index == y_batch[j, true].max_index
    end
  end
  correct.to_f / x.shape[0]
end

#backward(y) ⇒ Object



171
172
173
174
175
176
177
# File 'lib/dnn/core/model.rb', line 171

def backward(y)
  dout = y
  @layers[0..-1].reverse.each do |layer|
    dout = layer.backward(dout)
  end
  dout
end

#compile(optimizer) ⇒ Object



73
74
75
76
77
78
79
80
81
82
83
84
# File 'lib/dnn/core/model.rb', line 73

def compile(optimizer)
  unless optimizer.is_a?(Optimizers::Optimizer)
    raise DNN_TypeError.new("optimizer is not an instance of the DNN::Optimizers::Optimizer class.")
  end
  @compiled = true
  layers_check
  @optimizer = optimizer
  @layers.each do |layer|
    layer.build(self)
  end
  layers_shape_check
end

#compiled?Boolean

Returns:

  • (Boolean)


86
87
88
# File 'lib/dnn/core/model.rb', line 86

def compiled?
  @compiled
end

#forward(x, training) ⇒ Object



163
164
165
166
167
168
169
# File 'lib/dnn/core/model.rb', line 163

def forward(x, training)
  @training = training
  @layers.each do |layer|
    x = layer.forward(x)
  end
  x
end

#load_json_params(json_str) ⇒ Object



32
33
34
35
36
37
38
39
40
41
42
43
# File 'lib/dnn/core/model.rb', line 32

def load_json_params(json_str)
  has_param_layers_params = JSON.parse(json_str, symbolize_names: true)
  has_param_layers_index = 0
  @layers.each do |layer|
    next unless layer.is_a?(HasParamLayer)
    hash_params = has_param_layers_params[has_param_layers_index]
    hash_params.each do |key, param|
      layer.params[key] = SFloat.cast(param)
    end
    has_param_layers_index += 1
  end
end

#params_to_jsonObject



57
58
59
60
61
62
63
# File 'lib/dnn/core/model.rb', line 57

def params_to_json
  has_param_layers = @layers.select { |layer| layer.is_a?(HasParamLayer) }
  has_param_layers_params = has_param_layers.map do |layer|
    layer.params.map { |key, param| [key, param.to_a] }.to_h
  end
  JSON.dump(has_param_layers_params)
end

#predict(x) ⇒ Object



159
160
161
# File 'lib/dnn/core/model.rb', line 159

def predict(x)
  forward(x, false)
end

#save(file_name) ⇒ Object



45
46
47
48
49
# File 'lib/dnn/core/model.rb', line 45

def save(file_name)
  dir_name = file_name.match(%r`(.*)/.+$`)[1]
  Dir.mkdir(dir_name) unless Dir.exist?(dir_name)
  File.binwrite(file_name, Marshal.dump(self))
end

#to_jsonObject



51
52
53
54
55
# File 'lib/dnn/core/model.rb', line 51

def to_json
  hash_layers = @layers.map { |layer| layer.to_hash }
  hash = {version: VERSION, layers: hash_layers, optimizer: @optimizer.to_hash}
  JSON.dump(hash)
end

#train(x, y, epochs, batch_size: 1, test: nil, verbose: true, batch_proc: nil, &epoch_proc) ⇒ Object



90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
# File 'lib/dnn/core/model.rb', line 90

def train(x, y, epochs,
          batch_size: 1,
          test: nil,
          verbose: true,
          batch_proc: nil,
          &epoch_proc)
  @batch_size = batch_size
  num_train_data = x.shape[0]
  (1..epochs).each do |epoch|
    puts "【 epoch #{epoch}/#{epochs}" if verbose
    (num_train_data.to_f / @batch_size).ceil.times do |index|
      x_batch, y_batch = Util.get_minibatch(x, y, @batch_size)
      loss = train_on_batch(x_batch, y_batch, @batch_size, &batch_proc)
      if loss.nan?
        puts "\nloss is nan" if verbose
        return
      end
      num_trained_data = (index + 1) * batch_size
      num_trained_data = num_trained_data > num_train_data ? num_train_data : num_trained_data
      log = "\r"
      20.times do |i|
        if i < num_trained_data * 20 / num_train_data
          log << ""
        else
          log << ""
        end
      end
      log << "  #{num_trained_data}/#{num_train_data} loss: #{loss}"
      print log if verbose
    end
    if verbose && test
      acc = accurate(test[0], test[1], batch_size,&batch_proc)
      print "  accurate: #{acc}"
    end
    puts "" if verbose
    epoch_proc.call(epoch) if epoch_proc
  end
end

#train_on_batch(x, y, batch_size, &batch_proc) ⇒ Object



129
130
131
132
133
134
135
136
# File 'lib/dnn/core/model.rb', line 129

def train_on_batch(x, y, batch_size, &batch_proc)
  @batch_size = batch_size
  x, y = batch_proc.call(x, y) if batch_proc
  forward(x, true)
  backward(y)
  @layers.each { |layer| layer.update if layer.respond_to?(:update) }
  @layers[-1].loss(y)
end