Class: DNN::Model

Inherits:
Object
  • Object
show all
Includes:
Numo
Defined in:
lib/dnn/core/model.rb

Overview

This class deals with the model of the network.

Instance Attribute Summary collapse

Class Method Summary collapse

Instance Method Summary collapse

Constructor Details

#initializeModel

Returns a new instance of Model.



12
13
14
15
16
17
# File 'lib/dnn/core/model.rb', line 12

def initialize
  @layers = []
  @optimizer = nil
  @batch_size = nil
  @compiled = false
end

Instance Attribute Details

#batch_sizeObject (readonly)

Returns the value of attribute batch_size.



10
11
12
# File 'lib/dnn/core/model.rb', line 10

def batch_size
  @batch_size
end

#layersObject

Returns the value of attribute layers.



8
9
10
# File 'lib/dnn/core/model.rb', line 8

def layers
  @layers
end

#optimizerObject (readonly)

Returns the value of attribute optimizer.



9
10
11
# File 'lib/dnn/core/model.rb', line 9

def optimizer
  @optimizer
end

Class Method Details

.load(file_name) ⇒ Object



19
20
21
# File 'lib/dnn/core/model.rb', line 19

def self.load(file_name)
  Marshal.load(File.binread(file_name))
end

.load_json(json_str) ⇒ Object



23
24
25
26
27
28
29
# File 'lib/dnn/core/model.rb', line 23

def self.load_json(json_str)
  hash = JSON.parse(json_str, symbolize_names: true)
  model = self.new
  model.layers = hash[:layers].map { |hash_layer| Util.load_hash(hash_layer) }
  model.compile(Util.load_hash(hash[:optimizer]))
  model
end

Instance Method Details

#<<(layer) ⇒ Object



64
65
66
67
68
69
70
# File 'lib/dnn/core/model.rb', line 64

def <<(layer)
  unless layer.is_a?(Layers::Layer)
    raise DNN_TypeError.new("layer is not an instance of the DNN::Layers::Layer class.")
  end
  @layers << layer
  self
end

#accurate(x, y, batch_size = nil, &batch_proc) ⇒ Object



141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
# File 'lib/dnn/core/model.rb', line 141

def accurate(x, y, batch_size = nil, &batch_proc)
  unless batch_size
    if @batch_size
      batch_size = @batch_size >= x.shape[0] ? @batch_size : x.shape[0]
    else
      batch_size = 1
    end
  end
  correct = 0
  (x.shape[0].to_f / @batch_size).ceil.times do |i|
    x_batch = SFloat.zeros(@batch_size, *x.shape[1..-1])
    y_batch = SFloat.zeros(@batch_size, *y.shape[1..-1])
    @batch_size.times do |j|
      k = i * @batch_size + j
      break if k >= x.shape[0]
      x_batch[j, false] = x[k, false]
      y_batch[j, false] = y[k, false]
    end
    x_batch, y_batch = batch_proc.call(x_batch, y_batch) if batch_proc
    out = forward(x_batch, false)
    @batch_size.times do |j|
     correct += 1 if out[j, true].max_index == y_batch[j, true].max_index
    end
  end
  correct.to_f / x.shape[0]
end

#backward(y) ⇒ Object



180
181
182
183
184
185
186
# File 'lib/dnn/core/model.rb', line 180

def backward(y)
  dout = y
  @layers[0..-1].reverse.each do |layer|
    dout = layer.backward(dout)
  end
  dout
end

#compile(optimizer) ⇒ Object



72
73
74
75
76
77
78
79
80
81
82
83
# File 'lib/dnn/core/model.rb', line 72

def compile(optimizer)
  unless optimizer.is_a?(Optimizers::Optimizer)
    raise DNN_TypeError.new("optimizer is not an instance of the DNN::Optimizers::Optimizer class.")
  end
  @compiled = true
  layers_check
  @optimizer = optimizer
  @layers.each do |layer|
    layer.build(self)
  end
  layers_shape_check
end

#compiled?Boolean

Returns:

  • (Boolean)


85
86
87
# File 'lib/dnn/core/model.rb', line 85

def compiled?
  @compiled
end

#forward(x, training) ⇒ Object



172
173
174
175
176
177
178
# File 'lib/dnn/core/model.rb', line 172

def forward(x, training)
  @training = training
  @layers.each do |layer|
    x = layer.forward(x)
  end
  x
end

#load_json_params(json_str) ⇒ Object



31
32
33
34
35
36
37
38
39
40
41
42
# File 'lib/dnn/core/model.rb', line 31

def load_json_params(json_str)
  has_param_layers_params = JSON.parse(json_str, symbolize_names: true)
  has_param_layers_index = 0
  @layers.each do |layer|
    next unless layer.is_a?(HasParamLayer)
    hash_params = has_param_layers_params[has_param_layers_index]
    hash_params.each do |key, param|
      layer.params[key] = SFloat.cast(param)
    end
    has_param_layers_index += 1
  end
end

#params_to_jsonObject



56
57
58
59
60
61
62
# File 'lib/dnn/core/model.rb', line 56

def params_to_json
  has_param_layers = @layers.select { |layer| layer.is_a?(HasParamLayer) }
  has_param_layers_params = has_param_layers.map do |layer|
    layer.params.map { |key, param| [key, param.to_a] }.to_h
  end
  JSON.dump(has_param_layers_params)
end

#predict(x) ⇒ Object



168
169
170
# File 'lib/dnn/core/model.rb', line 168

def predict(x)
  forward(x, false)
end

#save(file_name) ⇒ Object



44
45
46
47
48
# File 'lib/dnn/core/model.rb', line 44

def save(file_name)
  dir_name = file_name.match(%r`(.*)/.+$`)[1]
  Dir.mkdir(dir_name) unless Dir.exist?(dir_name)
  File.binwrite(file_name, Marshal.dump(self))
end

#to_jsonObject



50
51
52
53
54
# File 'lib/dnn/core/model.rb', line 50

def to_json
  hash_layers = @layers.map { |layer| layer.to_hash }
  hash = {version: VERSION, layers: hash_layers, optimizer: @optimizer.to_hash}
  JSON.dump(hash)
end

#train(x, y, epochs, batch_size: 1, test: nil, verbose: true, batch_proc: nil, &epoch_proc) ⇒ Object



93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
# File 'lib/dnn/core/model.rb', line 93

def train(x, y, epochs,
          batch_size: 1,
          test: nil,
          verbose: true,
          batch_proc: nil,
          &epoch_proc)
  @batch_size = batch_size
  num_train_data = x.shape[0]
  (1..epochs).each do |epoch|
    puts "【 epoch #{epoch}/#{epochs}" if verbose
    (num_train_data.to_f / @batch_size).ceil.times do |index|
      x_batch, y_batch = Util.get_minibatch(x, y, @batch_size)
      loss = train_on_batch(x_batch, y_batch, @batch_size, &batch_proc)
      if loss.nan?
        puts "\nloss is nan" if verbose
        return
      end
      num_trained_data = (index + 1) * batch_size
      num_trained_data = num_trained_data > num_train_data ? num_train_data : num_trained_data
      log = "\r"
      20.times do |i|
        if i < num_trained_data * 20 / num_train_data
          log << ""
        else
          log << ""
        end
      end
      log << "  #{num_trained_data}/#{num_train_data} loss: #{loss}"
      print log if verbose
    end
    if verbose && test
      acc = accurate(test[0], test[1], batch_size,&batch_proc)
      print "  accurate: #{acc}"
    end
    puts "" if verbose
    epoch_proc.call(epoch) if epoch_proc
  end
end

#train_on_batch(x, y, batch_size, &batch_proc) ⇒ Object



132
133
134
135
136
137
138
139
# File 'lib/dnn/core/model.rb', line 132

def train_on_batch(x, y, batch_size, &batch_proc)
  @batch_size = batch_size
  x, y = batch_proc.call(x, y) if batch_proc
  forward(x, true)
  backward(y)
  @layers.each { |layer| layer.update if layer.respond_to?(:update) }
  @layers[-1].loss(y)
end

#training?Boolean

Returns:

  • (Boolean)


89
90
91
# File 'lib/dnn/core/model.rb', line 89

def training?
  @training
end