Class: DNN::Layers::Conv2D
- Inherits:
-
HasParamLayer
- Object
- Layer
- HasParamLayer
- DNN::Layers::Conv2D
- Includes:
- Initializers, Convert
- Defined in:
- lib/dnn/core/layers.rb
Instance Attribute Summary
Attributes inherited from HasParamLayer
Class Method Summary collapse
Instance Method Summary collapse
- #backward(dout) ⇒ Object
- #build(model) ⇒ Object
- #forward(x) ⇒ Object
-
#initialize(num_filters, filter_width, filter_height, weight_initializer: nil, bias_initializer: nil, strides: [1, 1], padding: false, weight_decay: 0) ⇒ Conv2D
constructor
A new instance of Conv2D.
- #shape ⇒ Object
- #to_hash ⇒ Object
Methods inherited from HasParamLayer
Methods inherited from Layer
Constructor Details
#initialize(num_filters, filter_width, filter_height, weight_initializer: nil, bias_initializer: nil, strides: [1, 1], padding: false, weight_decay: 0) ⇒ Conv2D
Returns a new instance of Conv2D.
225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 |
# File 'lib/dnn/core/layers.rb', line 225 def initialize(num_filters, filter_width, filter_height, weight_initializer: nil, bias_initializer: nil, strides: [1, 1], padding: false, weight_decay: 0) super() @num_filters = num_filters @filter_width = filter_width @filter_height = filter_height @weight_initializer = (weight_initializer || RandomNormal.new) @bias_initializer = (bias_initializer || Zeros.new) @strides = strides @padding = padding @weight_decay = weight_decay end |
Class Method Details
.load_hash(hash) ⇒ Object
242 243 244 245 246 247 248 249 |
# File 'lib/dnn/core/layers.rb', line 242 def self.load_hash(hash) Conv2D.new(hash[:num_filters], hash[:filter_width], hash[:filter_height], weight_initializer: Util.load_hash(hash[:weight_initializer]), bias_initializer: Util.load_hash(hash[:bias_initializer]), strides: hash[:strides], padding: hash[:padding], weight_decay: hash[:weight_decay]) end |
Instance Method Details
#backward(dout) ⇒ Object
270 271 272 273 274 275 276 277 278 279 280 281 |
# File 'lib/dnn/core/layers.rb', line 270 def backward(dout) dout = dout.reshape(dout.shape[0..2].reduce(:*), dout.shape[3]) @grads[:weight] = @col.transpose.dot(dout) if @weight_decay > 0 dridge = @weight_decay * @params[:weight] @grads[:weight] += dridge end @grads[:bias] = dout.sum(0) dcol = dout.dot(@params[:weight].transpose) dx = col2im(dcol, @x_shape, @out_width, @out_height, @filter_width, @filter_height, @strides) @padding ? back_padding(dx, @pad) : dx end |
#build(model) ⇒ Object
251 252 253 254 255 256 257 258 259 260 |
# File 'lib/dnn/core/layers.rb', line 251 def build(model) super prev_width, prev_height = prev_layer.shape[0..1] @out_width, @out_height = out_size(prev_width, prev_height, @filter_width, @filter_height, @strides) if @padding @pad = [prev_width - @out_width, prev_height - @out_height] @out_width = prev_width @out_height = prev_height end end |
#forward(x) ⇒ Object
262 263 264 265 266 267 268 |
# File 'lib/dnn/core/layers.rb', line 262 def forward(x) x = padding(x, @pad) if @padding @x_shape = x.shape @col = im2col(x, @out_width, @out_height, @filter_width, @filter_height, @strides) out = @col.dot(@params[:weight]) out.reshape(x.shape[0], @out_width, @out_height, out.shape[3]) end |
#shape ⇒ Object
283 284 285 |
# File 'lib/dnn/core/layers.rb', line 283 def shape [@out_width, @out_height, @num_filters] end |
#to_hash ⇒ Object
287 288 289 290 291 292 293 294 295 296 297 298 299 |
# File 'lib/dnn/core/layers.rb', line 287 def to_hash { name: self.class.name, num_filters: @num_filters, filter_width: @filter_width, filter_height: @filter_height, weight_initializer: @weight_initializer.to_hash, bias_initializer: @bias_initializer.to_hash, strides: @strides, padding: @padding, weight_decay: @weight_decay, } end |