Class: DNN::Layers::BatchNormalization

Inherits:
HasParamLayer show all
Defined in:
lib/dnn/core/layers.rb

Instance Attribute Summary

Attributes inherited from HasParamLayer

#grads, #params

Instance Method Summary collapse

Methods inherited from HasParamLayer

#init, #initialize, #update

Methods inherited from Layer

#init, #prev_layer, #shape

Constructor Details

This class inherits a constructor from DNN::Layers::HasParamLayer

Instance Method Details

#backward(dout) ⇒ Object



345
346
347
348
349
350
351
352
353
354
355
# File 'lib/dnn/core/layers.rb', line 345

def backward(dout)
  @grads[:beta] = dout.sum(0)
  @grads[:gamma] = (@xn * dout).sum(0)
  dxn = @params[:gamma] * dout
  dxc = dxn / @std
  dstd = -((dxn * @xc) / (@std**2)).sum(0)
  dvar = 0.5 * dstd / @std
  dxc += (2.0 / @model.batch_size) * @xc * dvar
  dmean = dxc.sum(0)
  dxc - dmean / @model.batch_size
end

#forward(x) ⇒ Object



336
337
338
339
340
341
342
343
# File 'lib/dnn/core/layers.rb', line 336

def forward(x)
  @mean = x.mean(0)
  @xc = x - @mean
  @var = (@xc**2).mean(0)
  @std = NMath.sqrt(@var + 1e-7)
  @xn = @xc / @std
  @params[:gamma] * @xn + @params[:beta]
end