Method: Transformers::Distilbert::TransformerBlock#initialize

Defined in:
lib/transformers/models/distilbert/modeling_distilbert.rb

#initialize(config) ⇒ TransformerBlock

Returns a new instance of TransformerBlock.



183
184
185
186
187
188
189
190
191
192
193
194
195
196
# File 'lib/transformers/models/distilbert/modeling_distilbert.rb', line 183

def initialize(config)
  super()

  # Have an even number of Configure multi-heads
  if config.dim % config.n_heads != 0
    raise ArgumentError, "config.n_heads #{config.n_heads} must divide config.dim #{config.dim} evenly"
  end

  @attention = DISTILBERT_ATTENTION_CLASSES[config._attn_implementation].new(config)
  @sa_layer_norm = Torch::NN::LayerNorm.new(config.dim, eps: 1e-12)

  @ffn = FFN.new(config)
  @output_layer_norm = Torch::NN::LayerNorm.new(config.dim, eps: 1e-12)
end