Class: Torch::NN::ConvNd

Inherits:
Module
  • Object
show all
Defined in:
lib/torch/nn/convnd.rb

Direct Known Subclasses

Conv1d, Conv2d, Conv3d

Instance Attribute Summary collapse

Attributes inherited from Module

#training

Instance Method Summary collapse

Methods inherited from Module

#_apply, #add_module, #apply, #buffers, #call, #children, #cpu, #cuda, #deep_dup, #double, #eval, #float, #forward, #half, #inspect, #load_state_dict, #method_missing, #modules, #named_buffers, #named_children, #named_modules, #named_parameters, #parameters, #register_buffer, #register_parameter, #requires_grad!, #respond_to?, #share_memory, #state_dict, #to, #train, #type, #zero_grad

Methods included from Utils

#_activation_fn, #_clones, #_ntuple, #_pair, #_quadrupal, #_single, #_triple

Constructor Details

#initialize(in_channels, out_channels, kernel_size, stride, padding, dilation, transposed, output_padding, groups, bias, padding_mode) ⇒ ConvNd

Returns a new instance of ConvNd.

Raises:

  • (ArgumentError)


6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
# File 'lib/torch/nn/convnd.rb', line 6

def initialize(in_channels, out_channels, kernel_size, stride, padding, dilation, transposed, output_padding, groups, bias, padding_mode)
  super()
  raise ArgumentError, "in_channels must be divisible by groups" if in_channels % groups != 0
  raise ArgumentError, "out_channels must be divisible by groups" if out_channels % groups != 0
  @in_channels = in_channels
  @out_channels = out_channels
  @kernel_size = kernel_size
  @stride = stride
  @padding = padding
  @dilation = dilation
  @transposed = transposed
  @output_padding = output_padding
  @groups = groups
  @padding_mode = padding_mode
  if transposed
    @weight = Parameter.new(Tensor.new(in_channels, out_channels / groups, *kernel_size))
  else
    @weight = Parameter.new(Tensor.new(out_channels, in_channels / groups, *kernel_size))
  end
  if bias
    @bias = Parameter.new(Tensor.new(out_channels))
  else
    register_parameter("bias", nil)
  end
  reset_parameters
end

Dynamic Method Handling

This class handles dynamic methods through the method_missing method in the class Torch::NN::Module

Instance Attribute Details

#dilationObject (readonly)

Returns the value of attribute dilation.



4
5
6
# File 'lib/torch/nn/convnd.rb', line 4

def dilation
  @dilation
end

#groupsObject (readonly)

Returns the value of attribute groups.



4
5
6
# File 'lib/torch/nn/convnd.rb', line 4

def groups
  @groups
end

#in_channelsObject (readonly)

Returns the value of attribute in_channels.



4
5
6
# File 'lib/torch/nn/convnd.rb', line 4

def in_channels
  @in_channels
end

#kernel_sizeObject (readonly)

Returns the value of attribute kernel_size.



4
5
6
# File 'lib/torch/nn/convnd.rb', line 4

def kernel_size
  @kernel_size
end

#out_channelsObject (readonly)

Returns the value of attribute out_channels.



4
5
6
# File 'lib/torch/nn/convnd.rb', line 4

def out_channels
  @out_channels
end

#output_padddingObject (readonly)

Returns the value of attribute output_paddding.



4
5
6
# File 'lib/torch/nn/convnd.rb', line 4

def output_paddding
  @output_paddding
end

#paddingObject (readonly)

Returns the value of attribute padding.



4
5
6
# File 'lib/torch/nn/convnd.rb', line 4

def padding
  @padding
end

#padding_modeObject (readonly)

Returns the value of attribute padding_mode.



4
5
6
# File 'lib/torch/nn/convnd.rb', line 4

def padding_mode
  @padding_mode
end

#strideObject (readonly)

Returns the value of attribute stride.



4
5
6
# File 'lib/torch/nn/convnd.rb', line 4

def stride
  @stride
end

#transposedObject (readonly)

Returns the value of attribute transposed.



4
5
6
# File 'lib/torch/nn/convnd.rb', line 4

def transposed
  @transposed
end

Instance Method Details

#reset_parametersObject



33
34
35
36
37
38
39
40
# File 'lib/torch/nn/convnd.rb', line 33

def reset_parameters
  Init.kaiming_uniform!(@weight, a: Math.sqrt(5))
  if @bias
    fan_in, _ = Init._calculate_fan_in_and_fan_out(@weight)
    bound = 1 / Math.sqrt(fan_in)
    Init.uniform!(@bias, a: -bound, b: bound)
  end
end