Method List
-
#<< DNN::Model
-
#accurate DNN::Model
-
#backward DNN::Activations::SigmoidWithLoss
-
#backward DNN::Activations::ReLU
-
#backward DNN::Layers::Conv2D
-
#backward DNN::Layers::Reshape
-
#backward DNN::Activations::Tanh
-
#backward DNN::Activations::Sigmoid
-
#backward DNN::Layers::InputLayer
-
#backward DNN::Layers::MaxPool2D
-
#backward DNN::Layers::BatchNormalization
-
#backward DNN::Layers::Dense
-
#backward DNN::Activations::IdentityWithLoss
-
#backward DNN::Activations::SoftmaxWithLoss
-
#backward DNN::Layers::Layer
-
#backward DNN::Activations::LeakyReLU
-
#backward DNN::Layers::Dropout
-
#backward DNN::Layers::Flatten
-
#batch_size DNN::Model
-
#beta1 DNN::Optimizers::Adam
-
#beta2 DNN::Optimizers::Adam
-
#col2im DNN::Layers::Convert
-
#compile DNN::Model
-
dir DNN::CIFAR10
-
download DNN::MNIST
-
downloads DNN::MNIST
-
#forward DNN::Activations::SoftmaxWithLoss
-
#forward DNN::Activations::IdentityWithLoss
-
#forward DNN::Layers::MaxPool2D
-
#forward DNN::Activations::LeakyReLU
-
#forward DNN::Layers::Reshape
-
#forward DNN::Layers::BatchNormalization
-
#forward DNN::Layers::Dropout
-
#forward DNN::Activations::SigmoidFunction
-
#forward DNN::Layers::Flatten
-
#forward DNN::Activations::ReLU
-
#forward DNN::Layers::Layer
-
#forward DNN::Layers::Conv2D
-
#forward DNN::Layers::InputLayer
-
#forward DNN::Activations::Tanh
-
#forward DNN::Layers::Dense
-
get_minibatch DNN::Util
-
#grads DNN::Layers::HasParamLayer
-
#im2col DNN::Layers::Convert
-
#init DNN::Layers::HasParamLayer
-
#init DNN::Layers::Layer
-
#init DNN::Layers::MaxPool2D
-
#init DNN::Layers::Conv2D
-
#init_param DNN::Initializers::He
-
#init_param DNN::Initializers::Xavier
-
#init_param DNN::Initializers::Initializer
-
#init_param DNN::Initializers::Zeros
-
#init_param DNN::Initializers::RandomNormal
-
#initialize DNN::Layers::InputLayer
-
#initialize DNN::Layers::HasParamLayer
-
#initialize DNN::Optimizers::AdaGrad
-
#initialize DNN::Initializers::RandomNormal
-
#initialize DNN::Model
-
#initialize DNN::Optimizers::Adam
-
#initialize DNN::Optimizers::RMSProp
-
#initialize DNN::DNN_GradUnfairError
-
#initialize DNN::Layers::MaxPool2D
-
#initialize DNN::Layers::Dropout
-
#initialize DNN::Layers::Dense
-
#initialize DNN::Layers::Reshape
-
#initialize DNN::Layers::Conv2D
-
#initialize DNN::Activations::LeakyReLU
-
#initialize DNN::Optimizers::Optimizer
-
#initialize DNN::Optimizers::SGD
-
#layers DNN::Model
-
#learning_rate DNN::Optimizers::Optimizer
-
load DNN::Model
-
load_images DNN::MNIST
-
load_labels DNN::MNIST
-
load_test DNN::MNIST
-
load_test DNN::CIFAR10
-
load_train DNN::CIFAR10
-
load_train DNN::MNIST
-
#loss DNN::Activations::SoftmaxWithLoss
-
#loss DNN::Activations::IdentityWithLoss
-
#loss DNN::Activations::SigmoidWithLoss
-
mnist_dir DNN::MNIST
-
#momentum DNN::Optimizers::SGD
-
#muse DNN::Optimizers::RMSProp
-
#num_nodes DNN::Layers::Dense
-
numerical_grad DNN::Util
-
#optimizer DNN::Model
-
#padding DNN::Layers::Convert
-
#params DNN::Layers::HasParamLayer
-
#predict DNN::Model
-
#prev_layer DNN::Layers::Layer
-
read DNN::ImageIO
-
#save DNN::Model
-
#shape DNN::Layers::Reshape
-
#shape DNN::Layers::Flatten
-
#shape DNN::Layers::MaxPool2D
-
#shape DNN::Layers::Conv2D
-
#shape DNN::Layers::Dense
-
#shape DNN::Layers::InputLayer
-
#shape DNN::Layers::Layer
-
#test DNN::Model
-
to_categorical DNN::Util
-
#train DNN::Model
-
#train_on_batch DNN::Model
-
#training DNN::Model
-
#update DNN::Optimizers::AdaGrad
-
#update DNN::Optimizers::RMSProp
-
#update DNN::Optimizers::Adam
-
#update DNN::Optimizers::Optimizer
-
#update DNN::Optimizers::SGD
-
#update DNN::Layers::HasParamLayer
-
url_to_file_name DNN::MNIST
-
#weight_decay DNN::Layers::Dense
-
write DNN::ImageIO