Documentation ¶
Index ¶
- func RandomID(n int) string
- func SoftMaxLog(values []float64) []float64
- type BaseLayer
- func (l *BaseLayer) Activate(input *tensor.Tensor) error
- func (l *BaseLayer) BackPropagate(err *tensor.Tensor) error
- func (l *BaseLayer) GetDebugInfo() []*debug.LayerInfo
- func (l *BaseLayer) GetInputSize() []int
- func (l *BaseLayer) GetOutputSize() []int
- func (l *BaseLayer) GetParamGradPointers() ([]*float64, []*float64)
- func (l *BaseLayer) ID() string
- func (l *BaseLayer) Init(inputSize, outputSize []int) error
- type ConvolutionalLayer
- func (l *ConvolutionalLayer) Activate(input *tensor.Tensor) (*tensor.Tensor, error)
- func (l *ConvolutionalLayer) BackPropagate(err *tensor.Tensor) (*tensor.Tensor, error)
- func (l *ConvolutionalLayer) ConvIm2Col(data, ker, out *tensor.Tensor, ...)
- func (l *ConvolutionalLayer) CreateSlave() weight.Layer
- type DenseLayer
- type FFNet
- func (n *FFNet) Activate(input *tensor.Tensor) (*tensor.Tensor, error)
- func (n *FFNet) AddLayer(layer weight.Layer, parents ...string) error
- func (n *FFNet) BackPropagate(input *tensor.Tensor) (*tensor.Tensor, error)
- func (n *FFNet) CreateSlave() weight.Layer
- func (n *FFNet) End() error
- func (n *FFNet) GetDebugInfo() []*debug.LayerInfo
- func (n *FFNet) GetInputSize() []int
- func (n *FFNet) GetOutputSize() []int
- func (n *FFNet) GetParamGradPointers() ([]*float64, []*float64)
- func (n *FFNet) ID() string
- type FFNode
- type LeakyReLULayer
- type PoolLayer
- type ReLULayer
- type ReshaperLayer
- type SigmoidLayer
- type SoftmaxLayer
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
func SoftMaxLog ¶
Types ¶
type BaseLayer ¶
type BaseLayer struct {
// contains filtered or unexported fields
}
func (*BaseLayer) GetDebugInfo ¶
func (*BaseLayer) GetInputSize ¶
func (*BaseLayer) GetOutputSize ¶
func (*BaseLayer) GetParamGradPointers ¶
type ConvolutionalLayer ¶
type ConvolutionalLayer struct { BaseLayer // contains filtered or unexported fields }
func NewConvolutionalLayer ¶
func NewConvolutionalLayer(inputWidth, inputHeight, inputDepth, nKernels, kernelPadX, kernelPadY, strideX, strideY, padX, padY int) *ConvolutionalLayer
NewConvolutionalLayer does what it says
inputWidth: input width inputHeight: input height inputDepth: input depth nKernels: number of kernels kernelPadX: kernel width pad kernelPadY: kernel height pad strideX: stride in x strideY: stride in y padX: padding in x padY: padding in y
Instead of passing the kernel size, you specify the size as a padding of a kernel of one pixel. This is to avoid the possibility of a kernel size that has no center pixel. It's also easier to create a layer that does not change the input size if kernelPad = pad
The filter cannot be bigger than the image so: kernelPadX*2+1 <= inputWidth && kernelPadY*2+1 <= inputHeight The stride must divide the image in equal integer parts: ((inputWidth + padX*2) - (1+kernelPadX*2)) % (strideX+1) == 0 && ((inputHeight + padY*2) - (1+kernelPadY*2)) % (strideY+1) == 0 If you want the output area to be the same as the input: kernelPadX == padX && kernelPadY == padY More padding than spatial extend makes no sense so: padX <= kernelPadX && padY < kernelPadY
func NewSquareConvolutionalLayer ¶
func NewSquareConvolutionalLayer(inputSize, inputDepth, nKernels, kernelPad, stride, padding int) *ConvolutionalLayer
func (*ConvolutionalLayer) Activate ¶
Activate takes and input tensor and computes an output tensor where each value is the sum of the convolutions of the different input depths using different kernels.
func (*ConvolutionalLayer) BackPropagate ¶
func (*ConvolutionalLayer) ConvIm2Col ¶
func (l *ConvolutionalLayer) ConvIm2Col(data, ker, out *tensor.Tensor, kernelSizeX, kernelSizeY, padX, padY, strideX, strideY int)
func (*ConvolutionalLayer) CreateSlave ¶
func (l *ConvolutionalLayer) CreateSlave() weight.Layer
CreateSlave creates a slave of the ConvolutionalLayer. See EnslaverLayer in package weight for more information on layer slaves.
type DenseLayer ¶
type DenseLayer struct {
BaseLayer
}
DenseLayer computes each output using a weighted sum of all inputs plus a bias
func NewDenseLayer ¶
func NewDenseLayer(inputSize, outputSize []int) *DenseLayer
NewDenseLayer creates a new DenseLayer
func (*DenseLayer) Activate ¶
Activate takes and input tensor and computes an output tensor where each value is the weighed sum of all the input values.
func (*DenseLayer) BackPropagate ¶
func (*DenseLayer) CreateSlave ¶
func (l *DenseLayer) CreateSlave() weight.Layer
CreateSlave creates a slave of the DenseLayer. See EnslaverLayer in package weight for more information on layer slaves.
func (*DenseLayer) GetNumberOfInputs ¶
func (l *DenseLayer) GetNumberOfInputs() int
func (*DenseLayer) GetNumberOfNeurons ¶
func (l *DenseLayer) GetNumberOfNeurons() int
type FFNet ¶
type FFNet struct {
// contains filtered or unexported fields
}
FFNet is a generic feedforward network. It can include any number branches, but they cannot form a loop.
func NewCRPBlock ¶
News a block of Convolutionals, ReLUs and Pool
func NewCRPBlocks ¶
func (*FFNet) Activate ¶
Activate takes an input tensor and passes it through all the layers in the netork following the node connections.
func (*FFNet) BackPropagate ¶
func (*FFNet) CreateSlave ¶
CreateSlave creates a slave of the FFNet. See EnslaverLayer in package weight for more information on layer slaves.
func (*FFNet) GetDebugInfo ¶
func (*FFNet) GetInputSize ¶
func (*FFNet) GetOutputSize ¶
func (*FFNet) GetParamGradPointers ¶
type FFNode ¶
type FFNode struct {
// contains filtered or unexported fields
}
FFNode is a node to be used with FFNet
func (*FFNode) Activate ¶
func (n *FFNode) Activate()
Activate waits for the parent nodes to send its outputs, computes the sum of them and passes it to the underlying layer's Activate, then sends the result to all childs.
func (*FFNode) BackPropagate ¶
func (n *FFNode) BackPropagate()
BackPropagate waits for the child nodes to send its propagated errors, computes the sum of them and passes it to the underlying layer's BackPropagate, then propagates the result to all parents.
type LeakyReLULayer ¶
type LeakyReLULayer struct {
BaseLayer
}
func NewLeakyReLULayer ¶
func NewLeakyReLULayer(size ...int) *LeakyReLULayer
func (*LeakyReLULayer) BackPropagate ¶
func (*LeakyReLULayer) CreateSlave ¶
func (l *LeakyReLULayer) CreateSlave() weight.Layer
type PoolLayer ¶
type PoolLayer struct { BaseLayer // contains filtered or unexported fields }
func NewPoolLayer ¶
func (*PoolLayer) BackPropagate ¶
func (*PoolLayer) CreateSlave ¶
func (*PoolLayer) GetParamGradPointers ¶
type ReLULayer ¶
type ReLULayer struct {
BaseLayer
}
func NewReLULayer ¶
func (*ReLULayer) BackPropagate ¶
func (*ReLULayer) CreateSlave ¶
type ReshaperLayer ¶
type ReshaperLayer struct {
BaseLayer
}
func NewReshaperLayer ¶
func NewReshaperLayer(inputSize []int, outputSize []int) *ReshaperLayer
func (*ReshaperLayer) BackPropagate ¶
func (*ReshaperLayer) CreateSlave ¶
func (l *ReshaperLayer) CreateSlave() weight.Layer
type SigmoidLayer ¶
type SigmoidLayer struct {
BaseLayer
}
func NewSigmoidLayer ¶
func NewSigmoidLayer(size ...int) *SigmoidLayer
func (*SigmoidLayer) BackPropagate ¶
func (*SigmoidLayer) CreateSlave ¶
func (l *SigmoidLayer) CreateSlave() weight.Layer
type SoftmaxLayer ¶
type SoftmaxLayer struct {
BaseLayer
}
func NewSoftmaxLayer ¶
func NewSoftmaxLayer(size ...int) *SoftmaxLayer
func (*SoftmaxLayer) BackPropagate ¶
func (*SoftmaxLayer) CreateSlave ¶
func (l *SoftmaxLayer) CreateSlave() weight.Layer