Documentation ¶
Overview ¶
Package layer provides the layers for sequential models.
Index ¶
- Variables
- func AsBatch() func(Layer)
- func AsType(dtype t.Dtype) func(Layer)
- func WithLayerOpts(opts ...CompileOpt) func(*Chain)
- func WithSharedChainLearnables(shared *Chain) func(*Chain)
- func WithSharedLearnables(shared Layer) func(Layer)
- type ActivationFn
- type Chain
- type ChainOpt
- type CompileOpt
- type Config
- type Conv2D
- type Dropout
- type FC
- type Flatten
- type Layer
- type LeakyReLUActivation
- type LinearActivation
- type MaxPooling2D
- type ReLUActivation
- type Reshape
- type SigmoidActivation
- type SoftmaxActivation
- type TanhActivation
Constants ¶
This section is empty.
Variables ¶
var LeakyReLU = &LeakyReLUActivation{0.01}
LeakyReLU is default leaky relu activation.
var Linear = &LinearActivation{}
Linear activation.
var ReLU = &ReLUActivation{}
ReLU activation.
var Sigmoid = &SigmoidActivation{}
Sigmoid activation function.
var Softmax = &SoftmaxActivation{}
Softmax is the default softmax activation.
var Tanh = &TanhActivation{}
Tanh activation.
Functions ¶
func WithLayerOpts ¶
func WithLayerOpts(opts ...CompileOpt) func(*Chain)
WithLayerOpts adds the given layer opts to all layers.
func WithSharedChainLearnables ¶
WithSharedChainLearnables shares the learnables from another chain.
func WithSharedLearnables ¶
WithSharedLearnables shares the learnables from another layer.
Types ¶
type ActivationFn ¶
type ActivationFn interface { // Fwd is a forward pass through x. Fwd(x *g.Node) (*g.Node, error) // Clone the activation. Clone() ActivationFn }
ActivationFn is an activation function.
type Chain ¶
type Chain struct { // Layers are the layers to chain together. Layers []Config // contains filtered or unexported fields }
Chain of layers.
func (*Chain) Learnables ¶
Learnables are all of the learnable parameters in the chain.
type Config ¶
type Config interface { // Compile the layer. Compile(graph *g.ExprGraph, opts ...CompileOpt) Layer // ApplyDefaults to the config. ApplyDefaults() Config // Validate the config. Validate() error // Clone the layer config. Clone() Config }
Config is the config for a layer.
type Conv2D ¶
type Conv2D struct { // Input channels. // required Input int // Output channels. // required Output int // Height of the filter. // required Height int // Width of the filter. // required Width int // Name of the layer. Name string // Activation function for the layer. // Defaults to ReLU Activation ActivationFn // Pad // Defaults to (1, 1) Pad []int // Stride // Defaults to (1, 1) Stride []int // Dilation // Defaults to (1, 1) Dilation []int // Init function fot the weights. // Defaults to GlorotN(1) Init g.InitWFn }
Conv2D is a 2D convolution.
type Dropout ¶
type Dropout struct { // Probability of dropping out. // Defaults to 0.6 Probability float64 }
Dropout implements layer dropout.
func (Dropout) ApplyDefaults ¶
ApplyDefaults applys defaults to the layers.
type FC ¶
type FC struct { // Input is the number of units in input. // required Input int // Output is the number of units in the output. // required Output int // Name of the layer. Name string // Activation is the activation function. // Defaults to ReLU Activation ActivationFn // Init is the init function. // Defaults to GlorotN(1) Init g.InitWFn // NoBias indicates to not use a bias with the layer // Defaults to true. NoBias bool // BiasInit is the init function for the bias. // Defaults to GlorotN(1) BiasInit g.InitWFn }
FC is a fully connected layer of neurons.
type Flatten ¶
type Flatten struct{}
Flatten reshapes the incoming tensor to be flat, preserving the batch.
func (Flatten) ApplyDefaults ¶
ApplyDefaults to the flatten layer.
type Layer ¶
type Layer interface { // Fwd is a forward pass through the layer. Fwd(x *g.Node) (*g.Node, error) // Learnables returns all learnable nodes within this layer. Learnables() g.Nodes // Clone the layer. Clone() Layer // Graph returns the graph for this layer. Graph() *g.ExprGraph }
Layer in a network.
type LeakyReLUActivation ¶
type LeakyReLUActivation struct {
// contains filtered or unexported fields
}
LeakyReLUActivation is a leaky relu activation layer.
func NewLeakyReLU ¶
func NewLeakyReLU(alpha float64) *LeakyReLUActivation
NewLeakyReLU returns a new leaky relu activation layer.
func (*LeakyReLUActivation) Clone ¶
func (r *LeakyReLUActivation) Clone() ActivationFn
Clone the activation.
func (*LeakyReLUActivation) Compile ¶
func (r *LeakyReLUActivation) Compile(x *g.Node, opts ...CompileOpt)
Compile the layer.
func (*LeakyReLUActivation) Learnables ¶
func (r *LeakyReLUActivation) Learnables() (n g.Nodes)
Learnables returns all learnable nodes within this layer.
type LinearActivation ¶
type LinearActivation struct{}
LinearActivation is a linear (identity) activation layer.
func (*LinearActivation) Clone ¶
func (l *LinearActivation) Clone() ActivationFn
Clone the activation.
func (*LinearActivation) Compile ¶
func (l *LinearActivation) Compile(x *g.Node, opts ...CompileOpt)
Compile the layer.
func (*LinearActivation) Learnables ¶
func (l *LinearActivation) Learnables() (n g.Nodes)
Learnables returns all learnable nodes within this layer.
type MaxPooling2D ¶
type MaxPooling2D struct { // Shape of the kernel. // Defaults to (2, 2) Kernel t.Shape // Pad // Defaults to (0, 0) Pad []int // Stride // Defaults to (2, 2) Stride []int // Name Name string }
MaxPooling2D implements the max pooling 2d function.
func (MaxPooling2D) ApplyDefaults ¶
func (m MaxPooling2D) ApplyDefaults() Config
ApplyDefaults applys defaults to the layers.
func (MaxPooling2D) Compile ¶
func (m MaxPooling2D) Compile(graph *g.ExprGraph, opts ...CompileOpt) Layer
Compile the config as a layer.
type ReLUActivation ¶
type ReLUActivation struct{}
ReLUActivation is a relu activation layer.
func (*ReLUActivation) Compile ¶
func (r *ReLUActivation) Compile(x *g.Node, opts ...CompileOpt)
Compile the layer.
func (*ReLUActivation) Learnables ¶
func (r *ReLUActivation) Learnables() (n g.Nodes)
Learnables returns all learnable nodes within this layer.
type Reshape ¶
Reshape the incoming tensor.
func (Reshape) ApplyDefaults ¶
ApplyDefaults to the flatten layer.
type SigmoidActivation ¶
type SigmoidActivation struct{}
SigmoidActivation is a sigmoid activation layer.
func NewSigmoid ¶
func NewSigmoid() *SigmoidActivation
NewSigmoid returns a new sigmoid activation layer.
func (*SigmoidActivation) Clone ¶
func (s *SigmoidActivation) Clone() ActivationFn
Clone the activation.
func (*SigmoidActivation) Compile ¶
func (s *SigmoidActivation) Compile(x *g.Node, opts ...CompileOpt)
Compile the layer.
func (*SigmoidActivation) Learnables ¶
func (s *SigmoidActivation) Learnables() (n g.Nodes)
Learnables returns all learnable nodes within this layer.
type SoftmaxActivation ¶
type SoftmaxActivation struct {
// contains filtered or unexported fields
}
SoftmaxActivation is a softmax activation layer.
func NewSoftmax ¶
func NewSoftmax(axis ...int) *SoftmaxActivation
NewSoftmax returns a new leaky softmax activation layer.
func (*SoftmaxActivation) Clone ¶
func (s *SoftmaxActivation) Clone() ActivationFn
Clone the activation.
func (*SoftmaxActivation) Compile ¶
func (s *SoftmaxActivation) Compile(x *g.Node, opts ...CompileOpt)
Compile the layer.
func (*SoftmaxActivation) Learnables ¶
func (s *SoftmaxActivation) Learnables() (n g.Nodes)
Learnables returns all learnable nodes within this layer.
type TanhActivation ¶
type TanhActivation struct{}
TanhActivation is a tanh activation layer.
func (*TanhActivation) Compile ¶
func (t *TanhActivation) Compile(x *g.Node, opts ...CompileOpt)
Compile the layer.
func (*TanhActivation) Learnables ¶
func (t *TanhActivation) Learnables() (n g.Nodes)
Learnables returns all learnable nodes within this layer.