Documentation ¶
Index ¶
- Variables
- type ConvLayer
- func (l *ConvLayer) Backward()
- func (l *ConvLayer) Forward(v *Vol, isTraining bool) *Vol
- func (l *ConvLayer) MarshalJSON() ([]byte, error)
- func (l *ConvLayer) OutDepth() int
- func (l *ConvLayer) OutSx() int
- func (l *ConvLayer) OutSy() int
- func (l *ConvLayer) ParamsAndGrads() []ParamsAndGrads
- func (l *ConvLayer) UnmarshalJSON(b []byte) error
- type DropoutLayer
- func (l *DropoutLayer) Backward()
- func (l *DropoutLayer) Forward(v *Vol, isTraining bool) *Vol
- func (l *DropoutLayer) MarshalJSON() ([]byte, error)
- func (l *DropoutLayer) OutDepth() int
- func (l *DropoutLayer) OutSx() int
- func (l *DropoutLayer) OutSy() int
- func (l *DropoutLayer) ParamsAndGrads() []ParamsAndGrads
- func (l *DropoutLayer) UnmarshalJSON(b []byte) error
- type FullyConnLayer
- func (l *FullyConnLayer) Backward()
- func (l *FullyConnLayer) Forward(v *Vol, isTraining bool) *Vol
- func (l *FullyConnLayer) MarshalJSON() ([]byte, error)
- func (l *FullyConnLayer) OutDepth() int
- func (l *FullyConnLayer) OutSx() int
- func (l *FullyConnLayer) OutSy() int
- func (l *FullyConnLayer) ParamsAndGrads() []ParamsAndGrads
- func (l *FullyConnLayer) UnmarshalJSON(b []byte) error
- type InputLayer
- func (l *InputLayer) Backward()
- func (l *InputLayer) Forward(v *Vol, isTraining bool) *Vol
- func (l *InputLayer) MarshalJSON() ([]byte, error)
- func (l *InputLayer) OutDepth() int
- func (l *InputLayer) OutSx() int
- func (l *InputLayer) OutSy() int
- func (l *InputLayer) ParamsAndGrads() []ParamsAndGrads
- func (l *InputLayer) UnmarshalJSON(b []byte) error
- type Layer
- type LayerDef
- type LayerType
- type LocalResponseNormalizationLayer
- func (l *LocalResponseNormalizationLayer) Backward()
- func (l *LocalResponseNormalizationLayer) Forward(v *Vol, isTraining bool) *Vol
- func (l *LocalResponseNormalizationLayer) MarshalJSON() ([]byte, error)
- func (l *LocalResponseNormalizationLayer) OutDepth() int
- func (l *LocalResponseNormalizationLayer) OutSx() int
- func (l *LocalResponseNormalizationLayer) OutSy() int
- func (l *LocalResponseNormalizationLayer) ParamsAndGrads() []ParamsAndGrads
- func (l *LocalResponseNormalizationLayer) UnmarshalJSON(b []byte) error
- type LossData
- type LossLayer
- type MagicNet
- type MaxoutLayer
- func (l *MaxoutLayer) Backward()
- func (l *MaxoutLayer) Forward(v *Vol, isTraining bool) *Vol
- func (l *MaxoutLayer) MarshalJSON() ([]byte, error)
- func (l *MaxoutLayer) OutDepth() int
- func (l *MaxoutLayer) OutSx() int
- func (l *MaxoutLayer) OutSy() int
- func (l *MaxoutLayer) ParamsAndGrads() []ParamsAndGrads
- func (l *MaxoutLayer) UnmarshalJSON(b []byte) error
- type Net
- func (n *Net) Backward(y LossData) float64
- func (n *Net) CostLoss(v *Vol, y LossData) float64
- func (n *Net) Forward(v *Vol, isTraining bool) *Vol
- func (n *Net) MakeLayers(defs []LayerDef, r *rand.Rand)
- func (n *Net) ParamsAndGrads() []ParamsAndGrads
- func (n *Net) Prediction() int
- func (n *Net) UnmarshalJSON(b []byte) error
- type ParamsAndGrads
- type PoolLayer
- func (l *PoolLayer) Backward()
- func (l *PoolLayer) Forward(v *Vol, isTraining bool) *Vol
- func (l *PoolLayer) MarshalJSON() ([]byte, error)
- func (l *PoolLayer) OutDepth() int
- func (l *PoolLayer) OutSx() int
- func (l *PoolLayer) OutSy() int
- func (l *PoolLayer) ParamsAndGrads() []ParamsAndGrads
- func (l *PoolLayer) UnmarshalJSON(b []byte) error
- type RegressionLayer
- func (l *RegressionLayer) Backward()
- func (l *RegressionLayer) BackwardLoss(y LossData) float64
- func (l *RegressionLayer) Forward(v *Vol, isTraining bool) *Vol
- func (l *RegressionLayer) MarshalJSON() ([]byte, error)
- func (l *RegressionLayer) OutDepth() int
- func (l *RegressionLayer) OutSx() int
- func (l *RegressionLayer) OutSy() int
- func (l *RegressionLayer) ParamsAndGrads() []ParamsAndGrads
- func (l *RegressionLayer) UnmarshalJSON(b []byte) error
- type ReluLayer
- func (l *ReluLayer) Backward()
- func (l *ReluLayer) Forward(v *Vol, isTraining bool) *Vol
- func (l *ReluLayer) MarshalJSON() ([]byte, error)
- func (l *ReluLayer) OutDepth() int
- func (l *ReluLayer) OutSx() int
- func (l *ReluLayer) OutSy() int
- func (l *ReluLayer) ParamsAndGrads() []ParamsAndGrads
- func (l *ReluLayer) UnmarshalJSON(b []byte) error
- type SVMLayer
- func (l *SVMLayer) Backward()
- func (l *SVMLayer) BackwardLoss(y LossData) float64
- func (l *SVMLayer) Forward(v *Vol, isTraining bool) *Vol
- func (l *SVMLayer) MarshalJSON() ([]byte, error)
- func (l *SVMLayer) OutDepth() int
- func (l *SVMLayer) OutSx() int
- func (l *SVMLayer) OutSy() int
- func (l *SVMLayer) ParamsAndGrads() []ParamsAndGrads
- func (l *SVMLayer) UnmarshalJSON(b []byte) error
- type SigmoidLayer
- func (l *SigmoidLayer) Backward()
- func (l *SigmoidLayer) Forward(v *Vol, isTraining bool) *Vol
- func (l *SigmoidLayer) MarshalJSON() ([]byte, error)
- func (l *SigmoidLayer) OutDepth() int
- func (l *SigmoidLayer) OutSx() int
- func (l *SigmoidLayer) OutSy() int
- func (l *SigmoidLayer) ParamsAndGrads() []ParamsAndGrads
- func (l *SigmoidLayer) UnmarshalJSON(b []byte) error
- type SoftmaxLayer
- func (l *SoftmaxLayer) Backward()
- func (l *SoftmaxLayer) BackwardLoss(y LossData) float64
- func (l *SoftmaxLayer) Forward(v *Vol, isTraining bool) *Vol
- func (l *SoftmaxLayer) MarshalJSON() ([]byte, error)
- func (l *SoftmaxLayer) OutDepth() int
- func (l *SoftmaxLayer) OutSx() int
- func (l *SoftmaxLayer) OutSy() int
- func (l *SoftmaxLayer) ParamsAndGrads() []ParamsAndGrads
- func (l *SoftmaxLayer) UnmarshalJSON(b []byte) error
- type TanhLayer
- func (l *TanhLayer) Backward()
- func (l *TanhLayer) Forward(v *Vol, isTraining bool) *Vol
- func (l *TanhLayer) MarshalJSON() ([]byte, error)
- func (l *TanhLayer) OutDepth() int
- func (l *TanhLayer) OutSx() int
- func (l *TanhLayer) OutSy() int
- func (l *TanhLayer) ParamsAndGrads() []ParamsAndGrads
- func (l *TanhLayer) UnmarshalJSON(b []byte) error
- type Trainer
- type TrainerMethod
- type TrainerOptions
- type TrainingResult
- type Vol
- func (v *Vol) Add(x, y, d int, value float64)
- func (v *Vol) AddFrom(v2 *Vol)
- func (v *Vol) AddFromScaled(v2 *Vol, a float64)
- func (v *Vol) AddGrad(x, y, d int, value float64)
- func (v *Vol) Augment(crop, dx, dy int, fliplr bool) *Vol
- func (v *Vol) Clone() *Vol
- func (v *Vol) CloneAndZero() *Vol
- func (v *Vol) Get(x, y, d int) float64
- func (v *Vol) GetGrad(x, y, d int) float64
- func (v *Vol) Set(x, y, d int, value float64)
- func (v *Vol) SetConst(a float64)
- func (v *Vol) SetGrad(x, y, d int, value float64)
- func (v *Vol) UnmarshalJSON(b []byte) error
Constants ¶
This section is empty.
Variables ¶
var DefaultTrainerOptions = TrainerOptions{ LearningRate: 0.01, L1Decay: 0.0, L2Decay: 0.0, BatchSize: 1, Method: MethodSGD, Momentum: 0.9, Ro: 0.95, Eps: 1e-8, Beta1: 0.9, Beta2: 0.999, }
Functions ¶
This section is empty.
Types ¶
type ConvLayer ¶
type ConvLayer struct {
// contains filtered or unexported fields
}
func (*ConvLayer) MarshalJSON ¶
func (*ConvLayer) ParamsAndGrads ¶
func (l *ConvLayer) ParamsAndGrads() []ParamsAndGrads
func (*ConvLayer) UnmarshalJSON ¶
type DropoutLayer ¶
type DropoutLayer struct {
// contains filtered or unexported fields
}
An inefficient dropout layer Note this is not most efficient implementation since the layer before computed all these activations and now we're just going to drop them :( same goes for backward pass. Also, if we wanted to be efficient at test time we could equivalently be clever and upscale during train and copy pointers during test
func (*DropoutLayer) Backward ¶
func (l *DropoutLayer) Backward()
func (*DropoutLayer) MarshalJSON ¶
func (l *DropoutLayer) MarshalJSON() ([]byte, error)
func (*DropoutLayer) OutDepth ¶
func (l *DropoutLayer) OutDepth() int
func (*DropoutLayer) OutSx ¶
func (l *DropoutLayer) OutSx() int
func (*DropoutLayer) OutSy ¶
func (l *DropoutLayer) OutSy() int
func (*DropoutLayer) ParamsAndGrads ¶
func (l *DropoutLayer) ParamsAndGrads() []ParamsAndGrads
func (*DropoutLayer) UnmarshalJSON ¶
func (l *DropoutLayer) UnmarshalJSON(b []byte) error
type FullyConnLayer ¶
type FullyConnLayer struct {
// contains filtered or unexported fields
}
func (*FullyConnLayer) Backward ¶
func (l *FullyConnLayer) Backward()
func (*FullyConnLayer) MarshalJSON ¶
func (l *FullyConnLayer) MarshalJSON() ([]byte, error)
func (*FullyConnLayer) OutDepth ¶
func (l *FullyConnLayer) OutDepth() int
func (*FullyConnLayer) OutSx ¶
func (l *FullyConnLayer) OutSx() int
func (*FullyConnLayer) OutSy ¶
func (l *FullyConnLayer) OutSy() int
func (*FullyConnLayer) ParamsAndGrads ¶
func (l *FullyConnLayer) ParamsAndGrads() []ParamsAndGrads
func (*FullyConnLayer) UnmarshalJSON ¶
func (l *FullyConnLayer) UnmarshalJSON(b []byte) error
type InputLayer ¶
type InputLayer struct {
// contains filtered or unexported fields
}
func (*InputLayer) Backward ¶
func (l *InputLayer) Backward()
func (*InputLayer) MarshalJSON ¶
func (l *InputLayer) MarshalJSON() ([]byte, error)
func (*InputLayer) OutDepth ¶
func (l *InputLayer) OutDepth() int
func (*InputLayer) OutSx ¶
func (l *InputLayer) OutSx() int
func (*InputLayer) OutSy ¶
func (l *InputLayer) OutSy() int
func (*InputLayer) ParamsAndGrads ¶
func (l *InputLayer) ParamsAndGrads() []ParamsAndGrads
func (*InputLayer) UnmarshalJSON ¶
func (l *InputLayer) UnmarshalJSON(b []byte) error
type LayerDef ¶
type LayerDef struct { Type LayerType `json:"type"` NumNeurons int `json:"num_neurons"` NumClasses int `json:"num_classes"` BiasPref float64 `json:"bias_pref"` BiasPrefZero bool `json:"-"` Activation LayerType `json:"activation"` GroupSize int `json:"group_size"` GroupSizeZero bool `json:"-"` DropProb float64 `json:"drop_prob"` DropProbZero bool `json:"-"` InSx int `json:"in_sx"` InSy int `json:"in_sy"` InDepth int `json:"in_depth"` OutSx int `json:"out_sx"` OutSy int `json:"out_sy"` OutDepth int `json:"out_depth"` L1DecayMul float64 `json:"l1_decay_mul"` L1DecayMulZero bool `json:"-"` L2DecayMul float64 `json:"l2_decay_mul"` L2DecayMulZero bool `json:"-"` Sx int `json:"sx"` SxZero bool `json:"-"` Sy int `json:"sy"` SyZero bool `json:"-"` Pad int `json:"pad"` PadZero bool `json:"-"` Stride int `json:"stride"` StrideZero bool `json:"-"` Filters int `json:"filters"` K float64 `json:"k"` N int `json:"n"` Alpha float64 `json:"alpha"` Beta float64 `json:"beta"` }
type LocalResponseNormalizationLayer ¶
type LocalResponseNormalizationLayer struct {
// contains filtered or unexported fields
}
Local Response Normalization in window, along depths of volumes
func (*LocalResponseNormalizationLayer) Backward ¶
func (l *LocalResponseNormalizationLayer) Backward()
func (*LocalResponseNormalizationLayer) Forward ¶
func (l *LocalResponseNormalizationLayer) Forward(v *Vol, isTraining bool) *Vol
func (*LocalResponseNormalizationLayer) MarshalJSON ¶
func (l *LocalResponseNormalizationLayer) MarshalJSON() ([]byte, error)
func (*LocalResponseNormalizationLayer) OutDepth ¶
func (l *LocalResponseNormalizationLayer) OutDepth() int
func (*LocalResponseNormalizationLayer) OutSx ¶
func (l *LocalResponseNormalizationLayer) OutSx() int
func (*LocalResponseNormalizationLayer) OutSy ¶
func (l *LocalResponseNormalizationLayer) OutSy() int
func (*LocalResponseNormalizationLayer) ParamsAndGrads ¶
func (l *LocalResponseNormalizationLayer) ParamsAndGrads() []ParamsAndGrads
func (*LocalResponseNormalizationLayer) UnmarshalJSON ¶
func (l *LocalResponseNormalizationLayer) UnmarshalJSON(b []byte) error
type MagicNet ¶
type MagicNet struct{}
MagicNet takes data: a list of convnet.Vol, and labels which for now are assumed to be class indices 0..K. MagicNet then: - creates data folds for cross-validation - samples candidate networks - evaluates candidate networks on all data folds - produces predictions by model-averaging the best networks
type MaxoutLayer ¶
type MaxoutLayer struct {
// contains filtered or unexported fields
}
Implements Maxout nonlinearity that computes x -> max(x) where x is a vector of size group_size. Ideally of course, the input size should be exactly divisible by group_size
func (*MaxoutLayer) Backward ¶
func (l *MaxoutLayer) Backward()
func (*MaxoutLayer) MarshalJSON ¶
func (l *MaxoutLayer) MarshalJSON() ([]byte, error)
func (*MaxoutLayer) OutDepth ¶
func (l *MaxoutLayer) OutDepth() int
func (*MaxoutLayer) OutSx ¶
func (l *MaxoutLayer) OutSx() int
func (*MaxoutLayer) OutSy ¶
func (l *MaxoutLayer) OutSy() int
func (*MaxoutLayer) ParamsAndGrads ¶
func (l *MaxoutLayer) ParamsAndGrads() []ParamsAndGrads
func (*MaxoutLayer) UnmarshalJSON ¶
func (l *MaxoutLayer) UnmarshalJSON(b []byte) error
type Net ¶
type Net struct {
Layers []Layer `json:"layers"`
}
Net manages a set of layers For now constraints: Simple linear order of layers, first layer input last layer a cost layer
func (*Net) Forward ¶
forward prop the network. The trainer class passes is_training = true, but when this function is called from outside (not from the trainer), it defaults to prediction mode
func (*Net) MakeLayers ¶
takes a list of layer definitions and creates the network layer objects
func (*Net) ParamsAndGrads ¶
func (n *Net) ParamsAndGrads() []ParamsAndGrads
accumulate parameters and gradients for the entire network
func (*Net) Prediction ¶
this is a convenience function for returning the argmax prediction, assuming the last layer of the net is a softmax
func (*Net) UnmarshalJSON ¶
type ParamsAndGrads ¶
type PoolLayer ¶
type PoolLayer struct {
// contains filtered or unexported fields
}
func (*PoolLayer) MarshalJSON ¶
func (*PoolLayer) ParamsAndGrads ¶
func (l *PoolLayer) ParamsAndGrads() []ParamsAndGrads
func (*PoolLayer) UnmarshalJSON ¶
type RegressionLayer ¶
type RegressionLayer struct {
// contains filtered or unexported fields
}
implements an L2 regression cost layer, so penalizes \sum_i(||x_i - y_i||^2), where x is its input and y is the user-provided array of "correct" values.
func (*RegressionLayer) Backward ¶
func (l *RegressionLayer) Backward()
func (*RegressionLayer) BackwardLoss ¶
func (l *RegressionLayer) BackwardLoss(y LossData) float64
func (*RegressionLayer) MarshalJSON ¶
func (l *RegressionLayer) MarshalJSON() ([]byte, error)
func (*RegressionLayer) OutDepth ¶
func (l *RegressionLayer) OutDepth() int
func (*RegressionLayer) OutSx ¶
func (l *RegressionLayer) OutSx() int
func (*RegressionLayer) OutSy ¶
func (l *RegressionLayer) OutSy() int
func (*RegressionLayer) ParamsAndGrads ¶
func (l *RegressionLayer) ParamsAndGrads() []ParamsAndGrads
func (*RegressionLayer) UnmarshalJSON ¶
func (l *RegressionLayer) UnmarshalJSON(b []byte) error
type ReluLayer ¶
type ReluLayer struct {
// contains filtered or unexported fields
}
Implements ReLU nonlinearity elementwise x -> max(0, x) the output is in [0, inf)
func (*ReluLayer) MarshalJSON ¶
func (*ReluLayer) ParamsAndGrads ¶
func (l *ReluLayer) ParamsAndGrads() []ParamsAndGrads
func (*ReluLayer) UnmarshalJSON ¶
type SVMLayer ¶
type SVMLayer struct {
// contains filtered or unexported fields
}
func (*SVMLayer) BackwardLoss ¶
func (*SVMLayer) MarshalJSON ¶
func (*SVMLayer) ParamsAndGrads ¶
func (l *SVMLayer) ParamsAndGrads() []ParamsAndGrads
func (*SVMLayer) UnmarshalJSON ¶
type SigmoidLayer ¶
type SigmoidLayer struct {
// contains filtered or unexported fields
}
Implements Sigmoid nonlinearity elementwise x -> 1/(1+e^(-x)) so the output is between 0 and 1.
func (*SigmoidLayer) Backward ¶
func (l *SigmoidLayer) Backward()
func (*SigmoidLayer) MarshalJSON ¶
func (l *SigmoidLayer) MarshalJSON() ([]byte, error)
func (*SigmoidLayer) OutDepth ¶
func (l *SigmoidLayer) OutDepth() int
func (*SigmoidLayer) OutSx ¶
func (l *SigmoidLayer) OutSx() int
func (*SigmoidLayer) OutSy ¶
func (l *SigmoidLayer) OutSy() int
func (*SigmoidLayer) ParamsAndGrads ¶
func (l *SigmoidLayer) ParamsAndGrads() []ParamsAndGrads
func (*SigmoidLayer) UnmarshalJSON ¶
func (l *SigmoidLayer) UnmarshalJSON(b []byte) error
type SoftmaxLayer ¶
type SoftmaxLayer struct {
// contains filtered or unexported fields
}
This is a classifier, with N discrete classes from 0 to N-1 it gets a stream of N incoming numbers and computes the softmax function (exponentiate and normalize to sum to 1 as probabilities should)
func (*SoftmaxLayer) Backward ¶
func (l *SoftmaxLayer) Backward()
func (*SoftmaxLayer) BackwardLoss ¶
func (l *SoftmaxLayer) BackwardLoss(y LossData) float64
func (*SoftmaxLayer) MarshalJSON ¶
func (l *SoftmaxLayer) MarshalJSON() ([]byte, error)
func (*SoftmaxLayer) OutDepth ¶
func (l *SoftmaxLayer) OutDepth() int
func (*SoftmaxLayer) OutSx ¶
func (l *SoftmaxLayer) OutSx() int
func (*SoftmaxLayer) OutSy ¶
func (l *SoftmaxLayer) OutSy() int
func (*SoftmaxLayer) ParamsAndGrads ¶
func (l *SoftmaxLayer) ParamsAndGrads() []ParamsAndGrads
func (*SoftmaxLayer) UnmarshalJSON ¶
func (l *SoftmaxLayer) UnmarshalJSON(b []byte) error
type TanhLayer ¶
type TanhLayer struct {
// contains filtered or unexported fields
}
Implements Tanh nnonlinearity elementwise x -> tanh(x) so the output is between -1 and 1.
func (*TanhLayer) MarshalJSON ¶
func (*TanhLayer) ParamsAndGrads ¶
func (l *TanhLayer) ParamsAndGrads() []ParamsAndGrads
func (*TanhLayer) UnmarshalJSON ¶
type Trainer ¶
type Trainer struct { Net *Net TrainerOptions // contains filtered or unexported fields }
func NewTrainer ¶
func NewTrainer(net *Net, opts TrainerOptions) *Trainer
type TrainerMethod ¶
type TrainerMethod int
const ( MethodSGD TrainerMethod = iota // sgd MethodAdam // adam MethodADAGrad // adagrad MethodADADelta // adadelta MethodWindowGrad // windowgrad MethodNetsterov // netsterov )
func (TrainerMethod) String ¶
func (i TrainerMethod) String() string
type TrainerOptions ¶
type TrainingResult ¶
type Vol ¶
type Vol struct { Sx int `json:"sx"` Sy int `json:"sy"` Depth int `json:"depth"` W []float64 `json:"w"` Dw []float64 `json:"-"` }
Vol is the basic building block of all data in a net. it is essentially just a 3D volume of numbers, with a width (sx), height (sy), and depth (depth). it is used to hold data for all filters, all volumes, all weights, and also stores all gradients w.r.t. the data. c is optionally a value to initialize the volume with. If c is missing, fills the Vol with random numbers.
func (*Vol) AddFromScaled ¶
func (*Vol) Augment ¶
Volume utilities intended for use with data augmentation crop is the size of output dx,dy are offset wrt incoming volume, of the shift fliplr is boolean on whether we also want to flip left<->right
Note: When converting from convnetjs, dx and dy default to a random number in [0, v.S[xy] - crop).
func (*Vol) CloneAndZero ¶
func (*Vol) UnmarshalJSON ¶
Source Files ¶
Directories ¶
Path | Synopsis |
---|---|
Package cnnutil contains various utility functions.
|
Package cnnutil contains various utility functions. |
Package cnnvis contains various utility functions
|
Package cnnvis contains various utility functions |