Documentation
¶
Overview ¶
Package nnet contains routines for constructing, training and testing neural networks.
Index ¶
- Variables
- func Bprop(q num.Queue, layers []Layer, grad *num.Array, work [3]num.Buffer) *num.Array
- func CheckErr(err error)
- func CopyParams(q num.Queue, src, dst []Layer, sync bool)
- func Download(rawurl, dir string) error
- func FileExists(name string) bool
- func FormatBytes(n int) string
- func FormatDuration(d time.Duration) string
- func Fprop(q num.Queue, layers []Layer, input *num.Array, work num.Buffer, trainMode bool) *num.Array
- func InitLogger(model string, flags int) error
- func LoadData(model string) (d map[string]Data, err error)
- func MemoryProfile(verbose bool, train, test *Network)
- func NewData(classes []string, shape []int, labels []int32, inputs []float32) *data
- func ParamLayers(desc string, layers []Layer, callback func(desc string, l ParamLayer))
- func SaveDataFile(d Data, name string) error
- func SetSeed(seed int64) *rand.Rand
- func StatsHeaders(d map[string]Data) []string
- func Train(net *Network, dset *Dataset, test Tester)
- func TrainEpoch(net *Network, dset *Dataset, epoch int, pred []int32) (batchLoss []float64, trainError float64)
- type AMSGrad
- type Activation
- type Adam
- type Add
- type BatchNorm
- type BatchNormLayer
- type Config
- func (c Config) AddLayers(layers ...ConfigLayer) Config
- func (c Config) Copy() Config
- func (c Config) DatasetConfig(test bool) DatasetOptions
- func (c Config) Fields() []string
- func (c Config) Get(key string) interface{}
- func (c Config) OptimiserParams(epoch, samples int) (learningRate, weightDecay float32)
- func (c Config) Save(name string) error
- func (c Config) SetString(key, val string) (Config, error)
- func (c Config) String() string
- type ConfigLayer
- type Conv
- type Data
- type Dataset
- type DatasetOptions
- type Dropout
- type Flatten
- type InitType
- type Layer
- type LayerConfig
- type LayerGroup
- type Linear
- type Nesterov
- type Network
- func (n *Network) BatchError(batch int, dset *Dataset, y, yPred *num.Array, pred []int32) float64
- func (n *Network) BatchLoss(yOneHot, yPred *num.Array) float64
- func (n *Network) Error(dset *Dataset, pred []int32) float64
- func (n *Network) InitWeights(rng *rand.Rand)
- func (n *Network) Memory() int
- func (n *Network) MemoryProfile(name string) string
- func (n *Network) OutLayer() OutputLayer
- func (n *Network) PrintWeights()
- func (n *Network) Release()
- func (n *Network) String() string
- type Optimiser
- type OptimiserType
- type OptionList
- type OutputLayer
- type ParamLayer
- type Pool
- type RMSprop
- type SGD
- type Stats
- type TestBase
- func (t *TestBase) Epilogue() bool
- func (t *TestBase) Init(dev num.Device, conf Config, data map[string]Data, rng *rand.Rand) *TestBase
- func (t *TestBase) Network() *Network
- func (t *TestBase) Predict(train *Dataset) *TestBase
- func (t *TestBase) Release()
- func (t *TestBase) Reset()
- func (t *TestBase) Test(net *Network, epoch int, batchLoss []float64, trainError float64, ...) bool
- type Tester
Constants ¶
This section is empty.
Variables ¶
Functions ¶
func CopyParams ¶
Copy weights and bias arrays from src to dst
func Download ¶
Download data from url and save it to directory dir. if file suffix is .gz then gunzip the data. If file type is .tar then untar the files.
func FormatDuration ¶
func Fprop ¶
func Fprop(q num.Queue, layers []Layer, input *num.Array, work num.Buffer, trainMode bool) *num.Array
Feed forward the input to get the predicted output
func ParamLayers ¶
func ParamLayers(desc string, layers []Layer, callback func(desc string, l ParamLayer))
Call function on each of the ParamLayers in the network
func SaveDataFile ¶
Encode in gob format and save to file under DataDir
func StatsHeaders ¶
Types ¶
type AMSGrad ¶
type AMSGrad struct { LearningRate float32 Beta1 float32 Beta2 float32 Epsilon float32 Work num.Buffer }
AMSGrad optimiser with adaptive learning rate
type Activation ¶
type Activation struct{ Atype string }
Sigmoid, tanh or relu activation layer, implements OutputLayer interface.
func (Activation) Marshal ¶
func (c Activation) Marshal() LayerConfig
func (Activation) String ¶
func (c Activation) String() string
func (Activation) Type ¶
func (c Activation) Type() string
type Adam ¶
type Adam struct { LearningRate float32 Beta1 float32 Beta2 float32 Epsilon float32 Iter int Work1 num.Buffer Work2 num.Buffer }
Adam optimiser with adaptive learning rate
type Add ¶
type Add struct {
X, Y []LayerConfig
}
Add two sets of layers, used in residual network block, output is X(input) + Y(input) if Y is nil then outputput is X(input) + 1
func AddLayer ¶
func AddLayer(X, Y []ConfigLayer) Add
func (Add) Marshal ¶
func (c Add) Marshal() LayerConfig
type BatchNorm ¶
type BatchNorm struct{ AvgFactor, Epsilon float64 }
Batch normalisation layer.
func (BatchNorm) Marshal ¶
func (c BatchNorm) Marshal() LayerConfig
type BatchNormLayer ¶
type BatchNormLayer interface { ParamLayer Stats() (w, b, runMean, runVar *num.Array) }
BatchNormLayer stores the scale, shift, running mean and variance
type Config ¶
type Config struct { DataSet string Optimiser OptimiserType Eta float64 EtaDecay float64 EtaDecayStep int Lambda float64 Momentum float64 WeightInit InitType Bias float64 Shuffle bool Normalise bool Distort bool TrainRuns int TrainBatch int TestBatch int MaxEpoch int MaxSeconds int MaxSamples int LogEvery int StopAfter int ExtraEpochs int ValidEMA float64 MinLoss float64 RandSeed int64 DebugLevel int UseGPU bool Profile bool MemProfile bool Layers []LayerConfig }
Training configuration settings
func LoadConfig ¶
Load network from json file under DataDir
func (Config) AddLayers ¶
func (c Config) AddLayers(layers ...ConfigLayer) Config
Append layers to the config struct
func (Config) DatasetConfig ¶
func (c Config) DatasetConfig(test bool) DatasetOptions
func (Config) OptimiserParams ¶
Get learning rate and weight decay
type ConfigLayer ¶
type ConfigLayer interface { Marshal() LayerConfig Type() string String() string }
type Conv ¶
Convolutional layer, implements ParamLayer interface.
func (Conv) Marshal ¶
func (c Conv) Marshal() LayerConfig
type Data ¶
type Data interface { Len() int Classes() []string ClassSize() int Shape() []int Label(index []int, label []int32) Input(index []int, buf []float32, t *img.Transformer) Image(ix int, channel string) *img.Image Encode(w io.Writer) error Decode(r io.Reader) error }
Data interface type represents the raw data for a training or test set
func LoadDataFile ¶
Decode data from file in gob format under DataDir
type Dataset ¶
type Dataset struct { Data Samples int BatchSize int Batches int sync.WaitGroup // contains filtered or unexported fields }
Dataset type encapsulates a set of training, test or validation data.
func NewDataset ¶
Create a new Dataset struct, allocate array buffers and set the batch size and maxSamples
type DatasetOptions ¶
Config options for dataset
type Dropout ¶
type Dropout struct{ Ratio float64 }
Dropout layer, randomly drops given ratio of nodes.
func (Dropout) Marshal ¶
func (c Dropout) Marshal() LayerConfig
type Flatten ¶
type Flatten struct{}
Flatten layer reshapes from 4 to 2 dimensions.
func (Flatten) Marshal ¶
func (c Flatten) Marshal() LayerConfig
type Layer ¶
type Layer interface { ConfigLayer Init(q num.Queue, inShape []int, opts num.LayerOpts, seed int64, cfg *Config) (workSize, inSize int) InShape() []int OutShape() []int Fprop(q num.Queue, in *num.Array, work num.Buffer, trainMode bool) *num.Array Bprop(q num.Queue, grad, dsrc *num.Array, work [3]num.Buffer) *num.Array Output() *num.Array Memory() (weights, outputs, temp int) BpropData() bool Release() }
Layer interface type represents one layer of the neural net.
type LayerConfig ¶
type LayerConfig struct { Type string Data json.RawMessage }
Layer configuration details
func (LayerConfig) Unmarshal ¶
func (l LayerConfig) Unmarshal() Layer
Unmarshal JSON data and construct new layer
type LayerGroup ¶
LayerGroup is a compound layer made up of multiple layers
type Linear ¶
type Linear struct{ Nout int }
Linear fully connected layer, implements ParamLayer interface.
func (Linear) Marshal ¶
func (c Linear) Marshal() LayerConfig
type Network ¶
type Network struct { Config Layers []Layer InShape []int WorkSpace [3]num.Buffer // contains filtered or unexported fields }
Network type represents a multilayer neural network model.
func New ¶
func New(queue num.Queue, conf Config, batchSize int, inShape []int, bprop bool, rng *rand.Rand) *Network
New function creates a new network with the given layers. If bprop is true then allocate memory for back propagation.
func (*Network) BatchError ¶
get the total error for this batch, if pred is non-null then save predicted values
func (*Network) Error ¶
Calculate the error from the predicted versus actual values if pred slice is not nil then also return the predicted output classes.
func (*Network) InitWeights ¶
Initialise network weights using a linear or normal distribution. Weights for each layer are scaled by 1/sqrt(nin)
func (*Network) MemoryProfile ¶
Print profile of allocated memory
type OptimiserType ¶
type OptimiserType int
Optimiser type
const ( SGDOpt OptimiserType = iota NesterovOpt RMSpropOpt AdamOpt AMSGradOpt )
func NewOptType ¶
func NewOptType(name string) (OptimiserType, error)
func (OptimiserType) ExtraArrays ¶
func (t OptimiserType) ExtraArrays() int
func (OptimiserType) Options ¶
func (t OptimiserType) Options() []string
func (OptimiserType) String ¶
func (t OptimiserType) String() string
type OptionList ¶
type OutputLayer ¶
OutputLayer is the final layer in the stack
type ParamLayer ¶
type ParamLayer interface { Layer Params() (W, B *num.Array) InitParams(q num.Queue, init InitType, bias float64, rng *rand.Rand) WeightDecay(q num.Queue, decay float32) UpdateParams(q num.Queue, opt Optimiser) Copy(q num.Queue, layer Layer) Export(q num.Queue) []uint32 Import(q num.Queue, vec []uint32) NumWeights() int }
ParamLayer is a layer which may have weight and bias parameters
type Pool ¶
Max pooling layer, should follow conv layer.
func (Pool) Marshal ¶
func (c Pool) Marshal() LayerConfig
type Stats ¶
type Stats struct { Epoch int AvgLoss float64 Loss []float64 Error []float64 BestSince int TrainTime time.Duration Elapsed time.Duration }
Training statistics
func (Stats) FormatElapsed ¶
func (Stats) FormatError ¶
func (Stats) FormatLoss ¶
type TestBase ¶
type TestBase struct { Net *Network Data map[string]*Dataset Pred map[string][]int32 Stats []Stats Headers []string // contains filtered or unexported fields }
Tester which evaluates the loss and error for each of the data sets and updates the stats.
func NewTestBase ¶
func NewTestBase() *TestBase
Create a new base class which implements the Tester interface.
func (*TestBase) Init ¶
func (t *TestBase) Init(dev num.Device, conf Config, data map[string]Data, rng *rand.Rand) *TestBase
Initialise the test dataset, network and other configuration.