Documentation ¶
Index ¶
- func MaxReLU(maxReturnedValue float64) func(float64) float64
- func ReLU(f float64) float64
- func Sigmoid(f float64) float64
- type ActivationFunc
- type CostFunc
- type CrossEntropy
- type DataPoint
- type HyperParameters
- type LayerLvl0
- type LayerOptimized
- func (layer LayerOptimized) ApplyGradients(learnRate, regularization, momentum float64)
- func (l LayerOptimized) Dims() (input, output int)
- func (layer LayerOptimized) StoreOutputs(inputs []float64) (weightOut, activations []float64)
- func (layer LayerOptimized) UpdateGradients(learnData layerLearnData)
- type LayerSetup
- type MeanSquaredError
- type Model2D
- type NetworkLvl0
- func (nn NetworkLvl0) CalculateOutputs(input []float64) []float64
- func (nn NetworkLvl0) Classify(expectedOutput, input []float64) (classification int, cost float64)
- func (nn NetworkLvl0) Cost(trainingData []DataPoint) (totalCost float64)
- func (nn NetworkLvl0) Dims() (input, output int)
- func (nn *NetworkLvl0) Export() (setup []LayerSetup)
- type NetworkOptimized
- func (nn *NetworkOptimized) Classify(inputs []float64) (prediction int, outputs []float64)
- func (nn *NetworkOptimized) Dims() (numIn, numOut int)
- func (nn *NetworkOptimized) Export() (exported []LayerSetup)
- func (nn *NetworkOptimized) Import(layers []LayerSetup, fn func() ActivationFunc)
- func (nn *NetworkOptimized) Learn(trainingData []DataPoint, learnRate, regularization, momentum float64)
- func (nn *NetworkOptimized) StoreOutputs(firstInputs []float64) []float64
- func (nn *NetworkOptimized) UpdateGradients(data DataPoint, learnData []layerLearnData)
- type Relu
- type Sigmd
- type SoftMax
- type TrainerLvl0
Examples ¶
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
Types ¶
type ActivationFunc ¶
type CrossEntropy ¶
type CrossEntropy struct {
// contains filtered or unexported fields
}
func (*CrossEntropy) CalculateFromInputs ¶
func (cross *CrossEntropy) CalculateFromInputs(pred, expected []float64, stride int)
func (*CrossEntropy) Derivative ¶
func (cross *CrossEntropy) Derivative(index int) float64
func (*CrossEntropy) TotalCost ¶
func (cross *CrossEntropy) TotalCost() float64
type DataPoint ¶
func MNISTToDatapoints ¶
type HyperParameters ¶
type HyperParameters struct { LayerSizes []int Activation ActivationFunc OutputActivation ActivationFunc Cost CostFunc LearnRateInitial float64 LearnRateDecay float64 MiniBatchSize int Momentum float64 Regularization float64 }
func NewHyperParameters ¶
func NewHyperParameters(layerSizes []int) HyperParameters
type LayerLvl0 ¶
type LayerLvl0 struct {
// contains filtered or unexported fields
}
func (LayerLvl0) CalculateOutputs ¶
CalculateOutputs runs the inputs through the layer and
type LayerOptimized ¶
type LayerOptimized struct {
// contains filtered or unexported fields
}
func (LayerOptimized) ApplyGradients ¶
func (layer LayerOptimized) ApplyGradients(learnRate, regularization, momentum float64)
ApplyGradients a.k.a ApplyAllGradients
func (LayerOptimized) Dims ¶
func (l LayerOptimized) Dims() (input, output int)
func (LayerOptimized) StoreOutputs ¶
func (layer LayerOptimized) StoreOutputs(inputs []float64) (weightOut, activations []float64)
StoreOutputs stores the result of passing inputs through the layer in weightedInputs and activations. It is the equivalent of CalculateOutputs
func (LayerOptimized) UpdateGradients ¶
func (layer LayerOptimized) UpdateGradients(learnData layerLearnData)
type LayerSetup ¶
func (LayerSetup) Dims ¶
func (ls LayerSetup) Dims() (numNodesIn, numNodesOut int)
type MeanSquaredError ¶
type MeanSquaredError struct {
// contains filtered or unexported fields
}
func (*MeanSquaredError) CalculateFromInputs ¶
func (mse *MeanSquaredError) CalculateFromInputs(pred, expected []float64, stride int)
func (*MeanSquaredError) Derivative ¶
func (mse *MeanSquaredError) Derivative(index int) float64
func (*MeanSquaredError) TotalCost ¶
func (mse *MeanSquaredError) TotalCost() float64
type Model2D ¶
func (Model2D) AddScatter ¶
func (Model2D) ColorModel ¶
func (Model2D) Generate2DData ¶
type NetworkLvl0 ¶
type NetworkLvl0 struct {
// contains filtered or unexported fields
}
NetworkLvl0 is the most basic functional neural network (for some definition of "most") that may have an arbitrary number of layers or nodes. Directly ported from Sebastian Lague's "How to Create a Neural Network (and Train it to Identify Doodles" https://www.youtube.com/watch?v=hfMk-kjRv4c.
Example (Mnist) ¶
package main import ( "fmt" "math/rand" "strconv" "github.com/soypat/neurus" "github.com/soypat/neurus/mnist" ) func main() { const ( batchSize = 10 h = 0.0001 learnRate = 0.05 epochs = 12 ) // cpuProfile, _ := os.Create("cpu.pprof") // pprof.StartCPUProfile(cpuProfile) // defer pprof.StopCPUProfile() mnistTrain, mnistTest, _ := mnist.Load64() if batchSize > len(mnistTrain) { panic("use smaller batch size. Max: " + strconv.Itoa(len(mnistTrain))) } nn := neurus.NewNetworkLvl0(neurus.ReLU, mnist.PixelCount, 16, 10) trainingData := neurus.MNISTToDatapoints(mnistTrain) testData := neurus.MNISTToDatapoints(mnistTest[:100]) initialCost := nn.Cost(testData) trainer := neurus.NewTrainerFromNetworkLvl0(nn) for epoch := 0; epoch < epochs; epoch++ { fmt.Printf("epoch %d, cost: %0.5f\n", epoch, nn.Cost(testData)) startIdx := rand.Intn(len(trainingData) - batchSize) miniBatch := trainingData[startIdx : startIdx+batchSize] trainer.TrainLvl0(nn, miniBatch, h, learnRate) } fmt.Printf("start cost:%0.5f, end cost: %0.5f", initialCost, nn.Cost(testData)) for i := range testData { expected := mnistTest[i] class, cost := nn.Classify(testData[i].ExpectedOutput, testData[i].Input) fmt.Println(expected.Num, class, cost) } }
Output: start cost:
Example (TwoD) ¶
const ( batchSize = 10 h = 0.0001 learnRate = 0.05 epochs = 200000 numPrints = 10 ) m := neurus.NewModel2D(2, basic2DClassifier) trainData := m.Generate2DData(400) testData := m.Generate2DData(100) fp, _ := os.Create("canon.png") m.AddScatter(trainData) png.Encode(fp, m) fp.Close() nn := neurus.NewNetworkLvl0(neurus.Sigmoid, 2, 2, 2, 2) initialCost := nn.Cost(testData) trainer := neurus.NewTrainerFromNetworkLvl0(nn) for epoch := 0; epoch < epochs; epoch++ { startIdx := rand.Intn(len(trainData) - batchSize) miniBatch := trainData[startIdx : startIdx+batchSize] trainer.TrainLvl0(nn, miniBatch, h, learnRate) if (epoch+1)%(epochs/numPrints) == 0 { fmt.Printf("epoch %d, cost: %0.5f\n", epoch, nn.Cost(testData)) } } fmt.Printf("start cost:%0.5f, end cost: %0.5f", initialCost, nn.Cost(testData)) // for i := range testData { // x, y := testData[i].Input[0], testData[i].Input[1] // expectedClass := canonClassifier(x, y) // class, cost := nn.Classify(testData[i].ExpectedOutput, testData[i].Input) // fmt.Println(expectedClass, class, cost) // } m.Classifier = func(x, y float64) int { return maxf(nn.CalculateOutputs([]float64{x, y})) } fp, _ = os.Create("nn.png") png.Encode(fp, m) fp.Close() fp, _ = os.Create("nn.json") b, _ := json.Marshal(nn.Export()) fp.Write(b) fp.Close()
Output: start cost:
func NewNetworkLvl0 ¶
func NewNetworkLvl0(activationFunction func(float64) float64, layerSizes ...int) NetworkLvl0
NewNetworkLvl0 creates a new NetworkLvl0 with randomized layers using math/rand standard library. To vary randomization use rand.Seed().
func (NetworkLvl0) CalculateOutputs ¶
func (nn NetworkLvl0) CalculateOutputs(input []float64) []float64
CalculateOutputs runs the inputs through the network and returns the output values. This is also known as feeding the neural network, or Feedthrough.
func (NetworkLvl0) Classify ¶
func (nn NetworkLvl0) Classify(expectedOutput, input []float64) (classification int, cost float64)
Classify runs the inputs through the network and returns index of output node with highest value.
func (NetworkLvl0) Cost ¶
func (nn NetworkLvl0) Cost(trainingData []DataPoint) (totalCost float64)
Cost calculates the total cost or loss of the training data being passed through the neural network.
func (NetworkLvl0) Dims ¶
func (nn NetworkLvl0) Dims() (input, output int)
Dims returns the input and output dimension of the neural network.
func (*NetworkLvl0) Export ¶
func (nn *NetworkLvl0) Export() (setup []LayerSetup)
type NetworkOptimized ¶
type NetworkOptimized struct { Cost CostFunc // contains filtered or unexported fields }
Example (TwoD) ¶
package main import ( "encoding/json" "fmt" "image/png" "math/rand" "os" "github.com/soypat/neurus" ) func main() { const ( epochs = 7 numPrints = 10 ) activation := func() neurus.ActivationFunc { return new(neurus.Sigmd) } // Generate 2D model data and model graph. m := neurus.NewModel2D(2, basic2DClassifier) trainData := m.Generate2DData(400) // testData := m.Generate2DData(100) fp, _ := os.Create("canonopt.png") m.AddScatter(trainData) png.Encode(fp, m) fp.Close() // Create neural network and hyperparameters. layerSizes := []int{2, 2, 2, 2} nn := neurus.NewNetworkOptimized(layerSizes, activation, &neurus.MeanSquaredError{}, rand.NewSource(1)) var importedModel []neurus.LayerSetup fp, err := os.Open("nn.json") if err != nil { panic(err) } err = json.NewDecoder(fp).Decode(&importedModel) if err != nil { panic(err) } nn.Import(importedModel, activation) var initialCost float64 params := neurus.NewHyperParameters(layerSizes) params.MiniBatchSize = 10 // Perform first learn iteration. nn.Learn(trainData, params.LearnRateInitial, params.Regularization, params.Momentum) initialCost = nn.Cost.TotalCost() batchSize := params.MiniBatchSize if true { for epoch := 0; epoch < epochs; epoch++ { startIdx := rand.Intn(len(trainData) - batchSize) miniBatch := trainData[startIdx : startIdx+batchSize] nn.Learn(miniBatch, params.LearnRateInitial, params.Regularization, params.Momentum) if (epoch+1)%(epochs/numPrints+1) == 0 { fmt.Printf("epoch %d, cost: %0.5f\n", epoch, nn.Cost.TotalCost()) } } } fmt.Printf("start cost:%0.5f, end cost: %0.5f", initialCost, nn.Cost.TotalCost()) m.Classifier = func(x, y float64) int { class, _ := nn.Classify([]float64{x, y}) return class } fp, _ = os.Create("nnopt.png") png.Encode(fp, m) fp.Close() } var basic2DClassifier = func(x, y float64) int { if -x*x+0.5 > y { return 1 } return 0 }
Output: start cost:
func NewNetworkOptimized ¶
func NewNetworkOptimized(layerSizes []int, fn func() ActivationFunc, cost CostFunc, src rand.Source) *NetworkOptimized
func (*NetworkOptimized) Classify ¶
func (nn *NetworkOptimized) Classify(inputs []float64) (prediction int, outputs []float64)
func (*NetworkOptimized) Dims ¶
func (nn *NetworkOptimized) Dims() (numIn, numOut int)
func (*NetworkOptimized) Export ¶
func (nn *NetworkOptimized) Export() (exported []LayerSetup)
func (*NetworkOptimized) Import ¶
func (nn *NetworkOptimized) Import(layers []LayerSetup, fn func() ActivationFunc)
func (*NetworkOptimized) Learn ¶
func (nn *NetworkOptimized) Learn(trainingData []DataPoint, learnRate, regularization, momentum float64)
func (*NetworkOptimized) StoreOutputs ¶
func (nn *NetworkOptimized) StoreOutputs(firstInputs []float64) []float64
func (*NetworkOptimized) UpdateGradients ¶
func (nn *NetworkOptimized) UpdateGradients(data DataPoint, learnData []layerLearnData)
type Relu ¶
type Relu struct { Inflection float64 // contains filtered or unexported fields }
func (*Relu) CalculateFromInputs ¶
func (*Relu) Derivative ¶
type Sigmd ¶
type Sigmd struct {
// contains filtered or unexported fields
}
func (*Sigmd) CalculateFromInputs ¶
func (*Sigmd) Derivative ¶
type SoftMax ¶
type SoftMax struct {
// contains filtered or unexported fields
}
func (*SoftMax) CalculateFromInputs ¶
func (*SoftMax) Derivative ¶
type TrainerLvl0 ¶
type TrainerLvl0 struct {
// contains filtered or unexported fields
}
func NewTrainerFromNetworkLvl0 ¶
func NewTrainerFromNetworkLvl0(nn NetworkLvl0) (tr TrainerLvl0)
func (TrainerLvl0) TrainLvl0 ¶
func (tr TrainerLvl0) TrainLvl0(nn NetworkLvl0, trainingData []DataPoint, h, learnRate float64)