go-ml

module
v0.0.0-...-4560c35 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Apr 16, 2020 License: Apache-2.0

README

CircleCI Maintainability Test Coverage Go Report Card License

var mnistConv0 = nn.Sequence(
	nn.Convolution{Channels: 24, Kernel: mx.Dim(3, 3), Activation: nn.ReLU},
	nn.MaxPool{Kernel: mx.Dim(2, 2), Stride: mx.Dim(2, 2)},
	nn.Convolution{Channels: 32, Kernel: mx.Dim(5, 5), Activation: nn.ReLU, BatchNorm: true},
	nn.MaxPool{Kernel: mx.Dim(2, 2), Stride: mx.Dim(2, 2)},
	nn.FullyConnected{Size: 32, Activation: nn.Swish, BatchNorm: true, Dropout: 0.33},
	nn.FullyConnected{Size: 10, Activation: nn.Softmax})

func Test_mnistConv0(t *testing.T) {
	modelFile := iokit.File(fu.ModelPath("mnist_test_conv0.zip"))

	report := nn.Model{
		Network:   mnistConv0,
		Optimizer: nn.Adam{Lr: .001},
		Loss:      nn.CrossEntropyLoss{},
		Input:     mx.Dim(1, 28, 28),
		Seed:      42,
		BatchSize: 32,
		//Context:   mx.GPU,
	}.Feed(model.Dataset{
		Source:   mnist.Data.RandomFlag(model.TestCol, 42, 0.2),
		Label:    model.LabelCol,
		Test:     model.TestCol,
		Features: []string{"Image"},
	}).LuckyTrain(model.Training{
		Iterations: 15,
		ModelFile:  modelFile,
		Metrics:    model.Classification{Accuracy: 0.983},
		Score:      model.ErrorScore,
	})

	fmt.Println(report.TheBest, report.Score)
	fmt.Println(report.History.Round(5))
	assert.Assert(t, model.Accuracy(report.Test) >= 0.98)

	net1 := nn.LuckyObjectify(modelFile) //.Gpu()
	lr := model.LuckyEvaluate(mnist.T10k, model.LabelCol, net1, 32, model.Classification{})
	fmt.Println(lr.Round(5))
	assert.Assert(t, model.Accuracy(lr) >= 0.98)
}

Directories

Path Synopsis
dataset
fu
hyperopt
Package hyperopt implements SMBO/TPE hyper-parameter optimization for ML models Many thanks to Masashi SHIBATA for his excellent work on goptuna I used github.com/c-bata/goptuna as a reference implementation for the paper 'Algorithms for Hyper-Parameter Optimization' https://papers.nips.cc/paper/4443-algorithms-for-hyper-parameter-optimization.pdf TPE sampler mostly derived from goptuna.
Package hyperopt implements SMBO/TPE hyper-parameter optimization for ML models Many thanks to Masashi SHIBATA for his excellent work on goptuna I used github.com/c-bata/goptuna as a reference implementation for the paper 'Algorithms for Hyper-Parameter Optimization' https://papers.nips.cc/paper/4443-algorithms-for-hyper-parameter-optimization.pdf TPE sampler mostly derived from goptuna.
nn
mx
vae
Package vae implements Auto-Encoding Variational Bayes Algorithm https://arxiv.org/pdf/1312.6114.pdf
Package vae implements Auto-Encoding Variational Bayes Algorithm https://arxiv.org/pdf/1312.6114.pdf
Package tables implements immutable tables abstraction
Package tables implements immutable tables abstraction
csv
rdb
xgb

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL