optim

package
v1.1.1 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jul 5, 2021 License: Apache-2.0 Imports: 5 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

func ParseNamedTrainedTensors

func ParseNamedTrainedTensors(vs *nn.VarStore) map[string]nn.Var

Types

type Adadelta

type Adadelta struct {
	*BaseOptimizer
	*AdadeltaConfig
}

Length of State is 2, Index0 is square_avg, Index1 is acc_delta

func (*Adadelta) Step

func (a *Adadelta) Step()

type AdadeltaConfig

type AdadeltaConfig struct {
	Rho float64
	Eps float64
	Wd  float64
}

func DefaultAdadeltaConfig

func DefaultAdadeltaConfig() *AdadeltaConfig

func NewDefaultAdadeltaConfig

func NewDefaultAdadeltaConfig(rho, eps, wd float64) *AdadeltaConfig

AdagradConfig creates AdagradConfig with specified values

func (*AdadeltaConfig) Build

func (c *AdadeltaConfig) Build(vs *nn.VarStore, stateDict map[string][]*ts.Tensor, lr float64) Optimizer

func (*AdadeltaConfig) GetAssNames

func (c *AdadeltaConfig) GetAssNames() []string

type Adagrad

type Adagrad struct {
	*BaseOptimizer
	*AdagradConfig
}

func (*Adagrad) Step

func (a *Adagrad) Step()

Length of State is 1, sum

type AdagradConfig

type AdagradConfig struct {
	LrDecay float64
	Eps     float64
	Wd      float64
}

func DefaultAdagradConfig

func DefaultAdagradConfig() *AdagradConfig

func NewAdagradConfig

func NewAdagradConfig(alpha, eps, wd float64) *AdagradConfig

AdagradConfig creates AdagradConfig with specified values

func (*AdagradConfig) Build

func (c *AdagradConfig) Build(vs *nn.VarStore, stateDict map[string][]*ts.Tensor, lr float64) Optimizer

func (*AdagradConfig) GetAssNames

func (c *AdagradConfig) GetAssNames() []string

type Adam

type Adam struct {
	*BaseOptimizer
	*AdamConfig
}

func (*Adam) Step

func (a *Adam) Step()

Length of State is 2, Index0 is exp_avg, Index1 is max_exp_avgsq If Amsgrad set, then 3

type AdamConfig

type AdamConfig struct {
	Beta1   float64
	Beta2   float64
	Wd      float64
	Eps     float64
	Amsgrad bool
}

func DefaultAdamConfig

func DefaultAdamConfig() *AdamConfig

DefaultAdamConfig creates AdamConfig with default values

func DefaultAdamWConfig

func DefaultAdamWConfig() *AdamConfig

DefaultAdamConfig creates AdamConfig with default values

func NewAdamConfig

func NewAdamConfig(beta1, beta2, wd float64, eps float64, amsgrad bool) *AdamConfig

NewAdamConfig creates AdamConfig with specified values

func (*AdamConfig) Build

func (c *AdamConfig) Build(vs *nn.VarStore, stateDict map[string][]*ts.Tensor, lr float64) Optimizer

func (*AdamConfig) GetAssNames

func (c *AdamConfig) GetAssNames() []string

type AdamW

type AdamW struct {
	*BaseOptimizer
	*AdamWConfig
}

func (*AdamW) Step

func (a *AdamW) Step()

type AdamWConfig

type AdamWConfig struct {
	Beta1   float64
	Beta2   float64
	Wd      float64
	Eps     float64
	Amsgrad bool
}

func NewAdamWConfig

func NewAdamWConfig(beta1, beta2, wd, eps float64, amsgrad bool) *AdamWConfig

NewAdamConfig creates AdamConfig with specified values

func (*AdamWConfig) Build

func (c *AdamWConfig) Build(vs *nn.VarStore, stateDict map[string][]*ts.Tensor, lr float64) Optimizer

func (*AdamWConfig) GetAssNames

func (c *AdamWConfig) GetAssNames() []string

type BaseOptimizer

type BaseOptimizer struct {
	VS             *nn.VarStore
	States         map[string][]*ts.Tensor
	TrainedTensors map[string]nn.Var
	LR             float64
	StepCount      int64
}

func (BaseOptimizer) ClipGradNorm

func (opt BaseOptimizer) ClipGradNorm(max float64)

/ TODO. Clips gradient L2 norm over all trainable parameters.

The norm is computed over all gradients together, as if they were concatenated into a single vector.

func (*BaseOptimizer) ResetStepCount

func (opt *BaseOptimizer) ResetStepCount()

ResetStepCount set step count to zero.

func (*BaseOptimizer) StateDict

func (b *BaseOptimizer) StateDict() map[string][]*ts.Tensor

func (*BaseOptimizer) Step

func (opt *BaseOptimizer) Step()

Step performs an optimization step, updating the tracked tensors based on their gradients.

func (*BaseOptimizer) ZeroGrad

func (opt *BaseOptimizer) ZeroGrad()

ZeroGrad zeroes the gradient for the tensors tracked by this optimizer.

type Optimizer

type Optimizer interface {
	StateDict() map[string][]*ts.Tensor
	Step()
	ZeroGrad()
}

Optimizer is a struct object to run gradient descent.

type OptimizerConfig

type OptimizerConfig interface {
	Build(vs *nn.VarStore, stateDict map[string][]*ts.Tensor, lr float64) Optimizer
	GetAssNames() []string
}

OptimizerConfig defines Optimizer configurations. These configs can be used to build optimizer.

type RMSProp

type RMSProp struct {
	*BaseOptimizer
	*RMSPropConfig
}

func (*RMSProp) Step

func (r *RMSProp) Step()

Length of State is 1, square_avg If mon > 0 then +1, momentum_buffer if centered then +1, grad_avg

type RMSPropConfig

type RMSPropConfig struct {
	Alpha    float64
	Eps      float64
	Wd       float64
	Momentum float64
	Centered bool
}

func DefaultRMSPropConfig

func DefaultRMSPropConfig() *RMSPropConfig

DefaultAdamConfig creates AdamConfig with default values

func NewRMSPropConfig

func NewRMSPropConfig(alpha, eps, wd, momentum float64, centered bool) *RMSPropConfig

NewRMSPropConfig creates RMSPropConfig with specified values

func (*RMSPropConfig) Build

func (c *RMSPropConfig) Build(vs *nn.VarStore, stateDict map[string][]*ts.Tensor, lr float64) Optimizer

func (*RMSPropConfig) GetAssNames

func (c *RMSPropConfig) GetAssNames() []string

type SGD

type SGD struct {
	*BaseOptimizer
	*SGDConfig
}

func (*SGD) Step

func (s *SGD) Step()

Length of State is 1.

type SGDConfig

type SGDConfig struct {
	Momentum  float64
	Dampening float64
	Wd        float64
	Nesterov  bool
}

SGDConfig holds parameters for building the SGD (Stochastic Gradient Descent) optimizer.

func DefaultSGDConfig

func DefaultSGDConfig() *SGDConfig

DefaultSGDConfig creates SGDConfig with default values.

func NewSGDConfig

func NewSGDConfig(momentum, dampening, wd float64, nesterov bool) *SGDConfig

NewSGD creates the configuration for a SGD optimizer with specified values

func (*SGDConfig) Build

func (c *SGDConfig) Build(vs *nn.VarStore, stateDict map[string][]*ts.Tensor, lr float64) Optimizer

func (*SGDConfig) GetAssNames

func (c *SGDConfig) GetAssNames() []string

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL