Documentation ¶
Index ¶
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
Types ¶
type Adadelta ¶
type Adadelta struct { *BaseOptimizer *AdadeltaConfig }
Length of State is 2, Index0 is square_avg, Index1 is acc_delta
type AdadeltaConfig ¶
func DefaultAdadeltaConfig ¶
func DefaultAdadeltaConfig() *AdadeltaConfig
func NewDefaultAdadeltaConfig ¶
func NewDefaultAdadeltaConfig(rho, eps, wd float64) *AdadeltaConfig
AdagradConfig creates AdagradConfig with specified values
func (*AdadeltaConfig) GetAssNames ¶
func (c *AdadeltaConfig) GetAssNames() []string
type Adagrad ¶
type Adagrad struct { *BaseOptimizer *AdagradConfig }
type AdagradConfig ¶
func DefaultAdagradConfig ¶
func DefaultAdagradConfig() *AdagradConfig
func NewAdagradConfig ¶
func NewAdagradConfig(alpha, eps, wd float64) *AdagradConfig
AdagradConfig creates AdagradConfig with specified values
func (*AdagradConfig) GetAssNames ¶
func (c *AdagradConfig) GetAssNames() []string
type Adam ¶
type Adam struct { *BaseOptimizer *AdamConfig }
type AdamConfig ¶
func DefaultAdamConfig ¶
func DefaultAdamConfig() *AdamConfig
DefaultAdamConfig creates AdamConfig with default values
func DefaultAdamWConfig ¶
func DefaultAdamWConfig() *AdamConfig
DefaultAdamConfig creates AdamConfig with default values
func NewAdamConfig ¶
func NewAdamConfig(beta1, beta2, wd float64, eps float64, amsgrad bool) *AdamConfig
NewAdamConfig creates AdamConfig with specified values
func (*AdamConfig) GetAssNames ¶
func (c *AdamConfig) GetAssNames() []string
type AdamW ¶
type AdamW struct { *BaseOptimizer *AdamWConfig }
type AdamWConfig ¶
func NewAdamWConfig ¶
func NewAdamWConfig(beta1, beta2, wd, eps float64, amsgrad bool) *AdamWConfig
NewAdamConfig creates AdamConfig with specified values
func (*AdamWConfig) GetAssNames ¶
func (c *AdamWConfig) GetAssNames() []string
type BaseOptimizer ¶
type BaseOptimizer struct { VS *nn.VarStore States map[string][]*ts.Tensor TrainedTensors map[string]nn.Var LR float64 StepCount int64 }
func (BaseOptimizer) ClipGradNorm ¶
func (opt BaseOptimizer) ClipGradNorm(max float64)
/ TODO. Clips gradient L2 norm over all trainable parameters.
The norm is computed over all gradients together, as if they were concatenated into a single vector.
func (*BaseOptimizer) ResetStepCount ¶
func (opt *BaseOptimizer) ResetStepCount()
ResetStepCount set step count to zero.
func (*BaseOptimizer) Step ¶
func (opt *BaseOptimizer) Step()
Step performs an optimization step, updating the tracked tensors based on their gradients.
func (*BaseOptimizer) ZeroGrad ¶
func (opt *BaseOptimizer) ZeroGrad()
ZeroGrad zeroes the gradient for the tensors tracked by this optimizer.
type OptimizerConfig ¶
type OptimizerConfig interface { Build(vs *nn.VarStore, stateDict map[string][]*ts.Tensor, lr float64) Optimizer GetAssNames() []string }
OptimizerConfig defines Optimizer configurations. These configs can be used to build optimizer.
type RMSProp ¶
type RMSProp struct { *BaseOptimizer *RMSPropConfig }
type RMSPropConfig ¶
func DefaultRMSPropConfig ¶
func DefaultRMSPropConfig() *RMSPropConfig
DefaultAdamConfig creates AdamConfig with default values
func NewRMSPropConfig ¶
func NewRMSPropConfig(alpha, eps, wd, momentum float64, centered bool) *RMSPropConfig
NewRMSPropConfig creates RMSPropConfig with specified values
func (*RMSPropConfig) GetAssNames ¶
func (c *RMSPropConfig) GetAssNames() []string
type SGD ¶
type SGD struct { *BaseOptimizer *SGDConfig }
type SGDConfig ¶
SGDConfig holds parameters for building the SGD (Stochastic Gradient Descent) optimizer.
func DefaultSGDConfig ¶
func DefaultSGDConfig() *SGDConfig
DefaultSGDConfig creates SGDConfig with default values.
func NewSGDConfig ¶
NewSGD creates the configuration for a SGD optimizer with specified values