pvlv

package
v1.1.17 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Nov 24, 2020 License: BSD-3-Clause Imports: 16 Imported by: 2

README

PVLV: Primary Value, Learned Value

TODO: computational-level overview of implementation goes here!

Documentation

Index

Constants

View Source
const NoUSTimeIn = 320

Variables

View Source
var (
	TraceVars       = []string{"NTr", "Tr"}
	SynapseVarProps = map[string]string{
		"NTr": `auto-scale:"+"`,
		"Tr":  `auto-scale:"+"`,
	}
	TraceVarsMap   map[string]int
	SynapseVarsAll []string
)
View Source
var (
	// ModNeuronVars are the modulator neurons plus some custom variables that sub-types use for their
	// algo-specific cases -- need a consistent set of overall network-level vars for display / generic
	// interface.
	ModNeuronVars = []string{
		DA.String(), ACh.String(), SE.String(),
		ModAct.String(), ModLevel.String(), ModNet.String(), ModLrn.String(),
		PVAct.String(),
	}
	ModNeuronVarsMap map[string]int
	ModNeuronVarsAll []string
)
View Source
var ContextInShape = []int{20, 3}

Context

View Source
var CtxMap = map[string]Context{
	CtxA.String():    CtxA,
	CtxA_B.String():  CtxA_B,
	CtxA_C.String():  CtxA_C,
	CtxB.String():    CtxB,
	CtxB_B.String():  CtxB_B,
	CtxB_C.String():  CtxB_C,
	CtxC.String():    CtxC,
	CtxC_B.String():  CtxC_B,
	CtxC_C.String():  CtxC_C,
	CtxD.String():    CtxD,
	CtxD_B.String():  CtxD_B,
	CtxD_C.String():  CtxD_C,
	CtxE.String():    CtxE,
	CtxE_B.String():  CtxE_B,
	CtxE_C.String():  CtxE_C,
	CtxF.String():    CtxF,
	CtxF_B.String():  CtxF_B,
	CtxF_C.String():  CtxF_C,
	CtxU.String():    CtxU,
	CtxU_B.String():  CtxU_B,
	CtxU_C.String():  CtxU_C,
	CtxV.String():    CtxV,
	CtxV_B.String():  CtxV_B,
	CtxV_C.String():  CtxV_C,
	CtxW.String():    CtxW,
	CtxW_B.String():  CtxW_B,
	CtxW_C.String():  CtxW_C,
	CtxX.String():    CtxX,
	CtxX_B.String():  CtxX_B,
	CtxX_C.String():  CtxX_C,
	CtxY.String():    CtxY,
	CtxY_B.String():  CtxY_B,
	CtxY_C.String():  CtxY_C,
	CtxZ.String():    CtxZ,
	CtxZ_B.String():  CtxZ_B,
	CtxZ_C.String():  CtxZ_C,
	CtxAX.String():   CtxAX,
	CtxAX_B.String(): CtxAX_B,
	CtxAX_C.String(): CtxAX_C,
	CtxAB.String():   CtxAB,
	CtxAB_B.String(): CtxAB_B,
	CtxAB_C.String(): CtxAB_C,
	CtxBY.String():   CtxBY,
	CtxBY_B.String(): CtxBY_B,
	CtxBY_C.String(): CtxBY_C,
	CtxCD.String():   CtxCD,
	CtxCD_B.String(): CtxCD_B,
	CtxCD_C.String(): CtxCD_C,
	CtxCX.String():   CtxCX,
	CtxCX_B.String(): CtxCX_B,
	CtxCX_C.String(): CtxCX_C,
	CtxCY.String():   CtxCY,
	CtxCY_B.String(): CtxCY_B,
	CtxCY_C.String(): CtxCY_C,
	CtxCZ.String():   CtxCZ,
	CtxCZ_B.String(): CtxCZ_B,
	CtxCZ_C.String(): CtxCZ_C,
	CtxDU.String():   CtxDU,
}
View Source
var CtxRe, _ = regexp.Compile("([ABCDEFUVWXYZ])([ABCDEFUVWXYZ]?)_?([ABCDEFUVWXYZ]?)")

var StimRe, _ = regexp.Compile("([ABCDEFUVWXYZ])([ABCDEFUVWXYZ]?)_(Rf|NR)")

View Source
var KiT_AcqExt = kit.Enums.AddEnum(NAcqExt, kit.NotBitFlag, nil)
View Source
var KiT_BlAmygLayer = kit.Types.AddType(&BlAmygLayer{}, nil)
View Source
var KiT_CElAmygLayer = kit.Types.AddType(&CElAmygLayer{}, nil)
View Source
var KiT_Context = kit.Enums.AddEnum(NContexts+1, kit.NotBitFlag, nil)
View Source
var KiT_DALrnRule = kit.Enums.AddEnum(DALrnRuleN, kit.NotBitFlag, nil)
View Source
var KiT_DaRType = kit.Enums.AddEnum(DaRTypeN, kit.NotBitFlag, nil)
View Source
var KiT_LHbRMTgLayer = kit.Types.AddType(&LHbRMTgLayer{}, leabra.LayerProps)
View Source
var KiT_MSNLayer = kit.Types.AddType(&MSNLayer{}, leabra.LayerProps)
View Source
var KiT_ModLayer = kit.Types.AddType(&ModLayer{}, nil)
View Source
var KiT_ModNeuron = kit.Types.AddType(&ModNeuron{}, nil)
View Source
var KiT_ModNeuronVar = kit.Enums.AddEnum(ModNeuronVarsN, kit.NotBitFlag, nil)
View Source
var KiT_ModParams = kit.Types.AddType(&ModParams{}, nil)
View Source
var KiT_Modulators = kit.Types.AddType(&Modulators{}, nil)
View Source
var KiT_Network = kit.Types.AddType(&Network{}, NetworkProps)
View Source
var KiT_PPTgLayer = kit.Types.AddType(&PPTgLayer{}, leabra.LayerProps)
View Source
var KiT_Stim = kit.Enums.AddEnum(StimN+1, kit.NotBitFlag, nil)
View Source
var KiT_StriatalCompartment = kit.Enums.AddEnum(NSComp, kit.NotBitFlag, nil)
View Source
var KiT_Tick = kit.Enums.AddEnum(TickN+1, kit.NotBitFlag, nil)
View Source
var KiT_Valence = kit.Enums.AddEnum(ValenceN, kit.NotBitFlag, nil)
View Source
var NegSMap = map[string]NegUS{
	Shock.String():    Shock,
	Nausea.String():   Nausea,
	Sharp.String():    Sharp,
	OtherNeg.String(): OtherNeg,
}
View Source
var NetworkProps = leabra.NetworkProps
View Source
var PosSMap = map[string]PosUS{
	Water.String():    Water,
	Food.String():     Food,
	Mate.String():     Mate,
	OtherPos.String(): OtherPos,
}
View Source
var StimInShape = []int{12, 1}

Stim : conditioned stimuli

View Source
var StimMap = map[string]Stim{
	StmA.String():    StmA,
	StmB.String():    StmB,
	StmC.String():    StmC,
	StmD.String():    StmD,
	StmE.String():    StmE,
	StmF.String():    StmF,
	StmU.String():    StmU,
	StmV.String():    StmV,
	StmW.String():    StmW,
	StmX.String():    StmX,
	StmY.String():    StmY,
	StmZ.String():    StmZ,
	StmNone.String(): StmNone,
	"":               StmNone,
}
View Source
var StmGrpMap = map[Stim]int{
	StmNone: 0,
	StmA:    1,
	StmB:    2,
	StmC:    3,
	StmD:    1,
	StmE:    2,
	StmF:    3,
	StmX:    4,
	StmU:    4,
	StmY:    5,
	StmV:    5,
	StmZ:    6,
	StmW:    7,
}
View Source
var TickMap = map[string]Tick{
	T0.String():      T0,
	T1.String():      T1,
	T2.String():      T2,
	T3.String():      T3,
	T4.String():      T4,
	T5.String():      T5,
	T6.String():      T6,
	T7.String():      T7,
	T8.String():      T8,
	T9.String():      T9,
	TckNone.String(): TckNone,
}
View Source
var USInShape = []int{4}
View Source
var USNone = US(PosUSNone)
View Source
var USTRe, _ = regexp.Compile("([ABCDEFUVWXYZ]?)_?(Pos|Neg)US([0123])_t([01234])")
View Source
var USTimeInShape = []int{16, 2, 4, 5}

USTimeIn

View Source
var ValMap = map[string]Valence{
	POS.String(): POS,
	NEG.String(): NEG,
}

Functions

func NeuronVarIdxByName

func NeuronVarIdxByName(varNm string) (int, error)

NeuronVarIdxByName returns the index of the variable in the Neuron, or error

func OneHotUS

func OneHotUS(us US) int

func SynapseVarByName

func SynapseVarByName(varNm string) (int, error)

VarByName returns variable by name, or error

func Tensor

func Tensor(us US) etensor.Tensor

func TensorScaled

func TensorScaled(us US, scale float32) etensor.Tensor

func TotalAct

func TotalAct(ly emer.Layer) float32

func TraceVarByName

func TraceVarByName(varNm string) (int, error)

copied from SynapseVarByName

Types

type AcqExt

type AcqExt int
const (
	Acq AcqExt = iota
	Ext
	NAcqExt
)

func (AcqExt) String

func (i AcqExt) String() string

type AmygModPrjn

type AmygModPrjn struct {
	leabra.Prjn
	SetScale    bool        `` /* 550-byte string literal not displayed */
	SetScaleMin float32     `desc:"minimum scale value for SetScale projections"`
	SetScaleMax float32     `desc:"maximum scale value for SetScale projections"`
	InitWtVal   float32     `` /* 303-byte string literal not displayed */
	DALRGain    float32     `desc:"gain multiplier on abs(DA) learning rate multiplier"`
	DALRBase    float32     `` /* 176-byte string literal not displayed */
	DALrnThr    float32     `` /* 234-byte string literal not displayed */
	ActDeltaThr float32     `` /* 207-byte string literal not displayed */
	ActLrnMod   bool        `desc:"if true, recv unit deep_lrn value modulates learning"`
	ActLrnThr   float32     `desc:"only ru->deep_lrn values > this get to learn - 0.05f seems to work okay"`
	DaMod       DaModParams `desc:"parameters for dopaminergic modulation"`
}

func (*AmygModPrjn) AsAmygModPrjn

func (pj *AmygModPrjn) AsAmygModPrjn() *AmygModPrjn

func (*AmygModPrjn) DWt

func (pj *AmygModPrjn) DWt()

Compute DA-modulated weight changes for amygdala layers

func (*AmygModPrjn) Defaults

func (pj *AmygModPrjn) Defaults()

func (*AmygModPrjn) GaussScale

func (pj *AmygModPrjn) GaussScale(_, _ int, _, _ *etensor.Shape) float32

GaussScale returns gaussian weight value for given unit indexes in given send and recv layers according to Gaussian Sigma and MaxWt.

func (*AmygModPrjn) InitWts

func (pj *AmygModPrjn) InitWts()

type AvgMaxModLayer

type AvgMaxModLayer interface {
	AvgMaxMod(*leabra.Time)
}

type BlAmygLayer

type BlAmygLayer struct {
	ModLayer `desc:"modulation state"`
	Valence  Valence               `desc:"positive or negative valence"`
	ILI      interinhib.InterInhib `desc:"inter-layer inhibition parameters and state"`
}

func (*BlAmygLayer) AsBlAmygLayer

func (ly *BlAmygLayer) AsBlAmygLayer() *BlAmygLayer

func (*BlAmygLayer) Build

func (ly *BlAmygLayer) Build() error

func (*BlAmygLayer) Defaults

func (ly *BlAmygLayer) Defaults()

func (*BlAmygLayer) GetMonitorVal

func (ly *BlAmygLayer) GetMonitorVal(data []string) float64

func (*BlAmygLayer) InhibFmGeAct

func (ly *BlAmygLayer) InhibFmGeAct(ltime *leabra.Time)

InhibiFmGeAct computes inhibition Gi from Ge and Act averages within relevant Pools

type CElAmygLayer

type CElAmygLayer struct {
	ModLayer
	CElTyp     CElAmygLayerType `desc:"basic parameters determining what type CEl layer this is"`
	AcqDeepMod bool             `` /* 216-byte string literal not displayed */
}

func (*CElAmygLayer) AsCElAmygLayer

func (ly *CElAmygLayer) AsCElAmygLayer() *CElAmygLayer

func (*CElAmygLayer) Build

func (ly *CElAmygLayer) Build() error

func (*CElAmygLayer) Defaults

func (ly *CElAmygLayer) Defaults()

type CElAmygLayerType

type CElAmygLayerType struct {
	AcqExt  AcqExt  `desc:"acquisition or extinction"`
	Valence Valence `desc:"positive or negative DA valence"`
}

type Context

type Context int
const (
	CtxA      Context = iota // A
	CtxA_B                   // A_B
	CtxA_C                   // A_C
	CtxB                     // B
	CtxB_B                   // B_B
	CtxB_C                   // B_C
	CtxC                     // C
	CtxC_B                   // C_B
	CtxC_C                   // C_C
	CtxD                     // D
	CtxD_B                   // D_B
	CtxD_C                   // D_C
	CtxE                     // E
	CtxE_B                   // E_B
	CtxE_C                   // E_C
	CtxF                     // F
	CtxF_B                   // F_B
	CtxF_C                   // F_C
	CtxU                     // U
	CtxU_B                   // U_B
	CtxU_C                   // U_C
	CtxV                     // V
	CtxV_B                   // V_B
	CtxV_C                   // V_C
	CtxW                     // W
	CtxW_B                   // W_B
	CtxW_C                   // W_C
	CtxX                     // X
	CtxX_B                   // X_B
	CtxX_C                   // X_C
	CtxY                     // Y
	CtxY_B                   // Y_B
	CtxY_C                   // Y_C
	CtxZ                     // Z
	CtxZ_B                   // Z_B
	CtxZ_C                   // Z_C
	CtxAX                    // AX
	CtxAX_B                  // AX_B
	CtxAX_C                  // AX_C
	CtxAB                    // AB
	CtxAB_B                  // AB_B
	CtxAB_C                  // AB_C
	CtxBY                    // BY
	CtxBY_B                  // BY_B
	CtxBY_C                  // BY_C
	CtxCD                    // CD
	CtxCD_B                  // CD_B
	CtxCD_C                  // CD_C
	CtxCX                    // CX
	CtxCX_B                  // CX_B
	CtxCX_C                  // CX_C
	CtxCY                    // CY
	CtxCY_B                  // CY_B
	CtxCY_C                  // CY_C
	CtxCZ                    // CZ
	CtxCZ_B                  // CZ_B
	CtxCZ_C                  // CZ_C
	CtxDU                    // DU
	CtxNone                  // NoContext
	NContexts = CtxNone
)

func (Context) Empty

func (ctx Context) Empty() bool

func (Context) FromString

func (ctx Context) FromString(s string) Inputs

func (Context) Int

func (ctx Context) Int() int

func (Context) OneHot

func (ctx Context) OneHot() int

func (Context) Parts

func (ctx Context) Parts() []int

func (Context) String

func (i Context) String() string

func (Context) Tensor

func (ctx Context) Tensor() etensor.Tensor

func (Context) TensorScaled

func (ctx Context) TensorScaled(scale float32) etensor.Tensor

type DALrnRule

type DALrnRule int
const (
	DAHebbVS DALrnRule = iota
	TraceNoThalVS
	DALrnRuleN
)

func (DALrnRule) String

func (i DALrnRule) String() string

type DaModParams added in v1.1.12

type DaModParams struct {
	On        bool    `desc:"whether to use dopamine modulation"`
	RecepType DaRType `inactive:"+" desc:"dopamine receptor type, D1 or D2"`
	BurstGain float32 `` /* 173-byte string literal not displayed */
	DipGain   float32 `` /* 241-byte string literal not displayed */
}

type DaRType

type DaRType int

DaRType for D1R and D2R dopamine receptors

const (
	// D1R primarily expresses Dopamine D1 Receptors -- dopamine is excitatory and bursts of dopamine lead to increases in synaptic weight, while dips lead to decreases -- direct pathway in dorsal striatum
	D1R DaRType = iota

	// D2R primarily expresses Dopamine D2 Receptors -- dopamine is inhibitory and bursts of dopamine lead to decreases in synaptic weight, while dips lead to increases -- indirect pathway in dorsal striatum
	D2R

	DaRTypeN
)

func (DaRType) String

func (i DaRType) String() string

type DelInhState

type DelInhState struct {
	GePrvQ   float32 `desc:"netin from previous quarter, used for delayed inhibition"`
	GePrvTrl float32 `desc:"netin from previous \"trial\" (alpha cycle), used for delayed inhibition"`
}

DelInhState contains extra variables for MSNLayer neurons -- stored separately

type DelayedInhibParams

type DelayedInhibParams struct {
	Active bool    `desc:"add in a portion of inhibition from previous time period"`
	PrvQ   float32 `desc:"proportion of per-unit net input on previous gamma-frequency quarter to add in as inhibition"`
	PrvTrl float32 `desc:"proportion of per-unit net input on previous trial to add in as inhibition"`
}

Delayed inhibition for matrix compartment layers

type IAmygPrjn

type IAmygPrjn interface {
	AsAmygModPrjn() *AmygModPrjn
}

type IBlAmygLayer

type IBlAmygLayer interface {
	AsBlAmygLayer() *BlAmygLayer
}

type ICElAmygLayer

type ICElAmygLayer interface {
	AsCElAmygLayer() *CElAmygLayer
}

type IMSNLayer

type IMSNLayer interface {
	AsMSNLayer() *MSNLayer
}

type IMSNPrjn

type IMSNPrjn interface {
	AsMSNPrjn() *MSNPrjn
}

type IModLayer

type IModLayer interface {
	AsMod() *ModLayer
}

type INetwork

type INetwork interface {
	AsLeabra() *leabra.Network
}

type ISetScalePrjn

type ISetScalePrjn interface {
	InitWts()
}

type IUS

type IUS interface {
	Val() Valence
	String() string
	Int() int
}

US, either positive or negative Valence

type Inputs

type Inputs interface {
	Empty() bool
	FromString(s string) Inputs
	OneHot() int
	Tensor() etensor.Tensor
	TensorScaled(scale float32) etensor.Tensor
}
var Fooey Inputs = POS // for testing

type LHBRMTgInternalState added in v1.1.12

type LHBRMTgInternalState struct {
	VSPatchPosD1   float32
	VSPatchPosD2   float32
	VSPatchNegD1   float32
	VSPatchNegD2   float32
	VSMatrixPosD1  float32
	VSMatrixPosD2  float32
	VSMatrixNegD1  float32
	VSMatrixNegD2  float32
	PosPV          float32
	NegPV          float32
	VSPatchPosNet  float32
	VSPatchNegNet  float32
	VSMatrixPosNet float32
	VSMatrixNegNet float32
	NetPos         float32
	NetNeg         float32
}

type LHbRMTgGains

type LHbRMTgGains struct {
	All                float32 `desc:"final overall gain on everything"`
	VSPatchPosD1       float32 `desc:"patch D1 APPETITIVE pathway - versus pos PV outcomes"`
	VSPatchPosD2       float32 `desc:"patch D2 APPETITIVE pathway versus vspatch_pos_D1"`
	VSPatchPosDisinhib float32 `desc:"proportion of positive reward prediction error (RPE) to use if RPE results from a predicted omission of positive"`
	VSMatrixPosD1      float32 `desc:"gain on VS matrix D1 APPETITIVE guys"`
	VSMatrixPosD2      float32 `desc:"VS matrix D2 APPETITIVE"`
	VSPatchNegD1       float32 `desc:"VS patch D1 pathway versus neg PV outcomes"`
	VSPatchNegD2       float32 `desc:"VS patch D2 pathway versus vspatch_neg_D1"`
	VSMatrixNegD1      float32 `desc:"VS matrix D1 AVERSIVE"`
	VSMatrixNegD2      float32 `desc:"VS matrix D2 AVERSIVE"`
}

Gain constants for LHbRMTg inputs

type LHbRMTgLayer

type LHbRMTgLayer struct {
	leabra.Layer
	RcvFrom       emer.LayNames
	Gains         LHbRMTgGains         `view:"inline"`
	PVNegDiscount float32              `` /* 180-byte string literal not displayed */
	InternalState LHBRMTgInternalState // for debugging
}

func AddLHbRMTgLayer

func AddLHbRMTgLayer(nt *Network, name string) *LHbRMTgLayer

func (*LHbRMTgLayer) ActFmG

func (ly *LHbRMTgLayer) ActFmG(ltime *leabra.Time)

func (*LHbRMTgLayer) Build

func (ly *LHbRMTgLayer) Build() error

func (*LHbRMTgLayer) Defaults

func (ly *LHbRMTgLayer) Defaults()

func (*LHbRMTgLayer) GetMonitorVal

func (ly *LHbRMTgLayer) GetMonitorVal(data []string) float64

type MSNLayer

type MSNLayer struct {
	ModLayer
	Compartment StriatalCompartment `inactive:"+" desc:"patch or matrix"`
	DIState     []DelInhState       `desc:"slice of delayed inhibition state for this layer."`
	DIParams    DelayedInhibParams  `view:"no-inline add-fields"`
}

func AddMSNLayer

func AddMSNLayer(nt *Network, name string, nY, nX, nNeurY, nNeurX int, cpmt StriatalCompartment, da DaRType) *MSNLayer

AddMatrixLayer adds a MSNLayer of given size, with given name. nY = number of pools in Y dimension, nX is pools in X dimension, and each pool has nNeurY, nNeurX neurons. da gives the DaReceptor type (D1R = Go, D2R = NoGo)

func (*MSNLayer) AlphaCycInit

func (ly *MSNLayer) AlphaCycInit()

func (*MSNLayer) AsMSNLayer

func (ly *MSNLayer) AsMSNLayer() *MSNLayer

func (*MSNLayer) AsMod

func (ly *MSNLayer) AsMod() *ModLayer

func (*MSNLayer) Build

func (ly *MSNLayer) Build() error

Build constructs the layer state, including calling Build on the projections you MUST have properly configured the Inhib.Pool.On setting by this point to properly allocate Pools for the unit groups if necessary.

func (*MSNLayer) ClearMSNTrace

func (ly *MSNLayer) ClearMSNTrace()

func (*MSNLayer) Defaults

func (ly *MSNLayer) Defaults()

func (*MSNLayer) GetDA

func (ly *MSNLayer) GetDA() float32

func (*MSNLayer) GetMonitorVal

func (ly *MSNLayer) GetMonitorVal(data []string) float64

func (*MSNLayer) InhibFmGeAct

func (ly *MSNLayer) InhibFmGeAct(ltime *leabra.Time)

InhibFmGeAct computes inhibition Gi from Ge and Act averages within relevant Pools this is here for matrix delyed inhibition, not needed otherwise

func (*MSNLayer) InitActs

func (ly *MSNLayer) InitActs()

func (*MSNLayer) ModsFmInc added in v1.1.12

func (ly *MSNLayer) ModsFmInc(_ *leabra.Time)

func (*MSNLayer) PoolDelayedInhib

func (ly *MSNLayer) PoolDelayedInhib(pl *leabra.Pool)

func (*MSNLayer) QuarterInitPrvs

func (ly *MSNLayer) QuarterInitPrvs(ltime *leabra.Time)

func (*MSNLayer) RecvPrjnVals

func (ly *MSNLayer) RecvPrjnVals(vals *[]float32, varNm string, sendLay emer.Layer, sendIdx1D int, prjnType string) error

func (*MSNLayer) SendPrjnVals

func (ly *MSNLayer) SendPrjnVals(vals *[]float32, varNm string, recvLay emer.Layer, recvIdx1D int, prjnType string) error

func (*MSNLayer) SetDA

func (ly *MSNLayer) SetDA(da float32)

type MSNParams

type MSNParams struct {
	Compartment StriatalCompartment `inactive:"+" desc:"patch or matrix"`
}

Parameters for Dorsal Striatum Medium Spiny Neuron computation

type MSNPrjn

type MSNPrjn struct {
	leabra.Prjn
	LearningRule DALrnRule
	Trace        MSNTraceParams `view:"inline" desc:"special parameters for striatum trace learning"`
	TrSyns       []TraceSyn     `desc:"trace synaptic state values, ordered by the sending layer units which owns them -- one-to-one with SConIdx array"`
	SLActVar     string         `desc:"sending layer activation variable name"`
	RLActVar     string         `desc:"receiving layer activation variable name"`
	MaxVSActMod  float32        `` /* 230-byte string literal not displayed */
	DaMod        DaModParams    `desc:"parameters for dopaminergic modulation"`
}

MSNPrjn does dopamine-modulated, for striatum-like layers

func (*MSNPrjn) AsMSNPrjn

func (pj *MSNPrjn) AsMSNPrjn() *MSNPrjn

func (*MSNPrjn) Build

func (pj *MSNPrjn) Build() error

func (*MSNPrjn) ClearTrace

func (pj *MSNPrjn) ClearTrace()

func (*MSNPrjn) DWt

func (pj *MSNPrjn) DWt()

DWt computes the weight change (learning) -- on sending projections.

func (*MSNPrjn) Defaults

func (pj *MSNPrjn) Defaults()

func (*MSNPrjn) InitWts

func (pj *MSNPrjn) InitWts()

func (*MSNPrjn) SynVal

func (pj *MSNPrjn) SynVal(varNm string, sidx, ridx int) float32

func (*MSNPrjn) SynVal1D

func (pj *MSNPrjn) SynVal1D(varIdx int, synIdx int) float32

SynVal1D returns value of given variable index (from SynVarIdx) on given SynIdx. Returns NaN on invalid index. This is the core synapse var access method used by other methods, so it is the only one that needs to be updated for derived layer types.

func (*MSNPrjn) SynVarIdx

func (pj *MSNPrjn) SynVarIdx(varNm string) (int, error)

type MSNTraceParams

type MSNTraceParams struct {
	Deriv       bool    `` /* 305-byte string literal not displayed */
	Decay       float32 `` /* 294-byte string literal not displayed */
	GateLRScale float32 `desc:"learning rate scale factor, if "`
}

Params for for trace-based learning

func (*MSNTraceParams) Defaults

func (tp *MSNTraceParams) Defaults()

func (*MSNTraceParams) MSNActLrnFactor

func (tp *MSNTraceParams) MSNActLrnFactor(act float32) float32

LrnFactor returns multiplicative factor for level of msn activation. If Deriv is 2 * act * (1-act) -- the factor of 2 compensates for otherwise reduction in learning from these factors. Otherwise is just act.

type ModLayer

type ModLayer struct {
	leabra.Layer
	ModNeurs     []ModNeuron     `desc:"neuron-level modulation state"`
	ModPools     []ModPool       `desc:"pools for maintaining aggregate values"`
	ModReceivers []ModRcvrParams `desc:"layer names and scale values for mods sent from this layer"`
	ModParams    `desc:"parameters shared by all modulator receiver layers"`
	DaMod        DaModParams `desc:"parameters for dopaminergic modulation"`
	Modulators   `desc:"layer-level neuromodulator levels"`
}

func (*ModLayer) ActFmG

func (ly *ModLayer) ActFmG(_ *leabra.Time)

func (*ModLayer) AddModReceiver

func (ly *ModLayer) AddModReceiver(rcvr ModReceiver, scale float32)

func (*ModLayer) AsLeabra

func (ly *ModLayer) AsLeabra() *leabra.Layer

Get a pointer to the generic Leabra portion of the layer

func (*ModLayer) AsMod

func (ly *ModLayer) AsMod() *ModLayer

AsMod returns a pointer to the ModLayer portion of the layer

func (*ModLayer) AvgMaxMod

func (ly *ModLayer) AvgMaxMod(_ *leabra.Time)

func (*ModLayer) Build

func (ly *ModLayer) Build() error

func (*ModLayer) ClearModActs

func (ly *ModLayer) ClearModActs()

func (*ModLayer) ClearModLevels

func (ly *ModLayer) ClearModLevels()

func (*ModLayer) DALrnFmDA

func (ly *ModLayer) DALrnFmDA(da float32) float32

DALrnFmDA returns effective learning dopamine value from given raw DA value applying Burst and Dip Gain factors, and then reversing sign for D2R. GetDa in cemer

func (*ModLayer) Defaults

func (ly *ModLayer) Defaults()

func (*ModLayer) GScaleFmAvgAct

func (ly *ModLayer) GScaleFmAvgAct()

func (*ModLayer) GetDA

func (ly *ModLayer) GetDA() float32

Functions for rl.DALayer

func (*ModLayer) GetMonitorVal

func (ly *ModLayer) GetMonitorVal(data []string) float64

Retrieve a value for a trace of some quantity, possibly more than just a variable

func (*ModLayer) Init

func (ly *ModLayer) Init()

func (*ModLayer) InitActs

func (ly *ModLayer) InitActs()

func (*ModLayer) ModSendValue

func (ly *ModLayer) ModSendValue(ni int32) float32

func (*ModLayer) ModUnitVals

func (ly *ModLayer) ModUnitVals(vals *[]float32, varNm string) error

func (*ModLayer) ModsFmInc

func (ly *ModLayer) ModsFmInc(_ *leabra.Time)

func (*ModLayer) ReceiveMods

func (ly *ModLayer) ReceiveMods(sender ModSender, scale float32)

func (*ModLayer) SendMods

func (ly *ModLayer) SendMods(_ *leabra.Time)

func (*ModLayer) SetDA

func (ly *ModLayer) SetDA(da float32)

func (*ModLayer) UnitVal1D

func (ly *ModLayer) UnitVal1D(varIdx int, idx int) float32

UnitVal1D returns value of given variable index on given unit, using 1-dimensional index. returns NaN on invalid index. This is the core unit var access method used by other methods, so it is the only one that needs to be updated for derived layer types.

func (*ModLayer) UnitValByIdx

func (ly *ModLayer) UnitValByIdx(vidx ModNeuronVar, idx int) float32

UnitValByIdx returns value of given variable by variable index and flat neuron index (from layer or neuron-specific one).

func (*ModLayer) UnitVals

func (ly *ModLayer) UnitVals(vals *[]float32, varNm string) error

// UnitVals fills in values of given variable name on unit, // for each unit in the layer, into given float32 slice (only resized if not big enough). // Returns error on invalid var name.

func (*ModLayer) UnitValsTensor

func (ly *ModLayer) UnitValsTensor(tsr etensor.Tensor, varNm string) error

// UnitValsTensor returns values of given variable name on unit // for each unit in the layer, as a float32 tensor in same shape as layer units.

func (*ModLayer) UnitVarIdx

func (ly *ModLayer) UnitVarIdx(varNm string) (int, error)

UnitVarIdx returns the index of given variable within the Neuron, according to UnitVarNames() list (using a map to lookup index), or -1 and error message if not found.

func (*ModLayer) UnitVarNames

func (ly *ModLayer) UnitVarNames() []string

UnitVarNames returns a list of variable names available on the units in this layer Mod returns *layer level* vars

func (*ModLayer) UpdateParams

func (ly *ModLayer) UpdateParams()

type ModNeuron

type ModNeuron struct {
	Modulators `desc:"neuron-level modulator activation"`
	ModAct     float32 `desc:"activity level for modulation"`
	ModLevel   float32 `desc:"degree of full modulation to apply"`
	ModNet     float32 `desc:"modulation input from sender"`
	ModLrn     float32 `desc:"multiplier for DA modulation of learning rate"`
	PVAct      float32 `desc:"direct activation from US"`
}

func (*ModNeuron) InitActs

func (mnr *ModNeuron) InitActs()

func (*ModNeuron) VarByIndex

func (mnr *ModNeuron) VarByIndex(idx int) float32

VarByIndex returns variable using index (0 = first variable in NeuronVars list)

func (*ModNeuron) VarByName

func (mnr *ModNeuron) VarByName(varNm string) (float32, error)

VarByName returns variable by name, or error

func (*ModNeuron) VarNames

func (mnr *ModNeuron) VarNames() []string

type ModNeuronVar

type ModNeuronVar int

NeuronVars are indexes into extra neuron-level variables

const (
	DA ModNeuronVar = iota
	ACh
	SE
	ModAct
	ModLevel
	ModNet
	ModLrn
	PVAct
	Cust1
	ModNeuronVarsN
)

func (ModNeuronVar) String

func (i ModNeuronVar) String() string

type ModParams

type ModParams struct {
	Minus            float32 `` /* 145-byte string literal not displayed */
	Plus             float32 `` /* 144-byte string literal not displayed */
	NegGain          float32 `` /* 214-byte string literal not displayed */
	PosGain          float32 `` /* 214-byte string literal not displayed */
	ActModZero       bool    `` /* 222-byte string literal not displayed */
	ModNetThreshold  float32 `` /* 348-byte string literal not displayed */
	ModSendThreshold float32 `desc:"threshold for including neuron activation in total to send (for ModNet)"`
	IsModSender      bool    `desc:"does this layer send modulation to other layers?"`
	IsModReceiver    bool    `desc:"does this layer receive modulation from other layers?"`
	IsPVReceiver     bool    `desc:"does this layer receive a direct PV input?"`
}

func (*ModParams) Gain

func (dm *ModParams) Gain(da, gain float32, plusPhase bool) float32

Gain returns da-modulated gain value

func (*ModParams) Ge

func (dm *ModParams) Ge(da, ge float32, plusPhase bool) float32

Ge returns da-modulated ge value

type ModPool

type ModPool struct {
	ModNetStats      minmax.AvgMax32
	ModSent          float32 `desc:"modulation level transmitted to receiver layers"`
	ModSendThreshold float32 `desc:"threshold for sending modulation. values below this are not added to the pool-level total"`
}

type ModRcvrParams

type ModRcvrParams struct {
	RcvName string  `desc:"name of receiving layer"`
	Scale   float32 `desc:"scale factor for modulation to this receiver"`
}

type ModReceiver

type ModReceiver interface {
	ModsFmInc(ltime *leabra.Time)
	ReceiveMods(sender ModSender, scale float32)
}

type ModSender

type ModSender interface {
	SendMods(ltime *leabra.Time)
	ModSendValue(ni int32) float32
}

type Modulators

type Modulators struct {
	DA  float32 `desc:"current dopamine level for this layer"`
	ACh float32 `desc:"current acetylcholine level for this layer"`
	SE  float32 `desc:"current serotonin level for this layer"`
}

func (*Modulators) InitActs

func (ml *Modulators) InitActs()

type NegUS

type NegUS US
const (
	Shock NegUS = iota
	Nausea
	Sharp
	OtherNeg
	NegUSNone // NoNegUS
	NNegUS    = NegUSNone
)

func (NegUS) FromString

func (neg NegUS) FromString(s string) NegUS

func (NegUS) Int

func (neg NegUS) Int() int

func (NegUS) NegUSEmpty

func (neg NegUS) NegUSEmpty() bool

func (NegUS) OneHot

func (neg NegUS) OneHot() int

func (NegUS) String

func (i NegUS) String() string

func (NegUS) Tensor

func (neg NegUS) Tensor() etensor.Tensor

func (NegUS) Val

func (neg NegUS) Val() Valence

type Network

type Network struct {
	leabra.Network
}

func (*Network) AddBlAmygLayer

func (nt *Network) AddBlAmygLayer(name string, nY, nX, nNeurY, nNeurX int, val Valence, dar DaRType, lTyp emer.LayerType) *BlAmygLayer

func (*Network) AddCElAmygLayer

func (nt *Network) AddCElAmygLayer(name string, nY, nX, nNeurY, nNeurX int,
	acqExt AcqExt, val Valence, dar DaRType) *CElAmygLayer

Add a CentroLateral Amygdala layer with specified 2D geometry, acquisition/extinction, valence, and DA receptor type

func (*Network) AddMSNLayer

func (nt *Network) AddMSNLayer(name string, nY, nX, nNeurY, nNeurX int, cpmt StriatalCompartment, da DaRType) *MSNLayer

AddMatrixLayer adds a MSNLayer of given size, with given name. nY = number of pools in Y dimension, nX is pools in X dimension, and each pool has nNeurY, nNeurX neurons. da gives the DaReceptor type (D1R = Go, D2R = NoGo)

func (*Network) AddVTALayer

func (nt *Network) AddVTALayer(name string, val Valence) *VTALayer

Add a positive or negative valence VTA layer

func (*Network) AsLeabra

func (nt *Network) AsLeabra() *leabra.Network

func (*Network) AvgMaxMod

func (nt *Network) AvgMaxMod(ltime *leabra.Time)

func (*Network) ClearMSNTraces

func (nt *Network) ClearMSNTraces(_ *leabra.Time)

func (*Network) ClearModActs

func (nt *Network) ClearModActs(_ *leabra.Time)

func (*Network) ConnectLayersActMod

func (nt *Network) ConnectLayersActMod(sender ModSender, rcvr ModReceiver, scale float32)

func (*Network) Cycle

func (nt *Network) Cycle(ltime *leabra.Time)

func (*Network) CycleImpl

func (nt *Network) CycleImpl(ltime *leabra.Time)

func (*Network) InitActs

func (nt *Network) InitActs()

func (*Network) QuarterInitPrvs

func (nt *Network) QuarterInitPrvs(ltime *leabra.Time)

func (*Network) RecvModInc

func (nt *Network) RecvModInc(ltime *leabra.Time)

func (*Network) SendMods

func (nt *Network) SendMods(ltime *leabra.Time)

func (*Network) SynVarNames

func (nt *Network) SynVarNames() []string

func (*Network) SynVarProps

func (nt *Network) SynVarProps() map[string]string

SynVarProps returns properties for variables

func (*Network) UnitVarNames

func (nt *Network) UnitVarNames() []string

UnitVarNames returns a list of variable names available on the units in this layer

type PPTgLayer

type PPTgLayer struct {
	leabra.Layer
	Ge              float32
	GePrev          float32
	SendAct         float32
	DA              float32
	DNetGain        float32 `desc:"gain on input activation"`
	ActThreshold    float32 `desc:"activation threshold for passing through"`
	ClampActivation bool    `desc:"clamp activation directly, after applying gain"`
}

The PPTg passes on a positively-rectified version of its input signal.

func AddPPTgLayer

func AddPPTgLayer(nt *Network, name string, nY, nX int) *PPTgLayer

Add a Pedunculopontine Gyrus layer. Acts as a positive rectifier for its inputs.

func (*PPTgLayer) ActFmG

func (ly *PPTgLayer) ActFmG(_ *leabra.Time)

func (*PPTgLayer) Build

func (ly *PPTgLayer) Build() error

func (*PPTgLayer) Defaults

func (ly *PPTgLayer) Defaults()

func (*PPTgLayer) GetDA

func (ly *PPTgLayer) GetDA() float32

func (*PPTgLayer) GetMonitorVal

func (ly *PPTgLayer) GetMonitorVal(data []string) float64

func (*PPTgLayer) InitActs

func (ly *PPTgLayer) InitActs()

func (*PPTgLayer) QuarterFinal

func (ly *PPTgLayer) QuarterFinal(ltime *leabra.Time)

func (*PPTgLayer) SetDA

func (ly *PPTgLayer) SetDA(da float32)

type PVLayer

type PVLayer struct {
	leabra.Layer
	Net           *Network
	SendPVQuarter int
	PVReceivers   emer.LayNames
}

Primary Value input layer. Sends activation directly to its receivers, bypassing the standard mechanisms.

func AddPVLayer

func AddPVLayer(nt *Network, name string, nY, nX int, typ emer.LayerType) *PVLayer

func (*PVLayer) AddPVReceiver

func (ly *PVLayer) AddPVReceiver(lyNm string)

func (*PVLayer) Build

func (ly *PVLayer) Build() error

func (*PVLayer) CyclePost

func (ly *PVLayer) CyclePost(ltime *leabra.Time)

func (*PVLayer) GetMonitorVal

func (ly *PVLayer) GetMonitorVal(data []string) float64

func (*PVLayer) SendPVAct

func (ly *PVLayer) SendPVAct()

type PackedUSTimeState

type PackedUSTimeState int64
const USTimeNone PackedUSTimeState = 0

func PUSTFromString

func PUSTFromString(s string) PackedUSTimeState

func (PackedUSTimeState) Empty

func (ps PackedUSTimeState) Empty() bool

func (PackedUSTimeState) FromString

func (pus PackedUSTimeState) FromString(s string) PackedUSTimeState

func (PackedUSTimeState) Shape

func (ps PackedUSTimeState) Shape() []int

func (PackedUSTimeState) Stim

func (ps PackedUSTimeState) Stim() Stim

func (PackedUSTimeState) String

func (ps PackedUSTimeState) String() string

func (PackedUSTimeState) Tensor

func (ps PackedUSTimeState) Tensor() etensor.Tensor

func (PackedUSTimeState) TensorScaled

func (ps PackedUSTimeState) TensorScaled(scale float32) etensor.Tensor

func (PackedUSTimeState) US

func (ps PackedUSTimeState) US() US

func (PackedUSTimeState) USTimeIn

func (ps PackedUSTimeState) USTimeIn() Tick

func (PackedUSTimeState) Unpack

func (ps PackedUSTimeState) Unpack() USTimeState

func (PackedUSTimeState) Valence

func (ps PackedUSTimeState) Valence() Valence

type PosUS

type PosUS US

positive and negative subtypes of US

const (
	Water PosUS = iota
	Food
	Mate
	OtherPos
	PosUSNone // NoPosUS
	NPosUS    = PosUSNone
)

func (PosUS) FromString

func (pos PosUS) FromString(s string) PosUS

func (PosUS) Int

func (pos PosUS) Int() int

func (PosUS) OneHot

func (pos PosUS) OneHot() int

func (PosUS) PosUSEmpty

func (pos PosUS) PosUSEmpty() bool

func (PosUS) String

func (i PosUS) String() string

func (PosUS) Tensor

func (pos PosUS) Tensor() etensor.Tensor

func (PosUS) Val

func (pos PosUS) Val() Valence

type Stim

type Stim int
const (
	StmA    Stim = iota // A
	StmB                // B
	StmC                // C
	StmD                // D
	StmE                // E
	StmF                // F
	StmU                // U
	StmV                // V
	StmW                // W
	StmX                // X
	StmY                // Y
	StmZ                // Z
	StmNone             // NoStim
	StimN   = StmNone
)

func (Stim) Empty

func (stm Stim) Empty() bool

func (Stim) FromString

func (stm Stim) FromString(s string) Inputs

func (Stim) OneHot

func (stm Stim) OneHot() int

func (Stim) String

func (i Stim) String() string

func (Stim) Tensor

func (stm Stim) Tensor() etensor.Tensor

func (Stim) TensorScaled

func (stm Stim) TensorScaled(scale float32) etensor.Tensor

type StriatalCompartment

type StriatalCompartment int
const (
	PATCH StriatalCompartment = iota
	MATRIX
	NSComp
)

func (StriatalCompartment) String

func (i StriatalCompartment) String() string

type Tick

type Tick int

Tick

const (
	T0 Tick = iota
	T1
	T2
	T3
	T4
	T5
	T6
	T7
	T8
	T9
	TckNone
	TickN = TckNone
)

func (Tick) Empty

func (t Tick) Empty() bool

func (Tick) FromString

func (t Tick) FromString(s string) Inputs

func (Tick) Int

func (t Tick) Int() int

func (Tick) OneHot

func (t Tick) OneHot() int

func (Tick) String

func (i Tick) String() string

func (Tick) Tensor

func (t Tick) Tensor() etensor.Tensor

func (Tick) TensorScaled

func (t Tick) TensorScaled(scale float32) etensor.Tensor

type TraceSyn

type TraceSyn struct {
	NTr float32 `` /* 136-byte string literal not displayed */
	Tr  float32 `` /* 183-byte string literal not displayed */
}

TraceSyn holds extra synaptic state for trace projections

func (*TraceSyn) SetVarByIndex

func (tr *TraceSyn) SetVarByIndex(idx int, val float32)

func (*TraceSyn) SetVarByName

func (tr *TraceSyn) SetVarByName(varNm string, val float32) error

SetVarByName sets synapse variable to given value

func (*TraceSyn) VarByIndex

func (tr *TraceSyn) VarByIndex(idx int) float32

VarByIndex returns variable using index (0 = first variable in SynapseVars list)

func (*TraceSyn) VarNames

func (tr *TraceSyn) VarNames() []string

type US

type US int

func (US) Empty

func (us US) Empty() bool

func (US) FromString

func (us US) FromString(s string) Inputs

func (US) Int

func (us US) Int() int

func (US) OneHot

func (us US) OneHot() int

func (US) String

func (us US) String() string

func (US) Tensor

func (us US) Tensor() etensor.Tensor

func (US) TensorScaled

func (us US) TensorScaled(scale float32) etensor.Tensor
func (pos PosUS) TensorScaled(scale float32) etensor.Tensor {
	return TensorScaled(pos, 1.0 / scale)
}
func (neg NegUS) TensorScaled(scale float32) etensor.Tensor {
	return TensorScaled(neg, 1.0 / scale)
}

func (US) Val

func (us US) Val() Valence

type USTimeState

type USTimeState struct {
	Stm Stim    `desc:"CS value"`
	US  US      `desc:"a US value or absent (USNone)"`
	Val Valence `desc:"PV d, POS, NEG, or absent (ValNone)"`
	Tck Tick    `desc:"Within-trial timestep"`
}

func USTFromString

func USTFromString(uss string) USTimeState

func (USTimeState) Coords

func (usts USTimeState) Coords() []int

func (USTimeState) CoordsString

func (usts USTimeState) CoordsString() string

func (USTimeState) Empty

func (usts USTimeState) Empty() bool

func (USTimeState) EnumVal

func (usts USTimeState) EnumVal() int

func (USTimeState) OneHot

func (usts USTimeState) OneHot(scale float32) etensor.Tensor

func (USTimeState) Pack

func (usts USTimeState) Pack() PackedUSTimeState

func (USTimeState) String

func (usts USTimeState) String() string

func (USTimeState) Tensor

func (usts USTimeState) Tensor() etensor.Tensor

func (USTimeState) TensorScaleAndAdd

func (usts USTimeState) TensorScaleAndAdd(scale float32, other USTimeState) etensor.Tensor

func (USTimeState) TensorScaled

func (usts USTimeState) TensorScaled(scale float32) etensor.Tensor

func (USTimeState) TsrOffset

func (usts USTimeState) TsrOffset() []int

type VTADAGains

type VTADAGains struct {
	DA                float32 `desc:"overall multiplier for dopamine values"`
	PPTg              float32 `desc:"gain on bursts from PPTg"`
	LHb               float32 `desc:"gain on dips/bursts from LHbRMTg"`
	PV                float32 `desc:"gain on positive PV component of total phasic DA signal (net after subtracting VSPatchIndir (PVi) shunt signal)"`
	PVIBurstShunt     float32 `desc:"gain on VSPatch projection that shunts bursting in VTA (for VTAp = VSPatchPosD1, for VTAn = VSPatchNegD2)"`
	PVIAntiBurstShunt float32 `desc:"gain on VSPatch projection that opposes shunting of bursting in VTA (for VTAp = VSPatchPosD2, for VTAn = VSPatchNegD1)"`
	PVIDipShunt       float32 `` /* 146-byte string literal not displayed */
	PVIAntiDipShunt   float32 `desc:"gain on VSPatch projection that opposes the shunting of dipping in VTA (currently only VTAp supported = VSPatchNegD1)"`
}

Gain constants for inputs to the VTA

func (*VTADAGains) Defaults

func (dag *VTADAGains) Defaults()

type VTALayer

type VTALayer struct {
	rl.ClampDaLayer
	SendVal       float32
	Valence       Valence    `desc:"VTA layer DA valence, positive or negative"`
	TonicDA       float32    `desc:"set a tonic 'dopamine' (DA) level (offset to add to da values)"`
	DAGains       VTADAGains `view:"inline" desc:"gains for various VTA inputs"`
	RecvFrom      map[string]emer.Layer
	InternalState VTAState `desc:"input values--for debugging only"`
}

VTA internal state

func AddVTALayer

func AddVTALayer(nt *Network, name string, val Valence) *VTALayer

func (*VTALayer) ActFmG

func (ly *VTALayer) ActFmG(ltime *leabra.Time)

func (*VTALayer) Build

func (ly *VTALayer) Build() error

func (*VTALayer) CyclePost

func (ly *VTALayer) CyclePost(_ *leabra.Time)

func (*VTALayer) Defaults

func (ly *VTALayer) Defaults()

func (*VTALayer) GetMonitorVal

func (ly *VTALayer) GetMonitorVal(data []string) float64

For monitoring during run. Includes values beyond the scope of neuron fields.

func (*VTALayer) VTAAct

func (ly *VTALayer) VTAAct(ltime *leabra.Time)

func (*VTALayer) VTAActN

func (ly *VTALayer) VTAActN(_ *leabra.Time)

VTAn activation

func (*VTALayer) VTAActP

func (ly *VTALayer) VTAActP(_ *leabra.Time)

VTAp activation

type VTAState added in v1.1.12

type VTAState struct {
	PPTgDAp    float32
	LHbDA      float32
	PosPVAct   float32
	VSPosPVI   float32
	VSNegPVI   float32
	BurstLHbDA float32
	DipLHbDA   float32
	TotBurstDA float32
	TotDipDA   float32
	NetDipDA   float32
	NetDA      float32
	SendVal    float32
}

monitoring and debugging only. Received values from all inputs

type Valence

type Valence int

Valence

const (
	ValNone Valence = iota // NoValence
	POS
	NEG
	ValenceN
)

func (Valence) Empty

func (val Valence) Empty() bool

func (Valence) FromString

func (val Valence) FromString(s string) Inputs

func (Valence) Negate

func (val Valence) Negate() Valence

func (Valence) OneHot

func (val Valence) OneHot() int

func (Valence) String

func (i Valence) String() string

func (Valence) Tensor

func (val Valence) Tensor() etensor.Tensor

func (Valence) TensorScaled

func (val Valence) TensorScaled(scale float32) etensor.Tensor

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL