Documentation ¶
Index ¶
- Variables
- func AddBG(nt *axon.Network, prefix string, ...) (mtxGo, mtxNo, cin, gpeOut, gpeIn, gpeTA, stnp, stns, gpi, vthal axon.AxonLayer)
- func AddBGPy(nt *axon.Network, prefix string, ...) []axon.AxonLayer
- func ConnectToMatrix(nt *axon.Network, send, recv emer.Layer, pat prjn.Pattern) emer.Prjn
- func PCoreNeuronVarIdxByName(varNm string) (int, error)
- func STNNeuronVarIdxByName(varNm string) (int, error)
- type CINLayer
- func (ly *CINLayer) ActFmG(ltime *axon.Time)
- func (ly *CINLayer) Build() error
- func (ly *CINLayer) CyclePost(ltime *axon.Time)
- func (ly *CINLayer) Defaults()
- func (ly *CINLayer) GetACh() float32
- func (ly *CINLayer) MaxAbsRew() float32
- func (ly *CINLayer) SetACh(ach float32)
- func (ly *CINLayer) UnitVal1D(varIdx int, idx int) float32
- func (ly *CINLayer) UnitVarIdx(varNm string) (int, error)
- func (ly *CINLayer) UnitVarNum() int
- type CaParams
- type DaModParams
- type DaReceptors
- type GPLayer
- type GPLays
- type GPiLayer
- type Layer
- func (ly *Layer) ActFmG(ltime *axon.Time)
- func (ly *Layer) ActLrnFmPhasicMax()
- func (ly *Layer) Build() error
- func (ly *Layer) Defaults()
- func (ly *Layer) InitActLrn()
- func (ly *Layer) InitActs()
- func (ly *Layer) InitPhasicMax()
- func (ly *Layer) MaxPhasicMax() float32
- func (ly *Layer) NewState()
- func (ly *Layer) PCoreNeuronByIdx(idx int) *PCoreNeuron
- func (ly *Layer) PhasicMaxAvgByPool(pli int) float32
- func (ly *Layer) PhasicMaxFmAct(ltime *axon.Time)
- func (ly *Layer) PhasicMaxMaxByPool(pli int) float32
- func (ly *Layer) UnitVal1D(varIdx int, idx int) float32
- func (ly *Layer) UnitVarIdx(varNm string) (int, error)
- func (ly *Layer) UnitVarNum() int
- type MatrixLayer
- func (ly *MatrixLayer) ActFmG(ltime *axon.Time)
- func (ly *MatrixLayer) DAActLrn(ltime *axon.Time)
- func (ly *MatrixLayer) Defaults()
- func (ly *MatrixLayer) GetACh() float32
- func (ly *MatrixLayer) InitActs()
- func (ly *MatrixLayer) SetACh(ach float32)
- func (ly *MatrixLayer) ThalLayer() (*VThalLayer, error)
- func (ly *MatrixLayer) UnitVal1D(varIdx int, idx int) float32
- func (ly *MatrixLayer) UnitVarIdx(varNm string) (int, error)
- func (ly *MatrixLayer) UnitVarNum() int
- type MatrixParams
- type MatrixPrjn
- func (pj *MatrixPrjn) Build() error
- func (pj *MatrixPrjn) ClearTrace()
- func (pj *MatrixPrjn) DWt(ltime *axon.Time)
- func (pj *MatrixPrjn) Defaults()
- func (pj *MatrixPrjn) InitWts()
- func (pj *MatrixPrjn) SynVal1D(varIdx int, synIdx int) float32
- func (pj *MatrixPrjn) SynVarIdx(varNm string) (int, error)
- func (pj *MatrixPrjn) SynVarNames() []string
- func (pj *MatrixPrjn) SynVarNum() int
- type MatrixTraceParams
- type Network
- func (nt *Network) AddBG(prefix string, nPoolsY, nPoolsX, nNeurY, nNeurX, gpNeurY, gpNeurX int, ...) (mtxGo, mtxNo, cin, gpeOut, gpeIn, gpeTA, stnp, stns, gpi, vthal axon.AxonLayer)
- func (nt *Network) ConnectToMatrix(send, recv emer.Layer, pat prjn.Pattern) emer.Prjn
- func (nt *Network) SynVarNames() []string
- func (nt *Network) UnitVarNames() []string
- type PCoreLayer
- type PCoreNeuron
- type STNLayer
- func (ly *STNLayer) ActFmG(ltime *axon.Time)
- func (ly *STNLayer) Build() error
- func (ly *STNLayer) Defaults()
- func (ly *STNLayer) InitActs()
- func (ly *STNLayer) NewState()
- func (ly *STNLayer) UnitVal1D(varIdx int, idx int) float32
- func (ly *STNLayer) UnitVarIdx(varNm string) (int, error)
- func (ly *STNLayer) UnitVarNum() int
- func (ly *STNLayer) UpdateParams()
- type STNNeuron
- type TraceSyn
- type VThalLayer
Constants ¶
This section is empty.
Variables ¶
var ( // NeuronVars are extra neuron variables for pcore -- union across all types NeuronVars = []string{"DA", "ActLrn", "PhasicMax", "DALrn", "ACh", "SKCai", "SKCaM", "Gsk"} // NeuronVarsAll is the pcore collection of all neuron-level vars NeuronVarsAll []string // SynVarsAll is the pcore collection of all synapse-level vars (includes TraceSynVars) SynVarsAll []string )
var ( PCoreNeuronVars = []string{"ActLrn", "PhasicMax"} PCoreNeuronVarsMap map[string]int )
var ( STNNeuronVars = []string{"SKCai", "SKCaM", "Gsk"} STNNeuronVarsMap map[string]int )
var KiT_CINLayer = kit.Types.AddType(&CINLayer{}, axon.LayerProps)
var KiT_DaReceptors = kit.Enums.AddEnum(DaReceptorsN, kit.NotBitFlag, nil)
var KiT_GPLayer = kit.Types.AddType(&GPLayer{}, axon.LayerProps)
var KiT_GPLays = kit.Enums.AddEnum(GPLaysN, kit.NotBitFlag, nil)
var KiT_GPiLayer = kit.Types.AddType(&GPiLayer{}, axon.LayerProps)
var KiT_Layer = kit.Types.AddType(&Layer{}, axon.LayerProps)
var KiT_MatrixLayer = kit.Types.AddType(&MatrixLayer{}, axon.LayerProps)
var KiT_MatrixPrjn = kit.Types.AddType(&MatrixPrjn{}, axon.PrjnProps)
var KiT_Network = kit.Types.AddType(&Network{}, NetworkProps)
var KiT_STNLayer = kit.Types.AddType(&STNLayer{}, axon.LayerProps)
var KiT_VThalLayer = kit.Types.AddType(&VThalLayer{}, axon.LayerProps)
var NetworkProps = axon.NetworkProps
var TraceSynVars = []string{"NTr"}
Functions ¶
func AddBG ¶
func AddBG(nt *axon.Network, prefix string, nPoolsY, nPoolsX, nNeurY, nNeurX, gpNeurY, gpNeurX int, space float32) (mtxGo, mtxNo, cin, gpeOut, gpeIn, gpeTA, stnp, stns, gpi, vthal axon.AxonLayer)
AddBG adds MtxGo, No, CIN, GPeOut, GPeIn, GPeTA, STNp, STNs, GPi, and VThal layers, with given optional prefix. Assumes that a 4D structure will be used, with Pools representing separable gating domains. All GP / STN layers have gpNeur neurons Appropriate PoolOneToOne connections are made between layers, using standard styles. space is the spacing between layers (2 typical)
func AddBGPy ¶
func AddBGPy(nt *axon.Network, prefix string, nPoolsY, nPoolsX, nNeurY, nNeurX, gpNeurY, gpNeurX int, space float32) []axon.AxonLayer
AddBGPy adds MtxGo, No, CIN, GPeOut, GPeIn, GPeTA, STNp, STNs, GPi, and VThal layers, with given optional prefix. Assumes that a 4D structure will be used, with Pools representing separable gating domains. Only Matrix has more than 1 unit per Pool by default. Appropriate PoolOneToOne connections are made between layers, using standard styles. space is the spacing between layers (2 typical) Py is Python version, returns layers as a slice
func ConnectToMatrix ¶
ConnectToMatrix adds a MatrixTracePrjn from given sending layer to a matrix layer
func PCoreNeuronVarIdxByName ¶ added in v1.5.1
PCoreNeuronVarIdxByName returns the index of the variable in the PCoreNeuron, or error
func STNNeuronVarIdxByName ¶
STNNeuronVarIdxByName returns the index of the variable in the STNNeuron, or error
Types ¶
type CINLayer ¶
type CINLayer struct { axon.Layer RewThr float32 `` /* 164-byte string literal not displayed */ RewLays emer.LayNames `desc:"Reward-representing layer(s) from which this computes ACh as Max absolute value"` SendACh rl.SendACh `desc:"list of layers to send acetylcholine to"` ACh float32 `desc:"acetylcholine value for this layer"` }
CINLayer (cholinergic interneuron) reads reward signals from named source layer(s) and sends the Max absolute value of that activity as the positively-rectified non-prediction-discounted reward signal computed by CINs, and sent as an acetylcholine (ACh) signal. To handle positive-only reward signals, need to include both a reward prediction and reward outcome layer.
func AddCINLayer ¶
AddCINLayer adds a CINLayer, with a single neuron.
func (*CINLayer) Build ¶
Build constructs the layer state, including calling Build on the projections.
func (*CINLayer) CyclePost ¶
CyclePost is called at end of Cycle We use it to send ACh, which will then be active for the next cycle of processing.
func (*CINLayer) MaxAbsRew ¶
MaxAbsRew returns the maximum absolute value of reward layer activations
func (*CINLayer) UnitVal1D ¶
UnitVal1D returns value of given variable index on given unit, using 1-dimensional index. returns NaN on invalid index. This is the core unit var access method used by other methods, so it is the only one that needs to be updated for derived layer types.
func (*CINLayer) UnitVarIdx ¶
UnitVarIdx returns the index of given variable within the Neuron, according to UnitVarNames() list (using a map to lookup index), or -1 and error message if not found.
func (*CINLayer) UnitVarNum ¶
UnitVarNum returns the number of Neuron-level variables for this layer. This is needed for extending indexes in derived types.
type CaParams ¶
type CaParams struct { SKCa chans.SKCaParams `view:"inline" desc:"small-conductance calcium-activated potassium channel"` CaD bool `desc:"use CaD timescale (delayed) calcium signal -- for STNs -- else use CaP (faster) for STNp"` CaScale float32 `desc:"scaling factor applied to input Ca to bring into proper range of these dynamics"` ThetaInit bool `desc:"initialize Ca, KCa values at start of every ThetaCycle (i.e., behavioral trial)"` }
CaParams control the calcium dynamics in STN neurons. The SKCa small-conductance calcium-gated potassium channel produces the pausing function as a consequence of rapid bursting.
type DaModParams ¶
type DaModParams struct { On bool `desc:"whether to use dopamine modulation"` ModGain bool `viewif:"On" desc:"modulate gain instead of Ge excitatory synaptic input"` Minus float32 `` /* 145-byte string literal not displayed */ Plus float32 `` /* 144-byte string literal not displayed */ NegGain float32 `` /* 208-byte string literal not displayed */ PosGain float32 `` /* 208-byte string literal not displayed */ }
Params for effects of dopamine (Da) based modulation, typically adding a Da-based term to the Ge excitatory synaptic input. Plus-phase = learning effects relative to minus-phase "performance" dopamine effects
func (*DaModParams) Defaults ¶
func (dm *DaModParams) Defaults()
func (*DaModParams) Gain ¶
func (dm *DaModParams) Gain(da, gain float32, plusPhase bool) float32
Gain returns da-modulated gain value
func (*DaModParams) GainModOn ¶
func (dm *DaModParams) GainModOn() bool
GainModOn returns true if modulating Gain
func (*DaModParams) Ge ¶
func (dm *DaModParams) Ge(da, ge float32, plusPhase bool) float32
Ge returns da-modulated ge value
func (*DaModParams) GeModOn ¶
func (dm *DaModParams) GeModOn() bool
GeModOn returns true if modulating Ge
type DaReceptors ¶
type DaReceptors int
DaReceptors for D1R and D2R dopamine receptors
const ( // D1R primarily expresses Dopamine D1 Receptors -- dopamine is excitatory and bursts of dopamine lead to increases in synaptic weight, while dips lead to decreases -- direct pathway in dorsal striatum D1R DaReceptors = iota // D2R primarily expresses Dopamine D2 Receptors -- dopamine is inhibitory and bursts of dopamine lead to decreases in synaptic weight, while dips lead to increases -- indirect pathway in dorsal striatum D2R DaReceptorsN )
func (*DaReceptors) FromString ¶
func (i *DaReceptors) FromString(s string) error
func (DaReceptors) MarshalJSON ¶
func (ev DaReceptors) MarshalJSON() ([]byte, error)
func (DaReceptors) String ¶
func (i DaReceptors) String() string
func (*DaReceptors) UnmarshalJSON ¶
func (ev *DaReceptors) UnmarshalJSON(b []byte) error
type GPLayer ¶
GPLayer represents a globus pallidus layer, including: GPeOut, GPeIn, GPeTA (arkypallidal), and GPi (see GPLay for type). Typically just a single unit per Pool representing a given stripe.
func AddGPeLayer ¶
AddGPLayer adds a GPLayer of given size, with given name. Assumes that a 4D structure will be used, with Pools representing separable gating domains. Typically nNeurY, nNeurX will both be 1, but could have more for noise etc.
type GPLays ¶
type GPLays int
GPLays for GPLayer type
const ( // GPeOut is Outer layer of GPe neurons, receiving inhibition from MtxGo GPeOut GPLays = iota // GPeIn is Inner layer of GPe neurons, receiving inhibition from GPeOut and MtxNo GPeIn // GPeTA is arkypallidal layer of GPe neurons, receiving inhibition from GPeIn // and projecting inhibition to Mtx GPeTA // GPi is the inner globus pallidus, functionally equivalent to SNr, // receiving from MtxGo and GPeIn, and sending inhibition to VThal GPi GPLaysN )
func (*GPLays) FromString ¶
func (GPLays) MarshalJSON ¶
func (*GPLays) UnmarshalJSON ¶
type GPiLayer ¶
type GPiLayer struct {
GPLayer
}
GPiLayer represents the GPi / SNr output nucleus of the BG. It gets inhibited by the MtxGo and GPeIn layers, and its minimum activation during this inhibition is recorded in ActLrn, for learning. Typically just a single unit per Pool representing a given stripe.
func AddGPiLayer ¶
AddGPiLayer adds a GPiLayer of given size, with given name. Assumes that a 4D structure will be used, with Pools representing separable gating domains. Typically nNeurY, nNeurX will both be 1, but could have more for noise etc.
type Layer ¶
type Layer struct { rl.Layer PhasicMaxCycMin int `desc:"minimum cycle after which phasic maximum activity is recorded"` PCoreNeurs []PCoreNeuron `` /* 146-byte string literal not displayed */ }
Layer is the basic pcore layer, which has a DA dopamine value from rl.Layer and tracks the phasic maximum activation during the gating window.
func (*Layer) ActLrnFmPhasicMax ¶ added in v1.5.1
func (ly *Layer) ActLrnFmPhasicMax()
ActLrnFmPhasicMax sets ActLrn to PhasicMax
func (*Layer) InitActLrn ¶ added in v1.5.1
func (ly *Layer) InitActLrn()
InitActLrn initializes the ActLrn to 0
func (*Layer) InitPhasicMax ¶ added in v1.5.1
func (ly *Layer) InitPhasicMax()
InitPhasicMax initializes the PhasicMax to 0
func (*Layer) MaxPhasicMax ¶ added in v1.5.1
MaxPhasicMax returns the maximum PhasicMax across the layer
func (*Layer) PCoreNeuronByIdx ¶ added in v1.5.1
func (ly *Layer) PCoreNeuronByIdx(idx int) *PCoreNeuron
PCoreNeuronByIdx returns neuron at given index
func (*Layer) PhasicMaxAvgByPool ¶ added in v1.5.1
PhasicMaxAvgByPool returns the average PhasicMax value by given pool index Pool index 0 is whole layer, 1 is first sub-pool, etc
func (*Layer) PhasicMaxFmAct ¶ added in v1.5.1
PhasicMaxFmAct computes PhasicMax from Activation
func (*Layer) PhasicMaxMaxByPool ¶ added in v1.5.1
PhasicMaxMaxByPool returns the average PhasicMax value by given pool index Pool index 0 is whole layer, 1 is first sub-pool, etc
func (*Layer) UnitVal1D ¶
UnitVal1D returns value of given variable index on given unit, using 1-dimensional index. returns NaN on invalid index. This is the core unit var access method used by other methods, so it is the only one that needs to be updated for derived layer types.
func (*Layer) UnitVarIdx ¶
UnitVarIdx returns the index of given variable within the Neuron, according to UnitVarNames() list (using a map to lookup index), or -1 and error message if not found.
func (*Layer) UnitVarNum ¶
UnitVarNum returns the number of Neuron-level variables for this layer. This is needed for extending indexes in derived types.
type MatrixLayer ¶
type MatrixLayer struct { Layer DaR DaReceptors `desc:"dominant type of dopamine receptor -- D1R for Go pathway, D2R for NoGo"` Matrix MatrixParams `view:"inline" desc:"matrix parameters"` DALrn float32 `inactive:"+" desc:"effective learning dopamine value for this layer: reflects DaR and Gains"` ACh float32 `` /* 190-byte string literal not displayed */ }
MatrixLayer represents the dorsal matrisome MSN's that are the main Go / NoGo gating units in BG. D1R = Go, D2R = NoGo.
func AddMatrixLayer ¶
func AddMatrixLayer(nt *axon.Network, name string, nPoolsY, nPoolsX, nNeurY, nNeurX int, da DaReceptors) *MatrixLayer
AddMatrixLayer adds a MatrixLayer of given size, with given name. Assumes that a 4D structure will be used, with Pools representing separable gating domains. da gives the DaReceptor type (D1R = Go, D2R = NoGo)
func (*MatrixLayer) ActFmG ¶
func (ly *MatrixLayer) ActFmG(ltime *axon.Time)
ActFmG computes rate-code activation from Ge, Gi, Gl conductances and updates learning running-average activations from that Act. Matrix extends to call DALrnFmDA and updates PhasicMax -> ActLrn
func (*MatrixLayer) DAActLrn ¶
func (ly *MatrixLayer) DAActLrn(ltime *axon.Time)
DAActLrn sets effective learning dopamine value from given raw DA value, applying Burst and Dip Gain factors, and then reversing sign for D2R. Also sets ActLrn based on whether corresponding VThal stripe fired above ThalThr -- flips sign of learning for stripe firing vs. not.
func (*MatrixLayer) Defaults ¶
func (ly *MatrixLayer) Defaults()
func (*MatrixLayer) GetACh ¶
func (ly *MatrixLayer) GetACh() float32
func (*MatrixLayer) InitActs ¶
func (ly *MatrixLayer) InitActs()
func (*MatrixLayer) SetACh ¶
func (ly *MatrixLayer) SetACh(ach float32)
func (*MatrixLayer) ThalLayer ¶
func (ly *MatrixLayer) ThalLayer() (*VThalLayer, error)
func (*MatrixLayer) UnitVal1D ¶
func (ly *MatrixLayer) UnitVal1D(varIdx int, idx int) float32
UnitVal1D returns value of given variable index on given unit, using 1-dimensional index. returns NaN on invalid index. This is the core unit var access method used by other methods, so it is the only one that needs to be updated for derived layer types.
func (*MatrixLayer) UnitVarIdx ¶
func (ly *MatrixLayer) UnitVarIdx(varNm string) (int, error)
UnitVarIdx returns the index of given variable within the Neuron, according to UnitVarNames() list (using a map to lookup index), or -1 and error message if not found.
func (*MatrixLayer) UnitVarNum ¶ added in v1.5.1
func (ly *MatrixLayer) UnitVarNum() int
UnitVarNum returns the number of Neuron-level variables for this layer. This is needed for extending indexes in derived types.
type MatrixParams ¶
type MatrixParams struct { ThalLay string `desc:"name of VThal layer -- needed to get overall gating output action"` ThalThr float32 `` /* 183-byte string literal not displayed */ Deriv bool `` /* 328-byte string literal not displayed */ BurstGain float32 `` /* 237-byte string literal not displayed */ DipGain float32 `` /* 237-byte string literal not displayed */ }
MatrixParams has parameters for Dorsal Striatum Matrix computation These are the main Go / NoGo gating units in BG driving updating of PFC WM in PBWM
func (*MatrixParams) Defaults ¶
func (mp *MatrixParams) Defaults()
func (*MatrixParams) LrnFactor ¶
func (mp *MatrixParams) LrnFactor(act float32) float32
LrnFactor returns multiplicative factor for level of msn activation. If Deriv is 2 * act * (1-act) -- the factor of 2 compensates for otherwise reduction in learning from these factors. Otherwise is just act.
type MatrixPrjn ¶
type MatrixPrjn struct { axon.Prjn Trace MatrixTraceParams `view:"inline" desc:"special parameters for matrix trace learning"` TrSyns []TraceSyn `desc:"trace synaptic state values, ordered by the sending layer units which owns them -- one-to-one with SConIdx array"` }
MatrixPrjn does dopamine-modulated, gated trace learning, for Matrix learning in PBWM context
func (*MatrixPrjn) Build ¶
func (pj *MatrixPrjn) Build() error
func (*MatrixPrjn) ClearTrace ¶
func (pj *MatrixPrjn) ClearTrace()
func (*MatrixPrjn) DWt ¶
func (pj *MatrixPrjn) DWt(ltime *axon.Time)
DWt computes the weight change (learning) -- on sending projections.
func (*MatrixPrjn) Defaults ¶
func (pj *MatrixPrjn) Defaults()
func (*MatrixPrjn) InitWts ¶
func (pj *MatrixPrjn) InitWts()
func (*MatrixPrjn) SynVal1D ¶
func (pj *MatrixPrjn) SynVal1D(varIdx int, synIdx int) float32
SynVal1D returns value of given variable index (from SynVarIdx) on given SynIdx. Returns NaN on invalid index. This is the core synapse var access method used by other methods, so it is the only one that needs to be updated for derived layer types.
func (*MatrixPrjn) SynVarIdx ¶
func (pj *MatrixPrjn) SynVarIdx(varNm string) (int, error)
SynVarIdx returns the index of given variable within the synapse, according to *this prjn's* SynVarNames() list (using a map to lookup index), or -1 and error message if not found.
func (*MatrixPrjn) SynVarNames ¶ added in v1.5.1
func (pj *MatrixPrjn) SynVarNames() []string
func (*MatrixPrjn) SynVarNum ¶
func (pj *MatrixPrjn) SynVarNum() int
SynVarNum returns the number of synapse-level variables for this prjn. This is needed for extending indexes in derived types.
type MatrixTraceParams ¶
type MatrixTraceParams struct { CurTrlDA bool `` /* 277-byte string literal not displayed */ Decay float32 `` /* 168-byte string literal not displayed */ }
MatrixTraceParams for for trace-based learning in the MatrixPrjn. A trace of synaptic co-activity is formed, and then modulated by dopamine whenever it occurs. This bridges the temporal gap between gating activity and subsequent activity, and is based biologically on synaptic tags. Trace is reset at time of reward based on ACh level from CINs.
func (*MatrixTraceParams) Defaults ¶
func (tp *MatrixTraceParams) Defaults()
type Network ¶
pcore.Network has methods for configuring specialized PCore network components PCore = Pallidal Core mode of BG
func (*Network) AddBG ¶
func (nt *Network) AddBG(prefix string, nPoolsY, nPoolsX, nNeurY, nNeurX, gpNeurY, gpNeurX int, space float32) (mtxGo, mtxNo, cin, gpeOut, gpeIn, gpeTA, stnp, stns, gpi, vthal axon.AxonLayer)
AddBG adds MtxGo, No, CIN, GPeOut, GPeIn, GPeTA, STNp, STNs, GPi, and VThal layers, with given optional prefix. Assumes that a 4D structure will be used, with Pools representing separable gating domains. All GP / STN layers have gpNeur neurons Appropriate PoolOneToOne connections are made between layers, using standard styles space is the spacing between layers (2 typical)
func (*Network) ConnectToMatrix ¶
ConnectToMatrix adds a MatrixTracePrjn from given sending layer to a matrix layer
func (*Network) SynVarNames ¶
SynVarNames returns the names of all the variables on the synapses in this network.
func (*Network) UnitVarNames ¶
UnitVarNames returns a list of variable names available on the units in this layer
type PCoreLayer ¶ added in v1.5.1
type PCoreLayer interface { // PCoreNeuronByIdx returns neuron at given index PCoreNeuronByIdx(idx int) *PCoreNeuron // PhasicMaxAvgByPool returns the average PhasicMax value by given pool index PhasicMaxAvgByPool(pli int) float32 // PhasicMaxMaxByPool returns the max PhasicMax value by given pool index PhasicMaxMaxByPool(pli int) float32 // PhasicMaxMax returns the max PhasicMax value across layer PhasicMaxMax() float32 }
PCoreLayer exposes PCoreNeuron access and PhaseMax values
type PCoreNeuron ¶ added in v1.5.1
type PCoreNeuron struct { ActLrn float32 `desc:"learning activity value -- based on PhasicMax activation plus other potential factors depending on layer type."` PhasicMax float32 `desc:"maximum phasic activation value during a gating window."` }
PCoreNeuron holds the extra neuron (unit) level variables for pcore computation.
func (*PCoreNeuron) VarByIndex ¶ added in v1.5.1
func (nrn *PCoreNeuron) VarByIndex(idx int) float32
VarByIndex returns variable using index (0 = first variable in PCoreNeuronVars list)
func (*PCoreNeuron) VarByName ¶ added in v1.5.1
func (nrn *PCoreNeuron) VarByName(varNm string) (float32, error)
VarByName returns variable by name, or error
func (*PCoreNeuron) VarNames ¶ added in v1.5.1
func (nrn *PCoreNeuron) VarNames() []string
type STNLayer ¶
type STNLayer struct { Layer Ca CaParams `` /* 186-byte string literal not displayed */ STNNeurs []STNNeuron `` /* 149-byte string literal not displayed */ }
STNLayer represents STN neurons, with two subtypes: STNp are more strongly driven and get over bursting threshold, driving strong, rapid activation of the KCa channels, causing a long pause in firing, which creates a window during which GPe dynamics resolve Go vs. No balance. STNs are more weakly driven and thus more slowly activate KCa, resulting in a longer period of activation, during which the GPi is inhibited to prevent premature gating based only MtxGo inhibition -- gating only occurs when GPeIn signal has had a chance to integrate its MtxNo inputs.
func AddSTNLayer ¶
AddSTNLayer adds a subthalamic nucleus Layer of given size, with given name. Assumes that a 4D structure will be used, with Pools representing separable gating domains. Typically nNeurY, nNeurX will both be 1, but could have more for noise etc.
func (*STNLayer) Build ¶
Build constructs the layer state, including calling Build on the projections.
func (*STNLayer) UnitVal1D ¶
UnitVal1D returns value of given variable index on given unit, using 1-dimensional index. returns NaN on invalid index. This is the core unit var access method used by other methods, so it is the only one that needs to be updated for derived layer types.
func (*STNLayer) UnitVarIdx ¶
UnitVarIdx returns the index of given variable within the Neuron, according to UnitVarNames() list (using a map to lookup index), or -1 and error message if not found.
func (*STNLayer) UnitVarNum ¶
UnitVarNum returns the number of Neuron-level variables for this layer. This is needed for extending indexes in derived types.
func (*STNLayer) UpdateParams ¶ added in v1.5.1
func (ly *STNLayer) UpdateParams()
type STNNeuron ¶
type STNNeuron struct { SKCai float32 `` /* 158-byte string literal not displayed */ SKCaM float32 `desc:"Calcium-gated potassium channel gating factor, driven by SKCai via a Hill equation as in chans.SKPCaParams."` Gsk float32 `desc:"Calcium-gated potassium channel conductance as a function of Gbar * SKCaM."` }
STNNeuron holds the extra neuron (unit) level variables for STN computation.
func (*STNNeuron) VarByIndex ¶
VarByIndex returns variable using index (0 = first variable in STNNeuronVars list)
type TraceSyn ¶
type TraceSyn struct {
NTr float32 `desc:"new trace = send * recv -- drives updates to trace value: sn.ActLrn * rn.ActLrn (subject to derivative too)"`
}
TraceSyn holds extra synaptic state for trace projections
func (*TraceSyn) VarByIndex ¶
VarByIndex returns synapse variable by index
type VThalLayer ¶
type VThalLayer struct {
Layer
}
VThalLayer represents the Ventral thalamus: VA / VM / VL, which receives BG gating in the form of inhibitory projection from GPi.
func AddVThalLayer ¶
func AddVThalLayer(nt *axon.Network, name string, nPoolsY, nPoolsX, nNeurY, nNeurX int) *VThalLayer
AddVThalLayer adds a ventral thalamus (VA/VL/VM) Layer of given size, with given name. Assumes that a 4D structure will be used, with Pools representing separable gating domains. Typically nNeurY, nNeurX will both be 1, but could have more for noise etc.
func (*VThalLayer) Defaults ¶
func (ly *VThalLayer) Defaults()