Documentation ¶
Index ¶
- Constants
- Variables
- func AddBG(nt *axon.Network, prefix string, ...) (mtxGo, mtxNo, gpeOut, gpeIn, gpeTA, stnp, stns, gpi axon.AxonLayer)
- func AddBG4D(nt *axon.Network, prefix string, ...) (mtxGo, mtxNo, gpeOut, gpeIn, gpeTA, stnp, stns, gpi axon.AxonLayer)
- func AddCINLayer(nt *axon.Network, name, mtxGo, mtxNo string, space float32) *rl.RSalienceLayer
- func AddPTThalForSuper(nt *axon.Network, super, ct emer.Layer, suffix string, ...) (pt, thal emer.Layer)
- func BoolToFloat32(b bool) float32
- func ConnectPTSelf(nt *axon.Network, ly emer.Layer, pat prjn.Pattern) emer.Prjn
- func ConnectToMatrix(nt *axon.Network, send, recv emer.Layer, pat prjn.Pattern) emer.Prjn
- func STNNeuronVarIdxByName(varNm string) (int, error)
- type CaParams
- type DaModParams
- type DaReceptors
- type GPLayer
- type GPLays
- type GPiLayer
- type LayerType
- type MatrixLayer
- func (ly *MatrixLayer) AnyGated() bool
- func (ly *MatrixLayer) Build() error
- func (ly *MatrixLayer) Class() string
- func (ly *MatrixLayer) DAActLrn()
- func (ly *MatrixLayer) DecayState(decay, glong float32)
- func (ly *MatrixLayer) Defaults()
- func (ly *MatrixLayer) GInteg(ni int, nrn *axon.Neuron, ctime *axon.Time)
- func (ly *MatrixLayer) GatedFmAvgSpk()
- func (ly *MatrixLayer) GetACh() float32
- func (ly *MatrixLayer) GiFmACh(ctime *axon.Time)
- func (ly *MatrixLayer) GiFmSpikes(ctime *axon.Time)
- func (ly *MatrixLayer) InitActs()
- func (ly *MatrixLayer) InitMods()
- func (ly *MatrixLayer) PlusPhase(ctime *axon.Time)
- func (ly *MatrixLayer) SetACh(ach float32)
- func (ly *MatrixLayer) SpikeFmG(ni int, nrn *axon.Neuron, ctime *axon.Time)
- func (ly *MatrixLayer) USActiveFmUS(ctime *axon.Time)
- func (ly *MatrixLayer) UnitVal1D(varIdx int, idx int) float32
- func (ly *MatrixLayer) UnitVarIdx(varNm string) (int, error)
- func (ly *MatrixLayer) UnitVarNum() int
- type MatrixParams
- type MatrixPrjn
- func (pj *MatrixPrjn) Build() error
- func (pj *MatrixPrjn) ClearTrace()
- func (pj *MatrixPrjn) DWt(ctime *axon.Time)
- func (pj *MatrixPrjn) DWtNoUS(ctime *axon.Time)
- func (pj *MatrixPrjn) DWtUS(ctime *axon.Time)
- func (pj *MatrixPrjn) Defaults()
- func (pj *MatrixPrjn) InitWts()
- func (pj *MatrixPrjn) SynVal1D(varIdx int, synIdx int) float32
- func (pj *MatrixPrjn) SynVarIdx(varNm string) (int, error)
- func (pj *MatrixPrjn) SynVarNames() []string
- func (pj *MatrixPrjn) SynVarNum() int
- type MatrixTraceParams
- type Network
- func (nt *Network) AddBG(prefix string, nPoolsY, nPoolsX, nNeurY, nNeurX, gpNeurY, gpNeurX int, ...) (mtxGo, mtxNo, gpeOut, gpeIn, gpeTA, stnp, stns, gpi axon.AxonLayer)
- func (nt *Network) AddBG4D(prefix string, nPoolsY, nPoolsX, nNeurY, nNeurX, gpNeurY, gpNeurX int, ...) (mtxGo, mtxNo, gpeOut, gpeIn, gpeTA, stnp, stns, gpi axon.AxonLayer)
- func (nt *Network) AddCINLayer(name, mtxGo, mtxNo string, space float32) *rl.RSalienceLayer
- func (nt *Network) AddPTThalForSuper(super, ct emer.Layer, suffix string, superToPT, ptSelf, ctToThal prjn.Pattern, ...) (pt, thal emer.Layer)
- func (nt *Network) AddThalLayer2D(name string, nNeurY, nNeurX int) *ThalLayer
- func (nt *Network) AddThalLayer4D(name string, nPoolsY, nPoolsX, nNeurY, nNeurX int) *ThalLayer
- func (nt *Network) ConnectToMatrix(send, recv emer.Layer, pat prjn.Pattern) emer.Prjn
- func (nt *Network) SynVarNames() []string
- func (nt *Network) UnitVarNames() []string
- type PTLayer
- func (ly *PTLayer) Class() string
- func (ly *PTLayer) Defaults()
- func (ly *PTLayer) GFmRawSyn(ni int, nrn *axon.Neuron, ctime *axon.Time, thalGeRaw, thalGeSyn float32)
- func (ly *PTLayer) GFmSpikeRaw(ni int, nrn *axon.Neuron, ctime *axon.Time) (thalGeRaw, thalGeSyn float32)
- func (ly *PTLayer) GInteg(ni int, nrn *axon.Neuron, ctime *axon.Time)
- func (ly *PTLayer) UpdateParams()
- type STNLayer
- func (ly *STNLayer) Build() error
- func (ly *STNLayer) Class() string
- func (ly *STNLayer) Defaults()
- func (ly *STNLayer) GInteg(ni int, nrn *axon.Neuron, ctime *axon.Time)
- func (ly *STNLayer) InitActs()
- func (ly *STNLayer) NewState()
- func (ly *STNLayer) UnitVal1D(varIdx int, idx int) float32
- func (ly *STNLayer) UnitVarIdx(varNm string) (int, error)
- func (ly *STNLayer) UnitVarNum() int
- func (ly *STNLayer) UpdateParams()
- type STNNeuron
- type ThalLayer
- func (ly *ThalLayer) AnyGated() bool
- func (ly *ThalLayer) Build() error
- func (ly *ThalLayer) Class() string
- func (ly *ThalLayer) DecayState(decay, glong float32)
- func (ly *ThalLayer) Defaults()
- func (ly *ThalLayer) GatedFmAvgSpk(thr float32) bool
- func (ly *ThalLayer) UnitVal1D(varIdx int, idx int) float32
- func (ly *ThalLayer) UnitVarIdx(varNm string) (int, error)
- func (ly *ThalLayer) UnitVarNum() int
- type TraceSyn
Constants ¶
const ( // Matrix are the matrisome medium spiny neurons (MSNs) that are the main // Go / NoGo gating units in BG. Matrix emer.LayerType = emer.LayerType(pvlv.LayerTypeN) + iota // STN is a subthalamic nucleus layer: STNp or STNs STN // GP is a globus pallidus layer: GPe or GPi GP // Thal is a thalamic layer, used for MD mediodorsal thalamus and // VM / VL / VA ventral thalamic nuclei. Thal // PT are layer 5IB intrinsic bursting pyramidal tract neocortical neurons. // These are bidirectionally interconnected with BG-gated thalamus in PFC. PT )
Variables ¶
var ( // NeuronVars are extra neuron variables for pcore -- union across all types NeuronVars = []string{"Burst", "BurstPrv", "CtxtGe", "DA", "DALrn", "ACh", "Gated", "SKCai", "SKCaM", "Gsk"} // NeuronVarsAll is the pcore collection of all neuron-level vars NeuronVarsAll []string // SynVarsAll is the pcore collection of all synapse-level vars (includes TraceSynVars) SynVarsAll []string )
var ( STNNeuronVars = []string{"SKCai", "SKCaM", "Gsk"} STNNeuronVarsMap map[string]int )
var KiT_DaReceptors = kit.Enums.AddEnum(DaReceptorsN, kit.NotBitFlag, nil)
var KiT_GPLayer = kit.Types.AddType(&GPLayer{}, LayerProps)
var KiT_GPLays = kit.Enums.AddEnum(GPLaysN, kit.NotBitFlag, nil)
var KiT_GPiLayer = kit.Types.AddType(&GPiLayer{}, LayerProps)
var KiT_LayerType = kit.Enums.AddEnumExt(pvlv.KiT_LayerType, LayerTypeN, kit.NotBitFlag, nil)
var KiT_MatrixLayer = kit.Types.AddType(&MatrixLayer{}, LayerProps)
var KiT_MatrixPrjn = kit.Types.AddType(&MatrixPrjn{}, axon.PrjnProps)
var KiT_Network = kit.Types.AddType(&Network{}, NetworkProps)
var KiT_PTLayer = kit.Types.AddType(&PTLayer{}, LayerProps)
var KiT_STNLayer = kit.Types.AddType(&STNLayer{}, LayerProps)
var KiT_ThalLayer = kit.Types.AddType(&ThalLayer{}, LayerProps)
var LayerProps = ki.Props{ "EnumType:Typ": KiT_LayerType, "ToolBar": ki.PropSlice{ {"Defaults", ki.Props{ "icon": "reset", "desc": "return all parameters to their intial default values", }}, {"InitWts", ki.Props{ "icon": "update", "desc": "initialize the layer's weight values according to prjn parameters, for all *sending* projections out of this layer", }}, {"InitActs", ki.Props{ "icon": "update", "desc": "initialize the layer's activation values", }}, {"sep-act", ki.BlankProp{}}, {"LesionNeurons", ki.Props{ "icon": "close", "desc": "Lesion (set the Off flag) for given proportion of neurons in the layer (number must be 0 -- 1, NOT percent!)", "Args": ki.PropSlice{ {"Proportion", ki.Props{ "desc": "proportion (0 -- 1) of neurons to lesion", }}, }, }}, {"UnLesionNeurons", ki.Props{ "icon": "reset", "desc": "Un-Lesion (reset the Off flag) for all neurons in the layer", }}, }, }
LayerProps are required to get the extended EnumType
var NetworkProps = axon.NetworkProps
var TraceSynVars = []string{"DTr"}
Functions ¶
func AddBG ¶
func AddBG(nt *axon.Network, prefix string, nPoolsY, nPoolsX, nNeurY, nNeurX, gpNeurY, gpNeurX int, space float32) (mtxGo, mtxNo, gpeOut, gpeIn, gpeTA, stnp, stns, gpi axon.AxonLayer)
AddBG adds MtxGo, MtxNo, GPeOut, GPeIn, GPeTA, STNp, STNs, GPi layers, with given optional prefix. Only the Matrix has pool-based 4D shape by default -- use pool for "role" like elements where matches need to be detected. All GP / STN layers have gpNeur neurons. Appropriate connections are made between layers, using standard styles. space is the spacing between layers (2 typical) A CIN or more widely used rl.RSalienceLayer should be added and project ACh to the MtxGo, No layers.
func AddBG4D ¶ added in v1.5.10
func AddBG4D(nt *axon.Network, prefix string, nPoolsY, nPoolsX, nNeurY, nNeurX, gpNeurY, gpNeurX int, space float32) (mtxGo, mtxNo, gpeOut, gpeIn, gpeTA, stnp, stns, gpi axon.AxonLayer)
AddBG4D adds MtxGo, MtxNo, GPeOut, GPeIn, GPeTA, STNp, STNs, GPi layers, with given optional prefix. This version makes 4D pools throughout the GP layers, with Pools representing separable gating domains. All GP / STN layers have gpNeur neurons. Appropriate PoolOneToOne connections are made between layers, using standard styles. space is the spacing between layers (2 typical) A CIN or more widely used rl.RSalienceLayer should be added and project ACh to the MtxGo, No layers.
func AddCINLayer ¶
AddCINLayer adds a rl.RSalienceLayer unsigned reward salience coding ACh layer which sends ACh to given Matrix Go and No layers (names), and is default located to the right of the MtxNo layer with given spacing. CIN is a cholinergic interneuron interspersed in the striatum that shows these response properties and modulates learning in the striatum around US and CS events. If other ACh modulation is needed, a global RSalienceLayer can be used.
func AddPTThalForSuper ¶ added in v1.5.10
func AddPTThalForSuper(nt *axon.Network, super, ct emer.Layer, suffix string, superToPT, ptSelf, ctToThal prjn.Pattern, space float32) (pt, thal emer.Layer)
AddPTThalForSuper adds a PT pyramidal tract layer and a Thalamus layer for given superficial layer (deep.SuperLayer) and associated CT with given suffix (e.g., MD, VM). PT and Thal have SetClass(super.Name()) called to allow shared params. Projections are made with given classes: SuperToPT, PTSelfMaint, CTtoThal. The PT and Thal layers are positioned behind the CT layer.
func BoolToFloat32 ¶ added in v1.5.10
todo: replace with ki/bools.ToFloat32 BoolToFloat32 -- the lack of ternary conditional expressions is *only* Go decision I disagree about
func ConnectPTSelf ¶ added in v1.5.10
ConnectPTSelf adds a Self (Lateral) projection within a PT layer, which supports active maintenance, with a class of PTSelfMaint
func ConnectToMatrix ¶
ConnectToMatrix adds a MatrixPrjn from given sending layer to a matrix layer
func STNNeuronVarIdxByName ¶
STNNeuronVarIdxByName returns the index of the variable in the STNNeuron, or error
Types ¶
type CaParams ¶
type CaParams struct { SKCa chans.SKCaParams `view:"inline" desc:"small-conductance calcium-activated potassium channel"` CaD bool `desc:"use CaD timescale (delayed) calcium signal -- for STNs -- else use CaP (faster) for STNp"` CaScale float32 `desc:"scaling factor applied to input Ca to bring into proper range of these dynamics"` ThetaInit bool `desc:"initialize Ca, KCa values at start of every ThetaCycle (i.e., behavioral trial)"` }
CaParams control the calcium dynamics in STN neurons. The SKCa small-conductance calcium-gated potassium channel produces the pausing function as a consequence of rapid bursting.
type DaModParams ¶
type DaModParams struct { On bool `desc:"whether to use dopamine modulation"` ModGain bool `viewif:"On" desc:"modulate gain instead of Ge excitatory synaptic input"` Minus float32 `` /* 145-byte string literal not displayed */ Plus float32 `` /* 144-byte string literal not displayed */ NegGain float32 `` /* 208-byte string literal not displayed */ PosGain float32 `` /* 208-byte string literal not displayed */ }
Params for effects of dopamine (Da) based modulation, typically adding a Da-based term to the Ge excitatory synaptic input. Plus-phase = learning effects relative to minus-phase "performance" dopamine effects
func (*DaModParams) Defaults ¶
func (dm *DaModParams) Defaults()
func (*DaModParams) Gain ¶
func (dm *DaModParams) Gain(da, gain float32, plusPhase bool) float32
Gain returns da-modulated gain value
func (*DaModParams) GainModOn ¶
func (dm *DaModParams) GainModOn() bool
GainModOn returns true if modulating Gain
func (*DaModParams) Ge ¶
func (dm *DaModParams) Ge(da, ge float32, plusPhase bool) float32
Ge returns da-modulated ge value
func (*DaModParams) GeModOn ¶
func (dm *DaModParams) GeModOn() bool
GeModOn returns true if modulating Ge
type DaReceptors ¶
type DaReceptors int
DaReceptors for D1R and D2R dopamine receptors
const ( // D1R primarily expresses Dopamine D1 Receptors -- dopamine is excitatory and bursts of dopamine lead to increases in synaptic weight, while dips lead to decreases -- direct pathway in dorsal striatum D1R DaReceptors = iota // D2R primarily expresses Dopamine D2 Receptors -- dopamine is inhibitory and bursts of dopamine lead to decreases in synaptic weight, while dips lead to increases -- indirect pathway in dorsal striatum D2R DaReceptorsN )
func (*DaReceptors) FromString ¶
func (i *DaReceptors) FromString(s string) error
func (DaReceptors) MarshalJSON ¶
func (ev DaReceptors) MarshalJSON() ([]byte, error)
func (DaReceptors) String ¶
func (i DaReceptors) String() string
func (*DaReceptors) UnmarshalJSON ¶
func (ev *DaReceptors) UnmarshalJSON(b []byte) error
type GPLayer ¶
GPLayer represents a globus pallidus layer, including: GPeOut, GPeIn, GPeTA (arkypallidal), and GPi (see GPLay for type). Typically just a single unit per Pool representing a given stripe.
func AddGPeLayer2D ¶ added in v1.5.10
AddGPLayer2D adds a GPLayer of given size, with given name.
func AddGPeLayer4D ¶ added in v1.5.10
AddGPLayer4D adds a GPLayer of given size, with given name. Makes a 4D structure with Pools representing separable gating domains.
type GPLays ¶
type GPLays int
GPLays for GPLayer type
const ( // GPeOut is Outer layer of GPe neurons, receiving inhibition from MtxGo GPeOut GPLays = iota // GPeIn is Inner layer of GPe neurons, receiving inhibition from GPeOut and MtxNo GPeIn // GPeTA is arkypallidal layer of GPe neurons, receiving inhibition from GPeIn // and projecting inhibition to Mtx GPeTA // GPi is the inner globus pallidus, functionally equivalent to SNr, // receiving from MtxGo and GPeIn, and sending inhibition to VThal GPi GPLaysN )
func (*GPLays) FromString ¶
func (GPLays) MarshalJSON ¶
func (*GPLays) UnmarshalJSON ¶
type GPiLayer ¶
type GPiLayer struct {
GPLayer
}
GPiLayer represents the GPi / SNr output nucleus of the BG. It gets inhibited by the MtxGo and GPeIn layers, and its minimum activation during this inhibition is recorded in ActLrn, for learning. Typically just a single unit per Pool representing a given stripe.
func AddGPiLayer2D ¶ added in v1.5.10
AddGPiLayer2D adds a GPiLayer of given size, with given name.
func AddGPiLayer4D ¶ added in v1.5.10
AddGPiLayer4D adds a GPiLayer of given size, with given name. Makes a 4D structure with Pools representing separable gating domains.
type LayerType ¶ added in v1.5.10
LayerType has the DeepAxon extensions to the emer.LayerType types, for gui
const ( Matrix_ LayerType = LayerType(pvlv.LayerTypeN) + iota STN_ GP_ Thal_ PT_ LayerTypeN )
gui versions
func StringToLayerType ¶ added in v1.5.10
type MatrixLayer ¶
type MatrixLayer struct { rl.Layer DaR DaReceptors `desc:"dominant type of dopamine receptor -- D1R for Go pathway, D2R for NoGo"` Matrix MatrixParams `view:"inline" desc:"matrix parameters"` MtxThals emer.LayNames `` /* 134-byte string literal not displayed */ USLayers emer.LayNames `` /* 345-byte string literal not displayed */ HasMod bool `inactive:"+" desc:"has modulatory projections, flagged with Modulator setting -- automatically detected"` Gated []bool `` /* 160-byte string literal not displayed */ DALrn float32 `inactive:"+" desc:"effective learning dopamine value for this layer: reflects DaR and Gains"` ACh float32 `` /* 479-byte string literal not displayed */ USActive bool `` /* 126-byte string literal not displayed */ Mods []float32 `desc:"modulatory values from Modulator projection(s) for each neuron"` }
MatrixLayer represents the matrisome medium spiny neurons (MSNs) that are the main Go / NoGo gating units in BG. D1R = Go, D2R = NoGo. The Gated value for each pool is updated in the PlusPhase and can be called separately too.
func AddMatrixLayer ¶
func AddMatrixLayer(nt *axon.Network, name string, nPoolsY, nPoolsX, nNeurY, nNeurX int, da DaReceptors) *MatrixLayer
AddMatrixLayer adds a MatrixLayer of given size, with given name. Assumes that a 4D structure will be used, with Pools representing separable gating domains. da gives the DaReceptor type (D1R = Go, D2R = NoGo)
func (*MatrixLayer) AnyGated ¶ added in v1.5.10
func (ly *MatrixLayer) AnyGated() bool
AnyGated returns true if any of the pools gated
func (*MatrixLayer) Build ¶ added in v1.5.10
func (ly *MatrixLayer) Build() error
func (*MatrixLayer) Class ¶ added in v1.5.10
func (ly *MatrixLayer) Class() string
func (*MatrixLayer) DAActLrn ¶
func (ly *MatrixLayer) DAActLrn()
DAActLrn sets effective learning dopamine value from given raw DA value, applying Burst and Dip Gain factors, and then reversing sign for D2R and also for InvertNoGate -- must have done GatedFmThal before this.
func (*MatrixLayer) DecayState ¶ added in v1.5.12
func (ly *MatrixLayer) DecayState(decay, glong float32)
func (*MatrixLayer) Defaults ¶
func (ly *MatrixLayer) Defaults()
func (*MatrixLayer) GatedFmAvgSpk ¶ added in v1.5.10
func (ly *MatrixLayer) GatedFmAvgSpk()
GatedFmAvgSpk updates Gated based on Avg SpkMax activity in Go Matrix and ThalLayers listed in MtxThals
func (*MatrixLayer) GetACh ¶
func (ly *MatrixLayer) GetACh() float32
func (*MatrixLayer) GiFmACh ¶ added in v1.5.12
func (ly *MatrixLayer) GiFmACh(ctime *axon.Time)
GiFmACh sets inhibitory conductance from ACh value, where ACh is 0 at baseline and goes up to 1 at US or CS -- effect is disinhibitory on MSNs
func (*MatrixLayer) GiFmSpikes ¶ added in v1.6.0
func (ly *MatrixLayer) GiFmSpikes(ctime *axon.Time)
func (*MatrixLayer) InitActs ¶
func (ly *MatrixLayer) InitActs()
func (*MatrixLayer) InitMods ¶ added in v1.5.12
func (ly *MatrixLayer) InitMods()
InitMods initializes the Mods modulator values
func (*MatrixLayer) PlusPhase ¶ added in v1.5.10
func (ly *MatrixLayer) PlusPhase(ctime *axon.Time)
PlusPhase does updating at end of the plus phase calls DAActLrn
func (*MatrixLayer) SetACh ¶
func (ly *MatrixLayer) SetACh(ach float32)
func (*MatrixLayer) USActiveFmUS ¶ added in v1.5.12
func (ly *MatrixLayer) USActiveFmUS(ctime *axon.Time)
USActiveFmUS updates the USActive flag based on USLayers state
func (*MatrixLayer) UnitVal1D ¶
func (ly *MatrixLayer) UnitVal1D(varIdx int, idx int) float32
UnitVal1D returns value of given variable index on given unit, using 1-dimensional index. returns NaN on invalid index. This is the core unit var access method used by other methods, so it is the only one that needs to be updated for derived layer types.
func (*MatrixLayer) UnitVarIdx ¶
func (ly *MatrixLayer) UnitVarIdx(varNm string) (int, error)
UnitVarIdx returns the index of given variable within the Neuron, according to UnitVarNames() list (using a map to lookup index), or -1 and error message if not found.
func (*MatrixLayer) UnitVarNum ¶ added in v1.5.1
func (ly *MatrixLayer) UnitVarNum() int
UnitVarNum returns the number of Neuron-level variables for this layer. This is needed for extending indexes in derived types.
type MatrixParams ¶
type MatrixParams struct { GPHasPools bool `` /* 189-byte string literal not displayed */ InvertNoGate bool `` /* 163-byte string literal not displayed */ GateThr float32 `desc:"threshold on layer Avg SpkMax for Matrix Go and Thal layers to count as having gated"` BurstGain float32 `` /* 237-byte string literal not displayed */ DipGain float32 `` /* 237-byte string literal not displayed */ ModGain float32 `desc:"gain factor multiplying the modulator input GeSyn conductances -- total modulation has a maximum of 1"` AChInhib float32 `desc:"strength of extra Gi current multiplied by MaxACh-ACh (ACh > Max = 0) -- ACh is disinhibitory on striatal firing"` MaxACh float32 `desc:"level of ACh at or above which AChInhib goes to 0 -- ACh typically ranges between 0-1"` }
MatrixParams has parameters for Dorsal Striatum Matrix computation These are the main Go / NoGo gating units in BG.
func (*MatrixParams) Defaults ¶
func (mp *MatrixParams) Defaults()
func (*MatrixParams) GiFmACh ¶ added in v1.5.12
func (mp *MatrixParams) GiFmACh(ach float32) float32
GiFmACh returns inhibitory conductance from ach value, where ACh is 0 at baseline and goes up to 1 at US or CS -- effect is disinhibitory on MSNs
type MatrixPrjn ¶
type MatrixPrjn struct { axon.Prjn Trace MatrixTraceParams `view:"inline" desc:"special parameters for matrix trace learning"` TrSyns []TraceSyn `desc:"trace synaptic state values, ordered by the sending layer units which owns them -- one-to-one with SConIdx array"` }
MatrixPrjn does dopamine-modulated, gated trace learning, for Matrix learning in PBWM context
func (*MatrixPrjn) Build ¶
func (pj *MatrixPrjn) Build() error
func (*MatrixPrjn) ClearTrace ¶
func (pj *MatrixPrjn) ClearTrace()
func (*MatrixPrjn) DWt ¶
func (pj *MatrixPrjn) DWt(ctime *axon.Time)
func (*MatrixPrjn) DWtNoUS ¶ added in v1.5.12
func (pj *MatrixPrjn) DWtNoUS(ctime *axon.Time)
DWtNoUS computes the weight change (learning) -- on sending projections. for non-USActive special case
func (*MatrixPrjn) DWtUS ¶ added in v1.5.12
func (pj *MatrixPrjn) DWtUS(ctime *axon.Time)
DWtUS computes the weight change (learning) -- on sending projections. case with USActive flag available to condition learning on US.
func (*MatrixPrjn) Defaults ¶
func (pj *MatrixPrjn) Defaults()
func (*MatrixPrjn) InitWts ¶
func (pj *MatrixPrjn) InitWts()
func (*MatrixPrjn) SynVal1D ¶
func (pj *MatrixPrjn) SynVal1D(varIdx int, synIdx int) float32
SynVal1D returns value of given variable index (from SynVarIdx) on given SynIdx. Returns NaN on invalid index. This is the core synapse var access method used by other methods, so it is the only one that needs to be updated for derived layer types.
func (*MatrixPrjn) SynVarIdx ¶
func (pj *MatrixPrjn) SynVarIdx(varNm string) (int, error)
SynVarIdx returns the index of given variable within the synapse, according to *this prjn's* SynVarNames() list (using a map to lookup index), or -1 and error message if not found.
func (*MatrixPrjn) SynVarNames ¶ added in v1.5.1
func (pj *MatrixPrjn) SynVarNames() []string
func (*MatrixPrjn) SynVarNum ¶
func (pj *MatrixPrjn) SynVarNum() int
SynVarNum returns the number of synapse-level variables for this prjn. This is needed for extending indexes in derived types.
type MatrixTraceParams ¶
type MatrixTraceParams struct { CurTrlDA bool `` /* 277-byte string literal not displayed */ Decay float32 `` /* 168-byte string literal not displayed */ NoACh bool `desc:"ignore ACh for learning modulation -- only used for reset if so -- otherwise ACh directly multiplies dWt"` Modulator bool `` /* 136-byte string literal not displayed */ }
MatrixTraceParams for for trace-based learning in the MatrixPrjn. A trace of synaptic co-activity is formed, and then modulated by dopamine whenever it occurs. This bridges the temporal gap between gating activity and subsequent activity, and is based biologically on synaptic tags. Trace is reset at time of reward based on ACh level from CINs.
func (*MatrixTraceParams) Defaults ¶
func (tp *MatrixTraceParams) Defaults()
type Network ¶
pcore.Network has methods for configuring specialized PCore network components PCore = Pallidal Core mode of BG
func (*Network) AddBG ¶
func (nt *Network) AddBG(prefix string, nPoolsY, nPoolsX, nNeurY, nNeurX, gpNeurY, gpNeurX int, space float32) (mtxGo, mtxNo, gpeOut, gpeIn, gpeTA, stnp, stns, gpi axon.AxonLayer)
AddBG adds MtxGo, MtxNo, GPeOut, GPeIn, GPeTA, STNp, STNs, GPi layers, with given optional prefix. Only the Matrix has pool-based 4D shape by default -- use pool for "role" like elements where matches need to be detected. All GP / STN layers have gpNeur neurons. Appropriate connections are made between layers, using standard styles. space is the spacing between layers (2 typical). A CIN or more widely used rl.RSalienceLayer should be added and project ACh to the MtxGo, No layers.
func (*Network) AddBG4D ¶ added in v1.5.10
func (nt *Network) AddBG4D(prefix string, nPoolsY, nPoolsX, nNeurY, nNeurX, gpNeurY, gpNeurX int, space float32) (mtxGo, mtxNo, gpeOut, gpeIn, gpeTA, stnp, stns, gpi axon.AxonLayer)
AddBG4D adds MtxGo, MtxNo, GPeOut, GPeIn, GPeTA, STNp, STNs, GPi layers, with given optional prefix. This version makes 4D pools throughout the GP layers, with Pools representing separable gating domains. All GP / STN layers have gpNeur neurons. Appropriate PoolOneToOne connections are made between layers, using standard styles. space is the spacing between layers (2 typical) A CIN or more widely used rl.RSalienceLayer should be added and project ACh to the MtxGo, No layers.
func (*Network) AddCINLayer ¶ added in v1.5.12
func (nt *Network) AddCINLayer(name, mtxGo, mtxNo string, space float32) *rl.RSalienceLayer
AddCINLayer adds a rl.RSalienceLayer unsigned reward salience coding ACh layer which sends ACh to given Matrix Go and No layers (names), and is default located to the right of the MtxNo layer with given spacing. CIN is a cholinergic interneuron interspersed in the striatum that shows these response properties and modulates learning in the striatum around US and CS events. If other ACh modulation is needed, a global RSalienceLayer can be used.
func (*Network) AddPTThalForSuper ¶ added in v1.5.10
func (nt *Network) AddPTThalForSuper(super, ct emer.Layer, suffix string, superToPT, ptSelf, ctToThal prjn.Pattern, space float32) (pt, thal emer.Layer)
AddPTThalForSuper adds a PT pyramidal tract layer and a Thalamus layer for given superficial layer (SuperLayer) with given suffix (e.g., MD, VM). Projections are made with given classes: SuperToPT, PTSelfMaint, CTtoThal. The PT and Thal layers are positioned behind the CT layer.
func (*Network) AddThalLayer2D ¶ added in v1.5.10
AddThalLayer2D adds a BG gated thalamus (e.g., VA/VL/VM, MD) Layer of given size, with given name. This version has a 2D structure
func (*Network) AddThalLayer4D ¶ added in v1.5.10
AddThalLayer4D adds a BG gated thalamus (e.g., VA/VL/VM, MD) Layer of given size, with given name. This version has a 4D structure, with Pools representing separable gating domains.
func (*Network) ConnectToMatrix ¶
ConnectToMatrix adds a MatrixPrjn from given sending layer to a matrix layer
func (*Network) SynVarNames ¶
SynVarNames returns the names of all the variables on the synapses in this network.
func (*Network) UnitVarNames ¶
UnitVarNames returns a list of variable names available on the units in this layer
type PTLayer ¶ added in v1.5.10
type PTLayer struct { rl.Layer // access as .Layer ThalNMDAGain float32 `` /* 146-byte string literal not displayed */ }
PTLayer implements the pyramidal tract layer 5 intrinsic bursting deep neurons.
func AddPTLayer2D ¶ added in v1.5.10
AddPTLayer2D adds a PTLayer of given size, with given name.
func AddPTLayer4D ¶ added in v1.5.10
AddPTLayer4D adds a PTLayer of given size, with given name.
func (*PTLayer) GFmSpikeRaw ¶ added in v1.6.0
func (*PTLayer) UpdateParams ¶ added in v1.5.10
func (ly *PTLayer) UpdateParams()
type STNLayer ¶
type STNLayer struct { rl.Layer Ca CaParams `` /* 186-byte string literal not displayed */ STNNeurs []STNNeuron `` /* 149-byte string literal not displayed */ }
STNLayer represents STN neurons, with two subtypes: STNp are more strongly driven and get over bursting threshold, driving strong, rapid activation of the KCa channels, causing a long pause in firing, which creates a window during which GPe dynamics resolve Go vs. No balance. STNs are more weakly driven and thus more slowly activate KCa, resulting in a longer period of activation, during which the GPi is inhibited to prevent premature gating based only MtxGo inhibition -- gating only occurs when GPeIn signal has had a chance to integrate its MtxNo inputs.
func AddSTNLayer2D ¶ added in v1.5.10
AddSTNLayer2D adds a subthalamic nucleus Layer of given size, with given name.
func AddSTNLayer4D ¶ added in v1.5.10
AddSTNLayer4D adds a subthalamic nucleus Layer of given size, with given name. Makes a 4D structure with Pools representing separable gating domains.
func (*STNLayer) Build ¶
Build constructs the layer state, including calling Build on the projections.
func (*STNLayer) UnitVal1D ¶
UnitVal1D returns value of given variable index on given unit, using 1-dimensional index. returns NaN on invalid index. This is the core unit var access method used by other methods, so it is the only one that needs to be updated for derived layer types.
func (*STNLayer) UnitVarIdx ¶
UnitVarIdx returns the index of given variable within the Neuron, according to UnitVarNames() list (using a map to lookup index), or -1 and error message if not found.
func (*STNLayer) UnitVarNum ¶
UnitVarNum returns the number of Neuron-level variables for this layer. This is needed for extending indexes in derived types.
func (*STNLayer) UpdateParams ¶ added in v1.5.1
func (ly *STNLayer) UpdateParams()
type STNNeuron ¶
type STNNeuron struct { SKCai float32 `` /* 158-byte string literal not displayed */ SKCaM float32 `desc:"Calcium-gated potassium channel gating factor, driven by SKCai via a Hill equation as in chans.SKPCaParams."` Gsk float32 `desc:"Calcium-gated potassium channel conductance as a function of Gbar * SKCaM."` }
STNNeuron holds the extra neuron (unit) level variables for STN computation.
func (*STNNeuron) VarByIndex ¶
VarByIndex returns variable using index (0 = first variable in STNNeuronVars list)
type ThalLayer ¶ added in v1.5.10
ThalLayer represents a BG gated thalamic layer, e.g., the Ventral thalamus: VA / VM / VL or MD mediodorsal thalamus, which receives BG gating in the form of an inhibitory projection from GPi.
func AddThalLayer2D ¶ added in v1.5.10
AddThalLayer2D adds a BG gated thalamus (e.g., VA/VL/VM, MD) Layer of given size, with given name. This version has a 2D structure
func AddThalLayer4D ¶ added in v1.5.10
AddThalLayer4D adds a BG gated thalamus (e.g., VA/VL/VM, MD) Layer of given size, with given name. This version has a 4D structure, with Pools representing separable gating domains.
func (*ThalLayer) DecayState ¶ added in v1.5.12
func (*ThalLayer) GatedFmAvgSpk ¶ added in v1.5.10
GatedFmAvgSpk updates the Gated values based on Avg SpkMax using given threshold. Called by Go Matrix layer. returns true if any gated.
func (*ThalLayer) UnitVal1D ¶ added in v1.5.10
UnitVal1D returns value of given variable index on given unit, using 1-dimensional index. returns NaN on invalid index. This is the core unit var access method used by other methods, so it is the only one that needs to be updated for derived layer types.
func (*ThalLayer) UnitVarIdx ¶ added in v1.5.10
UnitVarIdx returns the index of given variable within the Neuron, according to UnitVarNames() list (using a map to lookup index), or -1 and error message if not found.
func (*ThalLayer) UnitVarNum ¶ added in v1.5.10
UnitVarNum returns the number of Neuron-level variables for this layer. This is needed for extending indexes in derived types.
type TraceSyn ¶
type TraceSyn struct {
DTr float32 `desc:"delta trace = send * recv -- increments to Tr when a gating event happens."`
}
TraceSyn holds extra synaptic state for trace projections
func (*TraceSyn) VarByIndex ¶
VarByIndex returns synapse variable by index