Documentation ¶
Index ¶
- Variables
- func STNNeuronVarByName(varNm string) (int, error)
- type CINLayer
- func (ly *CINLayer) ActFmG(ltime *leabra.Time)
- func (ly *CINLayer) Build() error
- func (ly *CINLayer) CyclePost(ltime *leabra.Time)
- func (ly *CINLayer) Defaults()
- func (ly *CINLayer) GetACh() float32
- func (ly *CINLayer) RewLayer() (*leabra.Layer, error)
- func (ly *CINLayer) SetACh(ach float32)
- func (ly *CINLayer) UnitVal1D(varIdx int, idx int) float32
- func (ly *CINLayer) UnitVarIdx(varNm string) (int, error)
- type CaParams
- type DaModParams
- type DaReceptors
- type GPLayer
- func (ly *GPLayer) ActFmG(ltime *leabra.Time)
- func (ly *GPLayer) AlphaCycInit()
- func (ly *GPLayer) Build() error
- func (ly *GPLayer) Defaults()
- func (ly *GPLayer) GetACh() float32
- func (ly *GPLayer) InitActs()
- func (ly *GPLayer) InitMinAct()
- func (ly *GPLayer) MinActFmAct(ltime *leabra.Time)
- func (ly *GPLayer) SetACh(ach float32)
- func (ly *GPLayer) UnitVal1D(varIdx int, idx int) float32
- func (ly *GPLayer) UnitVarIdx(varNm string) (int, error)
- type GPeInPrjn
- type GPiLayer
- type GPiPrjn
- type GPiTraceParams
- type Layer
- type MatrixLayer
- func (ly *MatrixLayer) ActFmG(ltime *leabra.Time)
- func (ly *MatrixLayer) AlphaCycInit()
- func (ly *MatrixLayer) Build() error
- func (ly *MatrixLayer) DALrnFmDA(da float32) float32
- func (ly *MatrixLayer) Defaults()
- func (ly *MatrixLayer) GetACh() float32
- func (ly *MatrixLayer) InitActs()
- func (ly *MatrixLayer) InitMaxAct()
- func (ly *MatrixLayer) SetACh(ach float32)
- func (ly *MatrixLayer) UnitVal1D(varIdx int, idx int) float32
- func (ly *MatrixLayer) UnitVarIdx(varNm string) (int, error)
- type MatrixParams
- type MatrixPrjn
- type MatrixTraceParams
- type Network
- func (nt *Network) AddBG(prefix string, nPoolsY, nPoolsX, nNeurY, nNeurX int) (...)
- func (nt *Network) AddCINLayer(name string) *CINLayer
- func (nt *Network) AddGPeLayer(name string, nPoolsY, nPoolsX, nNeurY, nNeurX int) *GPLayer
- func (nt *Network) AddGPiLayer(name string, nPoolsY, nPoolsX, nNeurY, nNeurX int) *GPiLayer
- func (nt *Network) AddMatrixLayer(name string, nPoolsY, nPoolsX, nNeurY, nNeurX int, da DaReceptors) *MatrixLayer
- func (nt *Network) AddSTNLayer(name string, nPoolsY, nPoolsX, nNeurY, nNeurX int) *STNLayer
- func (nt *Network) AddVThalLayer(name string, nPoolsY, nPoolsX, nNeurY, nNeurX int) *VThalLayer
- func (nt *Network) ConnectToMatrix(send, recv emer.Layer, pat prjn.Pattern) emer.Prjn
- func (nt *Network) Defaults()
- func (nt *Network) NewLayer() emer.Layer
- func (nt *Network) NewPrjn() emer.Prjn
- func (nt *Network) SynVarNames() []string
- func (nt *Network) UnitVarNames() []string
- func (nt *Network) UpdateParams()
- type STNLayer
- func (ly *STNLayer) ActFmG(ltime *leabra.Time)
- func (ly *STNLayer) AlphaCycInit()
- func (ly *STNLayer) Build() error
- func (ly *STNLayer) Defaults()
- func (ly *STNLayer) GetDA() float32
- func (ly *STNLayer) InitActs()
- func (ly *STNLayer) SetDA(da float32)
- func (ly *STNLayer) UnitVal1D(varIdx int, idx int) float32
- func (ly *STNLayer) UnitVarIdx(varNm string) (int, error)
- type STNNeuron
- type TraceSyn
- type VThalLayer
Constants ¶
This section is empty.
Variables ¶
var ( // NeuronVars are extra neuron variables for bgate NeuronVars = []string{"DA", "DALrn", "ACh", "Ca", "KCa"} // NeuronVarsAll is the bgate collection of all neuron-level vars NeuronVarsAll []string // SynVarsAll is the bgate collection of all synapse-level vars (includes TraceSynVars) SynVarsAll []string )
var ( STNNeuronVars = []string{"Ca", "KCa"} STNNeuronVarsMap map[string]int )
var KiT_CINLayer = kit.Types.AddType(&CINLayer{}, leabra.LayerProps)
var KiT_DaReceptors = kit.Enums.AddEnum(DaReceptorsN, kit.NotBitFlag, nil)
var KiT_GPLayer = kit.Types.AddType(&GPLayer{}, leabra.LayerProps)
var KiT_GPiLayer = kit.Types.AddType(&GPiLayer{}, leabra.LayerProps)
var KiT_Layer = kit.Types.AddType(&Layer{}, leabra.LayerProps)
var KiT_MatrixLayer = kit.Types.AddType(&MatrixLayer{}, leabra.LayerProps)
var KiT_MatrixPrjn = kit.Types.AddType(&MatrixPrjn{}, leabra.PrjnProps)
var KiT_Network = kit.Types.AddType(&Network{}, NetworkProps)
var KiT_STNLayer = kit.Types.AddType(&STNLayer{}, leabra.LayerProps)
var KiT_VThalLayer = kit.Types.AddType(&VThalLayer{}, leabra.LayerProps)
var NetworkProps = deep.NetworkProps
var TraceSynVars = []string{"NTr", "Tr"}
Functions ¶
func STNNeuronVarByName ¶
STNNeuronVarByName returns the index of the variable in the STNNeuron, or error
Types ¶
type CINLayer ¶
type CINLayer struct { leabra.Layer RewLay string `desc:"name of Reward-representing layer from which this computes ACh as absolute value"` SendACh rl.SendACh `desc:"list of layers to send acetylcholine to"` ACh float32 `desc:"acetylcholine value for this layer"` }
CINLayer (cholinergic interneuron) reads reward signals from a named source layer and sends the absolute value of that activity as the positively-rectified non-prediction-discounted reward signal computed by CINs, and sent as an acetylcholine (ACh) signal.
func (*CINLayer) Build ¶
Build constructs the layer state, including calling Build on the projections.
func (*CINLayer) CyclePost ¶
CyclePost is called at end of Cycle We use it to send ACh, which will then be active for the next cycle of processing.
type CaParams ¶
type CaParams struct { BurstThr float32 `` /* 244-byte string literal not displayed */ ActThr float32 `def:"0.7" desc:"activation threshold for increment in activation above baseline that drives lower influx of Ca"` BurstCa float32 `def:"0.1" desc:"Ca level for burst level activation"` ActCa float32 `` /* 187-byte string literal not displayed */ GbarKCa float32 `def:"20" desc:"maximal KCa conductance (actual conductance is applied to KNa channels)"` KCaTau float32 `def:"20" desc:"KCa conductance time constant -- 40 from Gillies & Willshaw, 2006"` CaTau float32 `def:"185.7" desc:"Ca time constant of decay to baseline -- 185.7 from Gillies & Willshaw, 2006"` AlphaInit bool `desc:"initialize Ca, KCa values at start of every AlphaCycle"` }
CaParams control the calcium dynamics in STN neurons. Gillies & Willshaw, 2006 provide a biophysically detailed simulation, and we use their logistic function for computing KCa conductance based on Ca, but we use a simpler approximation with burst and act threshold. KCa are Calcium-gated potassium channels that drive the long afterhyperpolarization of STN neurons. Auto reset at each AlphaCycle. The conductance is applied to KNa channels to take advantage of the existing infrastructure.
type DaModParams ¶
type DaModParams struct { On bool `desc:"whether to use dopamine modulation"` ModGain bool `viewif:"On" desc:"modulate gain instead of Ge excitatory synaptic input"` Minus float32 `` /* 145-byte string literal not displayed */ Plus float32 `` /* 144-byte string literal not displayed */ NegGain float32 `` /* 208-byte string literal not displayed */ PosGain float32 `` /* 208-byte string literal not displayed */ }
Params for effects of dopamine (Da) based modulation, typically adding a Da-based term to the Ge excitatory synaptic input. Plus-phase = learning effects relative to minus-phase "performance" dopamine effects
func (*DaModParams) Defaults ¶
func (dm *DaModParams) Defaults()
func (*DaModParams) Gain ¶
func (dm *DaModParams) Gain(da, gain float32, plusPhase bool) float32
Gain returns da-modulated gain value
func (*DaModParams) GainModOn ¶
func (dm *DaModParams) GainModOn() bool
GainModOn returns true if modulating Gain
func (*DaModParams) Ge ¶
func (dm *DaModParams) Ge(da, ge float32, plusPhase bool) float32
Ge returns da-modulated ge value
func (*DaModParams) GeModOn ¶
func (dm *DaModParams) GeModOn() bool
GeModOn returns true if modulating Ge
type DaReceptors ¶
type DaReceptors int
DaReceptors for D1R and D2R dopamine receptors
const ( // D1R primarily expresses Dopamine D1 Receptors -- dopamine is excitatory and bursts of dopamine lead to increases in synaptic weight, while dips lead to decreases -- direct pathway in dorsal striatum D1R DaReceptors = iota // D2R primarily expresses Dopamine D2 Receptors -- dopamine is inhibitory and bursts of dopamine lead to decreases in synaptic weight, while dips lead to increases -- indirect pathway in dorsal striatum D2R DaReceptorsN )
func (*DaReceptors) FromString ¶
func (i *DaReceptors) FromString(s string) error
func (DaReceptors) MarshalJSON ¶
func (ev DaReceptors) MarshalJSON() ([]byte, error)
func (DaReceptors) String ¶
func (i DaReceptors) String() string
func (*DaReceptors) UnmarshalJSON ¶
func (ev *DaReceptors) UnmarshalJSON(b []byte) error
type GPLayer ¶
type GPLayer struct { Layer MinActCyc int `` /* 219-byte string literal not displayed */ AlphaMinAct []float32 `desc:"per-neuron minimum activation value during alpha cycle, after MinActCyc"` ACh float32 `` /* 190-byte string literal not displayed */ }
GPLayer represents the dorsal matrisome MSN's that are the main Go / NoGo gating units in BG. D1R = Go, D2R = NoGo. Minimum activation during gating period drives ActLrn value used for learning. Typically just a single unit per Pool representing a given stripe.
func (*GPLayer) ActFmG ¶
ActFmG computes rate-code activation from Ge, Gi, Gl conductances and updates learning running-average activations from that Act. GP extends to compute AlphaMinAct
func (*GPLayer) AlphaCycInit ¶
func (ly *GPLayer) AlphaCycInit()
AlphaCycInit handles all initialization at start of new input pattern, including computing input scaling from running average activation etc. should already have presented the external input to the network at this point.
func (*GPLayer) Build ¶
Build constructs the layer state, including calling Build on the projections you MUST have properly configured the Inhib.Pool.On setting by this point to properly allocate Pools for the unit groups if necessary.
func (*GPLayer) InitMinAct ¶
func (ly *GPLayer) InitMinAct()
InitMinAct initializes AlphaMinAct to 0
func (*GPLayer) MinActFmAct ¶
MinActFmAct computes the AlphaMinAct values from current activations, and updates ActLrn
type GPeInPrjn ¶
GPeInPrjn must be used with GPLayer. Learns from DA and gating status.
type GPiLayer ¶
type GPiLayer struct {
GPLayer
}
GPiLayer represents the GPi / SNr output nucleus of the BG. It gets inhibited by the MtxGo and GPeIn layers, and its minimum activation during this inhibition is recorded in ActLrn, for learning. Typically just a single unit per Pool representing a given stripe.
type GPiPrjn ¶
type GPiPrjn struct { leabra.Prjn Trace GPiTraceParams `view:"inline" desc:"parameters for GPi trace learning"` TrSyns []TraceSyn `desc:"trace synaptic state values, ordered by the sending layer units which owns them -- one-to-one with SConIdx array"` }
GPiPrjn must be used with GPi recv layer, from MtxGo, GPeIn senders. Learns from DA and ActLrn on GPi neuron.
func (*GPiPrjn) ClearTrace ¶
func (pj *GPiPrjn) ClearTrace()
func (*GPiPrjn) DWt ¶
func (pj *GPiPrjn) DWt()
DWt computes the weight change (learning) -- on sending projections.
type GPiTraceParams ¶
type GPiTraceParams struct { CurTrlDA bool `` /* 278-byte string literal not displayed */ Decay float32 `` /* 168-byte string literal not displayed */ GateAct float32 `` /* 172-byte string literal not displayed */ }
GPiTraceParams for for trace-based learning in the GPiPrjn. A trace of synaptic co-activity is formed, and then modulated by dopamine whenever it occurs. This bridges the temporal gap between gating activity and subsequent activity, and is based biologically on synaptic tags. Trace is reset at time of reward based on ACh level from CINs.
func (*GPiTraceParams) Defaults ¶
func (tp *GPiTraceParams) Defaults()
func (*GPiTraceParams) LrnFactor ¶
func (tp *GPiTraceParams) LrnFactor(act float32) float32
LrnFactor returns multiplicative factor for GPi activation, centered on GateAct param. If act < GateAct, returns (GateAct - act) / GateAct. If act > GateAct, returns (GateAct - act) / (1 - GateAct)
type Layer ¶
Layer is the base layer type for BGate framework. Adds a dopamine variable to base Leabra layer type.
type MatrixLayer ¶
type MatrixLayer struct { Layer DaR DaReceptors `desc:"dominant type of dopamine receptor -- D1R for Go pathway, D2R for NoGo"` Matrix MatrixParams `view:"inline" desc:"matrix parameters"` DALrn float32 `inactive:"+" desc:"effective learning dopamine value for this layer: reflects DaR and Gains"` ACh float32 `` /* 190-byte string literal not displayed */ AlphaMaxAct []float32 `desc:"per-neuron maximum activation value during alpha cycle"` }
MatrixLayer represents the dorsal matrisome MSN's that are the main Go / NoGo gating units in BG. D1R = Go, D2R = NoGo.
func (*MatrixLayer) ActFmG ¶
func (ly *MatrixLayer) ActFmG(ltime *leabra.Time)
ActFmG computes rate-code activation from Ge, Gi, Gl conductances and updates learning running-average activations from that Act. Matrix extends to call DALrnFmDA and updates AlphaMaxAct -> ActLrn
func (*MatrixLayer) AlphaCycInit ¶
func (ly *MatrixLayer) AlphaCycInit()
AlphaCycInit handles all initialization at start of new input pattern, including computing input scaling from running average activation etc. should already have presented the external input to the network at this point.
func (*MatrixLayer) Build ¶
func (ly *MatrixLayer) Build() error
Build constructs the layer state, including calling Build on the projections you MUST have properly configured the Inhib.Pool.On setting by this point to properly allocate Pools for the unit groups if necessary.
func (*MatrixLayer) DALrnFmDA ¶
func (ly *MatrixLayer) DALrnFmDA(da float32) float32
DALrnFmDA returns effective learning dopamine value from given raw DA value applying Burst and Dip Gain factors, and then reversing sign for D2R.
func (*MatrixLayer) Defaults ¶
func (ly *MatrixLayer) Defaults()
func (*MatrixLayer) GetACh ¶
func (ly *MatrixLayer) GetACh() float32
func (*MatrixLayer) InitActs ¶
func (ly *MatrixLayer) InitActs()
func (*MatrixLayer) InitMaxAct ¶
func (ly *MatrixLayer) InitMaxAct()
InitMaxAct initializes the AlphaMaxAct to 0
func (*MatrixLayer) SetACh ¶
func (ly *MatrixLayer) SetACh(ach float32)
func (*MatrixLayer) UnitVal1D ¶
func (ly *MatrixLayer) UnitVal1D(varIdx int, idx int) float32
UnitVal1D returns value of given variable index on given unit, using 1-dimensional index. returns NaN on invalid index. This is the core unit var access method used by other methods, so it is the only one that needs to be updated for derived layer types.
func (*MatrixLayer) UnitVarIdx ¶
func (ly *MatrixLayer) UnitVarIdx(varNm string) (int, error)
UnitVarIdx returns the index of given variable within the Neuron, according to UnitVarNames() list (using a map to lookup index), or -1 and error message if not found.
type MatrixParams ¶
type MatrixParams struct { BurstGain float32 `` /* 237-byte string literal not displayed */ DipGain float32 `` /* 237-byte string literal not displayed */ }
MatrixParams has parameters for Dorsal Striatum Matrix computation These are the main Go / NoGo gating units in BG driving updating of PFC WM in PBWM
func (*MatrixParams) Defaults ¶
func (mp *MatrixParams) Defaults()
type MatrixPrjn ¶
type MatrixPrjn struct { leabra.Prjn Trace MatrixTraceParams `view:"inline" desc:"special parameters for matrix trace learning"` TrSyns []TraceSyn `desc:"trace synaptic state values, ordered by the sending layer units which owns them -- one-to-one with SConIdx array"` }
MatrixPrjn does dopamine-modulated, gated trace learning, for Matrix learning in PBWM context
func (*MatrixPrjn) Build ¶
func (pj *MatrixPrjn) Build() error
func (*MatrixPrjn) ClearTrace ¶
func (pj *MatrixPrjn) ClearTrace()
func (*MatrixPrjn) DWt ¶
func (pj *MatrixPrjn) DWt()
DWt computes the weight change (learning) -- on sending projections.
func (*MatrixPrjn) Defaults ¶
func (pj *MatrixPrjn) Defaults()
func (*MatrixPrjn) InitWts ¶
func (pj *MatrixPrjn) InitWts()
func (*MatrixPrjn) SynVal1D ¶
func (pj *MatrixPrjn) SynVal1D(varIdx int, synIdx int) float32
SynVal1D returns value of given variable index (from SynVarIdx) on given SynIdx. Returns NaN on invalid index. This is the core synapse var access method used by other methods, so it is the only one that needs to be updated for derived layer types.
type MatrixTraceParams ¶
type MatrixTraceParams struct { CurTrlDA bool `` /* 278-byte string literal not displayed */ Decay float32 `` /* 168-byte string literal not displayed */ Deriv bool `` /* 328-byte string literal not displayed */ }
MatrixTraceParams for for trace-based learning in the MatrixPrjn. A trace of synaptic co-activity is formed, and then modulated by dopamine whenever it occurs. This bridges the temporal gap between gating activity and subsequent activity, and is based biologically on synaptic tags. Trace is reset at time of reward based on ACh level from CINs.
func (*MatrixTraceParams) Defaults ¶
func (tp *MatrixTraceParams) Defaults()
func (*MatrixTraceParams) LrnFactor ¶
func (tp *MatrixTraceParams) LrnFactor(act float32) float32
LrnFactor returns multiplicative factor for level of msn activation. If Deriv is 2 * act * (1-act) -- the factor of 2 compensates for otherwise reduction in learning from these factors. Otherwise is just act.
type Network ¶
bgate.Network has methods for configuring specialized BGATE network components
func (*Network) AddBG ¶
func (nt *Network) AddBG(prefix string, nPoolsY, nPoolsX, nNeurY, nNeurX int) (mtxGo, mtxNo, cin, gpeOut, gpeIn, gpeTA, stnp, stns, gpi, vthal leabra.LeabraLayer)
AddBG adds MtxGo, No, CIN, GPeOut, GPeIn, GPeTA, STNp, STNs, GPi, and VThal layers, with given optional prefix. Assumes that a 4D structure will be used, with Pools representing separable gating domains. Only Matrix has more than 1 unit per Pool by default. Appropriate PoolOneToOne connections are made between layers, using standard styles
func (*Network) AddCINLayer ¶
AddCINLayer adds a CINLayer, with a single neuron.
func (*Network) AddGPeLayer ¶
AddGPLayer adds a GPLayer of given size, with given name. Assumes that a 4D structure will be used, with Pools representing separable gating domains. Typically nNeurY, nNeurX will both be 1, but could have more for noise etc.
func (*Network) AddGPiLayer ¶
AddGPiLayer adds a GPiLayer of given size, with given name. Assumes that a 4D structure will be used, with Pools representing separable gating domains. Typically nNeurY, nNeurX will both be 1, but could have more for noise etc.
func (*Network) AddMatrixLayer ¶
func (nt *Network) AddMatrixLayer(name string, nPoolsY, nPoolsX, nNeurY, nNeurX int, da DaReceptors) *MatrixLayer
AddMatrixLayer adds a MatrixLayer of given size, with given name. Assumes that a 4D structure will be used, with Pools representing separable gating domains. da gives the DaReceptor type (D1R = Go, D2R = NoGo)
func (*Network) AddSTNLayer ¶
AddSTNLayer adds a subthalamic nucleus Layer of given size, with given name. Assumes that a 4D structure will be used, with Pools representing separable gating domains. Typically nNeurY, nNeurX will both be 1, but could have more for noise etc.
func (*Network) AddVThalLayer ¶
func (nt *Network) AddVThalLayer(name string, nPoolsY, nPoolsX, nNeurY, nNeurX int) *VThalLayer
AddVThalLayer adds a ventral thalamus (VA/VL/VM) Layer of given size, with given name. Assumes that a 4D structure will be used, with Pools representing separable gating domains. Typically nNeurY, nNeurX will both be 1, but could have more for noise etc.
func (*Network) ConnectToMatrix ¶
ConnectToMatrix adds a MatrixTracePrjn from given sending layer to a matrix layer
func (*Network) Defaults ¶
func (nt *Network) Defaults()
Defaults sets all the default parameters for all layers and projections
func (*Network) SynVarNames ¶
SynVarNames returns the names of all the variables on the synapses in this network.
func (*Network) UnitVarNames ¶
UnitVarNames returns a list of variable names available on the units in this layer
func (*Network) UpdateParams ¶
func (nt *Network) UpdateParams()
UpdateParams updates all the derived parameters if any have changed, for all layers and projections
type STNLayer ¶
type STNLayer struct { Layer Ca CaParams `` /* 186-byte string literal not displayed */ STNNeurs []STNNeuron `` /* 149-byte string literal not displayed */ }
STNLayer represents the pausing subtype of STN neurons. These open the gating window.
func (*STNLayer) AlphaCycInit ¶
func (ly *STNLayer) AlphaCycInit()
AlphaCycInit handles all initialization at start of new input pattern, including computing input scaling from running average activation etc. should already have presented the external input to the network at this point.
func (*STNLayer) Build ¶
Build constructs the layer state, including calling Build on the projections.
type STNNeuron ¶
type STNNeuron struct { Ca float32 `` /* 169-byte string literal not displayed */ KCa float32 `` /* 129-byte string literal not displayed */ }
STNNeuron holds the extra neuron (unit) level variables for STN computation.
func (*STNNeuron) VarByIndex ¶
VarByIndex returns variable using index (0 = first variable in STNNeuronVars list)
type TraceSyn ¶
type TraceSyn struct { NTr float32 `desc:"new trace = send * recv -- drives updates to trace value: sn.ActLrn * rn.ActLrn (subject to derivative too)"` Tr float32 `` /* 136-byte string literal not displayed */ }
TraceSyn holds extra synaptic state for trace projections
func (*TraceSyn) VarByIndex ¶
VarByIndex returns synapse variable by index
type VThalLayer ¶
type VThalLayer struct { leabra.Layer DA float32 `inactive:"+" desc:"dopamine value for this layer"` }
VThalLayer represents the Ventral thalamus: VA / VM / VL which receives BG gating.
func (*VThalLayer) Defaults ¶
func (ly *VThalLayer) Defaults()
func (*VThalLayer) GetDA ¶
func (ly *VThalLayer) GetDA() float32
func (*VThalLayer) InitActs ¶
func (ly *VThalLayer) InitActs()
func (*VThalLayer) SetDA ¶
func (ly *VThalLayer) SetDA(da float32)