From fafdc0c7f0cb4221fdb84b4316adb200fbec26d3 Mon Sep 17 00:00:00 2001 From: "Randall C. O'Reilly" Date: Wed, 30 Oct 2024 02:53:15 -0700 Subject: [PATCH] better sir2 params --- examples/sir2/sir2.go | 21 +++++++++++---------- leabra/layer.go | 9 +++++++++ 2 files changed, 20 insertions(+), 10 deletions(-) diff --git a/examples/sir2/sir2.go b/examples/sir2/sir2.go index 9c513f6..f7c7041 100644 --- a/examples/sir2/sir2.go +++ b/examples/sir2/sir2.go @@ -48,7 +48,7 @@ var ParamSets = params.Sets{ "Base": { {Sel: "Path", Desc: "no extra learning factors", Params: params.Params{ - "Path.Learn.Lrate": "0.02", // slower overall is key + "Path.Learn.Lrate": "0.01", // slower overall is key "Path.Learn.Norm.On": "false", "Path.Learn.Momentum.On": "false", "Path.Learn.WtBal.On": "false", @@ -179,7 +179,7 @@ var ParamSets = params.Sets{ }}, {Sel: "#RWPred", Desc: "keep it guessing", Params: params.Params{ - "Layer.RW.PredRange.Min": "0.05", // single most important param! was .01 -- need penalty.. + "Layer.RW.PredRange.Min": "0.02", // single most important param! was .01 -- need penalty.. "Layer.RW.PredRange.Max": "0.95", }}, }, @@ -191,7 +191,7 @@ type Config struct { NRuns int `default:"10" min:"1"` // total number of epochs per run - NEpochs int `default:"100"` + NEpochs int `default:"200"` // total number of trials per epochs per run NTrials int `default:"100"` @@ -333,13 +333,6 @@ func (ss *Sim) ConfigNet(net *leabra.Network) { cin.CIN.RewLays.Add(rew.Name, rp.Name) - inp.PlaceAbove(rew) - out.PlaceRightOf(inp, 2) - ctrl.PlaceBehind(inp, 2) - hid.PlaceBehind(ctrl, 2) - mtxGo.PlaceRightOf(rew, 2) - pfcMnt.PlaceRightOf(out, 2) - full := paths.NewFull() fmin := paths.NewRect() fmin.Size.Set(1, 1) @@ -356,6 +349,7 @@ func (ss *Sim) ConfigNet(net *leabra.Network) { pt.AddClass("PFCFixed") net.ConnectLayers(inp, hid, full, leabra.ForwardPath) + net.ConnectLayers(ctrl, hid, full, leabra.ForwardPath) net.BidirConnectLayers(hid, out, full) pt = net.ConnectLayers(pfcOutD, hid, full, leabra.ForwardPath) pt.AddClass("FmPFCOutD") @@ -363,6 +357,13 @@ func (ss *Sim) ConfigNet(net *leabra.Network) { pt.AddClass("FmPFCOutD") net.ConnectLayers(inp, out, full, leabra.ForwardPath) + inp.PlaceAbove(rew) + out.PlaceRightOf(inp, 2) + ctrl.PlaceBehind(inp, 2) + hid.PlaceBehind(ctrl, 2) + mtxGo.PlaceRightOf(rew, 2) + pfcMnt.PlaceRightOf(out, 2) + net.Build() net.Defaults() diff --git a/leabra/layer.go b/leabra/layer.go index b7736ca..a38b44d 100644 --- a/leabra/layer.go +++ b/leabra/layer.go @@ -64,6 +64,15 @@ func (ly *Layer) InitActs() { ly.NeuroMod.Init() } +// UpdateActAvgEff updates the effective ActAvg.ActPAvgEff value used in netinput +// scaling, from the current ActAvg.ActPAvg and fixed Init values. +func (ly *Layer) UpdateActAvgEff() { + for pi := range ly.Pools { + pl := &ly.Pools[pi] + ly.Inhib.ActAvg.EffFromAvg(&pl.ActAvg.ActPAvgEff, pl.ActAvg.ActPAvg) + } +} + // InitWeightsSym initializes the weight symmetry. // higher layers copy weights from lower layers. func (ly *Layer) InitWtSym() {