Skip to content

Commit

Permalink
start on pfcmaint test for pfc active maintenance
Browse files Browse the repository at this point in the history
  • Loading branch information
rcoreilly committed Apr 1, 2024
1 parent b753efa commit a1b5d36
Show file tree
Hide file tree
Showing 9 changed files with 1,074 additions and 1 deletion.
7 changes: 7 additions & 0 deletions examples/pfcmaint/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
# VSPatch

This is a test for representing graded numerical values in terms of the overall population activity, which is essential for the striatum neurons used in the [pcore](../../PCORE_BG.md) and [pvlv](../../PVLV.md) models. The specific test case here is the `VSPatch` neurons that predict and discount graded rewards.

The model only has a `VSPatch` layer which is trained to predict the amount of reward for N different "conditions", each of which has T time steps of neural activity, culminating in the reward. It needs to _not_ respond for the first T-1 time steps, and then accurately predict the graded reward value at T.


131 changes: 131 additions & 0 deletions examples/pfcmaint/config.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,131 @@
// Copyright (c) 2023, The Emergent Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.

package main

// EnvConfig has config params for environment
// note: only adding fields for key Env params that matter for both Network and Env
// other params are set via the Env map data mechanism.
type EnvConfig struct {

// env parameters -- can set any field/subfield on Env struct, using standard TOML formatting
Env map[string]any
}

// ParamConfig has config parameters related to sim params
type ParamConfig struct {

// number of units per dimension in the PFC
NUnits int `default:"7"`

// If true, perform automated parameter tweaking for parameters marked Hypers Tweak = log,incr, or [vals]
Tweak bool

// for Tweak, if true, first run a baseline with current default params
Baseline bool

// for Tweak, if true, only print what would be done, don't run
DryRun bool

// network parameters
Network map[string]any

// Extra Param Sheet name(s) to use (space separated if multiple) -- must be valid name as listed in compiled-in params or loaded params
Sheet string

// extra tag to add to file names and logs saved from this run
Tag string

// user note -- describe the run params etc -- like a git commit message for the run
Note string

// Name of the JSON file to input saved parameters from.
File string `nest:"+"`

// Save a snapshot of all current param and config settings in a directory named params_<datestamp> (or _good if Good is true), then quit -- useful for comparing to later changes and seeing multiple views of current params
SaveAll bool `nest:"+"`

// for SaveAll, save to params_good for a known good params state. This can be done prior to making a new release after all tests are passing -- add results to git to provide a full diff record of all params over time.
Good bool `nest:"+"`
}

// RunConfig has config parameters related to running the sim
type RunConfig struct {

// use the GPU for computation -- generally faster even for small models if NData ~16
GPU bool `default:"false"`

// number of data-parallel items to process in parallel per trial -- works (and is significantly faster) for both CPU and GPU. Results in an effective mini-batch of learning.
NData int `default:"1" min:"1"`

// number of parallel threads for CPU computation -- 0 = use default
NThreads int `default:"0"`

// starting run number -- determines the random seed -- runs counts from there -- can do all runs in parallel by launching separate jobs with each run, runs = 1
Run int `default:"0"`

// total number of runs to do when running Train
NRuns int `default:"1" min:"1"`

// total number of epochs per run
NEpochs int `default:"30"`

// total number of trials per epoch. Should be an even multiple of NData.
NTrials int `default:"128"`
}

// LogConfig has config parameters related to logging data
type LogConfig struct {

// if true, save final weights after each run
SaveWts bool

// if true, save train epoch log to file, as .epc.tsv typically
Epoch bool `default:"true" nest:"+"`

// if true, save run log to file, as .run.tsv typically
Run bool `default:"true" nest:"+"`

// if true, save train trial log to file, as .trl.tsv typically. May be large.
Trial bool `default:"false" nest:"+"`

// if true, save testing epoch log to file, as .tst_epc.tsv typically. In general it is better to copy testing items over to the training epoch log and record there.
TestEpoch bool `default:"false" nest:"+"`

// if true, save testing trial log to file, as .tst_trl.tsv typically. May be large.
TestTrial bool `default:"false" nest:"+"`

// if true, save network activation etc data from testing trials, for later viewing in netview
NetData bool

// activates testing mode -- records detailed data for Go CI tests (not the same as running test mode on network, via Looper)
Testing bool
}

// Config is a standard Sim config -- use as a starting point.
type Config struct {

// specify include files here, and after configuration, it contains list of include files added
Includes []string

// open the GUI -- does not automatically run -- if false, then runs automatically and quits
GUI bool `default:"true"`

// log debugging information
Debug bool

// environment configuration options
Env EnvConfig `view:"add-fields"`

// parameter related configuration options
Params ParamConfig `view:"add-fields"`

// sim running related configuration options
Run RunConfig `view:"add-fields"`

// data logging related configuration options
Log LogConfig `view:"add-fields"`
}

func (cfg *Config) IncludesPtr() *[]string { return &cfg.Includes }
52 changes: 52 additions & 0 deletions examples/pfcmaint/params.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
// Copyright (c) 2022, The Emergent Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.

package main

import (
"github.com/emer/emergent/v2/netparams"
"github.com/emer/emergent/v2/params"
)

// ParamSets is the default set of parameters -- Base is always applied,
// and others can be optionally selected to apply on top of that
var ParamSets = netparams.Sets{
"Base": {
{Sel: "Layer", Desc: "",
Params: params.Params{
"Layer.Acts.Clamp.Ge": "1.0", // 1.5 is def, was 0.6 (too low)
// "Layer.Inhib.ActAvg.Nominal": "0.2",
}},
{Sel: ".Time", Desc: "",
Params: params.Params{
"Layer.Inhib.ActAvg.Nominal": "0.05",
}},
{Sel: ".PFCPrjn", Desc: "",
Params: params.Params{
"Prjn.PrjnScale.Abs": "2.0",
}},
{Sel: "#GPiToPFCThal", Desc: "",
Params: params.Params{
"Prjn.PrjnScale.Abs": "4.0",
}},
{Sel: ".PTMaintLayer", Desc: "time integration params",
Params: params.Params{
"Layer.Acts.Dend.ModGain": "1.5",
"Layer.Inhib.Layer.Gi": "2.6",
"Layer.Inhib.Pool.Gi": "3.6",
}},
{Sel: ".BGThalLayer", Desc: "",
Params: params.Params{
"Layer.Learn.NeuroMod.AChDisInhib": "0",
}},
{Sel: ".PTSelfMaint", Desc: "",
Params: params.Params{
"Prjn.PrjnScale.Abs": "5.0", // note: too much! need a better strat
}},
{Sel: ".SuperToThal", Desc: "",
Params: params.Params{
"Prjn.PrjnScale.Abs": "4.0", // 4 > 2 for gating sooner
}},
},
}
91 changes: 91 additions & 0 deletions examples/pfcmaint/paramtweak.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
// Copyright (c) 2019, The Emergent Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.

package main

import (
"fmt"
"time"

"cogentcore.org/core/laser"
"github.com/emer/emergent/v2/elog"
"github.com/emer/emergent/v2/emer"
"github.com/emer/emergent/v2/etime"
"github.com/emer/emergent/v2/params"
"github.com/emer/empi/v2/mpi"
)

func (ss *Sim) RunParamTweak() {
ss.Config.Run.NRuns = 25 // 10
ss.Config.Log.Run = true
ss.Config.Log.Epoch = true

tstamp := time.Now().Format("2006-01-02-15-04")

ctag := ss.Params.Tag
ss.Params.Tag = tstamp
if ctag != "" {
ss.Params.Tag += "_" + ctag
}
runName := ss.Params.RunName(ss.Config.Run.Run)
ss.Stats.SetString("RunName", runName) // used for naming logs, stats, etc
netName := ss.Net.Name()

if !ss.Config.Params.DryRun {
elog.SetLogFile(&ss.Logs, ss.Config.Log.Epoch, etime.Train, etime.Epoch, "epc", netName, runName)
elog.SetLogFile(&ss.Logs, ss.Config.Log.Run, etime.Train, etime.Run, "run", netName, runName)
elog.SetLogFile(&ss.Logs, true, etime.Train, etime.Expt, "expt", netName, runName)
}

ss.Init()

srch := params.TweaksFromHypers(ss.Params.NetHypers)
if len(srch) == 0 {
fmt.Println("no tweak items to search!")
return
}

if ss.Config.Params.DryRun {
fmt.Println("Searching:", laser.StringJSON(srch))
}

ss.Loops.GetLoop(etime.Train, etime.Run).Counter.SetCurMaxPlusN(ss.Config.Run.Run, ss.Config.Run.NRuns)
if ss.Config.Run.GPU {
ss.Net.ConfigGPUnoGUI(&ss.Context)
}
mpi.Printf("Set NThreads to: %d\n", ss.Net.NThreads)

if !ss.Config.Params.DryRun && ss.Config.Params.Baseline {
fmt.Println("Running baseline")
ss.Loops.Run(etime.Train)
ss.Init() // start fresh next time
}

for _, twk := range srch {
sv0 := twk.Search[0]
for i, val := range sv0.Values {
tag := fmt.Sprintf("%s_%s_%g", twk.Sel.Sel, twk.Param, val)
for _, sv := range twk.Search {
val := sv.Values[i] // should be the same
emer.SetFloatParam(ss.Net, sv.Name, sv.Type, sv.Path, val)
}
ss.Params.Tag = tag
runName := ss.Params.RunName(ss.Config.Run.Run)
ss.Stats.SetString("RunName", runName) // used for naming logs, stats, etc
fmt.Println("Running:", tag)
if !ss.Config.Params.DryRun {
ss.Net.UpdateParams()
ss.Net.InitGScale(&ss.Net.Ctx)
ss.Net.GPU.SyncParamsToGPU() // critical!
ss.Loops.Run(etime.Train)
ss.Init() // start fresh next time -- param will be applied on top if this
}
}
for _, sv := range twk.Search {
emer.SetFloatParam(ss.Net, sv.Name, sv.Type, sv.Path, sv.Start) // restore
}
}

ss.Net.GPU.Destroy() // safe even if no GPU
}
Loading

0 comments on commit a1b5d36

Please sign in to comment.