Skip to content

Commit

Permalink
progress on gpu
Browse files Browse the repository at this point in the history
  • Loading branch information
rcoreilly committed Nov 22, 2024
1 parent fc10f4e commit 6645131
Show file tree
Hide file tree
Showing 10 changed files with 105 additions and 39 deletions.
4 changes: 4 additions & 0 deletions axon/act-net.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 4 additions & 0 deletions axon/act-net.goal
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,7 @@ func (nt *Network) ApplyExts() {
ctx := nt.Context()
nd := int(nix.NNeurons * ctx.NData)
RunApplyExtsNeuron(nd)
// note: not completed
}

// MinusPhase does updating after end of minus phase.
Expand All @@ -91,6 +92,7 @@ func (nt *Network) MinusPhase() {
pd := int(nix.NPools * ctx.NData)
RunMinusPhasePool(pd)
RunMinusPhaseNeuron(nd)
RunDoneLayersNeurons()
nt.MinusPhasePost()
ToGPULayersNeurons()
// todo:
Expand All @@ -115,6 +117,7 @@ func (nt *Network) PlusPhaseStart() {
ctx := nt.Context()
nd := int(nix.NNeurons * ctx.NData)
RunPlusPhaseStartNeuron(nd)
RunDone()
}

// PlusPhase does updating after end of plus phase
Expand All @@ -125,6 +128,7 @@ func (nt *Network) PlusPhase() {
pd := int(nix.NPools * ctx.NData)
RunPlusPhasePool(pd)
RunPlusPhaseNeuron(nd)
RunDoneLayersNeurons()
nt.PlusPhasePost()
ToGPULayersNeurons()
// todo:
Expand Down
18 changes: 5 additions & 13 deletions axon/basic_test.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

18 changes: 5 additions & 13 deletions axon/basic_test.goal
Original file line number Diff line number Diff line change
Expand Up @@ -395,9 +395,9 @@ func TestNetActShort(t *testing.T) {
}

func TestGPUAct(t *testing.T) {
// if os.Getenv("TEST_GPU") != "true" {
// t.Skip("Set TEST_GPU env var to run GPU tests")
// }
if os.Getenv("TEST_GPU") != "true" {
t.Skip("Set TEST_GPU env var to run GPU tests")
}
NetActTestShort(t, Tol6, true)
}

Expand Down Expand Up @@ -522,7 +522,7 @@ func NetActTest(t *testing.T, tol float32, gpu bool) {
testNet.PlusPhase()
}

// testNet.GPU.Destroy()
GPURelease()
}

// NetActTestShort runs an activation test on the network and checks
Expand Down Expand Up @@ -936,21 +936,13 @@ func NetTestLearn(t *testing.T, tol float32, gpu bool) {
}

testNet.DWt()
if gpu {
// testNet.GPU.SyncSynapsesFromGPU()
// testNet.GPU.SyncSynCaFromGPU()
}

didx := pi

hiddwt[didx] = hidLay.RecvPaths[0].SynValue("DWt", pi, pi)
outdwt[didx] = outLay.RecvPaths[0].SynValue("DWt", pi, pi)

testNet.WtFromDWt()
if gpu {
// testNet.GPU.SyncSynapsesFromGPU()
// testNet.GPU.SyncSynCaFromGPU()
}

hidwt[didx] = hidLay.RecvPaths[0].SynValue("Wt", pi, pi)
outwt[didx] = outLay.RecvPaths[0].SynValue("Wt", pi, pi)
Expand All @@ -966,7 +958,7 @@ func NetTestLearn(t *testing.T, tol float32, gpu bool) {
CompareFloats(tol, hidwt, hidWts, "hidWts", t)
CompareFloats(tol, outwt, outWts, "outWts", t)

// testNet.GPU.Destroy()
GPURelease()
}

func TestNetRLRate(t *testing.T) {
Expand Down
28 changes: 26 additions & 2 deletions axon/learn-net.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

29 changes: 27 additions & 2 deletions axon/learn-net.goal
Original file line number Diff line number Diff line change
Expand Up @@ -4,27 +4,52 @@

package axon

// DWt computes the weight change (learning) based on current running-average activation values
// DWt computes the weight change (learning) based on current
// running-average activation values. Copies synapses back from GPU,
// for case where viewing the synapses.
func (nt *Network) DWt() {
nix := nt.NetIxs()
ctx := nt.Context()
sd := int(nix.NSyns * ctx.NData)
RunDWtSyn(sd)
RunDWtFromDiSyn(int(nix.NSyns))
RunDoneSynapsesTrace()
}

// WtFromDWt updates the weights from delta-weight changes.
// WtFromDWt updates the weights from delta-weight changes,
// after having done DWt previously.
// Also does ctx.SlowInc() and calls SlowAdapt at SlowInterval
func (nt *Network) WtFromDWt() {
nix := nt.NetIxs()
ctx := nt.Context()
RunDWtSubMeanPath(int(nix.NPaths))
RunWtFromDWtSyn(int(nix.NSyns))
RunDoneSynapses()
if ctx.SlowInc() {
nt.SlowAdapt()
}
}


// DWtToWt computes the weight change (learning) based on current
// running-average activation values, and then WtFromDWt, syncing
// back only the synapses (not SynapseTraces).
// This should be used when not viewing the weights.
func (nt *Network) DWtToWt() {
nix := nt.NetIxs()
ctx := nt.Context()
sd := int(nix.NSyns * ctx.NData)
RunDWtSyn(sd)
RunDWtFromDiSyn(int(nix.NSyns))
RunDWtSubMeanPath(int(nix.NPaths))
RunWtFromDWtSyn(int(nix.NSyns))
RunDoneSynapses()
if ctx.SlowInc() {
nt.SlowAdapt()
ToGPUSynapses()
}
}

// SlowAdapt is the layer-level slow adaptation functions: Synaptic scaling,
// and adapting inhibition
func (nt *Network) SlowAdapt() {
Expand Down
13 changes: 6 additions & 7 deletions axon/looper.go
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,8 @@ func LooperStandard(ls *looper.Stacks, net *Network, viewFunc func(mode enums.En
for mode, st := range ls.Stacks {
cycLoop := st.Loops[cycle]
cycLoop.OnStart.Add("Cycle", func() {
nCycles := 10
getNeurons := false
nCycles := fastNCycles
getNeurons := true // todo: need back for phases..
if ls.ModeStack().StepLevel.Int64() == cycle.Int64() {
nCycles = 1
getNeurons = true
Expand All @@ -71,14 +71,13 @@ func LooperStandard(ls *looper.Stacks, net *Network, viewFunc func(mode enums.En
})
if mode.Int64() == trainMode.Int64() {
trlLoop.OnEnd.Add("UpdateWeights", func() {
net.DWt() // todo: need to get synapses here, not after
if view := viewFunc(mode); view != nil && view.IsViewingSynapse() {
//TODO:
// net.GPU.SyncSynapsesFromGPU()
// net.GPU.SyncSynCaFromGPU() // note: only time we call this
net.DWt() // todo: need to get synapses here, not after
view.RecordSyns() // note: critical to update weights here so DWt is visible
net.WtFromDWt()
} else {
net.DWtToWt()
}
net.WtFromDWt()
})
}
}
Expand Down
11 changes: 11 additions & 0 deletions axon/network.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

11 changes: 11 additions & 0 deletions axon/network.goal
Original file line number Diff line number Diff line change
Expand Up @@ -970,6 +970,17 @@ func RunDoneLayersNeurons() {
RunDone(CtxVar, GlobalScalarsVar, GlobalVectorsVar, LayerStatesVar, PoolsVar, PoolsIntVar, NeuronsVar, NeuronAvgsVar)
}

// RunDoneSynapses finishes running and copies the Synapse state back.
func RunDoneSynapses() {
RunDone(SynapsesVar)
}

// RunDoneSynapses finishes running and copies the Synapse state back,
// including SynapseTraces, for visualization.
func RunDoneSynapsesTrace() {
RunDone(SynapsesVar, SynapseTracesVar)
}

// BuildPathGBuf builds the PathGBuf, PathGSyns,
// based on the MaxDelay values in the PathParams,
// which should have been configured by this point.
Expand Down
8 changes: 6 additions & 2 deletions examples/ra25/ra25.go
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ type RunConfig struct {
GPU bool `default:"false"`

// number of data-parallel items to process in parallel per trial -- works (and is significantly faster) for both CPU and GPU. Results in an effective mini-batch of learning.
NData int `default:"2" min:"1"`
NData int `default:"16" min:"1"`

// number of parallel threads for CPU computation -- 0 = use default
NThreads int `default:"0"`
Expand Down Expand Up @@ -253,6 +253,10 @@ func (ss *Sim) New() {

// ConfigAll configures all the elements using the standard functions
func (ss *Sim) ConfigAll() {
if ss.Config.Run.GPU {
axon.GPUInit()
axon.UseGPU = true
}
// ss.ConfigPats()
ss.OpenPats()
ss.ConfigEnv()
Expand Down Expand Up @@ -399,7 +403,7 @@ func (ss *Sim) ConfigLoops() {
AddLevelIncr(Trial, trls, ss.Config.Run.NData).
AddLevel(Cycle, 200)

axon.LooperStandard(ls, ss.Net, ss.NetViewUpdater, 10, 150, 199, Cycle, Trial, Train)
axon.LooperStandard(ls, ss.Net, ss.NetViewUpdater, 50, 150, 199, Cycle, Trial, Train)

ls.Stacks[Train].OnInit.Add("Init", func() { ss.Init() })

Expand Down

0 comments on commit 6645131

Please sign in to comment.