From 210dc22bb4486c499f5baea98a00e44b335ddb29 Mon Sep 17 00:00:00 2001 From: "Randall C. O'Reilly" Date: Tue, 8 Oct 2024 13:45:21 -0700 Subject: [PATCH 01/24] update paths to new tensor --- egui/grids.go | 3 ++- egui/gui.go | 2 +- egui/plots.go | 10 ++++++---- estats/funcs.go | 20 ++++++++++++-------- estats/plots.go | 6 +++--- estats/stats.go | 9 +-------- looper/stack.go | 13 ++++++------- paths/circle.go | 4 ++-- paths/full.go | 4 ++-- paths/onetoone.go | 4 ++-- paths/pattern.go | 14 +++++++------- paths/poolonetoone.go | 22 +++++++++++----------- paths/poolrect.go | 4 ++-- paths/poolsameunit.go | 18 +++++++++--------- paths/pooltile.go | 28 ++++++++++++---------------- paths/pooltilesub.go | 28 ++++++++++++---------------- paths/poolunifrnd.go | 10 +++++----- paths/rect.go | 8 ++++---- paths/uniformrand.go | 12 ++++++------ 19 files changed, 105 insertions(+), 114 deletions(-) diff --git a/egui/grids.go b/egui/grids.go index 4f3383c6..c458e1a0 100644 --- a/egui/grids.go +++ b/egui/grids.go @@ -8,7 +8,6 @@ import ( "cogentcore.org/core/core" "cogentcore.org/core/tensor" "cogentcore.org/core/tensor/tensorcore" - "github.com/emer/emergent/v2/actrf" ) // Grid gets tensor grid view of given name, creating if not yet made @@ -60,6 +59,7 @@ func (gui *GUI) SaveActRFGrid(tg *tensorcore.TensorGrid, name string) { gui.SetGrid(name, tg) } +/* // AddActRFGridTabs adds tabs for each of the ActRFs. func (gui *GUI) AddActRFGridTabs(arfs *actrf.RFs) { for _, rf := range arfs.RFs { @@ -83,3 +83,4 @@ func (gui *GUI) ViewActRFs(atf *actrf.RFs) { } } } +*/ diff --git a/egui/gui.go b/egui/gui.go index d57a6c1c..99731210 100644 --- a/egui/gui.go +++ b/egui/gui.go @@ -9,7 +9,7 @@ package egui import ( "cogentcore.org/core/core" "cogentcore.org/core/events" - _ "cogentcore.org/core/gpu/gosl/slbool/slboolcore" // include to get gui views + _ "cogentcore.org/core/goal/gosl/slbool/slboolcore" // include to get gui views "cogentcore.org/core/plot/plotcore" "cogentcore.org/core/tensor/tensorcore" "github.com/emer/emergent/v2/etime" diff --git a/egui/plots.go b/egui/plots.go index 61f7333d..39048366 100644 --- a/egui/plots.go +++ b/egui/plots.go @@ -6,16 +6,13 @@ package egui import ( "fmt" - "log" - "cogentcore.org/core/base/errors" - "cogentcore.org/core/colors/gradient" "cogentcore.org/core/plot/plotcore" "cogentcore.org/core/tensor/tensorcore" - "github.com/emer/emergent/v2/elog" "github.com/emer/emergent/v2/etime" ) +/* // AddPlots adds plots based on the unique tables we have, // currently assumes they should always be plotted func (gui *GUI) AddPlots(title string, lg *elog.Logs) { @@ -39,6 +36,7 @@ func (gui *GUI) AddPlots(title string, lg *elog.Logs) { ConfigPlotFromLog(title, plt, lg, key) } } +*/ // AddMiscPlotTab adds a misc (non log-generated) plot with a new // tab and plot of given name. @@ -49,6 +47,7 @@ func (gui *GUI) AddMiscPlotTab(name string) *plotcore.PlotEditor { return plt } +/* func ConfigPlotFromLog(title string, plt *plotcore.PlotEditor, lg *elog.Logs, key etime.ScopeKey) { _, times := key.ModesAndTimes() time := times[0] @@ -79,6 +78,7 @@ func ConfigPlotFromLog(title string, plt *plotcore.PlotEditor, lg *elog.Logs, ke plt.ColumnsFromMetaMap(lt.Table.MetaData) plt.ColumnsFromMetaMap(lt.Meta) } +*/ // Plot returns plot for mode, time scope func (gui *GUI) Plot(mode etime.Modes, time etime.Times) *plotcore.PlotEditor { @@ -206,6 +206,7 @@ func (gui *GUI) NewPlotTab(key etime.ScopeKey, tabLabel string) *plotcore.PlotEd return plt } +/* // AddTableView adds a table view of given log, // typically particularly useful for Debug logs. func (gui *GUI) AddTableView(lg *elog.Logs, mode etime.Modes, time etime.Times) *tensorcore.Table { @@ -227,6 +228,7 @@ func (gui *GUI) AddTableView(lg *elog.Logs, mode etime.Modes, time etime.Times) tv.SetTable(lt.Table) return tv } +*/ // TableView returns TableView for mode, time scope func (gui *GUI) TableView(mode etime.Modes, time etime.Times) *tensorcore.Table { diff --git a/estats/funcs.go b/estats/funcs.go index 38ae50d1..2120e1f7 100644 --- a/estats/funcs.go +++ b/estats/funcs.go @@ -94,14 +94,18 @@ var PCAStrongThr = 0.01 // layer_PCA_Next5: average strength of next 5 eigenvalues // layer_PCA_Rest: average strength of remaining eigenvalues (if more than 10 total eigens) // Uses SVD to compute much more efficiently than official PCA. -func (st *Stats) PCAStats(ix *table.IndexView, varNm string, layers []string) { - svd := &st.SVD +func (st *Stats) PCAStats(ix *table.Table, varNm string, layers []string) { svd.Cond = PCAStrongThr + covar := tensor.NewFloat64() + evecs := tensor.NewFloat64() + evals := tensor.NewFloat64() for _, lnm := range layers { - svd.TableColumn(ix, lnm+"_"+varNm, metric.Covariance64) - ln := len(svd.Values) + col := ix.Column(lnm + "_" + varNm) + metric.CovarianceMatrixOut(metric.Covariance, col, covar) + matrix.SVDOut(covar, evecs, evals) + ln := len(evals) var nstr float64 // nstr := float64(svd.Rank) this didn't work.. - for i, v := range svd.Values { + for i, v := range evals { if v < PCAStrongThr { nstr = float64(i) break @@ -110,17 +114,17 @@ func (st *Stats) PCAStats(ix *table.IndexView, varNm string, layers []string) { var top5, next5 float64 for i := 0; i < 5; i++ { if ln >= 5 { - top5 += svd.Values[i] + top5 += evals[i] } if ln >= 10 { - next5 += svd.Values[i+5] + next5 += evals[i+5] } } st.SetFloat(lnm+"_PCA_NStrong", nstr) st.SetFloat(lnm+"_PCA_Top5", top5/5) st.SetFloat(lnm+"_PCA_Next5", next5/5) if ln > 10 { - sum := stats.Sum64(svd.Values) + sum := stats.Sum(evals) ravg := (sum - (top5 + next5)) / float64(ln-10) st.SetFloat(lnm+"_PCA_Rest", ravg) } else { diff --git a/estats/plots.go b/estats/plots.go index 1e2ecba7..75259e2f 100644 --- a/estats/plots.go +++ b/estats/plots.go @@ -6,7 +6,7 @@ package estats import ( "cogentcore.org/core/plot/plotcore" - "cogentcore.org/core/tensor/stats/clust" + "cogentcore.org/core/tensor/stats/cluster" "cogentcore.org/core/tensor/stats/metric" "cogentcore.org/core/tensor/stats/simat" "cogentcore.org/core/tensor/table" @@ -27,12 +27,12 @@ func ConfigPCAPlot(plt *plotcore.PlotEditor, dt *table.Table, nm string) { // ClusterPlot does one cluster plot on given table column name // and label name -func ClusterPlot(plt *plotcore.PlotEditor, ix *table.IndexView, colNm, lblNm string, dfunc clust.DistFunc) { +func ClusterPlot(plt *plotcore.PlotEditor, ix *table.IndexView, colNm, lblNm string, dfunc cluster.DistFunc) { nm, _ := ix.Table.MetaData["name"] smat := &simat.SimMat{} smat.TableColumnStd(ix, colNm, lblNm, false, metric.Euclidean) pt := &table.Table{} - clust.Plot(pt, clust.Glom(smat, dfunc), smat) + cluster.Plot(pt, cluster.Glom(smat, dfunc), smat) plt.Name = colNm plt.Options.Title = "Cluster Plot of: " + nm + " " + colNm plt.Options.XAxis = "X" diff --git a/estats/stats.go b/estats/stats.go index 28440c20..ee2234be 100644 --- a/estats/stats.go +++ b/estats/stats.go @@ -12,7 +12,6 @@ import ( "cogentcore.org/core/base/timer" "cogentcore.org/core/plot/plotcore" "cogentcore.org/core/tensor" - "cogentcore.org/core/tensor/stats/pca" "cogentcore.org/core/tensor/stats/simat" "github.com/emer/emergent/v2/actrf" "github.com/emer/emergent/v2/confusion" @@ -39,17 +38,11 @@ type Stats struct { Confusion confusion.Matrix `display:"no-inline"` // similarity matrix for comparing pattern similarities - SimMats map[string]*simat.SimMat + SimMats map[string]*tensor.Float64 // analysis plots -- created by analysis routines Plots map[string]*plotcore.PlotEditor - // one PCA object can be reused for all PCA computations - PCA pca.PCA - - // one SVD object can be reused for all SVD computations - SVD pca.SVD - // activation-based receptive fields ActRFs actrf.RFs `display:"no-inline"` diff --git a/looper/stack.go b/looper/stack.go index 41f4c1d4..1af793c9 100644 --- a/looper/stack.go +++ b/looper/stack.go @@ -5,7 +5,6 @@ package looper import ( - "github.com/emer/emergent/v2/estats" "github.com/emer/emergent/v2/etime" ) @@ -137,12 +136,12 @@ func (stack *Stack) TimeBelow(time etime.Times) etime.Times { // Typically, a TrialName string is also expected to be set, // to describe the current trial (Step) contents in a useful way, // and other relevant info (e.g., group / category info) can also be set. -func (stack *Stack) CountersToStats(stats *estats.Stats) { - for _, tm := range stack.Order { - lp := stack.Loops[tm] - stats.SetInt(tm.String(), lp.Counter.Cur) - } -} +// func (stack *Stack) CountersToStats(stats *estats.Stats) { +// for _, tm := range stack.Order { +// lp := stack.Loops[tm] +// stats.SetInt(tm.String(), lp.Counter.Cur) +// } +// } // SetStep sets stepping to given level and iterations func (stack *Stack) SetStep(numSteps int, stopscale etime.Times) { diff --git a/paths/circle.go b/paths/circle.go index c337af17..2291e0ce 100644 --- a/paths/circle.go +++ b/paths/circle.go @@ -66,7 +66,7 @@ func (cr *Circle) Name() string { return "Circle" } -func (cr *Circle) Connect(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bits) { +func (cr *Circle) Connect(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bool) { sendn, recvn, cons = NewTensors(send, recv) sNy, sNx, _, _ := tensor.Projection2DShape(send, false) rNy, rNx, _, _ := tensor.Projection2DShape(recv, false) @@ -104,7 +104,7 @@ func (cr *Circle) Connect(send, recv *tensor.Shape, same bool) (sendn, recvn *te if !cr.SelfCon && same && ri == si { continue } - cons.Values.Set(off, true) + cons.Values.Set(true, off) rnv[ri]++ snv[si]++ } diff --git a/paths/full.go b/paths/full.go index d01de5a3..2201a3f8 100644 --- a/paths/full.go +++ b/paths/full.go @@ -21,7 +21,7 @@ func (fp *Full) Name() string { return "Full" } -func (fp *Full) Connect(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bits) { +func (fp *Full) Connect(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bool) { sendn, recvn, cons = NewTensors(send, recv) cons.Values.SetAll(true) nsend := send.Len() @@ -29,7 +29,7 @@ func (fp *Full) Connect(send, recv *tensor.Shape, same bool) (sendn, recvn *tens if same && !fp.SelfCon { for i := 0; i < nsend; i++ { // nsend = nrecv off := i*nsend + i - cons.Values.Set(off, false) + cons.Values.Set(false, off) } nsend-- nrecv-- diff --git a/paths/onetoone.go b/paths/onetoone.go index ca396e74..b40cc099 100644 --- a/paths/onetoone.go +++ b/paths/onetoone.go @@ -27,7 +27,7 @@ func (ot *OneToOne) Name() string { return "OneToOne" } -func (ot *OneToOne) Connect(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bits) { +func (ot *OneToOne) Connect(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bool) { sendn, recvn, cons = NewTensors(send, recv) nsend := send.Len() nrecv := recv.Len() @@ -44,7 +44,7 @@ func (ot *OneToOne) Connect(send, recv *tensor.Shape, same bool) (sendn, recvn * break } off := ri*nsend + si - cons.Values.Set(off, true) + cons.Values.Set(true, off) rnv[ri] = 1 snv[si] = 1 } diff --git a/paths/pattern.go b/paths/pattern.go index de324159..a41ce341 100644 --- a/paths/pattern.go +++ b/paths/pattern.go @@ -25,15 +25,15 @@ type Pattern interface { // recvn and send tensors, each the shape of send and recv respectively. // The same flag should be set to true if the send and recv layers are the same (i.e., a self-connection) // often there are some different options for such connections. - Connect(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bits) + Connect(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bool) } // NewTensors returns the tensors used for Connect method, based on layer sizes -func NewTensors(send, recv *tensor.Shape) (sendn, recvn *tensor.Int32, cons *tensor.Bits) { - sendn = tensor.New[int32](send.Sizes).(*tensor.Int32) - recvn = tensor.New[int32](recv.Sizes).(*tensor.Int32) +func NewTensors(send, recv *tensor.Shape) (sendn, recvn *tensor.Int32, cons *tensor.Bool) { + sendn = tensor.NewInt32(send.Sizes...) + recvn = tensor.NewInt32(recv.Sizes...) csh := tensor.AddShapes(recv, send) - cons = tensor.NewBitsShape(csh) + cons = tensor.NewBoolShape(csh) return } @@ -41,7 +41,7 @@ func NewTensors(send, recv *tensor.Shape) (sendn, recvn *tensor.Int32, cons *ten // if perRecv is true then it displays the sending connections // per each recv unit -- otherwise it shows the entire matrix // as a 2D matrix -func ConsStringFull(send, recv *tensor.Shape, cons *tensor.Bits) []byte { +func ConsStringFull(send, recv *tensor.Shape, cons *tensor.Bool) []byte { nsend := send.Len() nrecv := recv.Len() @@ -68,6 +68,6 @@ func ConsStringFull(send, recv *tensor.Shape, cons *tensor.Bits) []byte { // ConsStringPerRecv returns a []byte string showing the pattern of connectivity // organized by receiving unit, showing the sending connections per each -func ConsStringPerRecv(send, recv *tensor.Shape, cons *tensor.Bits) []byte { +func ConsStringPerRecv(send, recv *tensor.Shape, cons *tensor.Bool) []byte { return nil } diff --git a/paths/poolonetoone.go b/paths/poolonetoone.go index e372ed2e..29cc7ad9 100644 --- a/paths/poolonetoone.go +++ b/paths/poolonetoone.go @@ -32,7 +32,7 @@ func (ot *PoolOneToOne) Name() string { return "PoolOneToOne" } -func (ot *PoolOneToOne) Connect(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bits) { +func (ot *PoolOneToOne) Connect(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bool) { switch { case send.NumDims() == 4 && recv.NumDims() == 4: return ot.ConnectPools(send, recv, same) @@ -47,7 +47,7 @@ func (ot *PoolOneToOne) Connect(send, recv *tensor.Shape, same bool) (sendn, rec } // ConnectPools is when both recv and send have pools -func (ot *PoolOneToOne) ConnectPools(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bits) { +func (ot *PoolOneToOne) ConnectPools(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bool) { sendn, recvn, cons = NewTensors(send, recv) sNtot := send.Len() // rNtot := recv.Len() @@ -72,7 +72,7 @@ func (ot *PoolOneToOne) ConnectPools(send, recv *tensor.Shape, same bool) (sendn for sui := 0; sui < sNu; sui++ { si := spi*sNu + sui off := ri*sNtot + si - cons.Values.Set(off, true) + cons.Values.Set(true, off) rnv[ri] = int32(sNu) snv[si] = int32(rNu) } @@ -82,7 +82,7 @@ func (ot *PoolOneToOne) ConnectPools(send, recv *tensor.Shape, same bool) (sendn } // ConnectRecvPool is when recv has pools but send doesn't -func (ot *PoolOneToOne) ConnectRecvPool(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bits) { +func (ot *PoolOneToOne) ConnectRecvPool(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bool) { sendn, recvn, cons = NewTensors(send, recv) sNtot := send.Len() rNp := recv.DimSize(0) * recv.DimSize(1) @@ -104,7 +104,7 @@ func (ot *PoolOneToOne) ConnectRecvPool(send, recv *tensor.Shape, same bool) (se for rui := 0; rui < rNu; rui++ { ri := rpi*rNu + rui off := ri*sNtot + si - cons.Values.Set(off, true) + cons.Values.Set(true, off) rnv[ri] = int32(1) snv[si] = int32(rNu) } @@ -119,7 +119,7 @@ func (ot *PoolOneToOne) ConnectRecvPool(send, recv *tensor.Shape, same bool) (se ri := rpi*rNu + rui for si := 0; si < sNtot; si++ { off := ri*sNtot + si - cons.Values.Set(off, true) + cons.Values.Set(true, off) rnv[ri] = int32(sNtot) snv[si] = int32(npl * rNu) } @@ -130,7 +130,7 @@ func (ot *PoolOneToOne) ConnectRecvPool(send, recv *tensor.Shape, same bool) (se } // ConnectSendPool is when send has pools but recv doesn't -func (ot *PoolOneToOne) ConnectSendPool(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bits) { +func (ot *PoolOneToOne) ConnectSendPool(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bool) { sendn, recvn, cons = NewTensors(send, recv) sNtot := send.Len() rNtot := recv.Len() @@ -153,7 +153,7 @@ func (ot *PoolOneToOne) ConnectSendPool(send, recv *tensor.Shape, same bool) (se for sui := 0; sui < sNu; sui++ { si := spi*sNu + sui off := ri*sNtot + si - cons.Values.Set(off, true) + cons.Values.Set(true, off) rnv[ri] = int32(sNu) snv[si] = int32(1) } @@ -168,7 +168,7 @@ func (ot *PoolOneToOne) ConnectSendPool(send, recv *tensor.Shape, same bool) (se for sui := 0; sui < sNu; sui++ { si := spi*sNu + sui off := ri*sNtot + si - cons.Values.Set(off, true) + cons.Values.Set(true, off) rnv[ri] = int32(npl * sNu) snv[si] = int32(rNtot) } @@ -179,7 +179,7 @@ func (ot *PoolOneToOne) ConnectSendPool(send, recv *tensor.Shape, same bool) (se } // copy of OneToOne.Connect -func (ot *PoolOneToOne) ConnectOneToOne(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bits) { +func (ot *PoolOneToOne) ConnectOneToOne(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bool) { sendn, recvn, cons = NewTensors(send, recv) sNtot := send.Len() rNtot := recv.Len() @@ -196,7 +196,7 @@ func (ot *PoolOneToOne) ConnectOneToOne(send, recv *tensor.Shape, same bool) (se break } off := ri*sNtot + si - cons.Values.Set(off, true) + cons.Values.Set(true, off) rnv[ri] = 1 snv[si] = 1 } diff --git a/paths/poolrect.go b/paths/poolrect.go index bac69a3c..7521b63d 100644 --- a/paths/poolrect.go +++ b/paths/poolrect.go @@ -61,7 +61,7 @@ func (cr *PoolRect) Name() string { return "PoolRect" } -func (cr *PoolRect) Connect(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bits) { +func (cr *PoolRect) Connect(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bool) { sendn, recvn, cons = NewTensors(send, recv) sNy := send.DimSize(0) sNx := send.DimSize(1) @@ -139,7 +139,7 @@ func (cr *PoolRect) Connect(send, recv *tensor.Shape, same bool) (sendn, recvn * if !cr.SelfCon && same && ri == si { continue } - cons.Values.Set(off, true) + cons.Values.Set(true, off) rnv[ri]++ snv[si]++ } diff --git a/paths/poolsameunit.go b/paths/poolsameunit.go index 147e32d5..418d00aa 100644 --- a/paths/poolsameunit.go +++ b/paths/poolsameunit.go @@ -28,7 +28,7 @@ func (ot *PoolSameUnit) Name() string { return "PoolSameUnit" } -func (ot *PoolSameUnit) Connect(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bits) { +func (ot *PoolSameUnit) Connect(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bool) { switch { case send.NumDims() == 4 && recv.NumDims() == 4: return ot.ConnectPools(send, recv, same) @@ -43,7 +43,7 @@ func (ot *PoolSameUnit) Connect(send, recv *tensor.Shape, same bool) (sendn, rec } // ConnectPools is when both recv and send have pools -func (ot *PoolSameUnit) ConnectPools(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bits) { +func (ot *PoolSameUnit) ConnectPools(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bool) { sendn, recvn, cons = NewTensors(send, recv) sNtot := send.Len() sNp := send.DimSize(0) * send.DimSize(1) @@ -64,7 +64,7 @@ func (ot *PoolSameUnit) ConnectPools(send, recv *tensor.Shape, same bool) (sendn } si := spi*sNu + rui off := ri*sNtot + si - cons.Values.Set(off, true) + cons.Values.Set(true, off) rnv[ri]++ snv[si]++ } @@ -74,7 +74,7 @@ func (ot *PoolSameUnit) ConnectPools(send, recv *tensor.Shape, same bool) (sendn } // ConnectRecvPool is when recv has pools but send doesn't -func (ot *PoolSameUnit) ConnectRecvPool(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bits) { +func (ot *PoolSameUnit) ConnectRecvPool(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bool) { sendn, recvn, cons = NewTensors(send, recv) sNtot := send.Len() rNp := recv.DimSize(0) * recv.DimSize(1) @@ -90,7 +90,7 @@ func (ot *PoolSameUnit) ConnectRecvPool(send, recv *tensor.Shape, same bool) (se ri := rpi*rNu + rui si := rui off := ri*sNtot + si - cons.Values.Set(off, true) + cons.Values.Set(true, off) rnv[ri]++ snv[si]++ } @@ -99,7 +99,7 @@ func (ot *PoolSameUnit) ConnectRecvPool(send, recv *tensor.Shape, same bool) (se } // ConnectSendPool is when send has pools but recv doesn't -func (ot *PoolSameUnit) ConnectSendPool(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bits) { +func (ot *PoolSameUnit) ConnectSendPool(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bool) { sendn, recvn, cons = NewTensors(send, recv) sNtot := send.Len() sNp := send.DimSize(0) * send.DimSize(1) @@ -115,7 +115,7 @@ func (ot *PoolSameUnit) ConnectSendPool(send, recv *tensor.Shape, same bool) (se for spi := 0; spi < sNp; spi++ { si := spi*sNu + rui off := ri*sNtot + si - cons.Values.Set(off, true) + cons.Values.Set(true, off) rnv[ri]++ snv[si]++ } @@ -124,7 +124,7 @@ func (ot *PoolSameUnit) ConnectSendPool(send, recv *tensor.Shape, same bool) (se } // copy of OneToOne.Connect -func (ot *PoolSameUnit) ConnectOneToOne(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bits) { +func (ot *PoolSameUnit) ConnectOneToOne(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bool) { sendn, recvn, cons = NewTensors(send, recv) sNtot := send.Len() sNu := send.DimSize(0) * send.DimSize(1) @@ -138,7 +138,7 @@ func (ot *PoolSameUnit) ConnectOneToOne(send, recv *tensor.Shape, same bool) (se ri := rui si := rui off := ri*sNtot + si - cons.Values.Set(off, true) + cons.Values.Set(true, off) rnv[ri] = 1 snv[si] = 1 } diff --git a/paths/pooltile.go b/paths/pooltile.go index 7a0bc712..1f238236 100644 --- a/paths/pooltile.go +++ b/paths/pooltile.go @@ -91,7 +91,7 @@ func (pt *PoolTile) Name() string { return "PoolTile" } -func (pt *PoolTile) Connect(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bits) { +func (pt *PoolTile) Connect(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bool) { if pt.Recip { return pt.ConnectRecip(send, recv, same) } @@ -145,7 +145,7 @@ func (pt *PoolTile) Connect(send, recv *tensor.Shape, same bool) (sendn, recvn * // if !pt.SelfCon && same && ri == si { // continue // } - cons.Values.Set(off, true) + cons.Values.Set(true, off) rnv[ri]++ snv[si]++ } @@ -158,7 +158,7 @@ func (pt *PoolTile) Connect(send, recv *tensor.Shape, same bool) (sendn, recvn * return } -func (pt *PoolTile) ConnectRecip(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bits) { +func (pt *PoolTile) ConnectRecip(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bool) { sendn, recvn, cons = NewTensors(send, recv) // all these variables are swapped: s from recv, r from send rNtot := send.Len() @@ -207,7 +207,7 @@ func (pt *PoolTile) ConnectRecip(send, recv *tensor.Shape, same bool) (sendn, re ri := ris + rui off := si*rNtot + ri if off < cons.Len() && si < len(snv) && ri < len(rnv) { - cons.Values.Set(off, true) + cons.Values.Set(true, off) snv[si]++ rnv[ri]++ } @@ -321,8 +321,7 @@ func (pt *PoolTile) TopoWeightsGauss2D(send, recv *tensor.Shape, wts *tensor.Flo rNuY = recv.DimSize(2) rNuX = recv.DimSize(3) } - wshp := []int{rNuY, rNuX, sNuY, sNuX} - wts.SetShape(wshp, "rNuY", "rNuX", "szY", "szX") + wts.SetShapeSizes(rNuY, rNuX, sNuY, sNuX) fsz := math32.Vec2(float32(sNuX-1), float32(sNuY-1)) // full rf size hfsz := fsz.MulScalar(0.5) // half rf @@ -382,7 +381,7 @@ func (pt *PoolTile) TopoWeightsGauss2D(send, recv *tensor.Shape, wts *tensor.Flo } wt := fwt * pwt rwt := pt.TopoRange.ProjValue(wt) - wts.Set([]int{ruy, rux, suy, sux}, rwt) + wts.Set(rwt, ruy, rux, suy, sux) } } } @@ -410,8 +409,7 @@ func (pt *PoolTile) TopoWeightsGauss4D(send, recv *tensor.Shape, wts *tensor.Flo rNuY = recv.DimSize(2) rNuX = recv.DimSize(3) } - wshp := []int{rNuY, rNuX, pt.Size.Y, pt.Size.X, sNuY, sNuX} - wts.SetShape(wshp, "rNuY", "rNuX", "szY", "szX", "sNuY", "sNuX") + wts.SetShapeSizes(rNuY, rNuX, pt.Size.Y, pt.Size.X, sNuY, sNuX) fsz := math32.Vec2(float32(pt.Size.X*sNuX-1), float32(pt.Size.Y*sNuY-1)) // full rf size hfsz := fsz.MulScalar(0.5) // half rf @@ -472,7 +470,7 @@ func (pt *PoolTile) TopoWeightsGauss4D(send, recv *tensor.Shape, wts *tensor.Flo } wt := fwt * pwt rwt := pt.TopoRange.ProjValue(wt) - wts.Set([]int{ruy, rux, fy, fx, suy, sux}, rwt) + wts.Set(rwt, ruy, rux, fy, fx, suy, sux) } } } @@ -531,8 +529,7 @@ func (pt *PoolTile) TopoWeightsSigmoid2D(send, recv *tensor.Shape, wts *tensor.F rNuY = recv.DimSize(2) rNuX = recv.DimSize(3) } - wshp := []int{rNuY, rNuX, sNuY, sNuX} - wts.SetShape(wshp, "rNuY", "rNuX", "sNuY", "sNuX") + wts.SetShapeSizes(rNuY, rNuX, sNuY, sNuX) fsz := math32.Vec2(float32(sNuX-1), float32(sNuY-1)) // full rf size hfsz := fsz.MulScalar(0.5) // half rf @@ -594,7 +591,7 @@ func (pt *PoolTile) TopoWeightsSigmoid2D(send, recv *tensor.Shape, wts *tensor.F } wt := fwt * pwt rwt := pt.TopoRange.ProjValue(wt) - wts.Set([]int{ruy, rux, suy, sux}, rwt) + wts.Set(rwt, ruy, rux, suy, sux) } } } @@ -622,8 +619,7 @@ func (pt *PoolTile) TopoWeightsSigmoid4D(send, recv *tensor.Shape, wts *tensor.F rNuY = recv.DimSize(2) rNuX = recv.DimSize(3) } - wshp := []int{rNuY, rNuX, pt.Size.Y, pt.Size.X, sNuY, sNuX} - wts.SetShape(wshp, "rNuY", "rNuX", "szY", "szX", "sNuY", "sNuX") + wts.SetShapeSizes(rNuY, rNuX, pt.Size.Y, pt.Size.X, sNuY, sNuX) fsz := math32.Vec2(float32(pt.Size.X*sNuX-1), float32(pt.Size.Y*sNuY-1)) // full rf size hfsz := fsz.MulScalar(0.5) // half rf @@ -687,7 +683,7 @@ func (pt *PoolTile) TopoWeightsSigmoid4D(send, recv *tensor.Shape, wts *tensor.F } wt := fwt * pwt rwt := pt.TopoRange.ProjValue(wt) - wts.Set([]int{ruy, rux, fy, fx, suy, sux}, rwt) + wts.Set(rwt, ruy, rux, fy, fx, suy, sux) } } } diff --git a/paths/pooltilesub.go b/paths/pooltilesub.go index dcbe54a6..446e0c3e 100644 --- a/paths/pooltilesub.go +++ b/paths/pooltilesub.go @@ -100,7 +100,7 @@ func (pt *PoolTileSub) Name() string { return "PoolTileSub" } -func (pt *PoolTileSub) Connect(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bits) { +func (pt *PoolTileSub) Connect(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bool) { if pt.Recip { return pt.ConnectRecip(send, recv, same) } @@ -164,7 +164,7 @@ func (pt *PoolTileSub) Connect(send, recv *tensor.Shape, same bool) (sendn, recv // if !pt.SelfCon && same && ri == si { // continue // } - cons.Values.Set(off, true) + cons.Values.Set(true, off) rnv[ri]++ snv[si]++ } @@ -177,7 +177,7 @@ func (pt *PoolTileSub) Connect(send, recv *tensor.Shape, same bool) (sendn, recv return } -func (pt *PoolTileSub) ConnectRecip(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bits) { +func (pt *PoolTileSub) ConnectRecip(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bool) { sendn, recvn, cons = NewTensors(send, recv) // all these variables are swapped: s from recv, r from send rNtot := send.Len() @@ -236,7 +236,7 @@ func (pt *PoolTileSub) ConnectRecip(send, recv *tensor.Shape, same bool) (sendn, ri := ris + rui off := si*rNtot + ri if off < cons.Len() && si < len(snv) && ri < len(rnv) { - cons.Values.Set(off, true) + cons.Values.Set(true, off) snv[si]++ rnv[ri]++ } @@ -304,8 +304,7 @@ func (pt *PoolTileSub) TopoWeightsGauss2D(send, recv *tensor.Shape, wts *tensor. rNuY = recv.DimSize(2) rNuX = recv.DimSize(3) } - wshp := []int{rNuY, rNuX, sNuY, sNuX} - wts.SetShape(wshp, "rNuY", "rNuX", "szY", "szX") + wts.SetShapeSizes(rNuY, rNuX, sNuY, sNuX) fsz := math32.Vec2(float32(sNuX-1), float32(sNuY-1)) // full rf size hfsz := fsz.MulScalar(0.5) // half rf @@ -365,7 +364,7 @@ func (pt *PoolTileSub) TopoWeightsGauss2D(send, recv *tensor.Shape, wts *tensor. } wt := fwt * pwt rwt := pt.TopoRange.ProjValue(wt) - wts.Set([]int{ruy, rux, suy, sux}, rwt) + wts.Set(rwt, ruy, rux, suy, sux) } } } @@ -393,8 +392,7 @@ func (pt *PoolTileSub) TopoWeightsGauss4D(send, recv *tensor.Shape, wts *tensor. rNuY = recv.DimSize(2) rNuX = recv.DimSize(3) } - wshp := []int{rNuY, rNuX, pt.Size.Y, pt.Size.X, sNuY, sNuX} - wts.SetShape(wshp, "rNuY", "rNuX", "szY", "szX", "sNuY", "sNuX") + wts.SetShapeSizes(rNuY, rNuX, pt.Size.Y, pt.Size.X, sNuY, sNuX) fsz := math32.Vec2(float32(pt.Size.X*sNuX-1), float32(pt.Size.Y*sNuY-1)) // full rf size hfsz := fsz.MulScalar(0.5) // half rf @@ -455,7 +453,7 @@ func (pt *PoolTileSub) TopoWeightsGauss4D(send, recv *tensor.Shape, wts *tensor. } wt := fwt * pwt rwt := pt.TopoRange.ProjValue(wt) - wts.Set([]int{ruy, rux, fy, fx, suy, sux}, rwt) + wts.Set(rwt, ruy, rux, fy, fx, suy, sux) } } } @@ -487,8 +485,7 @@ func (pt *PoolTileSub) TopoWeightsSigmoid2D(send, recv *tensor.Shape, wts *tenso rNuY = recv.DimSize(2) rNuX = recv.DimSize(3) } - wshp := []int{rNuY, rNuX, sNuY, sNuX} - wts.SetShape(wshp, "rNuY", "rNuX", "sNuY", "sNuX") + wts.SetShapeSizes(rNuY, rNuX, sNuY, sNuX) fsz := math32.Vec2(float32(sNuX-1), float32(sNuY-1)) // full rf size hfsz := fsz.MulScalar(0.5) // half rf @@ -550,7 +547,7 @@ func (pt *PoolTileSub) TopoWeightsSigmoid2D(send, recv *tensor.Shape, wts *tenso } wt := fwt * pwt rwt := pt.TopoRange.ProjValue(wt) - wts.Set([]int{ruy, rux, suy, sux}, rwt) + wts.Set(rwt, ruy, rux, suy, sux) } } } @@ -578,8 +575,7 @@ func (pt *PoolTileSub) TopoWeightsSigmoid4D(send, recv *tensor.Shape, wts *tenso rNuY = recv.DimSize(2) rNuX = recv.DimSize(3) } - wshp := []int{rNuY, rNuX, pt.Size.Y, pt.Size.X, sNuY, sNuX} - wts.SetShape(wshp, "rNuY", "rNuX", "szY", "szX", "sNuY", "sNuX") + wts.SetShapeSizes(rNuY, rNuX, pt.Size.Y, pt.Size.X, sNuY, sNuX) fsz := math32.Vec2(float32(pt.Size.X*sNuX-1), float32(pt.Size.Y*sNuY-1)) // full rf size hfsz := fsz.MulScalar(0.5) // half rf @@ -643,7 +639,7 @@ func (pt *PoolTileSub) TopoWeightsSigmoid4D(send, recv *tensor.Shape, wts *tenso } wt := fwt * pwt rwt := pt.TopoRange.ProjValue(wt) - wts.Set([]int{ruy, rux, fy, fx, suy, sux}, rwt) + wts.Set(rwt, ruy, rux, fy, fx, suy, sux) } } } diff --git a/paths/poolunifrnd.go b/paths/poolunifrnd.go index e0efeb90..aeaa184f 100644 --- a/paths/poolunifrnd.go +++ b/paths/poolunifrnd.go @@ -31,7 +31,7 @@ func (ur *PoolUniformRand) Name() string { return "PoolUniformRand" } -func (ur *PoolUniformRand) Connect(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bits) { +func (ur *PoolUniformRand) Connect(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bool) { if send.NumDims() == 4 && recv.NumDims() == 4 { return ur.ConnectPoolsRand(send, recv, same) } @@ -39,7 +39,7 @@ func (ur *PoolUniformRand) Connect(send, recv *tensor.Shape, same bool) (sendn, } // ConnectPoolsRand is when both recv and send have pools -func (ur *PoolUniformRand) ConnectPoolsRand(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bits) { +func (ur *PoolUniformRand) ConnectPoolsRand(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bool) { if ur.PCon >= 1 { return ur.ConnectPools(send, recv, same) } @@ -100,7 +100,7 @@ func (ur *PoolUniformRand) ConnectPoolsRand(send, recv *tensor.Shape, same bool) for sui := 0; sui < nsend; sui++ { si := spi*sNu + slist[sui] off := ri*sNtot + si - cons.Values.Set(off, true) + cons.Values.Set(true, off) } randx.PermuteInts(sorder, ur.Rand) } @@ -121,7 +121,7 @@ func (ur *PoolUniformRand) ConnectPoolsRand(send, recv *tensor.Shape, same bool) } // ConnectRand is a copy of UniformRand.Connect with initial if statement modified -func (ur *PoolUniformRand) ConnectRand(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bits) { +func (ur *PoolUniformRand) ConnectRand(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bool) { if ur.PCon >= 1 { switch { case send.NumDims() == 2 && recv.NumDims() == 4: @@ -188,7 +188,7 @@ func (ur *PoolUniformRand) ConnectRand(send, recv *tensor.Shape, same bool) (sen sort.Ints(slist) // keep list sorted for more efficient memory traversal etc for si := 0; si < nsend; si++ { off := ri*slen + slist[si] - cons.Values.Set(off, true) + cons.Values.Set(true, off) } randx.PermuteInts(sorder, ur.Rand) } diff --git a/paths/rect.go b/paths/rect.go index ba8dea8f..45fc4ec7 100644 --- a/paths/rect.go +++ b/paths/rect.go @@ -72,7 +72,7 @@ func (cr *Rect) Name() string { return "Rect" } -func (cr *Rect) Connect(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bits) { +func (cr *Rect) Connect(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bool) { if cr.Recip { return cr.ConnectRecip(send, recv, same) } @@ -133,7 +133,7 @@ func (cr *Rect) Connect(send, recv *tensor.Shape, same bool) (sendn, recvn *tens if !cr.SelfCon && same && ri == si { continue } - cons.Values.Set(off, true) + cons.Values.Set(true, off) rnv[ri]++ snv[si]++ } @@ -143,7 +143,7 @@ func (cr *Rect) Connect(send, recv *tensor.Shape, same bool) (sendn, recvn *tens return } -func (cr *Rect) ConnectRecip(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bits) { +func (cr *Rect) ConnectRecip(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bool) { sendn, recvn, cons = NewTensors(send, recv) sNy, sNx, _, _ := tensor.Projection2DShape(recv, false) // swapped! rNy, rNx, _, _ := tensor.Projection2DShape(send, false) @@ -201,7 +201,7 @@ func (cr *Rect) ConnectRecip(send, recv *tensor.Shape, same bool) (sendn, recvn if !cr.SelfCon && same && ri == si { continue } - cons.Values.Set(off, true) + cons.Values.Set(true, off) rnv[si]++ snv[ri]++ } diff --git a/paths/uniformrand.go b/paths/uniformrand.go index d5f6d926..ee67c083 100644 --- a/paths/uniformrand.go +++ b/paths/uniformrand.go @@ -56,7 +56,7 @@ func (ur *UniformRand) InitRand() { ur.Rand = randx.NewSysRand(ur.RandSeed) } -func (ur *UniformRand) Connect(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bits) { +func (ur *UniformRand) Connect(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bool) { if ur.PCon >= 1 { return ur.ConnectFull(send, recv, same) } @@ -116,7 +116,7 @@ func (ur *UniformRand) Connect(send, recv *tensor.Shape, same bool) (sendn, recv sort.Ints(slist) // keep list sorted for more efficient memory traversal etc for si := 0; si < nsend; si++ { off := ri*slen + slist[si] - cons.Values.Set(off, true) + cons.Values.Set(true, off) } randx.PermuteInts(sorder, ur.Rand) } @@ -137,7 +137,7 @@ func (ur *UniformRand) Connect(send, recv *tensor.Shape, same bool) (sendn, recv } // ConnectRecip does reciprocal connectvity -func (ur *UniformRand) ConnectRecip(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bits) { +func (ur *UniformRand) ConnectRecip(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bool) { sendn, recvn, cons = NewTensors(send, recv) slen := recv.Len() // swapped rlen := send.Len() @@ -181,7 +181,7 @@ func (ur *UniformRand) ConnectRecip(send, recv *tensor.Shape, same bool) (sendn, sort.Ints(slist) // keep list sorted for more efficient memory traversal etc for si := 0; si < nsend; si++ { off := slist[si]*slenR + ri - cons.Values.Set(off, true) + cons.Values.Set(true, off) } randx.PermuteInts(sorder, ur.Rand) } @@ -201,7 +201,7 @@ func (ur *UniformRand) ConnectRecip(send, recv *tensor.Shape, same bool) (sendn, return } -func (ur *UniformRand) ConnectFull(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bits) { +func (ur *UniformRand) ConnectFull(send, recv *tensor.Shape, same bool) (sendn, recvn *tensor.Int32, cons *tensor.Bool) { sendn, recvn, cons = NewTensors(send, recv) cons.Values.SetAll(true) nsend := send.Len() @@ -209,7 +209,7 @@ func (ur *UniformRand) ConnectFull(send, recv *tensor.Shape, same bool) (sendn, if same && !ur.SelfCon { for i := 0; i < nsend; i++ { // nsend = nrecv off := i*nsend + i - cons.Values.Set(off, false) + cons.Values.Set(false, off) } nsend-- nrecv-- From 212ff084797ed79a788bc848bbd0cf876ad1c812 Mon Sep 17 00:00:00 2001 From: "Randall C. O'Reilly" Date: Tue, 8 Oct 2024 14:01:48 -0700 Subject: [PATCH 02/24] most packages updated to new tensor --- egui/grids.go | 2 +- emer/layer.go | 48 +++++++++++++++++----------------------------- netview/events.go | 6 +++--- netview/laymesh.go | 2 +- netview/netdata.go | 12 ++++++------ netview/netview.go | 6 +++--- 6 files changed, 32 insertions(+), 44 deletions(-) diff --git a/egui/grids.go b/egui/grids.go index c458e1a0..4e7b4046 100644 --- a/egui/grids.go +++ b/egui/grids.go @@ -48,7 +48,7 @@ func (gui *GUI) ConfigRasterGrid(lay *core.Frame, laynm string, rast *tensor.Flo core.NewText(lay).SetText(laynm + ":") lay.AddChild(tg) core.NewSpace(lay) - rast.SetMetaData("grid-fill", "1") + rast.Metadata().Set("grid-fill", float32(1)) tg.SetTensor(rast) return tg } diff --git a/emer/layer.go b/emer/layer.go index 93f2ba97..3413804c 100644 --- a/emer/layer.go +++ b/emer/layer.go @@ -257,7 +257,7 @@ func (ly *LayerBase) Index4DFrom2D(x, y int) ([]int, bool) { px := x / nux py := y / nuy idx := []int{py, px, uy, ux} - if !lshp.IndexIsValid(idx) { + if !lshp.IndexIsValid(idx...) { return nil, false } return idx, true @@ -303,14 +303,8 @@ func (ly *LayerBase) DisplaySize() math32.Vector2 { } // SetShape sets the layer shape and also uses default dim names. -func (ly *LayerBase) SetShape(shape []int) { - var dnms []string - if len(shape) == 2 { - dnms = LayerDimNames2D - } else if len(shape) == 4 { - dnms = LayerDimNames4D - } - ly.Shape.SetShape(shape, dnms...) +func (ly *LayerBase) SetShape(shape ...int) { + ly.Shape.SetShapeSizes(shape...) } // SetSampleIndexesShape sets the SampleIndexes, @@ -318,15 +312,9 @@ func (ly *LayerBase) SetShape(shape []int) { // for a subset sample of units to represent the entire layer. // This is critical for large layers that are otherwise unwieldy // to visualize and for computationally-intensive statistics. -func (ly *LayerBase) SetSampleIndexesShape(idxs, shape []int) { +func (ly *LayerBase) SetSampleIndexesShape(idxs []int, shape ...int) { ly.SampleIndexes = idxs - var dnms []string - if len(shape) == 2 { - dnms = LayerDimNames2D - } else if len(shape) == 4 { - dnms = LayerDimNames4D - } - ly.SampleShape.SetShape(shape, dnms...) + ly.SampleShape.SetShapeSizes(shape...) } // GetSampleShape returns the shape to use for representative units. @@ -336,7 +324,7 @@ func (ly *LayerBase) GetSampleShape() *tensor.Shape { return &ly.Shape } if ly.SampleShape.Len() != sz { - ly.SampleShape.SetShape([]int{sz}) + ly.SampleShape.SetShapeSizes(sz) } return &ly.SampleShape } @@ -382,28 +370,28 @@ func (ly *LayerBase) UnitValues(vals *[]float32, varNm string, di int) error { // If tensor is not already big enough to hold the values, it is // set to the same shape as the layer. // Returns error on invalid var name. -func (ly *LayerBase) UnitValuesTensor(tsr tensor.Tensor, varNm string, di int) error { +func (ly *LayerBase) UnitValuesTensor(tsr tensor.Values, varNm string, di int) error { if tsr == nil { err := fmt.Errorf("emer.UnitValuesTensor: Tensor is nil") log.Println(err) return err } nn := ly.NumUnits() - tsr.SetShape(ly.Shape.Sizes, ly.Shape.Names...) + tsr.SetShapeSizes(ly.Shape.Sizes...) vidx, err := ly.EmerLayer.UnitVarIndex(varNm) if err != nil { nan := math.NaN() for lni := 0; lni < nn; lni++ { - tsr.SetFloat1D(lni, nan) + tsr.SetFloat1D(nan, lni) } return err } for lni := 0; lni < nn; lni++ { v := ly.EmerLayer.UnitValue1D(vidx, lni, di) if math32.IsNaN(v) { - tsr.SetFloat1D(lni, math.NaN()) + tsr.SetFloat1D(math.NaN(), lni) } else { - tsr.SetFloat1D(lni, float64(v)) + tsr.SetFloat1D(float64(v), lni) } } return nil @@ -422,7 +410,7 @@ func (ly *LayerBase) UnitValuesTensor(tsr tensor.Tensor, varNm string, di int) e // set to SampleShape to hold all the values if subset is defined, // otherwise it calls UnitValuesTensor and is identical to that. // Returns error on invalid var name. -func (ly *LayerBase) UnitValuesSampleTensor(tsr tensor.Tensor, varNm string, di int) error { +func (ly *LayerBase) UnitValuesSampleTensor(tsr tensor.Values, varNm string, di int) error { nu := len(ly.SampleIndexes) if nu == 0 { return ly.UnitValuesTensor(tsr, varNm, di) @@ -434,22 +422,22 @@ func (ly *LayerBase) UnitValuesSampleTensor(tsr tensor.Tensor, varNm string, di } if tsr.Len() != nu { rs := ly.GetSampleShape() - tsr.SetShape(rs.Sizes, rs.Names...) + tsr.SetShapeSizes(rs.Sizes...) } vidx, err := ly.EmerLayer.UnitVarIndex(varNm) if err != nil { nan := math.NaN() for i, _ := range ly.SampleIndexes { - tsr.SetFloat1D(i, nan) + tsr.SetFloat1D(nan, i) } return err } for i, ui := range ly.SampleIndexes { v := ly.EmerLayer.UnitValue1D(vidx, ui, di) if math32.IsNaN(v) { - tsr.SetFloat1D(i, math.NaN()) + tsr.SetFloat1D(math.NaN(), i) } else { - tsr.SetFloat1D(i, float64(v)) + tsr.SetFloat1D(float64(v), i) } } return nil @@ -465,7 +453,7 @@ func (ly *LayerBase) UnitValue(varNm string, idx []int, di int) float32 { if err != nil { return math32.NaN() } - fidx := ly.Shape.Offset(idx) + fidx := ly.Shape.IndexTo1D(idx...) return ly.EmerLayer.UnitValue1D(vidx, fidx, di) } @@ -517,7 +505,7 @@ func Layer2DSampleIndexes(ly Layer, maxSize int) (idxs, shape []int) { i := 0 for y := 0; y < my; y++ { for x := 0; x < mx; x++ { - idxs[i] = sh.Offset([]int{y, x}) + idxs[i] = sh.IndexTo1D(y, x) i++ } } diff --git a/netview/events.go b/netview/events.go index 155e87a4..b428a110 100644 --- a/netview/events.go +++ b/netview/events.go @@ -197,17 +197,17 @@ func (sw *Scene) LayerUnitAtPoint(pos image.Point) (lay emer.Layer, lx, ly, unIn lshp := lb.Shape if lb.Is2D() { idx := []int{ly, lx} - if !lshp.IndexIsValid(idx) { + if !lshp.IndexIsValid(idx...) { continue } - unIndex = lshp.Offset(idx) + unIndex = lshp.IndexTo1D(idx...) return } else if lb.Is4D() { idx, ok := lb.Index4DFrom2D(lx, ly) if !ok { continue } - unIndex = lshp.Offset(idx) + unIndex = lshp.IndexTo1D(idx...) return } else { continue // not supported diff --git a/netview/laymesh.go b/netview/laymesh.go index 2791c626..20592c9b 100644 --- a/netview/laymesh.go +++ b/netview/laymesh.go @@ -47,7 +47,7 @@ func (lm *LayMesh) MeshSize() (nVtx, nIndex int, hasColor bool) { return 0, 0, true } shp := &lm.Lay.AsEmer().Shape - lm.Shape.CopyShape(shp) + lm.Shape.CopyFrom(shp) if lm.View.Options.Raster.On { if shp.NumDims() == 4 { lm.NumVertex, lm.NumIndex = lm.RasterSize4D() diff --git a/netview/netdata.go b/netview/netdata.go index 77794a10..e36c46c2 100644 --- a/netview/netdata.go +++ b/netview/netdata.go @@ -14,13 +14,13 @@ import ( "math" "os" "path/filepath" - "strconv" "strings" "cogentcore.org/core/base/errors" "cogentcore.org/core/core" "cogentcore.org/core/math32" "cogentcore.org/core/plot/plotcore" + "cogentcore.org/core/tensor" "cogentcore.org/core/tensor/table" "github.com/emer/emergent/v2/emer" "github.com/emer/emergent/v2/ringidx" @@ -680,9 +680,9 @@ func (nd *NetData) SelectedUnitTable(di int) *table.Table { selnm := nd.PathLay + fmt.Sprintf("[%d]", nd.PathUnIndex) dt := &table.Table{} - dt.SetMetaData("name", "NetView: "+selnm) - dt.SetMetaData("read-only", "true") - dt.SetMetaData("precision", strconv.Itoa(4)) + dt.Meta.SetName("NetView: " + selnm) + dt.Meta.Set("read-only", true) + tensor.SetPrecision(dt.Meta, 4) ln := nd.Ring.Len vlen := len(nd.UnVars) @@ -698,11 +698,11 @@ func (nd *NetData) SelectedUnitTable(di int) *table.Table { for ri := 0; ri < ln; ri++ { ridx := nd.RecIndex(ri) - dt.SetFloatIndex(0, ri, float64(ri)) + dt.Columns.Values[0].SetFloat(float64(ri), 0, ri) for vi := 0; vi < vlen; vi++ { idx := ridx*nvu + vi*nd.MaxData*nu + di*nu + uidx1d val := ld.Data[idx] - dt.SetFloatIndex(vi+1, ri, float64(val)) + dt.Columns.Values[0].SetFloat(float64(val), vi+1, ri) } } return dt diff --git a/netview/netview.go b/netview/netview.go index 26ad4a5a..b3e69cd2 100644 --- a/netview/netview.go +++ b/netview/netview.go @@ -594,11 +594,11 @@ func (nv *NetView) ReadUnlock() { nv.DataMu.RUnlock() } -// UnitVal returns the raw value, scaled value, and color representation +// UnitValue returns the raw value, scaled value, and color representation // for given unit of given layer. scaled is in range -1..1 func (nv *NetView) UnitValue(lay emer.Layer, idx []int) (raw, scaled float32, clr color.RGBA, hasval bool) { lb := lay.AsEmer() - idx1d := lb.Shape.Offset(idx) + idx1d := lb.Shape.IndexTo1D(idx...) if idx1d >= lb.Shape.Len() { raw, hasval = 0, false } else { @@ -613,7 +613,7 @@ func (nv *NetView) UnitValue(lay emer.Layer, idx []int) (raw, scaled float32, cl // scaled is in range -1..1 func (nv *NetView) UnitValRaster(lay emer.Layer, idx []int, rCtr int) (raw, scaled float32, clr color.RGBA, hasval bool) { lb := lay.AsEmer() - idx1d := lb.SampleShape.Offset(idx) + idx1d := lb.SampleShape.IndexTo1D(idx...) ridx := lb.SampleIndexes if len(ridx) == 0 { // no rep if idx1d >= lb.Shape.Len() { From 5fe07955d0f13abc14624047ca8f8776bbee1f0a Mon Sep 17 00:00:00 2001 From: "Randall C. O'Reilly" Date: Tue, 8 Oct 2024 14:37:50 -0700 Subject: [PATCH 03/24] more tensor updates --- decoder/linear.go | 6 +++--- decoder/softmax.go | 4 ++-- elog/stditems.go | 11 +++++------ estats/actrf.go | 10 ++-------- estats/funcs.go | 26 ++++++++++++++------------ estats/plots.go | 30 ++++++++++++++---------------- estats/rasters.go | 4 ++-- estats/stats.go | 17 ++++++++--------- popcode/popcode2d.go | 14 +++++++------- 9 files changed, 57 insertions(+), 65 deletions(-) diff --git a/decoder/linear.go b/decoder/linear.go index 35e112d3..61a451f3 100644 --- a/decoder/linear.go +++ b/decoder/linear.go @@ -111,7 +111,7 @@ func (dec *Linear) Init(nOutputs, nInputs int, poolIndex int, activationFn Activ dec.NOutputs = nOutputs dec.Units = make([]LinearUnit, dec.NOutputs) dec.Inputs = make([]float32, dec.NInputs) - dec.Weights.SetShape([]int{dec.NOutputs, dec.NInputs}, "Outputs", "Inputs") + dec.Weights.SetShapeSizes(dec.NOutputs, dec.NInputs) for i := range dec.Weights.Values { dec.Weights.Values[i] = 0.1 } @@ -207,7 +207,7 @@ func (dec *Linear) Input(varNm string, di int) { shape := ly.Shape() y := dec.PoolIndex / shape.DimSize(1) x := dec.PoolIndex % shape.DimSize(1) - tsr = tsr.SubSpace([]int{y, x}).(*tensor.Float32) + tsr = tsr.SubSpace(y, x).(*tensor.Float32) } for j, v := range tsr.Values { dec.Inputs[off+j] = v @@ -259,7 +259,7 @@ func (dec *Linear) Back() float32 { // Returns SSE (sum squared error) of difference between targets and outputs. func (dec *Linear) BackMPI() float32 { if dec.MPIDWts.Len() != dec.Weights.Len() { - dec.MPIDWts.CopyShapeFrom(&dec.Weights) + tensor.SetShapeFrom(&dec.MPIDWts, &dec.Weights) } var sse float32 for ui := range dec.Units { diff --git a/decoder/softmax.go b/decoder/softmax.go index 8c164bfd..5c289e22 100644 --- a/decoder/softmax.go +++ b/decoder/softmax.go @@ -93,7 +93,7 @@ func (sm *SoftMax) Init(ncats, ninputs int) { sm.Units = make([]SoftMaxUnit, ncats) sm.Sorted = make([]int, ncats) sm.Inputs = make([]float32, sm.NInputs) - sm.Weights.SetShape([]int{sm.NCats, sm.NInputs}, "Cats", "Inputs") + sm.Weights.SetShapeSizes(sm.NCats, sm.NInputs) for i := range sm.Weights.Values { sm.Weights.Values[i] = .1 } @@ -215,7 +215,7 @@ func (sm *SoftMax) Back() { // MPI version shares weight changes across nodes func (sm *SoftMax) BackMPI() { if sm.MPIDWts.Len() != sm.Weights.Len() { - sm.MPIDWts.CopyShapeFrom(&sm.Weights) + tensor.SetShapeFrom(&sm.MPIDWts, &sm.Weights) } lr := sm.Lrate for ui := range sm.Units { diff --git a/elog/stditems.go b/elog/stditems.go index 959a7a45..14ab81bc 100644 --- a/elog/stditems.go +++ b/elog/stditems.go @@ -11,7 +11,6 @@ import ( "cogentcore.org/core/base/errors" "cogentcore.org/core/math32/minmax" - "cogentcore.org/core/tensor/stats/split" "cogentcore.org/core/tensor/stats/stats" "cogentcore.org/core/tensor/table" "github.com/emer/emergent/v2/emer" @@ -280,11 +279,11 @@ func (lg *Logs) RunStats(stats ...string) { lt := lg.TableDetailsScope(sk) ix, _ := lt.NamedIndexView("RunStats") - spl := split.GroupBy(ix, "RunName") - for _, st := range stats { - split.DescColumn(spl, st) - } - lg.MiscTables["RunStats"] = spl.AggsToTable(table.AddAggName) + // spl := split.GroupBy(ix, "RunName") + // for _, st := range stats { + // split.DescColumn(spl, st) + // } + // lg.MiscTables["RunStats"] = spl.AggsToTable(table.AddAggName) } // AddLayerTensorItems adds tensor recording items for given variable, diff --git a/estats/actrf.go b/estats/actrf.go index 75e2f6fd..a654e75e 100644 --- a/estats/actrf.go +++ b/estats/actrf.go @@ -4,14 +4,7 @@ package estats -import ( - "fmt" - "strings" - - "cogentcore.org/core/tensor" - "github.com/emer/emergent/v2/emer" -) - +/* // InitActRFs initializes a set of activation-based receptive field (ActRF) // statistics, which record activation-weighted averaging of other tensor // states, which can be activations in other layers, or external sensory @@ -89,3 +82,4 @@ func (st *Stats) UpdateActRFs(net emer.Network, varnm string, thr float32, di in func (st *Stats) ActRFsAvgNorm() { st.ActRFs.AvgNorm() } +*/ diff --git a/estats/funcs.go b/estats/funcs.go index 2120e1f7..a123f5d9 100644 --- a/estats/funcs.go +++ b/estats/funcs.go @@ -7,6 +7,7 @@ package estats import ( "cogentcore.org/core/base/errors" "cogentcore.org/core/tensor" + "cogentcore.org/core/tensor/matrix" "cogentcore.org/core/tensor/stats/metric" "cogentcore.org/core/tensor/stats/stats" "cogentcore.org/core/tensor/table" @@ -43,7 +44,7 @@ func (st *Stats) LayerVarsCorrel(net emer.Network, layNm, unitVarA, unitVarB str ly.UnitValuesTensor(tsrA, unitVarA, di) tsrB := st.F32TensorDi(layNm+"_alt", di) // alternative storage tensor ly.UnitValuesTensor(tsrB, unitVarB, di) - return metric.Correlation32(tsrA.Values, tsrB.Values) + return float32(metric.Correlation(tsrA, tsrB).Float1D(0)) } // LayerVarsCorrelRep returns the correlation between two variables on a given layer @@ -55,7 +56,7 @@ func (st *Stats) LayerVarsCorrelRep(net emer.Network, layNm, unitVarA, unitVarB ly.UnitValuesSampleTensor(tsrA, unitVarA, di) tsrB := st.F32TensorDi(layNm+"_alt", di) // alternative storage tensor ly.UnitValuesSampleTensor(tsrB, unitVarB, di) - return metric.Correlation32(tsrA.Values, tsrB.Values) + return float32(metric.Correlation(tsrA, tsrB).Float1D(0)) } // ClosestStat finds the closest pattern in given column of given table of possible patterns, @@ -65,13 +66,14 @@ func (st *Stats) LayerVarsCorrelRep(net emer.Network, layNm, unitVarA, unitVarB // di is a data parallel index di, for networks capable of processing input patterns in parallel. func (st *Stats) ClosestPat(net emer.Network, layNm, unitVar string, di int, pats *table.Table, colnm, namecol string) (int, float32, string) { tsr := st.SetLayerTensor(net, layNm, unitVar, di) - col := errors.Log1(pats.ColumnByName(colnm)) + col := pats.Column(colnm) // note: requires Increasing metric so using Inv - row, cor := metric.ClosestRow32(tsr, col.(*tensor.Float32), metric.InvCorrelation32) - cor = 1 - cor // convert back to correl + rc := metric.ClosestRow(metric.InvCorrelation, tsr, col) + row := rc.Int1D(0) + cor := 1 - float32(rc.Float1D(1)) // convert back to correl nm := "" if namecol != "" { - nm = pats.StringValue(namecol, row) + nm = pats.Column(namecol).String1D(row) } return row, cor, nm } @@ -95,7 +97,7 @@ var PCAStrongThr = 0.01 // layer_PCA_Rest: average strength of remaining eigenvalues (if more than 10 total eigens) // Uses SVD to compute much more efficiently than official PCA. func (st *Stats) PCAStats(ix *table.Table, varNm string, layers []string) { - svd.Cond = PCAStrongThr + // svd.Cond = PCAStrongThr covar := tensor.NewFloat64() evecs := tensor.NewFloat64() evals := tensor.NewFloat64() @@ -103,9 +105,9 @@ func (st *Stats) PCAStats(ix *table.Table, varNm string, layers []string) { col := ix.Column(lnm + "_" + varNm) metric.CovarianceMatrixOut(metric.Covariance, col, covar) matrix.SVDOut(covar, evecs, evals) - ln := len(evals) + ln := len(evals.Values) var nstr float64 // nstr := float64(svd.Rank) this didn't work.. - for i, v := range evals { + for i, v := range evals.Values { if v < PCAStrongThr { nstr = float64(i) break @@ -114,17 +116,17 @@ func (st *Stats) PCAStats(ix *table.Table, varNm string, layers []string) { var top5, next5 float64 for i := 0; i < 5; i++ { if ln >= 5 { - top5 += evals[i] + top5 += evals.Values[i] } if ln >= 10 { - next5 += evals[i+5] + next5 += evals.Values[i+5] } } st.SetFloat(lnm+"_PCA_NStrong", nstr) st.SetFloat(lnm+"_PCA_Top5", top5/5) st.SetFloat(lnm+"_PCA_Next5", next5/5) if ln > 10 { - sum := stats.Sum(evals) + sum := stats.Sum(evals).Float1D(0) ravg := (sum - (top5 + next5)) / float64(ln-10) st.SetFloat(lnm+"_PCA_Rest", ravg) } else { diff --git a/estats/plots.go b/estats/plots.go index 75259e2f..7efaf3cc 100644 --- a/estats/plots.go +++ b/estats/plots.go @@ -7,8 +7,6 @@ package estats import ( "cogentcore.org/core/plot/plotcore" "cogentcore.org/core/tensor/stats/cluster" - "cogentcore.org/core/tensor/stats/metric" - "cogentcore.org/core/tensor/stats/simat" "cogentcore.org/core/tensor/table" ) @@ -27,18 +25,18 @@ func ConfigPCAPlot(plt *plotcore.PlotEditor, dt *table.Table, nm string) { // ClusterPlot does one cluster plot on given table column name // and label name -func ClusterPlot(plt *plotcore.PlotEditor, ix *table.IndexView, colNm, lblNm string, dfunc cluster.DistFunc) { - nm, _ := ix.Table.MetaData["name"] - smat := &simat.SimMat{} - smat.TableColumnStd(ix, colNm, lblNm, false, metric.Euclidean) - pt := &table.Table{} - cluster.Plot(pt, cluster.Glom(smat, dfunc), smat) - plt.Name = colNm - plt.Options.Title = "Cluster Plot of: " + nm + " " + colNm - plt.Options.XAxis = "X" - plt.SetTable(pt) - // order of params: on, fixMin, min, fixMax, max - plt.SetColumnOptions("X", plotcore.Off, plotcore.FixMin, 0, plotcore.FloatMax, 0) - plt.SetColumnOptions("Y", plotcore.On, plotcore.FixMin, 0, plotcore.FloatMax, 0) - plt.SetColumnOptions("Label", plotcore.On, plotcore.FixMin, 0, plotcore.FloatMax, 0) +func ClusterPlot(plt *plotcore.PlotEditor, ix *table.Table, colNm, lblNm string, dfunc cluster.MetricFunc) { + // nm, _ := ix.Table.MetaData["name"] + // smat := &simat.SimMat{} + // smat.TableColumnStd(ix, colNm, lblNm, false, metric.Euclidean) + // pt := &table.Table{} + // cluster.Plot(pt, cluster.Glom(smat, dfunc), smat) + // plt.Name = colNm + // plt.Options.Title = "Cluster Plot of: " + nm + " " + colNm + // plt.Options.XAxis = "X" + // plt.SetTable(pt) + // // order of params: on, fixMin, min, fixMax, max + // plt.SetColumnOptions("X", plotcore.Off, plotcore.FixMin, 0, plotcore.FloatMax, 0) + // plt.SetColumnOptions("Y", plotcore.On, plotcore.FixMin, 0, plotcore.FloatMax, 0) + // plt.SetColumnOptions("Label", plotcore.On, plotcore.FixMin, 0, plotcore.FloatMax, 0) } diff --git a/estats/rasters.go b/estats/rasters.go index 94549b75..18ea1d89 100644 --- a/estats/rasters.go +++ b/estats/rasters.go @@ -21,14 +21,14 @@ func (st *Stats) ConfigRasters(net emer.Network, maxCyc int, layers []string) { if nu == 0 { nu = ly.Shape.Len() } - sr.SetShape([]int{nu, maxCyc}, "Nrn", "Cyc") + sr.SetShapeSizes(nu, maxCyc) } } // SetRasterCol sets column of given raster from data func (st *Stats) SetRasterCol(sr, tsr *tensor.Float32, col int) { for ni, v := range tsr.Values { - sr.Set([]int{ni, col}, v) + sr.Set(v, ni, col) } } diff --git a/estats/stats.go b/estats/stats.go index ee2234be..12bf1ce4 100644 --- a/estats/stats.go +++ b/estats/stats.go @@ -12,9 +12,6 @@ import ( "cogentcore.org/core/base/timer" "cogentcore.org/core/plot/plotcore" "cogentcore.org/core/tensor" - "cogentcore.org/core/tensor/stats/simat" - "github.com/emer/emergent/v2/actrf" - "github.com/emer/emergent/v2/confusion" "github.com/emer/emergent/v2/decoder" ) @@ -35,7 +32,7 @@ type Stats struct { IntTensors map[string]*tensor.Int // confusion matrix - Confusion confusion.Matrix `display:"no-inline"` + // Confusion confusion.Matrix `display:"no-inline"` // similarity matrix for comparing pattern similarities SimMats map[string]*tensor.Float64 @@ -44,7 +41,7 @@ type Stats struct { Plots map[string]*plotcore.PlotEditor // activation-based receptive fields - ActRFs actrf.RFs `display:"no-inline"` + // ActRFs actrf.RFs `display:"no-inline"` // list of layer names configured for recording raster plots Rasters []string @@ -67,14 +64,14 @@ func (st *Stats) Init() { st.F32Tensors = make(map[string]*tensor.Float32) st.F64Tensors = make(map[string]*tensor.Float64) st.IntTensors = make(map[string]*tensor.Int) - st.SimMats = make(map[string]*simat.SimMat) + // st.SimMats = make(map[string]*simat.SimMat) st.Plots = make(map[string]*plotcore.PlotEditor) st.LinDecoders = make(map[string]*decoder.Linear) st.SoftMaxDecoders = make(map[string]*decoder.SoftMax) st.Timers = make(map[string]*timer.Time) - st.PCA.Init() - st.SVD.Init() - st.SVD.Cond = PCAStrongThr + // st.PCA.Init() + // st.SVD.Init() + // st.SVD.Cond = PCAStrongThr } // Print returns a formatted Name: Value string of stat values, @@ -368,6 +365,7 @@ func (st *Stats) SetIntTensorDi(name string, di int, tsr *tensor.Int) { ///////////////////////////////////////// // Misc items +/* // SimMat returns a SimMat similarity matrix of given name, creating if not yet made func (st *Stats) SimMat(name string) *simat.SimMat { sm, has := st.SimMats[name] @@ -377,6 +375,7 @@ func (st *Stats) SimMat(name string) *simat.SimMat { } return sm } +*/ // Plot returns an plotcore.PlotEditor of given name, creating if not yet made func (st *Stats) Plot(name string) *plotcore.PlotEditor { diff --git a/popcode/popcode2d.go b/popcode/popcode2d.go index 5925f647..c8a14b5a 100644 --- a/popcode/popcode2d.go +++ b/popcode/popcode2d.go @@ -149,10 +149,10 @@ func (pc *TwoD) EncodeImpl(pat tensor.Tensor, val math32.Vector2, add bool) erro } idx := []int{yi, xi} if add { - val := float64(act) + pat.Float(idx) - pat.SetFloat(idx, val) + val := float64(act) + pat.Float(idx...) + pat.SetFloat(val, idx...) } else { - pat.SetFloat(idx, float64(act)) + pat.SetFloat(float64(act), idx...) } } } @@ -178,7 +178,7 @@ func (pc *TwoD) Decode(pat tensor.Tensor) (math32.Vector2, error) { for yi := 0; yi < ny; yi++ { for xi := 0; xi < nx; xi++ { idx := []int{yi, xi} - act := float32(pat.Float(idx)) + act := float32(pat.Float(idx...)) if act < pc.Thr { act = 0 } @@ -264,7 +264,7 @@ func (pc *TwoD) DecodeImpl(pat tensor.Tensor) (math32.Vector2, error) { for yi := 0; yi < ny; yi++ { for xi := 0; xi < nx; xi++ { idx := []int{yi, xi} - act := float32(pat.Float(idx)) + act := float32(pat.Float(idx...)) if act < pc.Thr { act = 0 } @@ -346,7 +346,7 @@ func (pc *TwoD) DecodeNPeaks(pat tensor.Tensor, nvals, width int) ([]math32.Vect continue } idx := []int{y, x} - act := float32(pat.Float(idx)) + act := float32(pat.Float(idx...)) sum += act ns++ } @@ -380,7 +380,7 @@ func (pc *TwoD) DecodeNPeaks(pat tensor.Tensor, nvals, width int) ([]math32.Vect continue } idx := []int{y, x} - act := float32(pat.Float(idx)) + act := float32(pat.Float(idx...)) if act < pc.Thr { act = 0 } From 78693e76ff0408e25d7dabff0c241c0f5ac9d6b9 Mon Sep 17 00:00:00 2001 From: "Randall C. O'Reilly" Date: Mon, 14 Oct 2024 14:48:34 -0700 Subject: [PATCH 04/24] more goal updating --- ecmd/README.md | 9 --- ecmd/args.go | 150 ------------------------------------------------ ecmd/std.go | 96 ------------------------------- ecmd/typegen.go | 17 ------ ecmd/types.go | 113 ------------------------------------ emer/layer.go | 2 +- 6 files changed, 1 insertion(+), 386 deletions(-) delete mode 100644 ecmd/README.md delete mode 100644 ecmd/args.go delete mode 100644 ecmd/std.go delete mode 100644 ecmd/typegen.go delete mode 100644 ecmd/types.go diff --git a/ecmd/README.md b/ecmd/README.md deleted file mode 100644 index 244a4b32..00000000 --- a/ecmd/README.md +++ /dev/null @@ -1,9 +0,0 @@ -Docs: [GoDoc](https://pkg.go.dev/github.com/emer/emergent/ecmd) - -Note: this is now deprecated in favor of the [econfig](../econfig) system, which provides a single common Config object for all configuration settings, with TOML config files and command-line arg support. - -`ecmd.Args` provides maps for storing commandline arguments of basic types (bool, string, int, float64), along with associated defaults and descriptions, which then set the standard library `flags` for parsing command line arguments. - -It has functions for populating standard emergent simulation args. - - diff --git a/ecmd/args.go b/ecmd/args.go deleted file mode 100644 index e783593f..00000000 --- a/ecmd/args.go +++ /dev/null @@ -1,150 +0,0 @@ -// Copyright (c) 2022, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ecmd - -import ( - "flag" - "fmt" -) - -// Args provides maps for storing commandline args. -type Args struct { - Ints map[string]*Int - Bools map[string]*Bool - Strings map[string]*String - Floats map[string]*Float - - // true when all args have been set to flag package - Flagged bool `edit:"-"` -} - -// Init must be called before use to create all the maps -func (ar *Args) Init() { - ar.Ints = make(map[string]*Int) - ar.Bools = make(map[string]*Bool) - ar.Strings = make(map[string]*String) - ar.Floats = make(map[string]*Float) -} - -// AddInt adds a new Int arg -func (ar *Args) AddInt(name string, def int, desc string) { - ar.Ints[name] = NewInt(name, def, desc) -} - -// AddBool adds a new Bool arg -func (ar *Args) AddBool(name string, def bool, desc string) { - ar.Bools[name] = NewBool(name, def, desc) -} - -// AddString adds a new String arg -func (ar *Args) AddString(name string, def string, desc string) { - ar.Strings[name] = NewString(name, def, desc) -} - -// AddFloat adds a new Float arg -func (ar *Args) AddFloat(name string, def float64, desc string) { - ar.Floats[name] = NewFloat(name, def, desc) -} - -// Int returns int val by name -func (ar *Args) Int(name string) int { - val, has := ar.Ints[name] - if has { - return val.Val - } - fmt.Printf("Arg named: %s not found in Args\n", name) - return 0 -} - -// SetInt sets the default and current val -func (ar *Args) SetInt(name string, val int) { - ar.Ints[name].Set(val) -} - -// Bool returns bool val by name -func (ar *Args) Bool(name string) bool { - val, has := ar.Bools[name] - if has { - return val.Val - } - fmt.Printf("Arg named: %s not found in Args\n", name) - return false -} - -// SetBool sets the default and current val -func (ar *Args) SetBool(name string, val bool) { - ar.Bools[name].Set(val) -} - -// String returns string val by name -func (ar *Args) String(name string) string { - val, has := ar.Strings[name] - if has { - return val.Val - } - fmt.Printf("Arg named: %s not found in Args\n", name) - return "" -} - -// SetString sets the default and current val -func (ar *Args) SetString(name string, val string) { - ar.Strings[name].Set(val) -} - -// Float returns float val by name -func (ar *Args) Float(name string) float64 { - val, has := ar.Floats[name] - if has { - return val.Val - } - fmt.Printf("Arg named: %s not found in Args\n", name) - return 0 -} - -// SetFloat sets the default and current val -func (ar *Args) SetFloat(name string, val float64) { - ar.Floats[name].Set(val) -} - -// Flag sets all args to the system flag values, only if not already done. -func (ar *Args) Flag() { - if ar.Flagged { - return - } - for _, vl := range ar.Ints { - if flag.Lookup(vl.Name) == nil { - flag.IntVar(&vl.Val, vl.Name, vl.Def, vl.Desc) - } - } - for _, vl := range ar.Bools { - if flag.Lookup(vl.Name) == nil { - flag.BoolVar(&vl.Val, vl.Name, vl.Def, vl.Desc) - } - } - for _, vl := range ar.Strings { - if flag.Lookup(vl.Name) == nil { - flag.StringVar(&vl.Val, vl.Name, vl.Def, vl.Desc) - } - } - for _, vl := range ar.Floats { - if flag.Lookup(vl.Name) == nil { - flag.Float64Var(&vl.Val, vl.Name, vl.Def, vl.Desc) - } - } - ar.Flagged = true -} - -// Parse parses command line args, setting values from command line -// Any errors will cause the program to exit with error message. -func (ar *Args) Parse() { - ar.Flag() - flag.Parse() -} - -// Usage prints the set of command args. It is called by the help bool arg. -func (ar *Args) Usage() { - ar.Flag() - flag.Usage() -} diff --git a/ecmd/std.go b/ecmd/std.go deleted file mode 100644 index 724db092..00000000 --- a/ecmd/std.go +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright (c) 2022, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ecmd - -//go:generate core generate -add-types - -import ( - "os" - - "cogentcore.org/core/base/mpi" - "github.com/emer/emergent/v2/elog" - "github.com/emer/emergent/v2/emer" - "github.com/emer/emergent/v2/etime" -) - -// AddStd adds the standard command line args used by most sims -func (ar *Args) AddStd() { - ar.AddBool("nogui", len(os.Args) > 1, "if not passing any other args and want to run nogui, use nogui") - ar.AddBool("help", false, "show all the command line args available, then exit") - ar.AddString("params", "", "ParamSet name to use -- must be valid name as listed in compiled-in params or loaded params") - ar.AddString("tag", "", "extra tag to add to file names and logs saved from this run") - ar.AddString("note", "", "user note -- describe the run params etc") - ar.AddInt("run", 0, "starting run number -- determines the random seed -- runs counts from there -- can do all runs in parallel by launching separate jobs with each run, runs = 1") - ar.AddInt("runs", 10, "number of runs to do (note that MaxEpcs is in paramset)") - ar.AddInt("epochs", 150, "number of epochs per run") - ar.AddBool("setparams", false, "if true, print a record of each parameter that is set") - ar.AddBool("randomize", false, "If true, randomize seed for every run") - ar.AddBool("wts", false, "if true, save final weights after each run") - ar.AddBool("epclog", true, "if true, save train epoch log to file") - ar.AddBool("triallog", false, "if true, save train trial log to file. May be large.") - ar.AddBool("runlog", true, "if true, save run log to file") - ar.AddBool("tstepclog", false, "if true, save testing epoch log to file") - ar.AddBool("tsttriallog", false, "if true, save testing trial log to file. May be large.") - ar.AddBool("netdata", false, "if true, save network activation etc data from testing trials, for later viewing in netview") - ar.AddString("hyperFile", "", "Name of the file to output hyperparameter data. If not empty string, program should write and then exit") - ar.AddString("paramsFile", "", "Name of the file to input parameters from.") - ar.AddBool("gpu", false, "Use the GPU to run the model -- typically faster for larger models.") -} - -// LogFilename returns a standard log file name as netName_runName_logName.tsv -func LogFilename(logName, netName, runName string) string { - return netName + "_" + runName + "_" + logName + ".tsv" -} - -// ProcStd processes the standard args, after Parse has been called -// for help, note, params, tag and wts -func (ar *Args) ProcStd(params *emer.NetParams) { - if ar.Bool("help") { - ar.Usage() - os.Exit(0) - } - if note := ar.String("note"); note != "" { - mpi.Printf("note: %s\n", note) - } - if pars := ar.String("params"); pars != "" { - // params.ExtraSets = pars // todo: - // mpi.Printf("Using ParamSet: %s\n", params.ExtraSets) - } - if tag := ar.String("tag"); tag != "" { - params.Tag = tag - } - if ar.Bool("wts") { - mpi.Printf("Saving final weights per run\n") - } - -} - -// ProcStdLogs processes the standard args for log files, -// setting the log files for standard log file names using netName -// and params.RunName to identify the network / sim and run params, tag, -// and starting run number -func (ar *Args) ProcStdLogs(logs *elog.Logs, params *emer.NetParams, netName string) { - runName := params.RunName(ar.Int("run")) // used for naming logs, stats, etc - if ar.Bool("epclog") { - fnm := LogFilename("epc", netName, runName) - logs.SetLogFile(etime.Train, etime.Epoch, fnm) - } - if ar.Bool("triallog") { - fnm := LogFilename("trl", netName, runName) - logs.SetLogFile(etime.Train, etime.Trial, fnm) - } - if ar.Bool("runlog") { - fnm := LogFilename("run", netName, runName) - logs.SetLogFile(etime.Train, etime.Run, fnm) - } - if ar.Bool("tstepclog") { - fnm := LogFilename("tst_epc", netName, runName) - logs.SetLogFile(etime.Test, etime.Epoch, fnm) - } - if ar.Bool("tsttriallog") { - fnm := LogFilename("tst_trl", netName, runName) - logs.SetLogFile(etime.Test, etime.Trial, fnm) - } -} diff --git a/ecmd/typegen.go b/ecmd/typegen.go deleted file mode 100644 index d4157255..00000000 --- a/ecmd/typegen.go +++ /dev/null @@ -1,17 +0,0 @@ -// Code generated by "core generate -add-types"; DO NOT EDIT. - -package ecmd - -import ( - "cogentcore.org/core/types" -) - -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/ecmd.Args", IDName: "args", Doc: "Args provides maps for storing commandline args.", Fields: []types.Field{{Name: "Ints"}, {Name: "Bools"}, {Name: "Strings"}, {Name: "Floats"}, {Name: "Flagged", Doc: "true when all args have been set to flag package"}}}) - -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/ecmd.Int", IDName: "int", Doc: "Int represents a int valued arg", Fields: []types.Field{{Name: "Name", Doc: "name of arg -- must be unique"}, {Name: "Desc", Doc: "description of arg"}, {Name: "Val", Doc: "value as parsed"}, {Name: "Def", Doc: "default initial value"}}}) - -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/ecmd.Bool", IDName: "bool", Doc: "Bool represents a bool valued arg", Fields: []types.Field{{Name: "Name", Doc: "name of arg -- must be unique"}, {Name: "Desc", Doc: "description of arg"}, {Name: "Val", Doc: "value as parsed"}, {Name: "Def", Doc: "default initial value"}}}) - -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/ecmd.String", IDName: "string", Doc: "String represents a string valued arg", Fields: []types.Field{{Name: "Name", Doc: "name of arg -- must be unique"}, {Name: "Desc", Doc: "description of arg"}, {Name: "Val", Doc: "value as parsed"}, {Name: "Def", Doc: "default initial value"}}}) - -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/ecmd.Float", IDName: "float", Doc: "Float represents a float64 valued arg", Fields: []types.Field{{Name: "Name", Doc: "name of arg -- must be unique"}, {Name: "Desc", Doc: "description of arg"}, {Name: "Val", Doc: "value as parsed"}, {Name: "Def", Doc: "default initial value"}}}) diff --git a/ecmd/types.go b/ecmd/types.go deleted file mode 100644 index 03ed17b6..00000000 --- a/ecmd/types.go +++ /dev/null @@ -1,113 +0,0 @@ -// Copyright (c) 2022, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ecmd - -// Int represents a int valued arg -type Int struct { - - // name of arg -- must be unique - Name string - - // description of arg - Desc string - - // value as parsed - Val int - - // default initial value - Def int -} - -// NewInt returns a new Int arg -func NewInt(name string, def int, desc string) *Int { - return &Int{Name: name, Desc: desc, Def: def} -} - -// Set sets default and current val -func (vl *Int) Set(val int) { - vl.Val = val - vl.Def = val -} - -// Bool represents a bool valued arg -type Bool struct { - - // name of arg -- must be unique - Name string - - // description of arg - Desc string - - // value as parsed - Val bool - - // default initial value - Def bool -} - -// NewBool returns a new Bool arg -func NewBool(name string, def bool, desc string) *Bool { - return &Bool{Name: name, Desc: desc, Val: def, Def: def} -} - -// Set sets default and current val -func (vl *Bool) Set(val bool) { - vl.Val = val - vl.Def = val -} - -// String represents a string valued arg -type String struct { - - // name of arg -- must be unique - Name string - - // description of arg - Desc string - - // value as parsed - Val string - - // default initial value - Def string -} - -// NewString returns a new String arg -func NewString(name string, def string, desc string) *String { - return &String{Name: name, Desc: desc, Val: def, Def: def} -} - -// Set sets default and current val -func (vl *String) Set(val string) { - vl.Val = val - vl.Def = val -} - -// Float represents a float64 valued arg -type Float struct { - - // name of arg -- must be unique - Name string - - // description of arg - Desc string - - // value as parsed - Val float64 - - // default initial value - Def float64 -} - -// NewFloat returns a new Float arg -func NewFloat(name string, def float64, desc string) *Float { - return &Float{Name: name, Desc: desc, Val: def, Def: def} -} - -// Set sets default and current val -func (vl *Float) Set(val float64) { - vl.Val = val - vl.Def = val -} diff --git a/emer/layer.go b/emer/layer.go index 3413804c..3afd675a 100644 --- a/emer/layer.go +++ b/emer/layer.go @@ -312,7 +312,7 @@ func (ly *LayerBase) SetShape(shape ...int) { // for a subset sample of units to represent the entire layer. // This is critical for large layers that are otherwise unwieldy // to visualize and for computationally-intensive statistics. -func (ly *LayerBase) SetSampleIndexesShape(idxs []int, shape ...int) { +func (ly *LayerBase) SetSampleIndexesShape(idxs, shape []int) { ly.SampleIndexes = idxs ly.SampleShape.SetShapeSizes(shape...) } From 5e912d51e684f70fc68ee73bd28c1890c5401c6a Mon Sep 17 00:00:00 2001 From: "Randall C. O'Reilly" Date: Thu, 31 Oct 2024 13:48:22 -0700 Subject: [PATCH 05/24] goal: fix actrf to use current tensor api. not going to be able to do git mod tidy until fully updated -- do this later. --- actrf/actrf.go | 38 +++++++++++++++++++------------------- actrf/running.go | 5 ++--- estats/typegen.go | 2 +- 3 files changed, 22 insertions(+), 23 deletions(-) diff --git a/actrf/actrf.go b/actrf/actrf.go index 8a272b1e..8f1e0805 100644 --- a/actrf/actrf.go +++ b/actrf/actrf.go @@ -7,8 +7,10 @@ package actrf //go:generate core generate -add-types import ( + "slices" + "cogentcore.org/core/tensor" - "cogentcore.org/core/tensor/stats/norm" + "cogentcore.org/core/tensor/stats/stats" ) // RF is used for computing an activation-based receptive field. @@ -59,17 +61,15 @@ func (af *RF) InitShape(act, src tensor.Tensor) []int { aNy, aNx, _, _ := tensor.Projection2DShape(act.Shape(), false) sNy, sNx, _, _ := tensor.Projection2DShape(src.Shape(), false) oshp := []int{aNy, aNx, sNy, sNx} - if tensor.EqualInts(af.RF.Shp.Sizes, oshp) { + if slices.Equal(af.RF.Shape().Sizes, oshp) { return oshp } - snm := []string{"ActY", "ActX", "SrcY", "SrcX"} sshp := []int{sNy, sNx} - ssnm := []string{"SrcY", "SrcX"} - af.RF.SetShape(oshp, snm...) - af.NormRF.SetShape(oshp, snm...) - af.SumProd.SetShape(oshp, snm...) - af.NormSrc.SetShape(sshp, ssnm...) - af.SumSrc.SetShape(sshp, ssnm...) + af.RF.SetShapeSizes(oshp...) + af.NormRF.SetShapeSizes(oshp...) + af.SumProd.SetShapeSizes(oshp...) + af.NormSrc.SetShapeSizes(sshp...) + af.SumSrc.SetShapeSizes(sshp...) af.ConfigView(&af.RF) af.ConfigView(&af.NormRF) @@ -81,10 +81,11 @@ func (af *RF) InitShape(act, src tensor.Tensor) []int { // ConfigView configures the view params on the tensor func (af *RF) ConfigView(tsr *tensor.Float32) { - tsr.SetMetaData("colormap", "Viridis") - tsr.SetMetaData("grid-fill", "1") // remove extra lines - tsr.SetMetaData("fix-min", "true") - tsr.SetMetaData("min", "0") + // todo:meta + // tsr.SetMetaData("colormap", "Viridis") + // tsr.SetMetaData("grid-fill", "1") // remove extra lines + // tsr.SetMetaData("fix-min", "true") + // tsr.SetMetaData("min", "0") } // Reset reinitializes the Sum accumulators -- must have called Init first @@ -106,11 +107,11 @@ func (af *RF) Add(act, src tensor.Tensor, thr float32) { if tv < thr { continue } - af.SumSrc.AddScalar([]int{sy, sx}, float64(tv)) + af.SumSrc.SetAdd(tv, sy, sx) for ay := 0; ay < aNy; ay++ { for ax := 0; ax < aNx; ax++ { av := float32(tensor.Projection2DValue(act, false, ay, ax)) - af.SumProd.AddScalar([]int{ay, ax, sy, sx}, float64(av*tv)) + af.SumProd.SetAdd(av*tv, ay, ax, sy, sx) } } } @@ -126,7 +127,7 @@ func (af *RF) Avg() { var maxSrc float32 for sy := 0; sy < sNy; sy++ { for sx := 0; sx < sNx; sx++ { - src := af.SumSrc.Value([]int{sy, sx}) + src := af.SumSrc.Value(sy, sx) if src == 0 { continue } @@ -135,7 +136,7 @@ func (af *RF) Avg() { } for ay := 0; ay < aNy; ay++ { for ax := 0; ax < aNx; ax++ { - oo := af.SumProd.Shape().Offset([]int{ay, ax, sy, sx}) + oo := af.SumProd.Shape().IndexTo1D(ay, ax, sy, sx) af.RF.Values[oo] = af.SumProd.Values[oo] / src } } @@ -151,8 +152,7 @@ func (af *RF) Avg() { // Norm computes unit norm of RF values -- must be called after Avg func (af *RF) Norm() { - af.NormRF.CopyFrom(&af.RF) - norm.TensorUnit(&af.NormRF, 2) // 2 = norm within outer 2 dims = norm each src within + stats.UnitNormOut(&af.RF, &af.NormRF) } // AvgNorm computes RF as SumProd / SumTarg and then does Norm. diff --git a/actrf/running.go b/actrf/running.go index 69f903c0..b67d2bdc 100644 --- a/actrf/running.go +++ b/actrf/running.go @@ -17,15 +17,14 @@ func RunningAvg(out *tensor.Float32, act, src tensor.Tensor, tau float32) { aNy, aNx, _, _ := tensor.Projection2DShape(act.Shape(), false) tNy, tNx, _, _ := tensor.Projection2DShape(src.Shape(), false) oshp := []int{aNy, aNx, tNy, tNx} - out.SetShape(oshp, "ActY", "ActX", "SrcY", "SrcX") + out.SetShapeSizes(oshp...) for ay := 0; ay < aNy; ay++ { for ax := 0; ax < aNx; ax++ { av := float32(tensor.Projection2DValue(act, false, ay, ax)) for ty := 0; ty < tNy; ty++ { for tx := 0; tx < tNx; tx++ { tv := float32(tensor.Projection2DValue(src, false, ty, tx)) - oi := []int{ay, ax, ty, tx} - oo := out.Shape().Offset(oi) + oo := out.Shape().IndexTo1D(ay, ax, ty, tx) ov := out.Values[oo] nv := cdt*ov + dt*tv*av out.Values[oo] = nv diff --git a/estats/typegen.go b/estats/typegen.go index ad099be2..34feee96 100644 --- a/estats/typegen.go +++ b/estats/typegen.go @@ -6,4 +6,4 @@ import ( "cogentcore.org/core/types" ) -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/estats.Stats", IDName: "stats", Doc: "Stats provides maps for storing statistics as named scalar and tensor values.\nThese stats are available in the elog.Context for use during logging.", Fields: []types.Field{{Name: "Floats"}, {Name: "Strings"}, {Name: "Ints"}, {Name: "F32Tensors", Doc: "float32 tensors used for grabbing values from layers"}, {Name: "F64Tensors", Doc: "float64 tensors as needed for other computations"}, {Name: "IntTensors", Doc: "int tensors as needed for other computations"}, {Name: "Confusion", Doc: "confusion matrix"}, {Name: "SimMats", Doc: "similarity matrix for comparing pattern similarities"}, {Name: "Plots", Doc: "analysis plots -- created by analysis routines"}, {Name: "PCA", Doc: "one PCA object can be reused for all PCA computations"}, {Name: "SVD", Doc: "one SVD object can be reused for all SVD computations"}, {Name: "ActRFs", Doc: "activation-based receptive fields"}, {Name: "Rasters", Doc: "list of layer names configured for recording raster plots"}, {Name: "LinDecoders", Doc: "linear decoders"}, {Name: "SoftMaxDecoders", Doc: "softmax decoders"}, {Name: "Timers", Doc: "named timers available for timing how long different computations take (wall-clock time)"}}}) +var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/estats.Stats", IDName: "stats", Doc: "Stats provides maps for storing statistics as named scalar and tensor values.\nThese stats are available in the elog.Context for use during logging.", Fields: []types.Field{{Name: "Floats"}, {Name: "Strings"}, {Name: "Ints"}, {Name: "F32Tensors", Doc: "float32 tensors used for grabbing values from layers"}, {Name: "F64Tensors", Doc: "float64 tensors as needed for other computations"}, {Name: "IntTensors", Doc: "int tensors as needed for other computations"}, {Name: "SimMats", Doc: "similarity matrix for comparing pattern similarities"}, {Name: "Plots", Doc: "analysis plots -- created by analysis routines"}, {Name: "Rasters", Doc: "list of layer names configured for recording raster plots"}, {Name: "LinDecoders", Doc: "linear decoders"}, {Name: "SoftMaxDecoders", Doc: "softmax decoders"}, {Name: "Timers", Doc: "named timers available for timing how long different computations take (wall-clock time)"}}}) From 309c4b47187355c90a74bb4dcad46fb4e9715932 Mon Sep 17 00:00:00 2001 From: "Randall C. O'Reilly" Date: Fri, 15 Nov 2024 00:20:57 -0800 Subject: [PATCH 06/24] goal: first pass egui building on databrowser, env updated to new table etc --- egui/grids.go | 17 ++++++--------- egui/gui.go | 54 +++++++++++++++++++++------------------------- egui/netview.go | 33 ---------------------------- egui/plots.go | 28 +++++++++++------------- env/fixed.go | 30 ++++++++++++-------------- env/freq.go | 35 +++++++++++++++--------------- env/mpifixed.go | 31 +++++++++++++------------- estats/plots.go | 16 +++++++------- netview/netdata.go | 7 +++--- 9 files changed, 103 insertions(+), 148 deletions(-) diff --git a/egui/grids.go b/egui/grids.go index 4e7b4046..1b31aa8b 100644 --- a/egui/grids.go +++ b/egui/grids.go @@ -4,12 +4,7 @@ package egui -import ( - "cogentcore.org/core/core" - "cogentcore.org/core/tensor" - "cogentcore.org/core/tensor/tensorcore" -) - +/* // Grid gets tensor grid view of given name, creating if not yet made func (gui *GUI) Grid(name string) *tensorcore.TensorGrid { if gui.Grids == nil { @@ -34,10 +29,11 @@ func (gui *GUI) SetGrid(name string, tg *tensorcore.TensorGrid) { // AddGridTab adds TensorGrid with a new // tab and plot of given name. func (gui *GUI) AddGridTab(name string) *tensorcore.TensorGrid { - tab, _ := gui.Tabs.NewTab(name) - grid := tensorcore.NewTensorGrid(tab) - gui.SetGrid(name, grid) - return grid + // tab, _ := gui.Tabs.NewTab(name) + // grid := tensorcore.NewTensorGrid(tab) + // gui.SetGrid(name, grid) + // return grid + return nil } // ConfigRasterGrid configures a raster grid for given layer name. @@ -58,6 +54,7 @@ func (gui *GUI) ConfigRasterGrid(lay *core.Frame, laynm string, rast *tensor.Flo func (gui *GUI) SaveActRFGrid(tg *tensorcore.TensorGrid, name string) { gui.SetGrid(name, tg) } +*/ /* // AddActRFGridTabs adds tabs for each of the ActRFs. diff --git a/egui/gui.go b/egui/gui.go index bf964310..554fef10 100644 --- a/egui/gui.go +++ b/egui/gui.go @@ -10,14 +10,14 @@ import ( "cogentcore.org/core/core" "cogentcore.org/core/events" _ "cogentcore.org/core/goal/gosl/slbool/slboolcore" // include to get gui views - "cogentcore.org/core/plot/plotcore" - "cogentcore.org/core/tensor/tensorcore" - "github.com/emer/emergent/v2/etime" + "cogentcore.org/core/styles" + "cogentcore.org/core/tensor/databrowser" "github.com/emer/emergent/v2/netview" ) // GUI manages all standard elements of a simulation Graphical User Interface type GUI struct { + databrowser.Browser // how many cycles between updates of cycle-level plots CycleUpdateInterval int @@ -31,32 +31,14 @@ type GUI struct { // flag to stop running StopNow bool `display:"-"` - // plots by scope - Plots map[etime.ScopeKey]*plotcore.PlotEditor - - // plots by scope - TableViews map[etime.ScopeKey]*tensorcore.Table - - // tensor grid views by name -- used e.g., for Rasters or ActRFs -- use Grid(name) to access - Grids map[string]*tensorcore.TensorGrid - // the view update for managing updates of netview ViewUpdate *netview.ViewUpdate `display:"-"` - // net data for recording in nogui mode, if !nil - NetData *netview.NetData `display:"-"` - // displays Sim fields on left SimForm *core.Form `display:"-"` - // tabs for different view elements: plots, rasters - Tabs *core.Tabs `display:"-"` - // Body is the content of the sim window Body *core.Body `display:"-"` - - // Toolbar is the overall sim toolbar - Toolbar *core.Toolbar `display:"-"` } // UpdateWindow triggers an update on window body, @@ -101,6 +83,7 @@ func (gui *GUI) MakeBody(sim any, appname, title, about string) { // gui.Body.App().About = about split := core.NewSplits(gui.Body) split.Name = "split" + gui.Splits = split gui.SimForm = core.NewForm(split).SetStruct(sim) gui.SimForm.Name = "sim-form" if tb, ok := sim.(core.ToolbarMaker); ok { @@ -109,19 +92,30 @@ func (gui *GUI) MakeBody(sim any, appname, title, about string) { gui.Toolbar.Maker(tb.MakeToolbar) }) } - gui.Tabs = core.NewTabs(split) - gui.Tabs.Name = "tabs" - split.SetSplits(.2, .8) + fform := core.NewFrame(split) + fform.Styler(func(s *styles.Style) { + s.Direction = styles.Column + s.Overflow.Set(styles.OverflowAuto) + s.Grow.Set(1, 1) + }) + gui.Files = databrowser.NewDataTree(fform) + tabs := databrowser.NewTabs(split) + gui.Tabs = tabs + tabs.Name = "tabs" + split.SetTiles(core.TileSplit, core.TileSpan) + split.SetSplits(.2, .5, .8) } // AddNetView adds NetView in tab with given name func (gui *GUI) AddNetView(tabName string) *netview.NetView { - nvt, tb := gui.Tabs.NewTab(tabName) - nv := netview.NewNetView(nvt) - nv.Var = "Act" - tb.OnFinal(events.Click, func(e events.Event) { - nv.Current() - nv.Update() + nv := databrowser.NewTab(gui.Tabs, tabName, func(tab *core.Frame) *netview.NetView { + nv := netview.NewNetView(tab) + nv.Var = "Act" + // tb.OnFinal(events.Click, func(e events.Event) { + // nv.Current() + // nv.Update() + // }) + return nv }) return nv } diff --git a/egui/netview.go b/egui/netview.go index a9656003..6dfd9f79 100644 --- a/egui/netview.go +++ b/egui/netview.go @@ -4,12 +4,6 @@ package egui -import ( - "cogentcore.org/core/core" - "github.com/emer/emergent/v2/emer" - "github.com/emer/emergent/v2/netview" -) - // UpdateNetView updates the gui visualization of the network. func (gui *GUI) UpdateNetView() { if gui.ViewUpdate != nil { @@ -24,30 +18,3 @@ func (gui *GUI) UpdateNetViewWhenStopped() { gui.ViewUpdate.UpdateWhenStopped() } } - -// InitNetData initializes the NetData object to record NetView data -// when the GUI is not active -func (gui *GUI) InitNetData(net emer.Network, nrecs int) { - gui.NetData = &netview.NetData{} - gui.NetData.Init(net, nrecs, true, 1) // true = NoSynData, 1 = MaxData -} - -// NetDataRecord records current netview data -// if InitNetData has been called and NetData exists. -func (gui *GUI) NetDataRecord(netViewText string) { - if gui.NetData == nil { - return - } - gui.NetData.Record(netViewText, -1, 100) -} - -// SaveNetData saves NetData NetView data (if !nil) -// to a file named by the network name -// plus _extra name plus ".netdata.gz" -func (gui *GUI) SaveNetData(extra string) { - if gui.NetData == nil { - return - } - ndfn := gui.NetData.Net.AsEmer().Name + "_" + extra + ".netdata.gz" - gui.NetData.SaveJSON(core.Filename(ndfn)) -} diff --git a/egui/plots.go b/egui/plots.go index 39048366..181b2499 100644 --- a/egui/plots.go +++ b/egui/plots.go @@ -4,14 +4,6 @@ package egui -import ( - "fmt" - - "cogentcore.org/core/plot/plotcore" - "cogentcore.org/core/tensor/tensorcore" - "github.com/emer/emergent/v2/etime" -) - /* // AddPlots adds plots based on the unique tables we have, // currently assumes they should always be plotted @@ -38,14 +30,14 @@ func (gui *GUI) AddPlots(title string, lg *elog.Logs) { } */ -// AddMiscPlotTab adds a misc (non log-generated) plot with a new -// tab and plot of given name. -func (gui *GUI) AddMiscPlotTab(name string) *plotcore.PlotEditor { - tab, _ := gui.Tabs.NewTab(name) - plt := plotcore.NewSubPlot(tab) - gui.SetPlotByName(name, plt) - return plt -} +// // AddMiscPlotTab adds a misc (non log-generated) plot with a new +// // tab and plot of given name. +// func (gui *GUI) AddMiscPlotTab(name string) *plotcore.PlotEditor { +// tab, _ := gui.Tabs.NewTab(name) +// plt := plotcore.NewSubPlot(tab) +// gui.SetPlotByName(name, plt) +// return plt +// } /* func ConfigPlotFromLog(title string, plt *plotcore.PlotEditor, lg *elog.Logs, key etime.ScopeKey) { @@ -80,6 +72,7 @@ func ConfigPlotFromLog(title string, plt *plotcore.PlotEditor, lg *elog.Logs, ke } */ +/* // Plot returns plot for mode, time scope func (gui *GUI) Plot(mode etime.Modes, time etime.Times) *plotcore.PlotEditor { return gui.PlotScope(etime.Scope(mode, time)) @@ -205,6 +198,7 @@ func (gui *GUI) NewPlotTab(key etime.ScopeKey, tabLabel string) *plotcore.PlotEd gui.Plots[key] = plt return plt } +*/ /* // AddTableView adds a table view of given log, @@ -230,6 +224,7 @@ func (gui *GUI) AddTableView(lg *elog.Logs, mode etime.Modes, time etime.Times) } */ +/* // TableView returns TableView for mode, time scope func (gui *GUI) TableView(mode etime.Modes, time etime.Times) *tensorcore.Table { if !gui.Active { @@ -252,3 +247,4 @@ func (gui *GUI) UpdateTableView(mode etime.Modes, time etime.Times) *tensorcore. } return tv } +*/ diff --git a/env/fixed.go b/env/fixed.go index 598ef27d..81df2f58 100644 --- a/env/fixed.go +++ b/env/fixed.go @@ -9,7 +9,6 @@ import ( "log" "math/rand" - "cogentcore.org/core/base/errors" "cogentcore.org/core/base/randx" "cogentcore.org/core/tensor" "cogentcore.org/core/tensor/table" @@ -25,11 +24,11 @@ type FixedTable struct { // name of this environment, usually Train vs. Test. Name string - // this is an indexed view of the table with the set of patterns to output. + // Table has the set of patterns to output. // The indexes are used for the *sequential* view so you can easily // sort / split / filter the patterns to be presented using this view. // This adds the random permuted Order on top of those if !sequential. - Table *table.IndexView + Table *table.Table // present items from the table in sequential order (i.e., according to // the indexed view on the Table)? otherwise permuted random order. @@ -57,10 +56,10 @@ type FixedTable struct { } func (ft *FixedTable) Validate() error { - if ft.Table == nil || ft.Table.Table == nil { + if ft.Table == nil { return fmt.Errorf("env.FixedTable: %v has no Table set", ft.Name) } - if ft.Table.Table.NumColumns() == 0 { + if ft.Table.NumColumns() == 0 { return fmt.Errorf("env.FixedTable: %v Table has no columns -- Outputs will be invalid", ft.Name) } return nil @@ -86,14 +85,14 @@ func (ft *FixedTable) Init(run int) { // then a Run counter is added, otherwise just Epoch and Trial. // NameCol and GroupCol are initialized to "Name" and "Group" // so set these to something else after this if needed. -func (ft *FixedTable) Config(tbl *table.IndexView) { +func (ft *FixedTable) Config(tbl *table.Table) { ft.Table = tbl ft.Init(0) } // NewOrder sets a new random Order based on number of rows in the table. func (ft *FixedTable) NewOrder() { - np := ft.Table.Len() + np := ft.Table.NumRows() ft.Order = rand.Perm(np) // always start with new one so random order is identical // and always maintain Order so random number usage is same regardless, and if // user switches between Sequential and random at any point, it all works.. @@ -106,29 +105,28 @@ func (ft *FixedTable) PermuteOrder() { randx.PermuteInts(ft.Order) } -// Row returns the current row number in table, based on Sequential / perumuted Order and -// already de-referenced through the IndexView's indexes to get the actual row in the table. +// Row returns the current row number in table, based on Sequential / perumuted Order. func (ft *FixedTable) Row() int { if ft.Sequential { - return ft.Table.Indexes[ft.Trial.Cur] + return ft.Trial.Cur } - return ft.Table.Indexes[ft.Order[ft.Trial.Cur]] + return ft.Order[ft.Trial.Cur] } func (ft *FixedTable) SetTrialName() { - if nms := errors.Ignore1(ft.Table.Table.ColumnByName(ft.NameCol)); nms != nil { + if nms := ft.Table.Column(ft.NameCol); nms != nil { rw := ft.Row() if rw >= 0 && rw < nms.Len() { - ft.TrialName.Set(nms.String1D(rw)) + ft.TrialName.Set(nms.StringRow(rw, 0)) } } } func (ft *FixedTable) SetGroupName() { - if nms := errors.Ignore1(ft.Table.Table.ColumnByName(ft.GroupCol)); nms != nil { + if nms := ft.Table.Column(ft.GroupCol); nms != nil { rw := ft.Row() if rw >= 0 && rw < nms.Len() { - ft.GroupName.Set(nms.String1D(rw)) + ft.GroupName.Set(nms.StringRow(rw, 0)) } } } @@ -143,7 +141,7 @@ func (ft *FixedTable) Step() bool { } func (ft *FixedTable) State(element string) tensor.Tensor { - et := ft.Table.Table.Tensor(element, ft.Row()) + et := ft.Table.Column(element).RowTensor(ft.Row()) if et == nil { log.Println("FixedTable.State -- could not find element:", element) } diff --git a/env/freq.go b/env/freq.go index b69e3941..e54fc0da 100644 --- a/env/freq.go +++ b/env/freq.go @@ -9,7 +9,6 @@ import ( "log" "math" - "cogentcore.org/core/base/errors" "cogentcore.org/core/base/randx" "cogentcore.org/core/tensor" "cogentcore.org/core/tensor/table" @@ -30,8 +29,11 @@ type FreqTable struct { // name of this environment Name string - // this is an indexed view of the table with the set of patterns to output -- the indexes are used for the *sequential* view so you can easily sort / split / filter the patterns to be presented using this view -- we then add the random permuted Order on top of those if !sequential - Table *table.IndexView + // Table has the set of patterns to output. + // The indexes are used for the *sequential* view so you can easily + // sort / split / filter the patterns to be presented using this view. + // This adds the random permuted Order on top of those if !sequential. + Table *table.Table // number of samples to use in constructing the list of items to present according to frequency -- number per epoch ~ NSamples * Freq -- see RandSamp option NSamples float64 @@ -65,15 +67,15 @@ type FreqTable struct { } func (ft *FreqTable) Validate() error { - if ft.Table == nil || ft.Table.Table == nil { + if ft.Table == nil { return fmt.Errorf("env.FreqTable: %v has no Table set", ft.Name) } - if ft.Table.Table.NumColumns() == 0 { + if ft.Table.NumColumns() == 0 { return fmt.Errorf("env.FreqTable: %v Table has no columns -- Outputs will be invalid", ft.Name) } - _, err := ft.Table.Table.ColumnByName(ft.FreqCol) - if err != nil { - return err + fc := ft.Table.Column(ft.FreqCol) + if fc == nil { + return fmt.Errorf("env.FreqTable: %v Table has no FreqCol", ft.FreqCol) } return nil } @@ -102,17 +104,16 @@ func (ft *FreqTable) Sample() { if ft.NSamples <= 0 { ft.NSamples = 1 } - np := ft.Table.Len() + np := ft.Table.NumRows() if ft.Order == nil { ft.Order = make([]int, 0, int(math.Round(float64(np)*ft.NSamples))) } else { ft.Order = ft.Order[:0] } - frqs := errors.Log1(ft.Table.Table.ColumnByName(ft.FreqCol)) + frqs := ft.Table.Column(ft.FreqCol) for ri := 0; ri < np; ri++ { - ti := ft.Table.Indexes[ri] - frq := frqs.Float1D(ti) + frq := frqs.FloatRow(ri, 0) if ft.RandSamp { n := int(ft.NSamples) for i := 0; i < n; i++ { @@ -139,19 +140,19 @@ func (ft *FreqTable) Row() int { } func (ft *FreqTable) SetTrialName() { - if nms := errors.Ignore1(ft.Table.Table.ColumnByName(ft.NameCol)); nms != nil { + if nms := ft.Table.Column(ft.NameCol); nms != nil { rw := ft.Row() if rw >= 0 && rw < nms.Len() { - ft.TrialName.Set(nms.String1D(rw)) + ft.TrialName.Set(nms.StringRow(rw, 0)) } } } func (ft *FreqTable) SetGroupName() { - if nms := errors.Ignore1(ft.Table.Table.ColumnByName(ft.GroupCol)); nms != nil { + if nms := ft.Table.Column(ft.GroupCol); nms != nil { rw := ft.Row() if rw >= 0 && rw < nms.Len() { - ft.GroupName.Set(nms.String1D(rw)) + ft.GroupName.Set(nms.StringRow(rw, 0)) } } } @@ -167,7 +168,7 @@ func (ft *FreqTable) Step() bool { } func (ft *FreqTable) State(element string) tensor.Tensor { - et := ft.Table.Table.Tensor(element, ft.Row()) + et := ft.Table.Column(element).RowTensor(ft.Row()) if et == nil { log.Println("FreqTable.State -- could not find element:", element) } diff --git a/env/mpifixed.go b/env/mpifixed.go index aed39c0e..0f9a3b79 100644 --- a/env/mpifixed.go +++ b/env/mpifixed.go @@ -9,7 +9,6 @@ import ( "log" "math/rand" - "cogentcore.org/core/base/errors" "cogentcore.org/core/base/randx" "cogentcore.org/core/tensor" "cogentcore.org/core/tensor/table" @@ -32,8 +31,11 @@ type MPIFixedTable struct { // name of this environment Name string - // this is an indexed view of the table with the set of patterns to output -- the indexes are used for the *sequential* view so you can easily sort / split / filter the patterns to be presented using this view -- we then add the random permuted Order on top of those if !sequential - Table *table.IndexView + // Table has the set of patterns to output. + // The indexes are used for the *sequential* view so you can easily + // sort / split / filter the patterns to be presented using this view. + // This adds the random permuted Order on top of those if !sequential. + Table *table.Table // present items from the table in sequential order (i.e., according to the indexed view on the Table)? otherwise permuted random order Sequential bool @@ -64,10 +66,10 @@ type MPIFixedTable struct { } func (ft *MPIFixedTable) Validate() error { - if ft.Table == nil || ft.Table.Table == nil { + if ft.Table == nil { return fmt.Errorf("MPIFixedTable: %v has no Table set", ft.Name) } - if ft.Table.Table.NumColumns() == 0 { + if ft.Table.NumColumns() == 0 { return fmt.Errorf("MPIFixedTable: %v Table has no columns -- Outputs will be invalid", ft.Name) } return nil @@ -90,7 +92,7 @@ func (ft *MPIFixedTable) Init(run int) { // NewOrder sets a new random Order based on number of rows in the table. func (ft *MPIFixedTable) NewOrder() { - np := ft.Table.Len() + np := ft.Table.NumRows() ft.Order = rand.Perm(np) // always start with new one so random order is identical // and always maintain Order so random number usage is same regardless, and if // user switches between Sequential and random at any point, it all works.. @@ -104,29 +106,28 @@ func (ft *MPIFixedTable) PermuteOrder() { randx.PermuteInts(ft.Order) } -// Row returns the current row number in table, based on Sequential / perumuted Order and -// already de-referenced through the IndexView's indexes to get the actual row in the table. +// Row returns the current row number in table, based on Sequential / perumuted Order. func (ft *MPIFixedTable) Row() int { if ft.Sequential { - return ft.Table.Indexes[ft.Trial.Cur] + return ft.Trial.Cur } - return ft.Table.Indexes[ft.Order[ft.Trial.Cur]] + return ft.Order[ft.Trial.Cur] } func (ft *MPIFixedTable) SetTrialName() { - if nms := errors.Ignore1(ft.Table.Table.ColumnByName(ft.NameCol)); nms != nil { + if nms := ft.Table.Column(ft.NameCol); nms != nil { rw := ft.Row() if rw >= 0 && rw < nms.Len() { - ft.TrialName.Set(nms.String1D(rw)) + ft.TrialName.Set(nms.StringRow(rw, 0)) } } } func (ft *MPIFixedTable) SetGroupName() { - if nms := errors.Ignore1(ft.Table.Table.ColumnByName(ft.GroupCol)); nms != nil { + if nms := ft.Table.Column(ft.GroupCol); nms != nil { rw := ft.Row() if rw >= 0 && rw < nms.Len() { - ft.GroupName.Set(nms.String1D(rw)) + ft.GroupName.Set(nms.StringRow(rw, 0)) } } } @@ -142,7 +143,7 @@ func (ft *MPIFixedTable) Step() bool { } func (ft *MPIFixedTable) State(element string) tensor.Tensor { - et := ft.Table.Table.Tensor(element, ft.Row()) + et := ft.Table.Column(element).RowTensor(ft.Row()) if et == nil { log.Println("MPIFixedTable.State -- could not find element:", element) } diff --git a/estats/plots.go b/estats/plots.go index 2fff4baa..ad5651f8 100644 --- a/estats/plots.go +++ b/estats/plots.go @@ -10,16 +10,16 @@ import ( ) func ConfigPCAPlot(plt *plotcore.PlotEditor, dt *table.Table, nm string) { - plt.Options.Title = nm - col1 := dt.ColumnName(1) - plt.Options.XAxis = col1 + // plt.Options.Title = nm + // col1 := dt.ColumnName(1) + // plt.Options.XAxis = col1 plt.SetTable(dt) - plt.Options.Lines = false - plt.Options.Points = true + // plt.Options.Lines = false + // plt.Options.Points = true // order of params: on, fixMin, min, fixMax, max - plt.SetColumnOptions(dt.ColumnName(0), plotcore.On, plotcore.FloatMin, 0, plotcore.FloatMax, 0) - plt.SetColumnOptions(col1, plotcore.Off, plotcore.FloatMin, -3, plotcore.FloatMax, 3) - plt.SetColumnOptions(dt.ColumnName(2), plotcore.On, plotcore.FloatMin, -3, plotcore.FloatMax, 3) + // plt.SetColumnOptions(dt.ColumnName(0), plotcore.On, plotcore.FloatMin, 0, plotcore.FloatMax, 0) + // plt.SetColumnOptions(col1, plotcore.Off, plotcore.FloatMin, -3, plotcore.FloatMax, 3) + // plt.SetColumnOptions(dt.ColumnName(2), plotcore.On, plotcore.FloatMin, -3, plotcore.FloatMax, 3) } // ClusterPlot does one cluster plot on given table column name diff --git a/netview/netdata.go b/netview/netdata.go index e36c46c2..fff820c1 100644 --- a/netview/netdata.go +++ b/netview/netdata.go @@ -636,8 +636,8 @@ func (nv *NetView) PlotSelectedUnit() (*table.Table, *plotcore.PlotEditor) { //t b := core.NewBody("netview-selectedunit").SetTitle("NetView SelectedUnit Plot: " + selnm) plt := plotcore.NewPlotEditor(b) - plt.Options.Title = "NetView " + selnm - plt.Options.XAxis = "Rec" + // plt.Options.Title = "NetView " + selnm + // plt.Options.XAxis = "Rec" b.AddTopBar(func(bar *core.Frame) { core.NewToolbar(bar).Maker(plt.MakeToolbar) @@ -652,11 +652,12 @@ func (nv *NetView) PlotSelectedUnit() (*table.Table, *plotcore.PlotEditor) { //t continue } disp := (vnm == nv.Var) + _ = disp min := vp.Range.Min if min < 0 && vp.Range.FixMin && vp.MinMax.Min >= 0 { min = 0 // netview uses -1..1 but not great for graphs unless needed } - plt.SetColumnOptions(vnm, disp, vp.Range.FixMin, min, vp.Range.FixMax, vp.Range.Max) + // plt.SetColumnOptions(vnm, disp, vp.Range.FixMin, min, vp.Range.FixMax, vp.Range.Max) } b.RunWindow() From 46b6fd793bf6fffea477b90505b697c1d79c9936 Mon Sep 17 00:00:00 2001 From: "Randall C. O'Reilly" Date: Fri, 15 Nov 2024 02:59:41 -0800 Subject: [PATCH 07/24] goal: egui fix config --- egui/gui.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/egui/gui.go b/egui/gui.go index 554fef10..ae0f4e0f 100644 --- a/egui/gui.go +++ b/egui/gui.go @@ -89,6 +89,7 @@ func (gui *GUI) MakeBody(sim any, appname, title, about string) { if tb, ok := sim.(core.ToolbarMaker); ok { gui.Body.AddTopBar(func(bar *core.Frame) { gui.Toolbar = core.NewToolbar(bar) + gui.Toolbar.Maker(gui.MakeToolbar) gui.Toolbar.Maker(tb.MakeToolbar) }) } @@ -102,6 +103,7 @@ func (gui *GUI) MakeBody(sim any, appname, title, about string) { tabs := databrowser.NewTabs(split) gui.Tabs = tabs tabs.Name = "tabs" + gui.Files.Tabber = tabs split.SetTiles(core.TileSplit, core.TileSpan) split.SetSplits(.2, .5, .8) } From e50072656389b2119507b7c5c27beb03856d7717 Mon Sep 17 00:00:00 2001 From: "Randall C. O'Reilly" Date: Fri, 15 Nov 2024 12:35:43 -0800 Subject: [PATCH 08/24] goal: looper doesn't include etime or stack directly (still uses etime for test) --- env/envs.go | 10 ++++----- looper/README.md | 4 ++-- looper/stack.go | 53 +++++++++++++++++++++++++-------------------- looper/stacks.go | 6 ++--- looper/step_test.go | 10 ++++++--- netview/netview.go | 12 ++++++++++ netview/viewupdt.go | 6 +++-- 7 files changed, 62 insertions(+), 39 deletions(-) diff --git a/env/envs.go b/env/envs.go index be2c0ee7..ca2fafad 100644 --- a/env/envs.go +++ b/env/envs.go @@ -7,7 +7,7 @@ package env import ( "fmt" - "github.com/emer/emergent/v2/etime" + "cogentcore.org/core/enums" ) // Envs is a map of environments organized according @@ -29,21 +29,21 @@ func (es *Envs) Add(evs ...Env) { } } -// ByMode returns env by etime.Modes evaluation mode as the map key. +// ByMode returns env by Modes evaluation mode as the map key. // returns nil if not found -func (es *Envs) ByMode(mode etime.Modes) Env { +func (es *Envs) ByMode(mode enums.Enum) Env { return (*es)[mode.String()] } // ModeDi returns the string of the given mode appended with // _di data index with leading zero. -func ModeDi(mode etime.Modes, di int) string { +func ModeDi(mode enums.Enum, di int) string { return fmt.Sprintf("%s_%02d", mode.String(), di) } // ByModeDi returns env by etime.Modes evaluation mode and // data parallel index as the map key, using ModeDi function. // returns nil if not found -func (es *Envs) ByModeDi(mode etime.Modes, di int) Env { +func (es *Envs) ByModeDi(mode enums.Enum, di int) Env { return (*es)[ModeDi(mode, di)] } diff --git a/looper/README.md b/looper/README.md index 189195c6..10eb395d 100644 --- a/looper/README.md +++ b/looper/README.md @@ -21,7 +21,7 @@ for { } ``` -The `Loop` object has the above function lists (`OnStart`, `OnEnd`, and `IsDone`), where function closures can be added to perform any relevant functionality. `Events` have the trigger `AtCounter` and a list of functions to call. If the Loop is the last one in the Stack, then, +The `Loop` object has the above function lists (`OnStart`, `OnEnd`, and `IsDone`), where function closures can be added to perform any relevant functionality. `Events` have the trigger `AtCounter` and a list of functions to call. Each level of loop holds a corresponding `Counter` value, which increments at each iteration, and its `Max` value determines when the loop iteration terminates. @@ -39,7 +39,7 @@ From `step_test.go` `ExampleStacks`: ```Go stacks := NewStacks() - stacks.AddStack(etime.Train). + stacks.AddStack(etime.Train, etime.Trial). AddTime(etime.Epoch, 3). AddTime(etime.Trial, 2) diff --git a/looper/stack.go b/looper/stack.go index 4dbfeb4c..54304098 100644 --- a/looper/stack.go +++ b/looper/stack.go @@ -5,11 +5,10 @@ package looper import ( + "fmt" "strings" "cogentcore.org/core/enums" - "github.com/emer/emergent/v2/estats" - "github.com/emer/emergent/v2/etime" ) // Stack contains a list of Loops to run, for a given Mode of processing. @@ -55,17 +54,17 @@ type Stack struct { StepCount int } -// NewStack returns a new Stack for given mode. -func NewStack(mode enums.Enum) *Stack { +// NewStack returns a new Stack for given mode and default step level. +func NewStack(mode, stepLevel enums.Enum) *Stack { st := &Stack{} - st.newInit(mode) + st.newInit(mode, stepLevel) return st } // newInit initializes new data structures for a newly created object. -func (st *Stack) newInit(mode enums.Enum) { +func (st *Stack) newInit(mode, stepLevel enums.Enum) { st.Mode = mode - st.StepLevel = etime.Trial + st.StepLevel = stepLevel st.StepCount = 1 st.Loops = map[enums.Enum]*Loop{} st.Order = []enums.Enum{} @@ -116,27 +115,27 @@ func (st *Stack) AddTimeIncr(time enums.Enum, ctrMax, ctrIncr int) *Stack { } // TimeAbove returns the time above the given time in the stack -// returning etime.NoTime if this is the highest level, +// returning false if this is the highest level, // or given time does not exist in order. -func (st *Stack) TimeAbove(time enums.Enum) enums.Enum { +func (st *Stack) TimeAbove(time enums.Enum) (enums.Enum, bool) { for i, tt := range st.Order { if tt == time && i > 0 { - return st.Order[i-1] + return st.Order[i-1], true } } - return etime.NoTime + return time, false } // TimeBelow returns the time below the given time in the stack -// returning etime.NoTime if this is the lowest level, +// returning false if this is the lowest level, // or given time does not exist in order. -func (st *Stack) TimeBelow(time enums.Enum) enums.Enum { +func (st *Stack) TimeBelow(time enums.Enum) (enums.Enum, bool) { for i, tt := range st.Order { if tt == time && i+1 < len(st.Order) { - return st.Order[i+1] + return st.Order[i+1], true } } - return etime.NoTime + return time, false } //////// Control @@ -165,17 +164,23 @@ func (st *Stack) ClearStep() { st.StopFlag = false } -// CountersToStats sets the current counter values to estats Int values -// by their time names only (no eval Mode). These values can then -// be read by elog LogItems to record the counters in logs. -// Typically, a TrialName string is also expected to be set, -// to describe the current trial (Step) contents in a useful way, -// and other relevant info (e.g., group / category info) can also be set. -func (st *Stack) CountersToStats(stats *estats.Stats) { +// Counters returns a slice of the current counter values +// for this stack, in Order. +func (st *Stack) Counters() []int { + ctrs := make([]int, len(st.Order)) + for i, tm := range st.Order { + ctrs[i] = st.Loops[tm].Counter.Cur + } + return ctrs +} + +// CountersString returns a string with loop time and counter values. +func (st *Stack) CountersString() string { + ctrs := "" for _, tm := range st.Order { - lp := st.Loops[tm] - stats.SetInt(tm.String(), lp.Counter.Cur) + ctrs += fmt.Sprintf("%s: %d ", tm.String(), st.Loops[tm].Counter.Cur) } + return ctrs } // DocString returns an indented summary of the loops and functions in the Stack. diff --git a/looper/stacks.go b/looper/stacks.go index 1c83d966..8f02097c 100644 --- a/looper/stacks.go +++ b/looper/stacks.go @@ -106,9 +106,9 @@ func (ls *Stacks) Stop(level enums.Enum) { //////// Config API -// AddStack adds a new Stack for given mode -func (ls *Stacks) AddStack(mode enums.Enum) *Stack { - st := NewStack(mode) +// AddStack adds a new Stack for given mode and default step level. +func (ls *Stacks) AddStack(mode, stepLevel enums.Enum) *Stack { + st := NewStack(mode, stepLevel) ls.Stacks[mode] = st return st } diff --git a/looper/step_test.go b/looper/step_test.go index cc5a40c4..b95093fb 100644 --- a/looper/step_test.go +++ b/looper/step_test.go @@ -15,7 +15,7 @@ var printTest = false func ExampleStacks() { stacks := NewStacks() - stacks.AddStack(etime.Train). + stacks.AddStack(etime.Train, etime.Trial). AddTime(etime.Epoch, 3). AddTime(etime.Trial, 2) @@ -55,7 +55,7 @@ func TestStep(t *testing.T) { trialCount := 0 stacks := NewStacks() - stacks.AddStack(etime.Train). + stacks.AddStack(etime.Train, etime.Trial). AddTime(etime.Run, 2). AddTime(etime.Epoch, 5). AddTime(etime.Trial, 4). @@ -134,7 +134,11 @@ func TestStepIncr(t *testing.T) { trialCount := 0 stacks := NewStacks() - stacks.AddStack(etime.Train).AddTime(etime.Run, 2).AddTime(etime.Epoch, 5).AddTimeIncr(etime.Trial, 10, 3).AddTime(etime.Cycle, 3) + stacks.AddStack(etime.Train, etime.Trial). + AddTime(etime.Run, 2). + AddTime(etime.Epoch, 5). + AddTimeIncr(etime.Trial, 10, 3). + AddTime(etime.Cycle, 3) stacks.Loop(etime.Train, etime.Trial).OnStart.Add("Count Trials", func() { trialCount += 1 }) stacks.Loop(etime.Train, etime.Run).OnEnd.Add("Counters Test", func() { run := stacks.Stacks[etime.Train].Loops[etime.Run].Counter.Cur diff --git a/netview/netview.go b/netview/netview.go index b703d0d7..336b4ea7 100644 --- a/netview/netview.go +++ b/netview/netview.go @@ -176,6 +176,18 @@ func (nv *NetView) HasLayers() bool { return true } +// IsViewingSynapse returns true if netview is viewing synapses. +func (nv *NetView) IsViewingSynapse() bool { + if !nv.IsVisible() { + return false + } + vvar := nv.Var + if strings.HasPrefix(vvar, "r.") || strings.HasPrefix(vvar, "s.") { + return true + } + return false +} + // RecordCounters saves the counters, so they are available for a Current update func (nv *NetView) RecordCounters(counters string) { nv.DataMu.Lock() diff --git a/netview/viewupdt.go b/netview/viewupdt.go index 118dfc78..da8d9390 100644 --- a/netview/viewupdt.go +++ b/netview/viewupdt.go @@ -13,7 +13,7 @@ import ( // ViewUpdate manages time scales for updating the NetView type ViewUpdate struct { - // the network view + // View is the network view. View *NetView `display:"-"` // whether in testing mode -- can be set in advance to drive appropriate updating @@ -25,7 +25,9 @@ type ViewUpdate struct { // toggles update of display on On bool - // if true, do not record network data when the NetView is invisible -- this speeds up running when not visible, but the NetView display will not show the current state when switching back to it + // SkipInvis means do not record network data when the NetView is invisible. + // This speeds up running when not visible, but the NetView display will + // not show the current state when switching back to it. SkipInvis bool // at what time scale to update the display during training? From dc33a5efb78d9661452986d2baec89125b21df79 Mon Sep 17 00:00:00 2001 From: "Randall C. O'Reilly" Date: Fri, 15 Nov 2024 14:49:55 -0800 Subject: [PATCH 09/24] remove ViewUpdate from egui. add OnStop function. --- egui/gui.go | 20 ++++++++++++++++---- egui/netview.go | 20 -------------------- 2 files changed, 16 insertions(+), 24 deletions(-) delete mode 100644 egui/netview.go diff --git a/egui/gui.go b/egui/gui.go index ae0f4e0f..1446aa9b 100644 --- a/egui/gui.go +++ b/egui/gui.go @@ -31,14 +31,17 @@ type GUI struct { // flag to stop running StopNow bool `display:"-"` - // the view update for managing updates of netview - ViewUpdate *netview.ViewUpdate `display:"-"` + // NetViews are the created netviews. + NetViews []*netview.NetView // displays Sim fields on left SimForm *core.Form `display:"-"` // Body is the content of the sim window Body *core.Body `display:"-"` + + // OnStop is called when running stopped through the GUI. + OnStop func() } // UpdateWindow triggers an update on window body, @@ -69,8 +72,8 @@ func (gui *GUI) Stopped() { if gui.Body == nil { return } - if gui.ViewUpdate != nil { - gui.UpdateNetViewWhenStopped() + if gui.OnStop != nil { + gui.OnStop() } gui.GoUpdateWindow() } @@ -117,11 +120,20 @@ func (gui *GUI) AddNetView(tabName string) *netview.NetView { // nv.Current() // nv.Update() // }) + gui.NetViews = append(gui.NetViews, nv) return nv }) return nv } +// NetView returns the first created netview, or nil if none. +func (gui *GUI) NetView() *netview.NetView { + if len(gui.NetViews) == 0 { + return nil + } + return gui.NetViews[0] +} + // FinalizeGUI wraps the end functionality of the GUI func (gui *GUI) FinalizeGUI(closePrompt bool) { if closePrompt { diff --git a/egui/netview.go b/egui/netview.go deleted file mode 100644 index 6dfd9f79..00000000 --- a/egui/netview.go +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright (c) 2022, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package egui - -// UpdateNetView updates the gui visualization of the network. -func (gui *GUI) UpdateNetView() { - if gui.ViewUpdate != nil { - gui.ViewUpdate.Update() - } -} - -// UpdateNetViewWhenStopped updates the gui visualization of the network. -// when stopped either via stepping or user hitting stop button. -func (gui *GUI) UpdateNetViewWhenStopped() { - if gui.ViewUpdate != nil { - gui.ViewUpdate.UpdateWhenStopped() - } -} From 43c06fd038e2ace0f04a3f1a82c797e7725cf379 Mon Sep 17 00:00:00 2001 From: "Randall C. O'Reilly" Date: Sat, 16 Nov 2024 11:32:37 -0800 Subject: [PATCH 10/24] looper uses level instead of time to index loops -- much clearer and no conflict with time package --- looper/README.md | 82 ++++++++++++++++++------- looper/counter.go | 2 +- looper/levels/enumgen.go | 93 ++++++++++++++++++++++++++++ looper/levels/levels.go | 21 +++++++ looper/loop.go | 2 +- looper/run.go | 16 ++--- looper/scope.go | 12 ++-- looper/stack.go | 67 ++++++++++---------- looper/stacks.go | 44 +++++++++----- looper/step_test.go | 128 +++++++++++++++++++-------------------- 10 files changed, 316 insertions(+), 151 deletions(-) create mode 100644 looper/levels/enumgen.go create mode 100644 looper/levels/levels.go diff --git a/looper/README.md b/looper/README.md index 10eb395d..d423eeb7 100644 --- a/looper/README.md +++ b/looper/README.md @@ -2,18 +2,20 @@ Docs: [GoDoc](https://pkg.go.dev/github.com/emer/emergent/looper) -Looper implements a fully generic looping control system, with a `Stack` of `Loop` elements that iterate over different time scales of processing, where the processing performed is provided by function closures on the Loop elements. +Looper implements a fully generic looping control system, with a `Stack` of `Loop` elements that iterate over different levels or time scales of processing, where the processing performed is provided by function closures on the Loop elements. Each Stack is defined by a `Mode` enum, e.g., Train vs. Test. + +Thus, the looper structure is defined by two "coordinate" variables: `Mode` stack and loop `Level`, which should be provided by end-user defined [enums](https://github.com/cogentcore/core/tree/enums) values (the looper code uses the `enums.Enum` interface). Critically, the loop logic supports _reentrant stepping_, such that you can iteratively `Step` through the loop processing and accomplish exactly the same outcomes as if you did a complete `Run` from the start. -Each loop implements the following logic, where it is key to understand that the time scale associated with the loop _runs the full iterations over that time scale_. For example, a `Trial` loop _iterates over Trials_ -- it is _not_ a single trial, but the whole sequence (loop) of trials. +Each loop implements the following logic, where it is key to understand that the level associated with the loop _runs the full iterations over that level_. For example, a `Trial` loop _iterates over Trials_ -- it is _not_ a single trial, but the whole sequence (loop) of trials. ```go for { - Events[Counter == AtCounter] // run events at counter + Events[Counter == AtCounter] // run events for current counter value OnStart() Run Sub-Loop to completion - OnEnd() + OnEnd() Counter += Inc if Counter >= Max || IsDone() { break @@ -25,13 +27,27 @@ The `Loop` object has the above function lists (`OnStart`, `OnEnd`, and `IsDone` Each level of loop holds a corresponding `Counter` value, which increments at each iteration, and its `Max` value determines when the loop iteration terminates. -Each `Stack` of loops has an associated `Mode` enum, e.g., `Train` or `Test`, and each `Loop` has an associated `Time` level, e.g., `Run`, `Epoch`, `Trial`. - -The collection of `Stacks` has a high-level API for configuring and controlling the set of `Stack` elements, and has the logic for running everything, in the form of `Run`, `Step`, `Stop` methods, etc. +The collection of `Stacks` has a high-level API for configuring and controlling the set of `Stack` elements, and has the logic for running everything, in the form of `Run`, `Step`, `Stop`, `Init` methods, etc. # Examples -The following examples use the [etime](../etime) `Modes` and `Times` enums. It is recommended that you define your own `Modes` enums if not using the basic `Train` and `Test` cases, to provide a better [egui](../egui) representation of the loop stack. +The following examples use the `Modes` and `Levels` enums defined in the [levels](levels) sub-package, which is intended for testing and example purposes: each use-case should define its own enums for better clarity and flexibility down the road. + +```Go +type Modes int32 //enums:enum +const ( + Train Modes = iota + Test +) + +type Levels int32 //enums:enum +const ( + Cycle Levels = iota + Trial + Epoch + Run +) +``` ## Configuration @@ -39,18 +55,18 @@ From `step_test.go` `ExampleStacks`: ```Go stacks := NewStacks() - stacks.AddStack(etime.Train, etime.Trial). - AddTime(etime.Epoch, 3). - AddTime(etime.Trial, 2) - - // add function closures: - stacks.Loop(etime.Train, etime.Epoch).OnStart.Add("Epoch Start", func() { fmt.Println("Epoch Start") }) - stacks.Loop(etime.Train, etime.Epoch).OnEnd.Add("Epoch End", func() { fmt.Println("Epoch End") }) - stacks.Loop(etime.Train, etime.Trial).OnStart.Add("Trial Run", func() { fmt.Println(" Trial Run") }) - - // add events: - stacks.Loop(etime.Train, etime.Epoch).AddEvent("EpochTwoEvent", 2, func() { fmt.Println("Epoch==2") }) - stacks.Loop(etime.Train, etime.Trial).AddEvent("TrialOneEvent", 1, func() { fmt.Println(" Trial==1") }) + stacks.AddStack(levels.Train, levels.Trial). + AddLevel(levels.Epoch, 3). + AddLevel(levels.Trial, 2) + + // add function closures: + stacks.Loop(levels.Train, levels.Epoch).OnStart.Add("Epoch Start", func() { fmt.Println("Epoch Start") }) + stacks.Loop(levels.Train, levels.Epoch).OnEnd.Add("Epoch End", func() { fmt.Println("Epoch End") }) + stacks.Loop(levels.Train, levels.Trial).OnStart.Add("Trial Run", func() { fmt.Println(" Trial Run") }) + + // add events: + stacks.Loop(levels.Train, levels.Epoch).AddEvent("EpochTwoEvent", 2, func() { fmt.Println("Epoch==2") }) + stacks.Loop(levels.Train, levels.Trial).AddEvent("TrialOneEvent", 1, func() { fmt.Println(" Trial==1") }) ``` The `DocString` for this stack is: @@ -93,17 +109,37 @@ Epoch End Run a full stack: ```Go -stacks.Run(etime.Train) +stacks.Run(level.Train) ``` Reset first and Run, ensures that the full sequence is run even if it might have been stopped or stepped previously: ```Go -stacks.ResetAndRun(etime.Train) +stacks.ResetAndRun(level.Train) ``` Step by 1 Trial: ```Go -stacks.Step(etime.Train, 1, etime.Trial) +stacks.Step(level.Train, 1, level.Trial) ``` +## Stacks config API + +Most configuration can be handled by these helper functions defined on the `Stacks` type: + +```Go +// AddEventAllModes adds a new event for all modes at given loop level. +AddEventAllModes(level enums.Enum, name string, atCtr int, fun func()) + +// AddOnStartToAll adds given function taking mode and level args to OnStart in all stacks, loops +AddOnStartToAll(name string, fun func(mode, level enums.Enum)) + +// AddOnEndToAll adds given function taking mode and level args to OnEnd in all stacks, loops +AddOnEndToAll(name string, fun func(mode, level enums.Enum)) + +// AddOnStartToLoop adds given function taking mode arg to OnStart in all stacks for given loop. +AddOnStartToLoop(level enums.Enum, name string, fun func(mode enums.Enum)) + +// AddOnEndToLoop adds given function taking mode arg to OnEnd in all stacks for given loop. +AddOnEndToLoop(level enums.Enum, name string, fun func(mode enums.Enum)) +``` diff --git a/looper/counter.go b/looper/counter.go index 825ef850..abf7fbee 100644 --- a/looper/counter.go +++ b/looper/counter.go @@ -5,7 +5,7 @@ package looper // Counter combines an integer with a maximum value. -// It supports time tracking within looper. +// It supports iteration tracking within looper. type Counter struct { // Cur is the current counter value. diff --git a/looper/levels/enumgen.go b/looper/levels/enumgen.go new file mode 100644 index 00000000..1443c137 --- /dev/null +++ b/looper/levels/enumgen.go @@ -0,0 +1,93 @@ +// Code generated by "core generate"; DO NOT EDIT. + +package levels + +import ( + "cogentcore.org/core/enums" +) + +var _ModesValues = []Modes{0, 1} + +// ModesN is the highest valid value for type Modes, plus one. +// +//gosl:start +const ModesN Modes = 2 + +//gosl:end + +var _ModesValueMap = map[string]Modes{`Train`: 0, `Test`: 1} + +var _ModesDescMap = map[Modes]string{0: ``, 1: ``} + +var _ModesMap = map[Modes]string{0: `Train`, 1: `Test`} + +// String returns the string representation of this Modes value. +func (i Modes) String() string { return enums.String(i, _ModesMap) } + +// SetString sets the Modes value from its string representation, +// and returns an error if the string is invalid. +func (i *Modes) SetString(s string) error { return enums.SetString(i, s, _ModesValueMap, "Modes") } + +// Int64 returns the Modes value as an int64. +func (i Modes) Int64() int64 { return int64(i) } + +// SetInt64 sets the Modes value from an int64. +func (i *Modes) SetInt64(in int64) { *i = Modes(in) } + +// Desc returns the description of the Modes value. +func (i Modes) Desc() string { return enums.Desc(i, _ModesDescMap) } + +// ModesValues returns all possible values for the type Modes. +func ModesValues() []Modes { return _ModesValues } + +// Values returns all possible values for the type Modes. +func (i Modes) Values() []enums.Enum { return enums.Values(_ModesValues) } + +// MarshalText implements the [encoding.TextMarshaler] interface. +func (i Modes) MarshalText() ([]byte, error) { return []byte(i.String()), nil } + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (i *Modes) UnmarshalText(text []byte) error { return enums.UnmarshalText(i, text, "Modes") } + +var _LevelsValues = []Levels{0, 1, 2, 3} + +// LevelsN is the highest valid value for type Levels, plus one. +// +//gosl:start +const LevelsN Levels = 4 + +//gosl:end + +var _LevelsValueMap = map[string]Levels{`Cycle`: 0, `Trial`: 1, `Epoch`: 2, `Run`: 3} + +var _LevelsDescMap = map[Levels]string{0: ``, 1: ``, 2: ``, 3: ``} + +var _LevelsMap = map[Levels]string{0: `Cycle`, 1: `Trial`, 2: `Epoch`, 3: `Run`} + +// String returns the string representation of this Levels value. +func (i Levels) String() string { return enums.String(i, _LevelsMap) } + +// SetString sets the Levels value from its string representation, +// and returns an error if the string is invalid. +func (i *Levels) SetString(s string) error { return enums.SetString(i, s, _LevelsValueMap, "Levels") } + +// Int64 returns the Levels value as an int64. +func (i Levels) Int64() int64 { return int64(i) } + +// SetInt64 sets the Levels value from an int64. +func (i *Levels) SetInt64(in int64) { *i = Levels(in) } + +// Desc returns the description of the Levels value. +func (i Levels) Desc() string { return enums.Desc(i, _LevelsDescMap) } + +// LevelsValues returns all possible values for the type Levels. +func LevelsValues() []Levels { return _LevelsValues } + +// Values returns all possible values for the type Levels. +func (i Levels) Values() []enums.Enum { return enums.Values(_LevelsValues) } + +// MarshalText implements the [encoding.TextMarshaler] interface. +func (i Levels) MarshalText() ([]byte, error) { return []byte(i.String()), nil } + +// UnmarshalText implements the [encoding.TextUnmarshaler] interface. +func (i *Levels) UnmarshalText(text []byte) error { return enums.UnmarshalText(i, text, "Levels") } diff --git a/looper/levels/levels.go b/looper/levels/levels.go new file mode 100644 index 00000000..bf394d66 --- /dev/null +++ b/looper/levels/levels.go @@ -0,0 +1,21 @@ +// Copyright (c) 2024, The Emergent Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package levels + +//go:generate core generate + +type Modes int32 //enums:enum +const ( + Train Modes = iota + Test +) + +type Levels int32 //enums:enum +const ( + Cycle Levels = iota + Trial + Epoch + Run +) diff --git a/looper/loop.go b/looper/loop.go index e5b862c9..68c569d8 100644 --- a/looper/loop.go +++ b/looper/loop.go @@ -29,7 +29,7 @@ import ( // } type Loop struct { - // Counter increments every time through the loop, up to [Counter.Max]. + // Counter increments every iteration through the loop, up to [Counter.Max]. Counter Counter // Events occur when Counter.Cur is at their AtCounter. diff --git a/looper/run.go b/looper/run.go index 0e0da223..0640851a 100644 --- a/looper/run.go +++ b/looper/run.go @@ -20,8 +20,8 @@ func (ss *Stacks) runLevel(currentLevel int) bool { if currentLevel >= len(st.Order) { return true // Stack overflow, expected at bottom of stack. } - time := st.Order[currentLevel] - loop := st.Loops[time] + level := st.Order[currentLevel] + loop := st.Loops[level] ctr := &loop.Counter for ctr.Cur < ctr.Max || ctr.Max <= 0 { // Loop forever for non-maxes @@ -47,11 +47,11 @@ func (ss *Stacks) runLevel(currentLevel int) bool { } // Don't ever Start the same iteration of the same level twice. - lastCounter, ok := ss.lastStartedCounter[ToScope(ss.Mode, time)] + lastCounter, ok := ss.lastStartedCounter[ToScope(ss.Mode, level)] if !ok || ctr.Cur > lastCounter { - ss.lastStartedCounter[ToScope(ss.Mode, time)] = ctr.Cur + ss.lastStartedCounter[ToScope(ss.Mode, level)] = ctr.Cur if PrintControlFlow { - fmt.Printf("%s%s: Start: %d\n", indent(currentLevel), time.String(), ctr.Cur) + fmt.Printf("%s%s: Start: %d\n", indent(currentLevel), level.String(), ctr.Cur) } for _, ev := range loop.Events { if ctr.Cur == ev.AtCounter { @@ -60,7 +60,7 @@ func (ss *Stacks) runLevel(currentLevel int) bool { } loop.OnStart.Run() } else if PrintControlFlow { - fmt.Printf("%s%s: Skipping Start: %d\n", indent(currentLevel), time.String(), ctr.Cur) + fmt.Printf("%s%s: Skipping Start: %d\n", indent(currentLevel), level.String(), ctr.Cur) } // Recursion! @@ -68,7 +68,7 @@ func (ss *Stacks) runLevel(currentLevel int) bool { if runComplete { if PrintControlFlow { - fmt.Printf("%s%s: End: %d\n", indent(currentLevel), time.String(), ctr.Cur) + fmt.Printf("%s%s: End: %d\n", indent(currentLevel), level.String(), ctr.Cur) } loop.OnEnd.Run() ctr.Incr() @@ -82,7 +82,7 @@ func (ss *Stacks) runLevel(currentLevel int) bool { for _, fun := range loop.IsDone { if fun.Func() { if PrintControlFlow { - fmt.Printf("%s%s: IsDone Stop at: %d from: %s\n", indent(currentLevel), time.String(), ctr.Cur, fun.Name) + fmt.Printf("%s%s: IsDone Stop at: %d from: %s\n", indent(currentLevel), level.String(), ctr.Cur, fun.Name) } goto exitLoop // Exit IsDone and Counter for-loops without flag variable. } diff --git a/looper/scope.go b/looper/scope.go index 180ed984..75373f89 100644 --- a/looper/scope.go +++ b/looper/scope.go @@ -6,16 +6,16 @@ package looper import "cogentcore.org/core/enums" -// Scope is a combined Mode + Time value. -// Mode is encoded by multiples of 1000 and Time is added to that. +// Scope is a combined Mode + Level value. +// Mode is encoded by multiples of 1000 and Level is added to that. type Scope int -func (sc Scope) ModeTime() (mode, time int64) { +func (sc Scope) ModeLevel() (mode, level int64) { mode = int64(sc / 1000) - time = int64(sc % 1000) + level = int64(sc % 1000) return } -func ToScope(mode, time enums.Enum) Scope { - return Scope(mode.Int64()*1000 + time.Int64()) +func ToScope(mode, level enums.Enum) Scope { + return Scope(mode.Int64()*1000 + level.Int64()) } diff --git a/looper/stack.go b/looper/stack.go index 54304098..60496021 100644 --- a/looper/stack.go +++ b/looper/stack.go @@ -11,23 +11,24 @@ import ( "cogentcore.org/core/enums" ) -// Stack contains a list of Loops to run, for a given Mode of processing. -// The order of Loop stacks is determined by the Order list. +// Stack contains a list of Loops to run, for a given Mode of processing, +// which distinguishes this stack, and is its key in the map of Stacks. +// The order of Loop stacks is determined by the Order list of loop levels. type Stack struct { // Mode identifies the mode of processing this stack performs, e.g., Train or Test. Mode enums.Enum - // Loops is the set of Loops for this Stack, keyed by the timescale. + // Loops is the set of Loops for this Stack, keyed by the level enum value. // Order is determined by the Order list. Loops map[enums.Enum]*Loop - // Order is the list and order of time scales looped over by this stack of loops. - // The ordered is from top to bottom, so longer timescales like Run should be at - // the beginning and shorter timescales like Trial should be and the end. + // Order is the list and order of levels looped over by this stack of loops. + // The order is from top to bottom, so longer timescales like Run should be at + // the start and shorter level timescales like Trial should be at the end. Order []enums.Enum - // OnInit are functions to run for Init function of this stack, + // OnInit are functions to run when Init is called, to restart processing, // which also resets the counters for this stack. OnInit NamedFuncs @@ -37,7 +38,7 @@ type Stack struct { // StopFlag will stop running ASAP if set. StopFlag bool - // StopLevel sets the Time level to stop at the end of. + // StopLevel sets the level to stop at the end of. // This is the current active Step level, which will be reset when done. StopLevel enums.Enum @@ -76,18 +77,18 @@ func (st *Stack) Level(i int) *Loop { return st.Loops[st.Order[i]] } -// AddTime adds a new timescale to this Stack with a given ctrMax number of iterations. +// AddLevel adds a new level to this Stack with a given counterMax number of iterations. // The order in which this method is invoked is important, // as it adds loops in order from top to bottom. -// Sets a default increment of 1 for the counter -- see AddTimeIncr for different increment. -func (st *Stack) AddTime(time enums.Enum, ctrMax int) *Stack { - st.Loops[time] = NewLoop(ctrMax, 1) - st.Order = append(st.Order, time) +// Sets a default increment of 1 for the counter -- see AddLevelIncr for different increment. +func (st *Stack) AddLevel(level enums.Enum, counterMax int) *Stack { + st.Loops[level] = NewLoop(counterMax, 1) + st.Order = append(st.Order, level) return st } -// AddOnStartToAll adds given function taking mode and time args to OnStart in all loops. -func (st *Stack) AddOnStartToAll(name string, fun func(mode, time enums.Enum)) { +// AddOnStartToAll adds given function taking mode and level args to OnStart in all loops. +func (st *Stack) AddOnStartToAll(name string, fun func(mode, level enums.Enum)) { for tt, lp := range st.Loops { lp.OnStart.Add(name, func() { fun(st.Mode, tt) @@ -95,8 +96,8 @@ func (st *Stack) AddOnStartToAll(name string, fun func(mode, time enums.Enum)) { } } -// AddOnEndToAll adds given function taking mode and time args to OnEnd in all loops. -func (st *Stack) AddOnEndToAll(name string, fun func(mode, time enums.Enum)) { +// AddOnEndToAll adds given function taking mode and level args to OnEnd in all loops. +func (st *Stack) AddOnEndToAll(name string, fun func(mode, level enums.Enum)) { for tt, lp := range st.Loops { lp.OnEnd.Add(name, func() { fun(st.Mode, tt) @@ -104,38 +105,38 @@ func (st *Stack) AddOnEndToAll(name string, fun func(mode, time enums.Enum)) { } } -// AddTimeIncr adds a new timescale to this Stack with a given ctrMax number of iterations, -// and increment per step. +// AddLevelIncr adds a new level to this Stack with a given counterMax +// number of iterations, and increment per step. // The order in which this method is invoked is important, // as it adds loops in order from top to bottom. -func (st *Stack) AddTimeIncr(time enums.Enum, ctrMax, ctrIncr int) *Stack { - st.Loops[time] = NewLoop(ctrMax, ctrIncr) - st.Order = append(st.Order, time) +func (st *Stack) AddLevelIncr(level enums.Enum, counterMax, counterIncr int) *Stack { + st.Loops[level] = NewLoop(counterMax, counterIncr) + st.Order = append(st.Order, level) return st } -// TimeAbove returns the time above the given time in the stack +// LevelAbove returns the level above the given level in the stack // returning false if this is the highest level, -// or given time does not exist in order. -func (st *Stack) TimeAbove(time enums.Enum) (enums.Enum, bool) { +// or given level does not exist in order. +func (st *Stack) LevelAbove(level enums.Enum) (enums.Enum, bool) { for i, tt := range st.Order { - if tt == time && i > 0 { + if tt == level && i > 0 { return st.Order[i-1], true } } - return time, false + return level, false } -// TimeBelow returns the time below the given time in the stack +// LevelBelow returns the level below the given level in the stack // returning false if this is the lowest level, -// or given time does not exist in order. -func (st *Stack) TimeBelow(time enums.Enum) (enums.Enum, bool) { +// or given level does not exist in order. +func (st *Stack) LevelBelow(level enums.Enum) (enums.Enum, bool) { for i, tt := range st.Order { - if tt == time && i+1 < len(st.Order) { + if tt == level && i+1 < len(st.Order) { return st.Order[i+1], true } } - return time, false + return level, false } //////// Control @@ -174,7 +175,7 @@ func (st *Stack) Counters() []int { return ctrs } -// CountersString returns a string with loop time and counter values. +// CountersString returns a string with loop level and counter values. func (st *Stack) CountersString() string { ctrs := "" for _, tm := range st.Order { diff --git a/looper/stacks.go b/looper/stacks.go index 8f02097c..878e285d 100644 --- a/looper/stacks.go +++ b/looper/stacks.go @@ -13,7 +13,7 @@ import ( ) var ( - // If you want to debug the flow of time, set this to true. + // If you want to debug the flow of processing, set this to true. PrintControlFlow = false ) @@ -96,7 +96,7 @@ func (ls *Stacks) ClearStep(mode enums.Enum) { st.ClearStep() } -// Stop stops currently running stack of loops at given run time level +// Stop stops currently running stack of loops at given run level. func (ls *Stacks) Stop(level enums.Enum) { st := ls.Stacks[ls.Mode] st.StopLevel = level @@ -113,13 +113,13 @@ func (ls *Stacks) AddStack(mode, stepLevel enums.Enum) *Stack { return st } -// Loop returns the Loop associated with given mode and timescale. -func (ls *Stacks) Loop(mode, time enums.Enum) *Loop { +// Loop returns the Loop associated with given mode and loop level. +func (ls *Stacks) Loop(mode, level enums.Enum) *Loop { st := ls.Stacks[mode] if st == nil { return nil } - return st.Loops[time] + return st.Loops[level] } // ModeStack returns the Stack for the current Mode @@ -127,27 +127,41 @@ func (ls *Stacks) ModeStack() *Stack { return ls.Stacks[ls.Mode] } -// AddEventAllModes adds a new event for all modes at given timescale. -func (ls *Stacks) AddEventAllModes(time enums.Enum, name string, atCtr int, fun func()) { +// AddEventAllModes adds a new event for all modes at given loop level. +func (ls *Stacks) AddEventAllModes(level enums.Enum, name string, atCtr int, fun func()) { for _, st := range ls.Stacks { - st.Loops[time].AddEvent(name, atCtr, fun) + st.Loops[level].AddEvent(name, atCtr, fun) } } -// AddOnStartToAll adds given function taking mode and time args to OnStart in all stacks, loops -func (ls *Stacks) AddOnStartToAll(name string, fun func(mode, time enums.Enum)) { +// AddOnStartToAll adds given function taking mode and level args to OnStart in all stacks, loops +func (ls *Stacks) AddOnStartToAll(name string, fun func(mode, level enums.Enum)) { for _, st := range ls.Stacks { st.AddOnStartToAll(name, fun) } } -// AddOnEndToAll adds given function taking mode and time args to OnEnd in all stacks, loops -func (ls *Stacks) AddOnEndToAll(name string, fun func(mode, time enums.Enum)) { +// AddOnEndToAll adds given function taking mode and level args to OnEnd in all stacks, loops +func (ls *Stacks) AddOnEndToAll(name string, fun func(mode, level enums.Enum)) { for _, st := range ls.Stacks { st.AddOnEndToAll(name, fun) } } +// AddOnStartToLoop adds given function taking mode arg to OnStart in all stacks for given loop. +func (ls *Stacks) AddOnStartToLoop(level enums.Enum, name string, fun func(mode enums.Enum)) { + for m, st := range ls.Stacks { + st.Loops[level].OnStart.Add(name, func() { fun(m) }) + } +} + +// AddOnEndToLoop adds given function taking mode arg to OnEnd in all stacks for given loop. +func (ls *Stacks) AddOnEndToLoop(level enums.Enum, name string, fun func(mode enums.Enum)) { + for m, st := range ls.Stacks { + st.Loops[level].OnEnd.Add(name, func() { fun(m) }) + } +} + //////// More detailed control API // IsRunning is True if running. @@ -166,7 +180,7 @@ func (ls *Stacks) InitMode(mode enums.Enum) { // ResetCountersByMode resets counters for given mode. func (ls *Stacks) ResetCountersByMode(mode enums.Enum) { for sk, _ := range ls.lastStartedCounter { - skm, _ := sk.ModeTime() + skm, _ := sk.ModeLevel() if skm == mode.Int64() { delete(ls.lastStartedCounter, sk) } @@ -201,13 +215,13 @@ func (ls *Stacks) ResetCounters() { // ResetCountersBelow resets the Cur on all loop Counters below given level // (inclusive), and resets the Stacks's place in the loops. -func (ls *Stacks) ResetCountersBelow(mode enums.Enum, time enums.Enum) { +func (ls *Stacks) ResetCountersBelow(mode enums.Enum, level enums.Enum) { for _, st := range ls.Stacks { if st.Mode != mode { continue } for lt, loop := range st.Loops { - if lt.Int64() > time.Int64() { + if lt.Int64() > level.Int64() { continue } loop.Counter.Cur = 0 diff --git a/looper/step_test.go b/looper/step_test.go index b95093fb..f01962e7 100644 --- a/looper/step_test.go +++ b/looper/step_test.go @@ -8,29 +8,29 @@ import ( "fmt" "testing" - "github.com/emer/emergent/v2/etime" + "github.com/emer/emergent/v2/looper/levels" ) var printTest = false func ExampleStacks() { stacks := NewStacks() - stacks.AddStack(etime.Train, etime.Trial). - AddTime(etime.Epoch, 3). - AddTime(etime.Trial, 2) + stacks.AddStack(levels.Train, levels.Trial). + AddLevel(levels.Epoch, 3). + AddLevel(levels.Trial, 2) // add function closures: - stacks.Loop(etime.Train, etime.Epoch).OnStart.Add("Epoch Start", func() { fmt.Println("Epoch Start") }) - stacks.Loop(etime.Train, etime.Epoch).OnEnd.Add("Epoch End", func() { fmt.Println("Epoch End") }) - stacks.Loop(etime.Train, etime.Trial).OnStart.Add("Trial Run", func() { fmt.Println(" Trial Run") }) + stacks.Loop(levels.Train, levels.Epoch).OnStart.Add("Epoch Start", func() { fmt.Println("Epoch Start") }) + stacks.Loop(levels.Train, levels.Epoch).OnEnd.Add("Epoch End", func() { fmt.Println("Epoch End") }) + stacks.Loop(levels.Train, levels.Trial).OnStart.Add("Trial Run", func() { fmt.Println(" Trial Run") }) // add events: - stacks.Loop(etime.Train, etime.Epoch).AddEvent("EpochTwoEvent", 2, func() { fmt.Println("Epoch==2") }) - stacks.Loop(etime.Train, etime.Trial).AddEvent("TrialOneEvent", 1, func() { fmt.Println(" Trial==1") }) + stacks.Loop(levels.Train, levels.Epoch).AddEvent("EpochTwoEvent", 2, func() { fmt.Println("Epoch==2") }) + stacks.Loop(levels.Train, levels.Trial).AddEvent("TrialOneEvent", 1, func() { fmt.Println(" Trial==1") }) // fmt.Println(stacks.DocString()) - stacks.Run(etime.Train) + stacks.Run(levels.Train) // Output: // Epoch Start @@ -55,44 +55,44 @@ func TestStep(t *testing.T) { trialCount := 0 stacks := NewStacks() - stacks.AddStack(etime.Train, etime.Trial). - AddTime(etime.Run, 2). - AddTime(etime.Epoch, 5). - AddTime(etime.Trial, 4). - AddTime(etime.Cycle, 3) - stacks.Loop(etime.Train, etime.Trial).OnStart.Add("Count Trials", func() { trialCount += 1 }) - stacks.Loop(etime.Train, etime.Run).OnEnd.Add("Counters Test", func() { - run := stacks.Stacks[etime.Train].Loops[etime.Run].Counter.Cur - epc := stacks.Stacks[etime.Train].Loops[etime.Epoch].Counter.Cur + stacks.AddStack(levels.Train, levels.Trial). + AddLevel(levels.Run, 2). + AddLevel(levels.Epoch, 5). + AddLevel(levels.Trial, 4). + AddLevel(levels.Cycle, 3) + stacks.Loop(levels.Train, levels.Trial).OnStart.Add("Count Trials", func() { trialCount += 1 }) + stacks.Loop(levels.Train, levels.Run).OnEnd.Add("Counters Test", func() { + run := stacks.Stacks[levels.Train].Loops[levels.Run].Counter.Cur + epc := stacks.Stacks[levels.Train].Loops[levels.Epoch].Counter.Cur if epc != 5 { t.Errorf("Run %d OnEnd epoch counter should be 5, not: %d", run, epc) } }) - run := stacks.Stacks[etime.Train].Loops[etime.Run] - epc := stacks.Stacks[etime.Train].Loops[etime.Epoch] + run := stacks.Stacks[levels.Train].Loops[levels.Run] + epc := stacks.Stacks[levels.Train].Loops[levels.Epoch] if printTest { // print version for human checking PrintControlFlow = true fmt.Println("#### Step Cyc 1:") - stacks.Step(etime.Train, 1, etime.Cycle) + stacks.Step(levels.Train, 1, levels.Cycle) fmt.Println("#### Step Cyc 1:") - stacks.Step(etime.Train, 1, etime.Cycle) + stacks.Step(levels.Train, 1, levels.Cycle) fmt.Println("#### Step Cyc 1:") - stacks.Step(etime.Train, 1, etime.Cycle) + stacks.Step(levels.Train, 1, levels.Cycle) fmt.Println("#### Step Cyc 1:") - stacks.Step(etime.Train, 1, etime.Cycle) + stacks.Step(levels.Train, 1, levels.Cycle) fmt.Println("#### Step Cyc 2:") - stacks.Step(etime.Train, 2, etime.Cycle) + stacks.Step(levels.Train, 2, levels.Cycle) fmt.Println("#### Step Run 1:") - stacks.Step(etime.Train, 1, etime.Run) + stacks.Step(levels.Train, 1, levels.Run) if run.Counter.Cur != 1 { t.Errorf("Incorrect step run") } fmt.Println("#### Step Epoch 3:") - stacks.Step(etime.Train, 3, etime.Epoch) + stacks.Step(levels.Train, 3, levels.Epoch) if run.Counter.Cur != 1 || epc.Counter.Cur != 3 { t.Errorf("Incorrect step epoch") } @@ -101,29 +101,29 @@ func TestStep(t *testing.T) { } fmt.Println("#### Step Trial 2:") - stacks.Step(etime.Train, 2, etime.Trial) + stacks.Step(levels.Train, 2, levels.Trial) if trialCount != 34 { // 34 = 1*5*4+3*4+2 t.Errorf("Cycles not counted correctly: %d != 34", trialCount) } } else { PrintControlFlow = false - stacks.Step(etime.Train, 1, etime.Cycle) - stacks.Step(etime.Train, 1, etime.Cycle) - stacks.Step(etime.Train, 1, etime.Cycle) - stacks.Step(etime.Train, 1, etime.Cycle) - stacks.Step(etime.Train, 2, etime.Cycle) - stacks.Step(etime.Train, 1, etime.Run) + stacks.Step(levels.Train, 1, levels.Cycle) + stacks.Step(levels.Train, 1, levels.Cycle) + stacks.Step(levels.Train, 1, levels.Cycle) + stacks.Step(levels.Train, 1, levels.Cycle) + stacks.Step(levels.Train, 2, levels.Cycle) + stacks.Step(levels.Train, 1, levels.Run) if run.Counter.Cur != 1 { t.Errorf("Incorrect step run") } - stacks.Step(etime.Train, 3, etime.Epoch) + stacks.Step(levels.Train, 3, levels.Epoch) if run.Counter.Cur != 1 || epc.Counter.Cur != 3 { t.Errorf("Incorrect step epoch") } if trialCount != 32 { // 32 = 1*5*4+3*4 t.Errorf("Cycles not counted correctly: %d != 32", trialCount) } - stacks.Step(etime.Train, 2, etime.Trial) + stacks.Step(levels.Train, 2, levels.Trial) if trialCount != 34 { // 34 = 1*5*4+3*4+2 t.Errorf("Cycles not counted correctly: %d != 34", trialCount) } @@ -134,44 +134,44 @@ func TestStepIncr(t *testing.T) { trialCount := 0 stacks := NewStacks() - stacks.AddStack(etime.Train, etime.Trial). - AddTime(etime.Run, 2). - AddTime(etime.Epoch, 5). - AddTimeIncr(etime.Trial, 10, 3). - AddTime(etime.Cycle, 3) - stacks.Loop(etime.Train, etime.Trial).OnStart.Add("Count Trials", func() { trialCount += 1 }) - stacks.Loop(etime.Train, etime.Run).OnEnd.Add("Counters Test", func() { - run := stacks.Stacks[etime.Train].Loops[etime.Run].Counter.Cur - epc := stacks.Stacks[etime.Train].Loops[etime.Epoch].Counter.Cur + stacks.AddStack(levels.Train, levels.Trial). + AddLevel(levels.Run, 2). + AddLevel(levels.Epoch, 5). + AddLevelIncr(levels.Trial, 10, 3). + AddLevel(levels.Cycle, 3) + stacks.Loop(levels.Train, levels.Trial).OnStart.Add("Count Trials", func() { trialCount += 1 }) + stacks.Loop(levels.Train, levels.Run).OnEnd.Add("Counters Test", func() { + run := stacks.Stacks[levels.Train].Loops[levels.Run].Counter.Cur + epc := stacks.Stacks[levels.Train].Loops[levels.Epoch].Counter.Cur if epc != 5 { t.Errorf("Run %d OnEnd epoch counter should be 5, not: %d", run, epc) } }) - run := stacks.Stacks[etime.Train].Loops[etime.Run] - epc := stacks.Stacks[etime.Train].Loops[etime.Epoch] + run := stacks.Stacks[levels.Train].Loops[levels.Run] + epc := stacks.Stacks[levels.Train].Loops[levels.Epoch] if printTest { // print version for human checking PrintControlFlow = true fmt.Println("#### Step Cyc 1:") - stacks.Step(etime.Train, 1, etime.Cycle) + stacks.Step(levels.Train, 1, levels.Cycle) fmt.Println("#### Step Cyc 1:") - stacks.Step(etime.Train, 1, etime.Cycle) + stacks.Step(levels.Train, 1, levels.Cycle) fmt.Println("#### Step Cyc 1:") - stacks.Step(etime.Train, 1, etime.Cycle) + stacks.Step(levels.Train, 1, levels.Cycle) fmt.Println("#### Step Cyc 1:") - stacks.Step(etime.Train, 1, etime.Cycle) + stacks.Step(levels.Train, 1, levels.Cycle) fmt.Println("#### Step Cyc 2:") - stacks.Step(etime.Train, 2, etime.Cycle) + stacks.Step(levels.Train, 2, levels.Cycle) fmt.Println("#### Step Run 1:") - stacks.Step(etime.Train, 1, etime.Run) + stacks.Step(levels.Train, 1, levels.Run) if run.Counter.Cur != 1 { t.Errorf("Incorrect step run") } fmt.Println("#### Step Epoch 3:") - stacks.Step(etime.Train, 3, etime.Epoch) + stacks.Step(levels.Train, 3, levels.Epoch) if run.Counter.Cur != 1 || epc.Counter.Cur != 3 { t.Errorf("Incorrect step epoch") } @@ -180,29 +180,29 @@ func TestStepIncr(t *testing.T) { } fmt.Println("#### Step Trial 2:") - stacks.Step(etime.Train, 2, etime.Trial) + stacks.Step(levels.Train, 2, levels.Trial) if trialCount != 34 { // 34 = 1*5*4+3*4+2 t.Errorf("Cycles not counted correctly: %d != 34", trialCount) } } else { PrintControlFlow = false - stacks.Step(etime.Train, 1, etime.Cycle) - stacks.Step(etime.Train, 1, etime.Cycle) - stacks.Step(etime.Train, 1, etime.Cycle) - stacks.Step(etime.Train, 1, etime.Cycle) - stacks.Step(etime.Train, 2, etime.Cycle) - stacks.Step(etime.Train, 1, etime.Run) + stacks.Step(levels.Train, 1, levels.Cycle) + stacks.Step(levels.Train, 1, levels.Cycle) + stacks.Step(levels.Train, 1, levels.Cycle) + stacks.Step(levels.Train, 1, levels.Cycle) + stacks.Step(levels.Train, 2, levels.Cycle) + stacks.Step(levels.Train, 1, levels.Run) if run.Counter.Cur != 1 { t.Errorf("Incorrect step run") } - stacks.Step(etime.Train, 3, etime.Epoch) + stacks.Step(levels.Train, 3, levels.Epoch) if run.Counter.Cur != 1 || epc.Counter.Cur != 3 { t.Errorf("Incorrect step epoch") } if trialCount != 32 { // 32 = 1*5*4+3*4 t.Errorf("Cycles not counted correctly: %d != 32", trialCount) } - stacks.Step(etime.Train, 2, etime.Trial) + stacks.Step(levels.Train, 2, levels.Trial) if trialCount != 34 { // 34 = 1*5*4+3*4+2 t.Errorf("Cycles not counted correctly: %d != 34", trialCount) } From e131ff4d42d4c67530d5c0c7eac9b9c989ce7c2b Mon Sep 17 00:00:00 2001 From: "Randall C. O'Reilly" Date: Mon, 18 Nov 2024 17:49:18 -0800 Subject: [PATCH 11/24] major simplification and improvement for params -- use closures, so much simpler & better --- emer/layer.go | 89 +--------- emer/netparams.go | 216 ------------------------ emer/netsize.go | 107 ------------ emer/network.go | 96 +---------- emer/path.go | 76 +++------ looper/stacks.go | 12 ++ netview/data.go | 2 +- netview/events.go | 8 +- netview/laymesh.go | 2 +- netview/netdata.go | 15 +- netview/netview.go | 2 +- netview/options.go | 7 +- netview/render.go | 30 ++-- params/README.md | 48 +++--- params/apply.go | 313 +++-------------------------------- params/applymap.go | 77 --------- params/diff.go | 123 -------------- params/flex.go | 163 ------------------ params/history.go | 52 ------ params/hypers.go | 90 ---------- params/io.go | 375 ------------------------------------------ params/params.go | 179 +++++--------------- params/params_test.go | 302 +++++----------------------------- params/styler.go | 36 +--- params/tweak.go | 267 +++++++++++++++--------------- params/tweak_test.go | 33 ++-- 26 files changed, 359 insertions(+), 2361 deletions(-) delete mode 100644 emer/netparams.go delete mode 100644 emer/netsize.go delete mode 100644 params/applymap.go delete mode 100644 params/diff.go delete mode 100644 params/flex.go delete mode 100644 params/history.go delete mode 100644 params/hypers.go delete mode 100644 params/io.go diff --git a/emer/layer.go b/emer/layer.go index 3afd675a..559eb1b3 100644 --- a/emer/layer.go +++ b/emer/layer.go @@ -10,7 +10,6 @@ import ( "log" "math" - "cogentcore.org/core/base/errors" "cogentcore.org/core/base/slicesx" "cogentcore.org/core/math32" "cogentcore.org/core/tensor" @@ -35,15 +34,13 @@ var ( // LayerBase struct, and this interface only has methods that must be // implemented specifically for a given algorithmic implementation. type Layer interface { - // StyleType, StyleClass, and StyleName methods for parameter styling. - params.StylerObject // AsEmer returns the layer as an *emer.LayerBase, // to access base functionality. AsEmer() *LayerBase // Label satisfies the core.Labeler interface for getting - // the name of objects generically. + // the name of objects generically. Use to access Name via interface. Label() string // TypeName is the type or category of layer, defined @@ -109,17 +106,8 @@ type Layer interface { // Returns error on invalid var name or lack of recv path (vals always set to nan on path err). SendPathValues(vals *[]float32, varNm string, recvLay Layer, recvIndex1D int, pathType string) error - // UpdateParams() updates parameter values for all Layer - // and recv pathway parameters, - // based on any other params that might have changed. - UpdateParams() - - // SetParam sets parameter at given path to given value. - // returns error if path not found or value cannot be set. - SetParam(path, val string) error - // NonDefaultParams returns a listing of all parameters in the Layer that - // are not at their default values -- useful for setting param styles etc. + // are not at their default values; useful for setting param styles etc. NonDefaultParams() string // AllParams returns a listing of all parameters in the Layer @@ -204,9 +192,6 @@ type LayerBase struct { // units in the central pools of a 4D layer. SampleShape tensor.Shape `table:"-"` - // provides a history of parameters applied to the layer - ParamsHistory params.HistoryImpl `table:"-"` - // optional metadata that is saved in network weights files, // e.g., can indicate number of epochs that were trained, // or any other information about this network that would be useful to save. @@ -222,11 +207,7 @@ func InitLayer(l Layer, name string) { } func (ly *LayerBase) AsEmer() *LayerBase { return ly } - -// params.Styler: -func (ly *LayerBase) StyleType() string { return "Layer" } -func (ly *LayerBase) StyleClass() string { return ly.EmerLayer.TypeName() + " " + ly.Class } -func (ly *LayerBase) StyleName() string { return ly.Name } +func (ly *LayerBase) Label() string { return ly.Name } // AddClass adds a CSS-style class name(s) for this layer, // ensuring that it is not a duplicate, and properly space separated. @@ -236,8 +217,6 @@ func (ly *LayerBase) AddClass(cls ...string) *LayerBase { return ly } -func (ly *LayerBase) Label() string { return ly.Name } - // Is2D() returns true if this is a 2D layer (no Pools) func (ly *LayerBase) Is2D() bool { return ly.Shape.NumDims() == 2 } @@ -518,7 +497,7 @@ func (ly *LayerBase) RecvPathBySendName(sender string) (Path, error) { el := ly.EmerLayer for pi := range el.NumRecvPaths() { pt := el.RecvPath(pi) - if pt.SendLayer().StyleName() == sender { + if pt.SendLayer().Label() == sender { return pt, nil } } @@ -531,7 +510,7 @@ func (ly *LayerBase) SendPathByRecvName(recv string) (Path, error) { el := ly.EmerLayer for pi := range el.NumSendPaths() { pt := el.SendPath(pi) - if pt.RecvLayer().StyleName() == recv { + if pt.RecvLayer().Label() == recv { return pt, nil } } @@ -545,7 +524,7 @@ func (ly *LayerBase) RecvPathBySendNameType(sender, typeName string) (Path, erro el := ly.EmerLayer for pi := range el.NumRecvPaths() { pt := el.RecvPath(pi) - if pt.SendLayer().StyleName() == sender && pt.TypeName() == typeName { + if pt.SendLayer().Label() == sender && pt.TypeName() == typeName { return pt, nil } } @@ -559,66 +538,14 @@ func (ly *LayerBase) SendPathByRecvNameType(recv, typeName string) (Path, error) el := ly.EmerLayer for pi := range el.NumSendPaths() { pt := el.SendPath(pi) - if pt.RecvLayer().StyleName() == recv && pt.TypeName() == typeName { + if pt.RecvLayer().Label() == recv && pt.TypeName() == typeName { return pt, nil } } return nil, fmt.Errorf("receiving layer named: %s, type: %s not found in list of sending pathways", recv, typeName) } -//////////////////////////////////////////////////////////////////// -// Params - -// ParamsHistoryReset resets parameter application history -func (ly *LayerBase) ParamsHistoryReset() { - ly.ParamsHistory.ParamsHistoryReset() - el := ly.EmerLayer - for pi := range el.NumRecvPaths() { - pt := el.RecvPath(pi) - pt.AsEmer().ParamsHistoryReset() - } -} - -// ParamsApplied is just to satisfy History interface so reset can be applied -func (ly *LayerBase) ParamsApplied(sel *params.Sel) { - ly.ParamsHistory.ParamsApplied(sel) -} - -// SetParam sets parameter at given path to given value. -// returns error if path not found or value cannot be set. -func (ly *LayerBase) SetParam(path, val string) error { - return params.SetParam(ly.EmerLayer.StyleObject(), path, val) -} - -// ApplyParams applies given parameter style Sheet to this layer and its recv pathways. -// Calls UpdateParams on anything set to ensure derived parameters are all updated. -// If setMsg is true, then a message is printed to confirm each parameter that is set. -// it always prints a message if a parameter fails to be set. -// returns true if any params were set, and error if there were any errors. -func (ly *LayerBase) ApplyParams(pars *params.Sheet, setMsg bool) (bool, error) { - applied := false - var errs []error - app, err := pars.Apply(ly.EmerLayer, setMsg) // essential to go through AxonLay - if app { - ly.EmerLayer.UpdateParams() - applied = true - } - if err != nil { - errs = append(errs, err) - } - el := ly.EmerLayer - for pi := range el.NumRecvPaths() { - pt := el.RecvPath(pi).AsEmer() - app, err = pt.ApplyParams(pars, setMsg) - if app { - applied = true - } - if err != nil { - errs = append(errs, err) - } - } - return applied, errors.Join(errs...) -} +//////// Params // NonDefaultParams returns a listing of all parameters in the Layer that // are not at their default values -- useful for setting param styles etc. diff --git a/emer/netparams.go b/emer/netparams.go deleted file mode 100644 index a24f2153..00000000 --- a/emer/netparams.go +++ /dev/null @@ -1,216 +0,0 @@ -// Copyright (c) 2019, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package emer - -import ( - "fmt" - "log" - "log/slog" - "strings" - - "cogentcore.org/core/base/mpi" - "github.com/emer/emergent/v2/params" -) - -// NetParams handles standard parameters for a Network -// (use econfig and a Config struct for other configuration params). -// Assumes a Set named "Base" has the base-level parameters, which are -// always applied first, followed optionally by additional Set(s) -// that can have different parameters to try. -type NetParams struct { - - // full collection of param sets to use - Params params.Sets `new-window:"+" display:"no-inline"` - - // optional additional sheets of parameters to apply after Base -- can use multiple names separated by spaces (don't put spaces in Sheet names!) - ExtraSheets string - - // optional additional tag to add to file names, logs to identify params / run config - Tag string - - // the network to apply parameters to - Network Network `display:"-"` - - // list of hyper parameters compiled from the network parameters, using the layers and pathways from the network, so that the same styling logic as for regular parameters can be used - NetHypers params.Flex `display:"-"` - - // print out messages for each parameter that is set - SetMsg bool `display:"-"` -} - -// Config configures the ExtraSheets, Tag, and Network fields -func (pr *NetParams) Config(pars params.Sets, extraSheets, tag string, net Network) { - pr.Params = pars - report := "" - if extraSheets != "" { - pr.ExtraSheets = extraSheets - report += " ExtraSheets: " + extraSheets - } - if tag != "" { - pr.Tag = tag - report += " Tag: " + tag - } - pr.Network = net - if report != "" { - mpi.Printf("NetParams Set: %s", report) - } -} - -// Name returns name of current set of parameters, including Tag. -// if ExtraSheets is empty then it returns "Base", otherwise returns ExtraSheets -func (pr *NetParams) Name() string { - rn := "" - if pr.Tag != "" { - rn += pr.Tag + "_" - } - if pr.ExtraSheets == "" { - rn += "Base" - } else { - rn += pr.ExtraSheets - } - return rn -} - -// RunName returns standard name simulation run based on params Name() -// and starting run number. -func (pr *NetParams) RunName(startRun int) string { - return fmt.Sprintf("%s_%03d", pr.Name(), startRun) -} - -// Validate checks that the Network has been set -func (pr *NetParams) Validate() error { - if pr.Network == nil { - err := fmt.Errorf("emer.NetParams: Network is not set -- params will not be applied!") - log.Println(err) - return err - } - return nil -} - -// SetAll sets all parameters, using "Base" Set then any ExtraSheets, -// Does a Validate call first. -func (pr *NetParams) SetAll() error { - err := pr.Validate() - if err != nil { - return err - } - if hist, ok := pr.Network.(params.History); ok { - hist.ParamsHistoryReset() - } - err = pr.SetAllSheet("Base") - if pr.ExtraSheets != "" && pr.ExtraSheets != "Base" { - sps := strings.Fields(pr.ExtraSheets) - for _, ps := range sps { - err = pr.SetAllSheet(ps) - } - } - return err -} - -// SetAllSheet sets parameters for given Sheet name to the Network -func (pr *NetParams) SetAllSheet(sheetName string) error { - err := pr.Validate() - if err != nil { - return err - } - psheet, err := pr.Params.SheetByName(sheetName) - if err != nil { - return err - } - psheet.SelMatchReset(sheetName) - pr.SetNetworkSheet(pr.Network, psheet, sheetName) - err = psheet.SelNoMatchWarn(sheetName, "Network") - return err -} - -// SetNetworkMap applies params from given map of values -// The map keys are Selector:Path and the value is the value to apply, as a string. -func (pr *NetParams) SetNetworkMap(net Network, vals map[string]any) error { - sh, err := params.MapToSheet(vals) - if err != nil { - log.Println(err) - return err - } - pr.SetNetworkSheet(net, sh, "ApplyMap") - return nil -} - -// SetNetworkSheet applies params from given sheet -func (pr *NetParams) SetNetworkSheet(net Network, sh *params.Sheet, setName string) { - net.AsEmer().ApplyParams(sh, pr.SetMsg) - hypers := NetworkHyperParams(net, sh) - if setName == "Base" { - pr.NetHypers = hypers - } else { - pr.NetHypers.CopyFrom(hypers) - } -} - -// NetworkHyperParams returns the compiled hyper parameters from given Sheet -// for each layer and pathway in the network -- applies the standard css -// styling logic for the hyper parameters. -func NetworkHyperParams(net Network, sheet *params.Sheet) params.Flex { - hypers := params.Flex{} - nl := net.NumLayers() - for li := range nl { - ly := net.EmerLayer(li) - nm := ly.StyleName() - hypers[nm] = ¶ms.FlexVal{Name: nm, Type: "Layer", Class: ly.StyleClass(), Object: params.Hypers{}} - } - // separate pathways - for li := range nl { - ly := net.EmerLayer(li) - np := ly.NumRecvPaths() - for pi := range np { - pj := ly.RecvPath(pi) - nm := pj.StyleName() - hypers[nm] = ¶ms.FlexVal{Name: nm, Type: "Path", Class: pj.StyleClass(), Object: params.Hypers{}} - } - } - for nm, vl := range hypers { - sheet.Apply(vl, false) - hv := vl.Object.(params.Hypers) - hv.DeleteValOnly() - if len(hv) == 0 { - delete(hypers, nm) - } - } - return hypers -} - -// SetFloatParam sets given float32 param value to layer or pathway -// (typ = Layer or Path) of given name, at given path (which can start -// with the typ name). -// Returns an error (and logs it automatically) for any failure. -func SetFloatParam(net Network, name, typ, path string, val float32) error { - rpath := params.PathAfterType(path) - prs := fmt.Sprintf("%g", val) - en := net.AsEmer() - switch typ { - case "Layer": - ly, err := en.EmerLayerByName(name) - if err != nil { - slog.Error(err.Error()) - return err - } - err = ly.SetParam(rpath, prs) - if err != nil { - slog.Error(err.Error()) - return err - } - case "Path": - pj, err := en.EmerPathByName(name) - if err != nil { - slog.Error(err.Error()) - return err - } - err = pj.SetParam(rpath, prs) - if err != nil { - slog.Error(err.Error()) - return err - } - } - return nil -} diff --git a/emer/netsize.go b/emer/netsize.go deleted file mode 100644 index 82eb8d03..00000000 --- a/emer/netsize.go +++ /dev/null @@ -1,107 +0,0 @@ -// Copyright (c) 2019, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package emer - -import ( - "fmt" - - "github.com/emer/emergent/v2/params" -) - -// LaySize contains parameters for size of layers -type LaySize struct { - - // Y (vertical) size of layer -- in units for 2D, or number of pools (outer dimension) for 4D layer - Y int - - // X (horizontal) size of layer -- in units for 2D, or number of pools (outer dimension) for 4D layer - X int - - // Y (vertical) size of each pool in units, only for 4D layers (inner dimension) - PoolY int - - // Y (horizontal) size of each pool in units, only for 4D layers (inner dimension) - PoolX int -} - -// NetSize is a network schema for holding a params for layer sizes. -// Values can be queried for getting sizes when configuring the network. -// Uses params.Flex to support flexible parameter specification -type NetSize params.Flex - -func (ns *NetSize) JSONString() string { - return ((*params.Flex)(ns)).JSONString() -} - -// ApplySheet applies given sheet of parameters to each layer -func (ns *NetSize) ApplySheet(sheet *params.Sheet, setMsg bool) { - ((*params.Flex)(ns)).ApplySheet(sheet, setMsg) -} - -// AddLayers adds layer(s) of given class -- most efficient -// to add each class separately en-mass. -func (ns *NetSize) AddLayers(names []string, class string) { - if *ns == nil { - *ns = make(NetSize) - } - for _, nm := range names { - (*ns)[nm] = ¶ms.FlexVal{Name: nm, Type: "Layer", Class: class, Object: &LaySize{}} - } -} - -// Layer returns the layer size for given layer name -- nil if not found -// and an error is emitted and returned -func (ns *NetSize) Layer(name string) (*LaySize, error) { - fv, has := (*ns)[name] - if !has { - err := fmt.Errorf("emer.NetSize: layer named: %s not found", name) - return nil, err - } - return fv.Object.(*LaySize), nil -} - -// LayX returns the X value = horizontal size of 2D layer or number of pools -// (outer dimension) for 4D layer, for given layer from size, if it set there. -// Otherwise returns the provided default value -func (ns *NetSize) LayX(name string, def int) int { - ls, err := ns.Layer(name) - if err != nil || ls.X == 0 { - return def - } - return ls.X -} - -// LayY returns the Y value = vertical size of 2D layer or number of pools -// (outer dimension) for 4D layer, for given layer from size, if it set there. -// Otherwise returns the provided default value -func (ns *NetSize) LayY(name string, def int) int { - ls, err := ns.Layer(name) - if err != nil || ls.Y == 0 { - return def - } - return ls.Y -} - -// PoolX returns the Pool X value (4D inner dim) = size of pool in units -// for given layer from size if it set there. -// Otherwise returns the provided default value -func (ns *NetSize) PoolX(name string, def int) int { - ls, err := ns.Layer(name) - if err != nil || ls.PoolX == 0 { - return def - } - return ls.PoolX -} - -// PoolY returns the Pool X value (4D inner dim) = size of pool in units -// for given layer from size if it set there. -// Otherwise returns the provided default value -func (ns *NetSize) PoolY(name string, def int) int { - ls, err := ns.Layer(name) - if err != nil || ls.PoolY == 0 { - return def - } - return ls.PoolY -} diff --git a/emer/network.go b/emer/network.go index 8c19908e..4ceb612f 100644 --- a/emer/network.go +++ b/emer/network.go @@ -16,7 +16,6 @@ import ( "cogentcore.org/core/base/randx" "cogentcore.org/core/core" "cogentcore.org/core/math32" - "github.com/emer/emergent/v2/params" "github.com/emer/emergent/v2/relpos" ) @@ -147,9 +146,6 @@ type NetworkBase struct { // map of name to layers, for EmerLayerByName methods LayerNameMap map[string]Layer `display:"-"` - // map from class name to layer names. - LayerClassMap map[string][]string `display:"-"` - // minimum display position in network MinPos math32.Vector3 `display:"-"` @@ -184,31 +180,16 @@ func (nt *NetworkBase) AsEmer() *NetworkBase { return nt } func (nt *NetworkBase) Label() string { return nt.Name } -// MakeLayerMaps creates new LayerNameMap and LayerClassMap. -// Call this when the network is built. -func (nt *NetworkBase) MakeLayerMaps() { - nt.LayerNameMap = make(map[string]Layer) - nt.LayerClassMap = make(map[string][]string) - nt.UpdateLayerMaps() -} - -// UpdateLayerMaps updates the LayerNameMap and LayerClassMap. -func (nt *NetworkBase) UpdateLayerMaps() { +// UpdateLayerNameMap updates the LayerNameMap. +func (nt *NetworkBase) UpdateLayerNameMap() { if nt.LayerNameMap == nil { - nt.MakeLayerMaps() - return + nt.LayerNameMap = make(map[string]Layer) } nl := nt.EmerNetwork.NumLayers() for li := range nl { ly := nt.EmerNetwork.EmerLayer(li) - lnm := ly.StyleName() + lnm := ly.Label() nt.LayerNameMap[lnm] = ly - cls := strings.Split(ly.StyleClass(), " ") - for _, cl := range cls { - ll := nt.LayerClassMap[cl] - ll = append(ll, lnm) - nt.LayerClassMap[cl] = ll - } } } @@ -216,7 +197,7 @@ func (nt *NetworkBase) UpdateLayerMaps() { // returns error message if layer is not found. func (nt *NetworkBase) EmerLayerByName(name string) (Layer, error) { if nt.LayerNameMap == nil || len(nt.LayerNameMap) != nt.EmerNetwork.NumLayers() { - nt.UpdateLayerMaps() + nt.UpdateLayerNameMap() } if ly, ok := nt.LayerNameMap[name]; ok { return ly, nil @@ -250,47 +231,6 @@ func (nt *NetworkBase) EmerPathByName(name string) (Path, error) { return path, nil } -// LayersByClass returns a list of layer names by given class(es). -// Lists are compiled when network Build() function called, -// or now if not yet present. -// The layer Type is always included as a Class, along with any other -// space-separated strings specified in Class for parameter styling, etc. -// If no classes are passed, all layer names in order are returned. -func (nt *NetworkBase) LayersByClass(classes ...string) []string { - if nt.LayerClassMap == nil { - nt.UpdateLayerMaps() - } - var nms []string - nl := nt.EmerNetwork.NumLayers() - if len(classes) == 0 { - for li := range nl { - ly := nt.EmerNetwork.EmerLayer(li).AsEmer() - if ly.Off { - continue - } - nms = append(nms, ly.Name) - } - return nms - } - for _, lc := range classes { - nms = append(nms, nt.LayerClassMap[lc]...) - } - // only get unique layers - layers := []string{} - has := map[string]bool{} - for _, nm := range nms { - if has[nm] { - continue - } - layers = append(layers, nm) - has[nm] = true - } - if len(layers) == 0 { - panic(fmt.Sprintf("No Layers found for query: %#v.", classes)) - } - return layers -} - // LayoutLayers computes the 3D layout of layers based on their relative // position settings. func (nt *NetworkBase) LayoutLayers() { @@ -393,31 +333,7 @@ func (nt *NetworkBase) VarRange(varNm string) (min, max float32, err error) { return } -/////////////////////////////////////////////////////////////////////// -// Params - -// ApplyParams applies given parameter style Sheet to layers and paths in this network. -// Calls UpdateParams to ensure derived parameters are all updated. -// If setMsg is true, then a message is printed to confirm each parameter that is set. -// it always prints a message if a parameter fails to be set. -// returns true if any params were set, and error if there were any errors. -func (nt *NetworkBase) ApplyParams(pars *params.Sheet, setMsg bool) (bool, error) { - applied := false - var errs []error - en := nt.EmerNetwork - nlay := en.NumLayers() - for li := range nlay { - ly := en.EmerLayer(li).AsEmer() - app, err := ly.ApplyParams(pars, setMsg) - if app { - applied = true - } - if err != nil { - errs = append(errs, err) - } - } - return applied, errors.Join(errs...) -} +//////// Params // NonDefaultParams returns a listing of all parameters in the Network that // are not at their default values -- useful for setting param styles etc. diff --git a/emer/path.go b/emer/path.go index eed35b66..4dc8d7b0 100644 --- a/emer/path.go +++ b/emer/path.go @@ -6,6 +6,7 @@ package emer import ( "io" + "strings" "cogentcore.org/core/math32" "github.com/emer/emergent/v2/params" @@ -22,15 +23,13 @@ import ( // PathBase struct, and this interface only has methods that must be // implemented specifically for a given algorithmic implementation, type Path interface { - // StyleType, StyleClass, and StyleName methods for parameter styling. - params.StylerObject // AsEmer returns the path as an *emer.PathBase, // to access base functionality. AsEmer() *PathBase // Label satisfies the core.Labeler interface for getting - // the name of objects generically. + // the name of objects generically. Use to access Name via interface. Label() string // TypeName is the type or category of path, defined @@ -87,14 +86,6 @@ type Path interface { // so it is the only one that needs to be updated for derived types. SynValue1D(varIndex int, synIndex int) float32 - // UpdateParams() updates parameter values for all Path parameters, - // based on any other params that might have changed. - UpdateParams() - - // SetParam sets parameter at given path to given value. - // returns error if path not found or value cannot be set. - SetParam(path, val string) error - // AllParams returns a listing of all parameters in the Pathway. AllParams() string @@ -125,7 +116,7 @@ type PathBase struct { Name string // Class is for applying parameter styles across multiple paths - // that all get the same parameters. This can be space separated + // that all get the same parameters. This can be space separated // with multple classes. Class string @@ -142,9 +133,6 @@ type PathBase struct { // Off inactivates this pathway, allowing for easy experimentation. Off bool - - // provides a history of parameters applied to the layer - ParamsHistory params.HistoryImpl `table:"-"` } // InitPath initializes the path, setting the EmerPath interface @@ -155,12 +143,7 @@ func InitPath(pt Path) { } func (pt *PathBase) AsEmer() *PathBase { return pt } - -// params.Styler: -func (pt *PathBase) StyleType() string { return "Path" } -func (pt *PathBase) StyleClass() string { return pt.EmerPath.TypeName() + " " + pt.Class } -func (pt *PathBase) StyleName() string { return pt.Name } -func (pt *PathBase) Label() string { return pt.Name } +func (pt *PathBase) Label() string { return pt.Name } // AddClass adds a CSS-style class name(s) for this path, // ensuring that it is not a duplicate, and properly space separated. @@ -170,6 +153,23 @@ func (pt *PathBase) AddClass(cls ...string) *PathBase { return pt } +// IsTypeOrClass returns true if the TypeName or parameter Class for this +// pathway matches the space separated list of values given, using +// case-insensitive, "contains" logic for each match. +func (pt *PathBase) IsTypeOrClass(types string) bool { + cls := strings.Fields(strings.ToLower(pt.Class)) + cls = append([]string{strings.ToLower(pt.EmerPath.TypeName())}, cls...) + fs := strings.Fields(strings.ToLower(types)) + for _, pt := range fs { + for _, cl := range cls { + if strings.Contains(cl, pt) { + return true + } + } + } + return false +} + // SynValue returns value of given variable name on the synapse // between given send, recv unit indexes (1D, flat indexes). // Returns math32.NaN() for access errors. @@ -182,40 +182,6 @@ func (pt *PathBase) SynValue(varNm string, sidx, ridx int) float32 { return pt.EmerPath.SynValue1D(vidx, syi) } -//////////////////////////////////////////////////////////////////// -// Params - -// ParamsHistoryReset resets parameter application history -func (pt *PathBase) ParamsHistoryReset() { - pt.ParamsHistory.ParamsHistoryReset() -} - -// ParamsApplied is just to satisfy History interface so reset can be applied -func (pt *PathBase) ParamsApplied(sel *params.Sel) { - pt.ParamsHistory.ParamsApplied(sel) -} - -// SetParam sets parameter at given path to given value. -// returns error if path not found or value cannot be set. -func (pt *PathBase) SetParam(path, val string) error { - return params.SetParam(pt.EmerPath.StyleObject(), path, val) -} - -// ApplyParams applies given parameter style Sheet to this pathway. -// Calls UpdateParams if anything set to ensure derived parameters are all updated. -// If setMsg is true, then a message is printed to confirm each parameter that is set. -// it always prints a message if a parameter fails to be set. -// returns true if any params were set, and error if there were any errors. -func (pt *PathBase) ApplyParams(pars *params.Sheet, setMsg bool) (bool, error) { - app, err := pars.Apply(pt.EmerPath, setMsg) - // note: must use EmerPath to get to actual Path, which then uses Styler interface - // to return the Params struct. - if app { - pt.EmerPath.UpdateParams() - } - return app, err -} - // NonDefaultParams returns a listing of all parameters in the Layer that // are not at their default values -- useful for setting param styles etc. func (pt *PathBase) NonDefaultParams() string { diff --git a/looper/stacks.go b/looper/stacks.go index 878e285d..ec7727df 100644 --- a/looper/stacks.go +++ b/looper/stacks.go @@ -7,9 +7,12 @@ package looper //go:generate core generate -add-types import ( + "cmp" + "slices" "strings" "cogentcore.org/core/enums" + "golang.org/x/exp/maps" ) var ( @@ -162,6 +165,15 @@ func (ls *Stacks) AddOnEndToLoop(level enums.Enum, name string, fun func(mode en } } +// Modes returns a sorted list of stack modes, for iterating in Mode enum value order. +func (ls *Stacks) Modes() []enums.Enum { + mds := maps.Keys(ls.Stacks) + slices.SortFunc(mds, func(a, b enums.Enum) int { + return cmp.Compare(a.Int64(), b.Int64()) + }) + return mds +} + //////// More detailed control API // IsRunning is True if running. diff --git a/netview/data.go b/netview/data.go index ba51cfe8..b1270c4b 100644 --- a/netview/data.go +++ b/netview/data.go @@ -43,7 +43,7 @@ func (ld *LayData) AllocSendPaths(ly emer.Layer) { ld.SendPaths = make([]*PathData, nsp) for si := range ly.NumSendPaths() { pt := ly.SendPath(si) - pd := &PathData{Send: pt.SendLayer().StyleName(), Recv: pt.RecvLayer().StyleName(), Path: pt} + pd := &PathData{Send: pt.SendLayer().Label(), Recv: pt.RecvLayer().Label(), Path: pt} ld.SendPaths[si] = pd pd.Alloc() } diff --git a/netview/events.go b/netview/events.go index b428a110..a9036206 100644 --- a/netview/events.go +++ b/netview/events.go @@ -46,13 +46,13 @@ func (sw *Scene) MouseDownEvent(e events.Event) { pos := e.Pos().Sub(sw.Geom.ContentBBox.Min) pt := sw.PathAtPoint(pos) if pt != nil { - FormDialog(sw, pt, "Path: "+pt.StyleName()) + FormDialog(sw, pt, "Path: "+pt.Label()) e.SetHandled() return } lay := sw.LayerLabelAtPoint(pos) if lay != nil { - FormDialog(sw, lay, "Layer: "+lay.StyleName()) + FormDialog(sw, lay, "Layer: "+lay.Label()) e.SetHandled() return } @@ -62,7 +62,7 @@ func (sw *Scene) MouseDownEvent(e events.Event) { } nv := sw.NetView nv.Data.PathUnIndex = unIndex - nv.Data.PathLay = lay.StyleName() + nv.Data.PathLay = lay.Label() nv.UpdateView() e.SetHandled() } @@ -77,7 +77,7 @@ func (sw *Scene) WidgetTooltip(pos image.Point) (string, image.Point) { pt := sw.PathAtPoint(lpos) if pt != nil { pe := pt.AsEmer() - tt := "[Click to edit] " + pe.Name + " " + pt.StyleClass() + tt := "[Click to edit] " + pe.Name if pe.Doc != "" { tt += ": " + pe.Doc } diff --git a/netview/laymesh.go b/netview/laymesh.go index 20592c9b..f5690551 100644 --- a/netview/laymesh.go +++ b/netview/laymesh.go @@ -35,7 +35,7 @@ func NewLayMesh(sc *xyz.Scene, nv *NetView, lay emer.Layer) *LayMesh { lm := &LayMesh{} lm.View = nv lm.Lay = lay - lm.Name = lay.StyleName() + lm.Name = lay.Label() sc.SetMesh(lm) return lm } diff --git a/netview/netdata.go b/netview/netdata.go index fff820c1..ae5fc143 100644 --- a/netview/netdata.go +++ b/netview/netdata.go @@ -17,6 +17,7 @@ import ( "strings" "cogentcore.org/core/base/errors" + "cogentcore.org/core/base/metadata" "cogentcore.org/core/core" "cogentcore.org/core/math32" "cogentcore.org/core/plot/plotcore" @@ -158,12 +159,12 @@ makeData: if !nd.NoSynData { for li := range nlay { rlay := nd.Net.EmerLayer(li) - rld := nd.LayData[rlay.StyleName()] + rld := nd.LayData[rlay.Label()] rld.RecvPaths = make([]*PathData, rlay.NumRecvPaths()) for ri := 0; ri < rlay.NumRecvPaths(); ri++ { rpj := rlay.RecvPath(ri) slay := rpj.SendLayer() - sld := nd.LayData[slay.StyleName()] + sld := nd.LayData[slay.Label()] for _, spj := range sld.SendPaths { if spj.Path == rpj { rld.RecvPaths[ri] = spj // link @@ -175,7 +176,7 @@ makeData: } else { for li := range nlay { lay := nd.Net.EmerLayer(li) - ld := nd.LayData[lay.StyleName()] + ld := nd.LayData[lay.Label()] if nd.NoSynData { ld.FreePaths() } else { @@ -346,7 +347,7 @@ func (nd *NetData) RecordSyns() { } for li := range nlay { lay := nd.Net.EmerLayer(li) - laynm := lay.StyleName() + laynm := lay.Label() ld := nd.LayData[laynm] for si := 0; si < lay.NumSendPaths(); si++ { spd := ld.SendPaths[si] @@ -681,9 +682,9 @@ func (nd *NetData) SelectedUnitTable(di int) *table.Table { selnm := nd.PathLay + fmt.Sprintf("[%d]", nd.PathUnIndex) dt := &table.Table{} - dt.Meta.SetName("NetView: " + selnm) - dt.Meta.Set("read-only", true) - tensor.SetPrecision(dt.Meta, 4) + metadata.SetName(dt, "NetView: "+selnm) + metadata.SetTo(dt, "read-only", true) + tensor.SetPrecision(dt, 4) ln := nd.Ring.Len vlen := len(nd.UnVars) diff --git a/netview/netview.go b/netview/netview.go index 336b4ea7..7e485102 100644 --- a/netview/netview.go +++ b/netview/netview.go @@ -659,7 +659,7 @@ func (nv *NetView) UnitValColor(lay emer.Layer, idx1d int, raw float32, hasval b } if !hasval { scaled = 0 - if lay.StyleName() == nv.Data.PathLay && idx1d == nv.Data.PathUnIndex { + if lay.Label() == nv.Data.PathLay && idx1d == nv.Data.PathUnIndex { clr = color.RGBA{0x20, 0x80, 0x20, 0x80} } else { clr = NilColor diff --git a/netview/options.go b/netview/options.go index 5d9fde2c..d82ae74e 100644 --- a/netview/options.go +++ b/netview/options.go @@ -53,11 +53,10 @@ type Options struct { //types:add // whether to display the pathways between layers as arrows Paths bool - // path type name(s) to display (space separated), for path arrows, + // PathType has name(s) to display (space separated), for path arrows, // and when there are multiple pathways from the same layer. - // For arrows, uses the style class names to match, which includes type name - // and other factors. - // Uses case insensitive contains logic for each name. + // Uses the parameter Class names in addition to type, + // and case insensitive "contains" logic for each name. PathType string // width of the path arrows, in normalized units diff --git a/netview/render.go b/netview/render.go index 9ab24ee5..29aa040e 100644 --- a/netview/render.go +++ b/netview/render.go @@ -9,7 +9,6 @@ import ( "fmt" "math" "slices" - "strings" "cogentcore.org/core/base/errors" "cogentcore.org/core/colors" @@ -42,13 +41,13 @@ func (nv *NetView) UpdateLayers() { layConfig := tree.TypePlan{} for li := range nlay { ly := nv.Net.EmerLayer(li) - layConfig.Add(types.For[xyz.Group](), ly.StyleName()) + layConfig.Add(types.For[xyz.Group](), ly.Label()) } if !tree.Update(laysGp, layConfig) && nv.layerNameSizeShown == nv.Options.LayerNameSize { for li := range laysGp.Children { ly := nv.Net.EmerLayer(li) - lmesh := errors.Log1(se.MeshByName(ly.StyleName())) + lmesh := errors.Log1(se.MeshByName(ly.Label())) se.SetMesh(lmesh) // does update } if nv.hasPaths != nv.Options.Paths || nv.pathTypeShown != nv.Options.PathType || @@ -72,14 +71,14 @@ func (nv *NetView) UpdateLayers() { for li, lgi := range laysGp.Children { ly := nv.Net.EmerLayer(li) lb := ly.AsEmer() - lmesh, _ := se.MeshByName(ly.StyleName()) + lmesh, _ := se.MeshByName(ly.Label()) if lmesh == nil { NewLayMesh(se, nv, ly) } else { lmesh.(*LayMesh).Lay = ly // make sure } lg := lgi.(*xyz.Group) - gpConfig[1].Name = ly.StyleName() // text2d textures use obj name, so must be unique + gpConfig[1].Name = ly.Label() // text2d textures use obj name, so must be unique tree.Update(lg, gpConfig) lp := lb.Pos.Pos lp.Y = -lp.Y // reverse direction @@ -89,9 +88,9 @@ func (nv *NetView) UpdateLayers() { lo := lg.Child(0).(*LayObj) lo.Defaults() - lo.LayName = ly.StyleName() + lo.LayName = ly.Label() lo.NetView = nv - lo.SetMeshName(ly.StyleName()) + lo.SetMeshName(ly.Label()) lo.Material.Color = colors.FromRGB(255, 100, 255) lo.Material.Reflective = 8 lo.Material.Bright = 8 @@ -102,7 +101,7 @@ func (nv *NetView) UpdateLayers() { txt := lg.Child(1).(*LayName) txt.Defaults() txt.NetView = nv - txt.SetText(ly.StyleName()) + txt.SetText(ly.Label()) txt.Pose.Scale = math32.Vector3Scalar(nv.Options.LayerNameSize).Div(lg.Pose.Scale) txt.Styles.Background = colors.Uniform(colors.Transparent) txt.Styles.Text.Align = styles.Start @@ -230,7 +229,7 @@ func (nv *NetView) UpdatePaths() { npt := sl.NumSendPaths() for pi := range npt { pt := sl.SendPath(pi) - if !nv.pathTypeNameMatch(pt.StyleClass()) { + if !nv.pathTypeNameMatch(pt) { continue } rb := pt.RecvLayer().AsEmer() @@ -407,20 +406,11 @@ func (nv *NetView) UpdatePaths() { nv.pathWidthShown = nv.Options.PathWidth } -func (nv *NetView) pathTypeNameMatch(pcls string) bool { +func (nv *NetView) pathTypeNameMatch(pt emer.Path) bool { if len(nv.Options.PathType) == 0 { return true } - cls := strings.Fields(strings.ToLower(pcls)) - fs := strings.Fields(strings.ToLower(nv.Options.PathType)) - for _, pt := range fs { - for _, cl := range cls { - if strings.Contains(cl, pt) { - return true - } - } - } - return false + return pt.AsEmer().IsTypeOrClass(nv.Options.PathType) } // returns the self projection mesh, either left = 1 or right = 2 diff --git a/params/README.md b/params/README.md index 2f912f12..2b423f4b 100644 --- a/params/README.md +++ b/params/README.md @@ -2,12 +2,22 @@ Docs: [GoDoc](https://pkg.go.dev/github.com/emer/emergent/params) See [Wiki Params](https://github.com/emer/emergent/wiki/Params) page for detailed docs. -Package `params` provides general-purpose parameter management functionality for organizing multiple sets of parameters efficiently, and basic IO for saving / loading from JSON files and generating Go code to embed into applications, and a basic GUI for viewing and editing. +Package `params` applies parameters to struct fields using [css selectors](https://www.w3schools.com/cssref/css_selectors.php) to select which objects a given set of parameters applies to. The struct type must implement the `Styler` interface, with `StyleName() string` and `StyleClass() string` methods, which provide the name and class values against which the selectors test. -The main overall unit that is generally operated upon at run-time is the `params.Sheet`, (similar to CSS style sheets) that constitute a coherent set of parameters. Here's the structure: +Parameters are set using a closure function that runs on matching objects, so that any type value can be set using full editor completion for accessing the struct fields, and any additional logic can be applied within the closure function, including parameter search functions. + +Three levels of organization are supported: + +* `Sheets` is a `map` of named `Sheet`s, typically with a "Base" Sheet that is applied first, and contains all the base-level defaults, and then different optional parameter `Sheet`s for different configurations or test cases being explored. + +* `Sheet` is an ordered list (slice) of `Sel` elements, applied in order. The ordering is critical for organizing parameters into broad defaults that apply more generally, which are put at the start, followed by progressively more specific parameters that override those defaults for specific cases as needed. + +* `Sel` is an individual selector with an expression that matches on Name, Class or Type (Type can be left blank as the entire stack applies only to a specific type of object), and the `Set` function that sets the parameter values on matching objects. + +TODO: replace with actual example from axon: ``` -Sets { +Sheets { "Base" { Sel: "Layer" { Params: { @@ -28,37 +38,27 @@ Sets { } ``` +In summary, the overall logic is all about the order of application, going from broad defaults to more specific overrides, with the following overall ordering: +* A `Defaults()` method defined on the struct type, which establishes hard-coded default parameters. +* The "Base" `Sheet` applies default parameters for a specific simulation, relative to hard-coded defaults. +* Other `Sheet` cases defined in the map of `Sheets` can then optionally be applied with various experiments, parameter searches, or other specific cases. +* Order of `Sel`s within within a given Sheet is also critical, with the most general Type params first, then .Class, then the most specific #Name cases. For example, an overall learning rate that applies across all pathways with a Type sel, but then a slower one is needed for a for a .Class or specific #Name'd pathway. -A good strategy is to have a "Base" Sheet that has all the best parameters so far, and then other sets can modify specific params relative to that one. Order of application is critical, as subsequent params applications overwrite earlier ones, and the typical order is: - -* `Defaults()` method called that establishes the hard-coded default parameters. -* Then apply "Base" sheet for any changes relative to those. -* Then optionally apply one or more additional sheets with current experimental parameters. +## Selectors -Critically, all of this is entirely up to the particular model program(s) to determine and control -- this package just provides the basic data structures for holding all of the parameters, and the IO / and Apply infrastructure. - -Each `params.Sheet` consists of a collection of `params.Sel` elements which actually finally contain the parameters. The `Sel` field specifies a CSS-style selector determining over what scope the parameters should be applied: - -* `Type` (no prefix) = name of a type -- anything having this type name will get these params. +The `Sel` field of the `Sel` specifies a CSS-style selector determining over what scope the parameters should be applied: * `.Class` = anything with a given class label (each object can have multiple Class labels and thus receive multiple parameter settings, but again, order matters!) * `#Name` = a specific named object. -The order of application within a given Sheet is also critical -- typically put the most general Type params first, then .Class, then the most specific #Name cases, to achieve within a given Sheet the same logic of establishing Base params for all types and then more specific overrides for special cases (e.g., an overall learning rate that appplies across all pathways, but maybe a faster or slower one for a .Class or specific #Name'd pathway). - -There is a `params.Styler` interface with methods that any Go type can implement to provide these different labels. The emer.Network, .Layer, and .Path interfaces each implement this interface. - -Otherwise, the Apply method will just directly apply params to a given struct type if it does not implement the Styler interface. - -Parameter values are stored as strings, which can represent any value. +* `Type` (no prefix) = name of a type -- because parameters only apply to a specific type of object, this can typically just be left blank. -Finally, there are methods to show where params.Set's set the same parameter differently, and to compare with the default settings on a given object type using go struct field tags of the form def:"val1[,val2...]". +There is a `params.Styler` interface with methods that any Go type can implement to provide these different labels. -# Providing direct access to specific params -The best way to provide the user direct access to specific parameter values through the Params mechanisms is to put the relevant params in the `Sim` object, where they will be editable fields, and then call `SetFloat` or `SetString` as appropriate with the path to the parameter in question, followed by a call to apply the params. +## Parameter Searching -The current value can be obtained by the `ParamVal` methods. +TODO diff --git a/params/apply.go b/params/apply.go index deb59f63..cf0c86cd 100644 --- a/params/apply.go +++ b/params/apply.go @@ -8,202 +8,32 @@ import ( "errors" "fmt" "log" - "log/slog" - "reflect" "strings" - - "cogentcore.org/core/base/labels" - "cogentcore.org/core/base/reflectx" ) -// PathAfterType returns the portion of a path string after the initial -// type, e.g., Layer.Acts.Kir.Gbar -> Acts.Kir.Gbar -func PathAfterType(path string) string { - return strings.Join(strings.Split(path, ".")[1:], ".") -} - -// TargetType returns the first part of the path, indicating what type of -// object the params apply to. Uses the first item in the map (which is random) -// everything in the map must have the same target. -func (pr *Params) TargetType() string { - for pt := range *pr { - return strings.Split(pt, ".")[0] - } - return "" -} - -// Path returns the second part of the path after the target type, -// indicating the path to the specific parameter being set. -func (pr *Params) Path(path string) string { - return PathAfterType(path) -} - -// Apply applies all parameter values to given object. -// Object must already be the appropriate target type based on -// the first element of the path (see TargetType method). -// If setMsg is true, then it will log a confirmation that the parameter -// was set (it always prints an error message if it fails to set the -// parameter at given path, and returns error if so). -func (pr *Params) Apply(obj any, setMsg bool) error { - objNm := "" - if styler, has := obj.(Styler); has { - objNm = styler.StyleName() - if styob, has := obj.(StylerObject); has { - obj = styob.StyleObject() - } - } else if lblr, has := obj.(labels.Labeler); has { - objNm = lblr.Label() - } - var errs []error - for pt, v := range *pr { - path := pr.Path(pt) - if hv, ok := obj.(Hypers); ok { - if cv, has := hv[pt]; has { // full path - cv["Val"] = v - } else { - hv[pt] = HyperValues{"Val": v} - } - continue - } - err := SetParam(obj, path, v) - if err == nil { - if setMsg { - log.Printf("%v Set param path: %v to value: %v\n", objNm, pt, v) - } - } else { - errs = append(errs, err) - } - } - return errors.Join(errs...) -} - -/////////////////////////////////////////////////////////////////////// -// Hypers - -// TargetType returns the first part of the path, indicating what type of -// object the params apply to. Uses the first item in the map (which is random) -// everything in the map must have the same target. -func (pr *Hypers) TargetType() string { - for pt := range *pr { - return strings.Split(pt, ".")[0] - } - return "" -} - -// Path returns the second part of the path after the target type, -// indicating the path to the specific parameter being set. -func (pr *Hypers) Path(path string) string { - return strings.Join(strings.Split(path, ".")[1:], ".") -} - -// Apply applies all parameter values to given object. -// Object must already be the appropriate target type based on -// the first element of the path (see TargetType method). -// If setMsg is true, then it will log a confirmation that the parameter -// was set (it always prints an error message if it fails to set the -// parameter at given path, and returns error if so). -func (pr *Hypers) Apply(obj any, setMsg bool) error { - objNm := "" - if styler, has := obj.(Styler); has { - objNm = styler.StyleName() - if styob, has := obj.(StylerObject); has { - obj = styob.StyleObject() - } - } else if lblr, has := obj.(labels.Labeler); has { - objNm = lblr.Label() - } - if hv, ok := obj.(Hypers); ok { - hv.CopyFrom(*pr) - return nil - } - var errs []error - for pt, v := range *pr { - path := pr.Path(pt) - val, ok := v["Val"] - if !ok { - continue - } - err := SetParam(obj, path, val) - if err == nil { - if setMsg { - log.Printf("%v Set hypers path: %v to value: %v\n", objNm, pt, v) - } - } else { - errs = append(errs, err) - } - } - return errors.Join(errs...) -} - -/////////////////////////////////////////////////////////////////////// -// Sel - // Apply checks if Sel selector applies to this object according to (.Class, #Name, Type) -// using the params.Styler interface, and returns false if it does not. -// The TargetType of the Params is always tested against the obj's type name first. -// If it does apply, or is not a Styler, then the Params values are set. -// If setMsg is true, then a message is printed to confirm each parameter that is set. -// It always prints a message if a parameter fails to be set, and returns an error. -func (ps *Sel) Apply(obj any, setMsg bool) (bool, error) { - if !ps.TargetTypeMatch(obj) { - return false, nil - } +// using the Styler interface, and returns false if it does not. If it does apply, +// then the Set function is called on the object. +func (ps *Sel[T]) Apply(obj T) bool { if !ps.SelMatch(obj) { - return false, nil - } - errp := ps.Params.Apply(obj, setMsg) - errh := ps.Hypers.Apply(obj, setMsg) - if errp != nil { - return true, errp - } - return true, errh -} - -// TargetTypeMatch return true if target type applies to object -func (ps *Sel) TargetTypeMatch(obj any) bool { - trg := ps.Params.TargetType() - if styler, has := obj.(Styler); has { - tnm := styler.StyleType() - if tnm == trg { - return true - } - } - trgh := ps.Hypers.TargetType() - if styler, has := obj.(Styler); has { - tnm := styler.StyleType() - if tnm == trgh { - return true - } - } - tnm := reflectx.NonPointerType(reflect.TypeOf(obj)).Name() - return tnm == trg || tnm == trgh -} - -// SelMatch returns true if Sel selector matches the target object properties -func (ps *Sel) SelMatch(obj any) bool { - styler, has := obj.(Styler) - if !has { - return true // default match if no styler.. - } - if styob, has := obj.(StylerObject); has { - obj = styob.StyleObject() + return false } - gotyp := reflectx.NonPointerType(reflect.TypeOf(obj)).Name() - return SelMatch(ps.Sel, styler.StyleName(), styler.StyleClass(), styler.StyleType(), gotyp) + ps.Set(obj) + return true } -// SelMatch returns true if Sel selector matches the target object properties -func SelMatch(sel string, name, cls, styp, gotyp string) bool { - if sel == "" { - return false +// SelMatch returns true if Sel selector matches the target object properties. +func (ps *Sel[T]) SelMatch(obj T) bool { + if ps.Sel == "" { + return true } - if sel[0] == '.' { // class - return ClassMatch(sel[1:], cls) + if ps.Sel[0] == '.' { // class + return ClassMatch(ps.Sel[1:], obj.StyleClass()) } - if sel[0] == '#' { // name - return name == sel[1:] + if ps.Sel[0] == '#' { // name + return obj.StyleName() == ps.Sel[1:] } - return styp == sel || gotyp == sel // type + return true // type always matches } // ClassMatch returns true if given class names. @@ -218,42 +48,30 @@ func ClassMatch(sel, cls string) bool { return false } -/////////////////////////////////////////////////////////////////////// -// Sheet +//////// Sheet -// Apply applies entire sheet to given object, using param.Sel's in order -// see param.Sel.Apply() for details. +// Apply applies entire sheet to given object, using Sel's in order. // returns true if any Sel's applied, and error if any errors. -// If setMsg is true, then a message is printed to confirm each parameter that is set. -// It always prints a message if a parameter fails to be set, and returns an error. -func (ps *Sheet) Apply(obj any, setMsg bool) (bool, error) { +func (ps *Sheet[T]) Apply(obj T) bool { applied := false - var errs []error for _, sl := range *ps { - app, err := sl.Apply(obj, setMsg) + app := sl.Apply(obj) if app { applied = true sl.NMatch++ - if hist, ok := obj.(History); ok { - hist.ParamsApplied(sl) - } - } - if err != nil { - errs = append(errs, err) } } - return applied, errors.Join(errs...) + return applied } // SelMatchReset resets the Sel.NMatch counter used to find cases where no Sel -// matched any target objects. Call at start of application process, which +// matched any target objects. Call at start of application process, which // may be at an outer-loop of Apply calls (e.g., for a Network, Apply is called // for each Layer and Path), so this must be called separately. // See SelNoMatchWarn for warning call at end. -func (ps *Sheet) SelMatchReset(setName string) { +func (ps *Sheet[T]) SelMatchReset() { for _, sl := range *ps { sl.NMatch = 0 - sl.SetName = setName } } @@ -261,7 +79,7 @@ func (ps *Sheet) SelMatchReset(setName string) { // matches during the last Apply process -- see SelMatchReset. // The setName and objName provide info about the Set and obj being applied. // Returns an error message with the non-matching sets if any, else nil. -func (ps *Sheet) SelNoMatchWarn(setName, objName string) error { +func (ps *Sheet[T]) SelNoMatchWarn(setName, objName string) error { msg := "" for _, sl := range *ps { if sl.NMatch == 0 { @@ -275,88 +93,3 @@ func (ps *Sheet) SelNoMatchWarn(setName, objName string) error { } return nil } - -/////////////////////////////////////////////////////////////////////// -// Core Find / Set / Get Param - -// FindParam parses the path and recursively tries to find the parameter pointed to -// by the path (dot-delimited field names). -// Returns error if not found, and always also emits error messages -- -// the target type should already have been identified and this should only -// be called when there is an expectation of the path working. -func FindParam(val reflect.Value, path string) (reflect.Value, error) { - npv := reflectx.NonPointerValue(val) - if npv.Kind() != reflect.Struct { - if !npv.IsValid() { - err := fmt.Errorf("params.FindParam: object is nil -- must Build *before* applying params! path: %v\n", path) - slog.Error(err.Error()) - return npv, err - } - err := fmt.Errorf("params.FindParam: object is not a struct: %v kind: %v -- params must be on structs, path: %v\n", npv.String(), npv.Kind(), path) - slog.Error(err.Error()) - return npv, err - } - paths := strings.Split(path, ".") - fnm := paths[0] - fld := npv.FieldByName(fnm) - if !fld.IsValid() { - err := fmt.Errorf("params.FindParam: could not find Field named: %v in struct: %v kind: %v, path: %v\n", fnm, npv.String(), npv.Kind(), path) - slog.Error(err.Error()) - return fld, err - } - if len(paths) == 1 { - return fld.Addr(), nil - } - return FindParam(fld.Addr(), strings.Join(paths[1:], ".")) // need addr -} - -// SetParam sets parameter at given path on given object to given value -// converts the string param val as appropriate for target type. -// returns error if path not found or cannot set (always logged). -func SetParam(obj any, path string, val string) error { - npv := reflectx.NonPointerValue(reflect.ValueOf(obj)) - if npv.Kind() == reflect.Map { // only for string maps - npv.SetMapIndex(reflect.ValueOf(path), reflect.ValueOf(val)) - return nil - } - - fld, err := FindParam(reflect.ValueOf(obj), path) - if err != nil { - return err - } - err = reflectx.SetRobust(fld.Interface(), val) - if err != nil { - slog.Error("params.SetParam: field could not be set", "path", path, "value", val, "err", err) - return err - } - return nil -} - -// GetParam gets parameter value at given path on given object. -// converts target type to float64. -// returns error if path not found or target is not a numeric type (always logged). -func GetParam(obj any, path string) (float64, error) { - fld, err := FindParam(reflect.ValueOf(obj), path) - if err != nil { - return 0, err - } - npf := reflectx.NonPointerValue(fld) - switch npf.Kind() { - case reflect.Float64, reflect.Float32: - return npf.Float(), nil - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - return float64(npf.Int()), nil - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - return float64(npf.Uint()), nil - case reflect.Bool: - if npf.Bool() { - return 1, nil - } else { - return 0, nil - } - default: - err := fmt.Errorf("params.GetParam: field is not of a numeric type -- only numeric types supported. value: %v, kind: %v, path: %v\n", npf.String(), npf.Kind(), path) - slog.Error(err.Error()) - return 0, err - } -} diff --git a/params/applymap.go b/params/applymap.go deleted file mode 100644 index 8f31c40e..00000000 --- a/params/applymap.go +++ /dev/null @@ -1,77 +0,0 @@ -// Copyright (c) 2023, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package params - -import ( - "errors" - "fmt" - "log" - "reflect" - "strings" - - "cogentcore.org/core/base/reflectx" -) - -// ApplyMap applies given map[string]any values, where the map keys -// are a Path and the value is the value to apply (any appropriate type). -// This is not for Network params, which should use MapToSheet -- see emer.Params wrapper. -// If setMsg is true, then a message is printed to confirm each parameter that is set. -// It always prints a message if a parameter fails to be set, and returns an error. -func ApplyMap(obj any, vals map[string]any, setMsg bool) error { - objv := reflect.ValueOf(obj) - npv := reflectx.NonPointerValue(objv) - if npv.Kind() == reflect.Map { - err := reflectx.CopyMapRobust(obj, vals) - if err != nil { - log.Println(err) - return err - } - if setMsg { - log.Printf("ApplyMap: set map object to %#v\n", obj) - } - } - var errs []error - for k, v := range vals { - fld, err := FindParam(objv, k) - if err != nil { - errs = append(errs, err) - } - err = reflectx.SetRobust(fld.Interface(), v) - if err != nil { - err = fmt.Errorf("ApplyMap: was not able to apply value: %v to field: %s", v, k) - log.Println(err) - errs = append(errs, err) - } - if setMsg { - log.Printf("ApplyMap: set field: %s = %#v\n", k, reflectx.NonPointerValue(fld).Interface()) - } - } - return errors.Join(errs...) -} - -// MapToSheet returns a Sheet from given map[string]any values, -// so the Sheet can be applied as such -- e.g., for the network -// ApplyParams method. -// The map keys are Selector:Path and the value is the value to apply. -func MapToSheet(vals map[string]any) (*Sheet, error) { - sh := NewSheet() - var errs []error - for k, v := range vals { - fld := strings.Split(k, ":") - if len(fld) != 2 { - err := fmt.Errorf("ApplyMap: map key value must be colon-separated Selector:Path, not: %s", k) - log.Println(err) - errs = append(errs, err) - continue - } - vstr := reflectx.ToString(v) - - sl := &Sel{Sel: fld[0], SetName: "ApplyMap"} - sl.Params = make(Params) - sl.Params[fld[1]] = vstr - *sh = append(*sh, sl) - } - return sh, errors.Join(errs...) -} diff --git a/params/diff.go b/params/diff.go deleted file mode 100644 index 6d0690e9..00000000 --- a/params/diff.go +++ /dev/null @@ -1,123 +0,0 @@ -// Copyright (c) 2019, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package params - -import ( - "fmt" - "sort" - - "golang.org/x/exp/maps" -) - -// DiffsAll reports all the cases where the same param path is being set -// to different values across different sets -func (ps *Sets) DiffsAll() string { - pd := "" - sz := len(*ps) - keys := maps.Keys(*ps) - sort.Strings(keys) - for i, sNm := range keys { - set := (*ps)[sNm] - for j := i + 1; j < sz; j++ { - osNm := keys[j] - oset := (*ps)[osNm] - spd := set.Diffs(oset, sNm, osNm) - if spd != "" { - pd += "//////////////////////////////////////\n" - pd += spd - } - } - } - return pd -} - -// DiffsFirst reports all the cases where the same param path is being set -// to different values between the "Base" set and all other sets. -// Only works if there is a set named "Base". -func (ps *Sets) DiffsFirst() string { - pd := "" - sz := len(*ps) - if sz < 2 { - return "" - } - set, ok := (*ps)["Base"] - if !ok { - return "params.DiffsFirst: Set named 'Base' not found\n" - } - keys := maps.Keys(*ps) - sort.Strings(keys) - for _, sNm := range keys { - if sNm == "Base" { - continue - } - oset := (*ps)[sNm] - spd := set.Diffs(oset, "Base", sNm) - if spd != "" { - pd += "//////////////////////////////////////\n" - pd += spd - } - } - return pd -} - -// DiffsWithin reports all the cases where the same param path is being set -// to different values within given sheet. -func (ps *Sets) DiffsWithin(sheetName string) string { - sht, err := ps.SheetByName(sheetName) - if err != nil { - return err.Error() - } - return sht.DiffsWithin(sheetName) -} - -///////////////////////////////////////////////////////// -// Sheet - -// Diffs reports all the cases where the same param path is being set -// to different values between this sheeet and the other sheeet. -func (ps *Sheet) Diffs(ops *Sheet, setNm1, setNm2 string) string { - pd := "" - for _, sel := range *ps { - for _, osel := range *ops { - spd := sel.Params.Diffs(&sel.Params, setNm1+":"+sel.Sel, setNm2+":"+osel.Sel) - pd += spd - } - } - return pd -} - -// DiffsWithin reports all the cases where the same param path is being set -// to different values within different Sel's in this Sheet. -func (ps *Sheet) DiffsWithin(sheetName string) string { - pd := "" - sz := len(*ps) - for i, sel := range *ps { - for j := i + 1; j < sz; j++ { - osel := (*ps)[j] - spd := sel.Params.Diffs(&sel.Params, sheetName+":"+sel.Sel, sheetName+":"+osel.Sel) - pd += spd - } - } - return pd -} - -///////////////////////////////////////////////////////// -// Params - -// Diffs returns comparison between all params in this params -// versus the other params, where the path is the same but the -// parameter value is different. Nm1 is the name / id of the -// 'this' Params, and nm2 is for the other params. -func (pr *Params) Diffs(op *Params, nm1, nm2 string) string { - pd := "" - for pt, pv := range *pr { - for opt, opv := range *op { - if pt == opt && pv != opv { - pd += fmt.Sprintf("%s:\t\t %s = %v \t|\t %s = %v,\n", pt, nm1, pv, nm2, opv) - } - } - } - return pd -} diff --git a/params/flex.go b/params/flex.go deleted file mode 100644 index 4ae20327..00000000 --- a/params/flex.go +++ /dev/null @@ -1,163 +0,0 @@ -// Copyright (c) 2019, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package params - -import ( - "bytes" - "encoding/json" - "io" - "io/ioutil" - "log" - - "cogentcore.org/core/core" -) - -// FlexVal is a specific flexible value for the Flex parameter map -// that implements the StylerObject interface for CSS-style selection logic. -// The field names are abbreviated because full names are used in StylerObject. -type FlexVal struct { - // name of this specific object, matches #Name selections - Name string - - // type name of this object, matches plain TypeName selections - Type string - - // space-separated list of class name(s), match the .Class selections - Class string - - // actual object with data that is set by the parameters - Object any - - // History of params applied - History HistoryImpl `table:"-"` -} - -func (fv *FlexVal) StyleType() string { - return fv.Type -} - -func (fv *FlexVal) StyleClass() string { - return fv.Class -} - -func (fv *FlexVal) StyleName() string { - return fv.Name -} - -func (fv *FlexVal) StyleObject() any { - return fv.Object -} - -func (fv *FlexVal) CopyFrom(cp *FlexVal) { - fv.Name = cp.Name // these should be the same, but copy anyway - fv.Type = cp.Type - fv.Class = cp.Class - if hyp, ok := fv.Object.(Hypers); ok { // this is the main use-case - if cph, ok := cp.Object.(Hypers); ok { - hyp.CopyFrom(cph) - } - } -} - -// ParamsHistoryReset resets parameter application history -func (fv *FlexVal) ParamsHistoryReset() { - fv.History.ParamsHistoryReset() -} - -// ParamsApplied is just to satisfy History interface so reset can be applied -func (fv *FlexVal) ParamsApplied(sel *Sel) { - fv.History.ParamsApplied(sel) -} - -// Flex supports arbitrary named parameter values that can be set -// by a Set of parameters, as a map of any objects. -// First initialize the map with set of names and a type to create -// blank values, then apply the Set to it. -type Flex map[string]*FlexVal - -// Make makes the map if it is nil (otherwise does nothing) -func (fl *Flex) Make() { - if *fl != nil { - return - } - *fl = make(Flex) -} - -func (fl *Flex) StyleType() string { // note: assuming all same type for this purpose - for _, fv := range *fl { - return fv.StyleType() - } - return "Flex" -} - -func (fl *Flex) StyleClass() string { - return "" -} - -func (fl *Flex) Name() string { - return "" -} - -// Init initializes the Flex map with given set of flex values. -func (fl *Flex) Init(vals []FlexVal) { - *fl = make(Flex, len(vals)) - for _, vl := range vals { - inst := vl - (*fl)[vl.Name] = &inst - } -} - -// ApplySheet applies given sheet of parameters to each element in Flex -func (fl *Flex) ApplySheet(sheet *Sheet, setMsg bool) { - for _, vl := range *fl { - sheet.Apply(vl, setMsg) - } -} - -// CopyFrom copies hyper vals from source -func (fl *Flex) CopyFrom(cp Flex) { - fl.Make() - for nm, fv := range cp { - if sfv, has := (*fl)[nm]; has { - sfv.CopyFrom(fv) - } else { - sfv := &FlexVal{} - sfv.CopyFrom(fv) - (*fl)[nm] = sfv - } - } -} - -// WriteJSON saves hypers to a JSON-formatted file. -func (fl *Flex) WriteJSON(w io.Writer) error { - b, err := json.MarshalIndent(fl, "", " ") - if err != nil { - log.Println(err) // unlikely - return err - } - w.Write(b) - return err -} - -// JSONString returns a string representation of Flex params -func (fl *Flex) JSONString() string { - var buf bytes.Buffer - fl.WriteJSON(&buf) - return string(buf.Bytes()) -} - -// SaveJSON saves hypers to a JSON-formatted file. -func (fl *Flex) SaveJSON(filename core.Filename) error { - b, err := json.MarshalIndent(fl, "", " ") - if err != nil { - log.Println(err) // unlikely - return err - } - err = ioutil.WriteFile(string(filename), b, 0644) - if err != nil { - log.Println(err) - } - return err -} diff --git a/params/history.go b/params/history.go deleted file mode 100644 index c6cad7b7..00000000 --- a/params/history.go +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright (c) 2019, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package params - -// The params.History interface records history of parameters applied -// to a given object. -type History interface { - // ParamsHistoryReset resets parameter application history - ParamsHistoryReset() - - // ParamsApplied is called when a parameter is successfully applied for given selector - ParamsApplied(sel *Sel) -} - -// HistoryImpl implements the History interface. Implementing object can -// just pass calls to a HistoryImpl field. -type HistoryImpl []*Sel - -// ParamsHistoryReset resets parameter application history -func (hi *HistoryImpl) ParamsHistoryReset() { - *hi = nil -} - -// ParamsApplied is called when a parameter is successfully applied for given selector -func (hi *HistoryImpl) ParamsApplied(sel *Sel) { - *hi = append(*hi, sel) -} - -// ParamsHistory returns the sequence of params applied for each parameter -// from all Sel's applied, in reverse order -func (hi *HistoryImpl) ParamsHistory() Params { - pr := make(Params) - lastSet := "" - for _, sl := range *hi { - for pt, v := range sl.Params { - nmv := sl.Sel + ": " + v - if sl.SetName != lastSet { - nmv = sl.SetName + ":" + nmv - lastSet = sl.SetName - } - ev, has := pr[pt] - if has { - pr[pt] = nmv + " | " + ev - } else { - pr[pt] = nmv - } - } - } - return pr -} diff --git a/params/hypers.go b/params/hypers.go deleted file mode 100644 index a3b9c868..00000000 --- a/params/hypers.go +++ /dev/null @@ -1,90 +0,0 @@ -// Copyright (c) 2019, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package params - -import ( - "bytes" - "encoding/json" - "fmt" - - "cogentcore.org/core/base/errors" -) - -// HyperValues is a string-value map for storing hyperparameter values -type HyperValues map[string]string //types:add - -// JSONString returns hyper values as a JSON formatted string -func (hv *HyperValues) JSONString() string { - var buf bytes.Buffer - b, _ := json.Marshal(hv) - buf.Write(b) - return buf.String() -} - -// SetJSONString sets from a JSON_formatted string -func (hv *HyperValues) SetJSONString(str string) error { - return json.Unmarshal([]byte(str), hv) -} - -// CopyFrom copies from another HyperValues -func (hv *HyperValues) CopyFrom(cp HyperValues) { - if *hv == nil { - *hv = make(HyperValues, len(cp)) - } - for k, v := range cp { - (*hv)[k] = v - } -} - -// Hypers is a parallel structure to Params which stores information relevant -// to hyperparameter search as well as the values. -// Use the key "Val" for the default value. This is equivalant to the value in -// Params. "Min" and "Max" guid the range, and "Sigma" describes a Gaussian. -type Hypers map[string]HyperValues //types:add - -// ParamByName returns given parameter, by name. -// Returns and logs error if not found. -func (pr *Hypers) ParamByName(name string) (map[string]string, error) { - vl, ok := (*pr)[name] - if !ok { - return vl, errors.Log(fmt.Errorf("params.Params: parameter named %v not found", name)) - } - return vl, nil -} - -// SetByName sets given parameter by name to given value. -// (just a wrapper around map set function) -func (pr *Hypers) SetByName(name string, value map[string]string) { - (*pr)[name] = value -} - -// CopyFrom copies hyper vals from source -func (pr *Hypers) CopyFrom(cp Hypers) { - if *pr == nil { - *pr = make(Hypers, len(cp)) - } - for path, hv := range cp { - if shv, has := (*pr)[path]; has { - shv.CopyFrom(hv) - } else { - shv := HyperValues{} - shv.CopyFrom(hv) - (*pr)[path] = shv - } - } -} - -// DeleteValOnly deletes entries that only have a "Val" entry. -// This happens when applying a param Sheet using Flex params -// to compile values using styling logic -func (pr *Hypers) DeleteValOnly() { - for path, hv := range *pr { - if len(hv) == 1 { - if _, has := (hv)["Val"]; has { - delete(*pr, path) - } - } - } -} diff --git a/params/io.go b/params/io.go deleted file mode 100644 index 65b0f034..00000000 --- a/params/io.go +++ /dev/null @@ -1,375 +0,0 @@ -// Copyright (c) 2019, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package params - -import ( - "bytes" - "encoding/json" - "fmt" - "io" - "io/ioutil" - "log" - "os" - "sort" - - "cogentcore.org/core/base/indent" - "cogentcore.org/core/base/iox" - "cogentcore.org/core/base/iox/jsonx" - "cogentcore.org/core/base/iox/tomlx" - "cogentcore.org/core/core" - "github.com/BurntSushi/toml" - "golang.org/x/exp/maps" -) - -// WriteGoPrelude writes the start of a go file in package main that starts a -// variable assignment to given variable -- for start of SaveGoCode methods. -func WriteGoPrelude(w io.Writer, varNm string) { - w.Write([]byte("// File generated by params.SaveGoCode\n\n")) - w.Write([]byte("package main\n\n")) - w.Write([]byte(`import "github.com/emer/emergent/v2/params"`)) - w.Write([]byte("\n\nvar " + varNm + " = ")) -} - -// OpenJSON opens params from a JSON-formatted file. -func (pr *Params) OpenJSON(filename core.Filename) error { - *pr = make(Params) // reset - return jsonx.Open(pr, string(filename)) -} - -// SaveJSON saves params to a JSON-formatted file. -func (pr *Params) SaveJSON(filename core.Filename) error { - return jsonx.Save(pr, string(filename)) -} - -// OpenTOML opens params from a TOML-formatted file. -func (pr *Params) OpenTOML(filename core.Filename) error { - *pr = make(Params) // reset - return tomlx.Open(pr, string(filename)) -} - -// SaveTOML saves params to a TOML-formatted file. -func (pr *Params) SaveTOML(filename core.Filename) error { - // return tomlx.Save(pr, string(filename)) // pelletier/go-toml produces bad output on maps - return iox.Save(pr, string(filename), func(w io.Writer) iox.Encoder { - return toml.NewEncoder(w) - }) -} - -// WriteGoCode writes params to corresponding Go initializer code. -func (pr *Params) WriteGoCode(w io.Writer, depth int) { - w.Write([]byte("params.Params{\n")) - depth++ - paths := make([]string, len(*pr)) // alpha-sort paths for consistent output - ctr := 0 - for pt := range *pr { - paths[ctr] = pt - ctr++ - } - sort.StringSlice(paths).Sort() - for _, pt := range paths { - pv := (*pr)[pt] - w.Write(indent.TabBytes(depth)) - w.Write([]byte(fmt.Sprintf("%q: %q,\n", pt, pv))) - } - depth-- - w.Write(indent.TabBytes(depth)) - w.Write([]byte("}")) -} - -// StringGoCode returns Go initializer code as a byte string. -func (pr *Params) StringGoCode() []byte { - var buf bytes.Buffer - pr.WriteGoCode(&buf, 0) - return buf.Bytes() -} - -// SaveGoCode saves params to corresponding Go initializer code. -func (pr *Params) SaveGoCode(filename core.Filename) error { - fp, err := os.Create(string(filename)) - defer fp.Close() - if err != nil { - log.Println(err) - return err - } - WriteGoPrelude(fp, "SavedParams") - pr.WriteGoCode(fp, 0) - return nil -} - -///////////////////////////////////////////////////////// -// Hypers - -// OpenJSON opens hypers from a JSON-formatted file. -func (pr *Hypers) OpenJSON(filename core.Filename) error { - *pr = make(Hypers) // reset - b, err := ioutil.ReadFile(string(filename)) - if err != nil { - log.Println(err) - return err - } - return json.Unmarshal(b, pr) -} - -// SaveJSON saves hypers to a JSON-formatted file. -func (pr *Hypers) SaveJSON(filename core.Filename) error { - return jsonx.Save(pr, string(filename)) -} - -// OpenTOML opens params from a TOML-formatted file. -func (pr *Hypers) OpenTOML(filename core.Filename) error { - *pr = make(Hypers) // reset - return tomlx.Open(pr, string(filename)) -} - -// SaveTOML saves params to a TOML-formatted file. -func (pr *Hypers) SaveTOML(filename core.Filename) error { - // return tomlx.Save(pr, string(filename)) - return iox.Save(pr, string(filename), func(w io.Writer) iox.Encoder { - return toml.NewEncoder(w) - }) -} - -// WriteGoCode writes hypers to corresponding Go initializer code. -func (pr *Hypers) WriteGoCode(w io.Writer, depth int) { - w.Write([]byte("params.Hypers{\n")) - depth++ - paths := maps.Keys(*pr) - sort.StringSlice(paths).Sort() - for _, pt := range paths { - pv := (*pr)[pt] - w.Write(indent.TabBytes(depth)) - w.Write([]byte(fmt.Sprintf("%q: {", pt))) - ks := maps.Keys(pv) - sort.StringSlice(ks).Sort() - for _, k := range ks { - v := pv[k] - w.Write([]byte(fmt.Sprintf("%q: %q,", k, v))) - } - w.Write([]byte("},\n")) - } - depth-- - w.Write(indent.TabBytes(depth)) - w.Write([]byte("}")) -} - -// StringGoCode returns Go initializer code as a byte string. -func (pr *Hypers) StringGoCode() []byte { - var buf bytes.Buffer - pr.WriteGoCode(&buf, 0) - return buf.Bytes() -} - -// SaveGoCode saves hypers to corresponding Go initializer code. -func (pr *Hypers) SaveGoCode(filename core.Filename) error { - fp, err := os.Create(string(filename)) - defer fp.Close() - if err != nil { - log.Println(err) - return err - } - WriteGoPrelude(fp, "SavedHypers") - pr.WriteGoCode(fp, 0) - return nil -} - -///////////////////////////////////////////////////////// -// Sel - -// OpenJSON opens params from a JSON-formatted file. -func (pr *Sel) OpenJSON(filename core.Filename) error { - b, err := ioutil.ReadFile(string(filename)) - if err != nil { - log.Println(err) - return err - } - return json.Unmarshal(b, pr) -} - -// SaveJSON saves params to a JSON-formatted file. -func (pr *Sel) SaveJSON(filename core.Filename) error { - return jsonx.Save(pr, string(filename)) -} - -// OpenTOML opens params from a TOML-formatted file. -func (pr *Sel) OpenTOML(filename core.Filename) error { - return tomlx.Open(pr, string(filename)) -} - -// SaveTOML saves params to a TOML-formatted file. -func (pr *Sel) SaveTOML(filename core.Filename) error { - // return tomlx.Save(pr, string(filename)) - return iox.Save(pr, string(filename), func(w io.Writer) iox.Encoder { - return toml.NewEncoder(w) - }) -} - -// WriteGoCode writes params to corresponding Go initializer code. -func (pr *Sel) WriteGoCode(w io.Writer, depth int) { - w.Write([]byte(fmt.Sprintf("Sel: %q, Desc: %q,\n", pr.Sel, pr.Desc))) - depth++ - w.Write(indent.TabBytes(depth)) - w.Write([]byte("Params: ")) - pr.Params.WriteGoCode(w, depth) - if len(pr.Hypers) > 0 { - w.Write([]byte(", Hypers: ")) - pr.Hypers.WriteGoCode(w, depth) - } -} - -// StringGoCode returns Go initializer code as a byte string. -func (pr *Sel) StringGoCode() []byte { - var buf bytes.Buffer - pr.WriteGoCode(&buf, 0) - return buf.Bytes() -} - -// SaveGoCode saves params to corresponding Go initializer code. -func (pr *Sel) SaveGoCode(filename core.Filename) error { - fp, err := os.Create(string(filename)) - defer fp.Close() - if err != nil { - log.Println(err) - return err - } - WriteGoPrelude(fp, "SavedParamsSel") - pr.WriteGoCode(fp, 0) - return nil -} - -///////////////////////////////////////////////////////// -// Sheet - -// OpenJSON opens params from a JSON-formatted file. -func (pr *Sheet) OpenJSON(filename core.Filename) error { - *pr = make(Sheet, 0) // reset - b, err := ioutil.ReadFile(string(filename)) - if err != nil { - log.Println(err) - return err - } - return json.Unmarshal(b, pr) -} - -// SaveJSON saves params to a JSON-formatted file. -func (pr *Sheet) SaveJSON(filename core.Filename) error { - return jsonx.Save(pr, string(filename)) -} - -// OpenTOML opens params from a TOML-formatted file. -func (pr *Sheet) OpenTOML(filename core.Filename) error { - *pr = make(Sheet, 0) // reset - return tomlx.Open(pr, string(filename)) -} - -// SaveTOML saves params to a TOML-formatted file. -func (pr *Sheet) SaveTOML(filename core.Filename) error { - // return tomlx.Save(pr, string(filename)) - return iox.Save(pr, string(filename), func(w io.Writer) iox.Encoder { - return toml.NewEncoder(w) - }) -} - -// WriteGoCode writes params to corresponding Go initializer code. -func (pr *Sheet) WriteGoCode(w io.Writer, depth int) { - w.Write([]byte("{\n")) - depth++ - for _, pv := range *pr { - w.Write(indent.TabBytes(depth)) - w.Write([]byte("{")) - pv.WriteGoCode(w, depth) - w.Write([]byte("},\n")) - } - depth-- - w.Write(indent.TabBytes(depth)) - w.Write([]byte("},\n")) -} - -// StringGoCode returns Go initializer code as a byte string. -func (pr *Sheet) StringGoCode() []byte { - var buf bytes.Buffer - pr.WriteGoCode(&buf, 0) - return buf.Bytes() -} - -// SaveGoCode saves params to corresponding Go initializer code. -func (pr *Sheet) SaveGoCode(filename core.Filename) error { - fp, err := os.Create(string(filename)) - defer fp.Close() - if err != nil { - log.Println(err) - return err - } - WriteGoPrelude(fp, "SavedParamsSheet") - pr.WriteGoCode(fp, 0) - return nil -} - -///////////////////////////////////////////////////////// -// Sets - -// OpenJSON opens params from a JSON-formatted file. -func (pr *Sets) OpenJSON(filename core.Filename) error { - *pr = make(Sets) // reset - b, err := ioutil.ReadFile(string(filename)) - if err != nil { - log.Println(err) - return err - } - return json.Unmarshal(b, pr) -} - -// SaveJSON saves params to a JSON-formatted file. -func (pr *Sets) SaveJSON(filename core.Filename) error { - return jsonx.Save(pr, string(filename)) -} - -// OpenTOML opens params from a TOML-formatted file. -func (pr *Sets) OpenTOML(filename core.Filename) error { - *pr = make(Sets) // reset - return tomlx.Open(pr, string(filename)) -} - -// SaveTOML saves params to a TOML-formatted file. -func (pr *Sets) SaveTOML(filename core.Filename) error { - // return tomlx.Save(pr, string(filename)) - return iox.Save(pr, string(filename), func(w io.Writer) iox.Encoder { - return toml.NewEncoder(w) - }) -} - -// WriteGoCode writes params to corresponding Go initializer code. -func (pr *Sets) WriteGoCode(w io.Writer, depth int) { - w.Write([]byte("params.Sets{\n")) - depth++ - for _, st := range *pr { - w.Write(indent.TabBytes(depth)) - w.Write([]byte("{")) - st.WriteGoCode(w, depth) - w.Write([]byte("},\n")) - } - depth-- - w.Write(indent.TabBytes(depth)) - w.Write([]byte("}\n")) -} - -// StringGoCode returns Go initializer code as a byte string. -func (pr *Sets) StringGoCode() []byte { - var buf bytes.Buffer - pr.WriteGoCode(&buf, 0) - return buf.Bytes() -} - -// SaveGoCode saves params to corresponding Go initializer code. -func (pr *Sets) SaveGoCode(filename core.Filename) error { - fp, err := os.Create(string(filename)) - defer fp.Close() - if err != nil { - log.Println(err) - return err - } - WriteGoPrelude(fp, "SavedParamsSets") - pr.WriteGoCode(fp, 0) - return nil -} diff --git a/params/params.go b/params/params.go index bb1d5aa9..8e08d2aa 100644 --- a/params/params.go +++ b/params/params.go @@ -12,77 +12,34 @@ import ( "cogentcore.org/core/base/errors" ) -// Params is a name-value map for parameter values that can be applied -// to any numeric type in any object. -// The name must be a dot-separated path to a specific parameter, e.g., Path.Learn.Lrate -// The first part of the path is the overall target object type, e.g., "Path" or "Layer", -// which is used for determining if the parameter applies to a given object type. -// -// All of the params in one map must apply to the same target type because -// only the first item in the map (which could be any due to order randomization) -// is used for checking the type of the target. Also, they all fall within the same -// Sel selector scope which is used to determine what specific objects to apply the -// parameters to. -type Params map[string]string //types:add - -// ParamByName returns given parameter, by name. -// Returns and logs error if not found. -func (pr *Params) ParamByName(name string) (string, error) { - vl, ok := (*pr)[name] - if !ok { - return "", errors.Log(fmt.Errorf("params.Params: parameter named %v not found", name)) - } - return vl, nil -} - -// SetByName sets given parameter by name to given value. -// (just a wrapper around map set function) -func (pr *Params) SetByName(name, value string) { - (*pr)[name] = value -} - -/////////////////////////////////////////////////////////////////////// - -// params.Sel specifies a selector for the scope of application of a set of +// Sel specifies a selector for the scope of application of a set of // parameters, using standard css selector syntax (. prefix = class, # prefix = name, -// and no prefix = type) -type Sel struct { //types:add - - // selector for what to apply the parameters to, using standard css selector syntax: .Example applies to anything with a Class tag of 'Example', #Example applies to anything with a Name of 'Example', and Example with no prefix applies to anything of type 'Example' +// and no prefix = type). Type always matches, and generally should come first as an +// initial set of defaults. +type Sel[T Styler] struct { + + // Sel is the selector for what to apply the parameters to, + // using standard css selector syntax: + // - .Example applies to anything with a Class tag of 'Example' + // - #Example applies to anything with a Name of 'Example' + // - Example with no prefix or blank selector always applies. Sel string `width:"30"` - // description of these parameter values -- what effect do they have? what range was explored? it is valuable to record this information as you explore the params. - Desc string `width:"60"` + // Doc is documentation of these parameter values: what effect + // do they have? what range was explored? It is valuable to record + // this information as you explore the params. + Doc string `width:"60"` - // parameter values to apply to whatever matches the selector - Params Params `display:"no-inline"` + // Set function applies parameter values to the given object of the target type. + Set func(v T) `display:"-"` - // Put your hyperparams here - Hypers Hypers - - // number of times this selector matched a target during the last Apply process -- a warning is issued for any that remain at 0 -- see Sheet SelMatchReset and SelNoMatchWarn methods + // NMatch is the number of times this selector matched a target + // during the last Apply process. A warning is issued for any + // that remain at 0: See Sheet SelMatchReset and SelNoMatchWarn methods. NMatch int `table:"-" toml:"-" json:"-" xml:"-" edit:"-"` - - // name of current Set being applied - SetName string `table:"-" toml:"-" json:"-" xml:"-" edit:"-"` } -// SetFloat sets the value of given parameter -func (sl *Sel) SetFloat(param string, val float64) { - sl.Params.SetByName(param, fmt.Sprintf("%g", val)) -} - -// SetString sets the value of given parameter -func (sl *Sel) SetString(param string, val string) { - sl.Params.SetByName(param, val) -} - -// ParamVal returns the value of given parameter -func (sl *Sel) ParamValue(param string) (string, error) { - return sl.Params.ParamByName(param) -} - -/////////////////////////////////////////////////////////////////////// +//////// // Sheet is a CSS-like style-sheet of params.Sel values, each of which represents // a different set of specific parameter values applied according to the Sel selector: @@ -90,27 +47,24 @@ func (sl *Sel) ParamValue(param string) (string, error) { // // The order of elements in the Sheet list is critical, as they are applied // in the order given by the list (slice), and thus later Sel's can override -// those applied earlier. Thus, you generally want to have more general Type-level -// parameters listed first, and then subsequently more specific ones (.Class and #Name) -// -// This is the highest level of params that has an Apply method -- above this level -// application must be done under explicit program control. -type Sheet []*Sel //types:add +// those applied earlier. Generally put more general Type-level parameters first, +// and then subsequently more specific ones (.Class and #Name). +type Sheet[T Styler] []*Sel[T] -// NewSheet returns a new Sheet -func NewSheet() *Sheet { - sh := make(Sheet, 0) +// NewSheet returns a new Sheet for given type. +func NewSheet[T Styler]() *Sheet[T] { + sh := make(Sheet[T], 0) return &sh } -// ElemLabel satisfies the core.SliceLabeler interface to provide labels for slice elements -func (sh *Sheet) ElemLabel(idx int) string { +// ElemLabel satisfies the core.SliceLabeler interface to provide labels for slice elements. +func (sh *Sheet[T]) ElemLabel(idx int) string { return (*sh)[idx].Sel } // SelByName returns given selector within the Sheet, by Name. // Returns and logs error if not found. -func (sh *Sheet) SelByName(sel string) (*Sel, error) { +func (sh *Sheet[T]) SelByName(sel string) (*Sel[T], error) { for _, sl := range *sh { if sl.Sel == sel { return sl, nil @@ -119,79 +73,20 @@ func (sh *Sheet) SelByName(sel string) (*Sel, error) { return nil, errors.Log(fmt.Errorf("params.Sheet: Sel named %v not found", sel)) } -// SetFloat sets the value of given parameter, in selection sel -func (sh *Sheet) SetFloat(sel, param string, val float64) error { - sp, err := sh.SelByName(sel) - if err != nil { - return err - } - sp.SetFloat(param, val) - return nil -} - -// SetString sets the value of given parameter, in selection sel -func (sh *Sheet) SetString(sel, param string, val string) error { - sp, err := sh.SelByName(sel) - if err != nil { - return err - } - sp.SetString(param, val) - return nil -} - -// ParamVal returns the value of given parameter, in selection sel -func (sh *Sheet) ParamValue(sel, param string) (string, error) { - sp, err := sh.SelByName(sel) - if err != nil { - return "", err - } - return sp.ParamValue(param) -} - -/////////////////////////////////////////////////////////////////////// +//////// -// Sets is a collection of Sheets that can be chosen among -// depending on different desired configurations etc. Thus, each Set -// represents a collection of different possible specific configurations, -// and different such configurations can be chosen by name to apply as desired. -type Sets map[string]*Sheet //git:add +// Sheets are named collections of Sheet elements that can be chosen among +// depending on different desired configurations. +// Conventionally, there is always a Base configuration with basic-level +// defaults, and then any number of more specific sets to apply after that. +type Sheets[T Styler] map[string]*Sheet[T] // SheetByName tries to find given set by name. // Returns and logs error if not found. -func (ps *Sets) SheetByName(name string) (*Sheet, error) { +func (ps *Sheets[T]) SheetByName(name string) (*Sheet[T], error) { st, ok := (*ps)[name] if ok { return st, nil } - return nil, errors.Log(fmt.Errorf("params.Sets: Param Sheet named %s not found", name)) -} - -// SetFloat sets the value of given parameter, in selection sel, -// in sheet and set. -func (ps *Sets) SetFloat(sheet, sel, param string, val float64) error { - sp, err := ps.SheetByName(sheet) - if err != nil { - return err - } - return sp.SetFloat(sel, param, val) -} - -// SetString sets the value of given parameter, in selection sel, -// in sheet and set. Returns error if anything is not found. -func (ps *Sets) SetString(sheet, sel, param string, val string) error { - sp, err := ps.SheetByName(sheet) - if err != nil { - return err - } - return sp.SetString(sel, param, val) -} - -// ParamVal returns the value of given parameter, in selection sel, -// in sheet and set. Returns error if anything is not found. -func (ps *Sets) ParamValue(sheet, sel, param string) (string, error) { - sp, err := ps.SheetByName(sheet) - if err != nil { - return "", err - } - return sp.ParamValue(sel, param) + return nil, errors.Log(fmt.Errorf("params.Sheets: Param Sheet named %q not found", name)) } diff --git a/params/params_test.go b/params/params_test.go index e226d0ef..774614ad 100644 --- a/params/params_test.go +++ b/params/params_test.go @@ -5,286 +5,72 @@ package params import ( - "bytes" "testing" "github.com/stretchr/testify/assert" ) -var paramSets = Sets{ - "Base": { - {Sel: "Path", Desc: "norm and momentum on works better, but wt bal is not better for smaller nets", - Params: Params{ - "Path.Learn.Norm.On": "true", - "Path.Learn.Momentum.On": "true", - "Path.Learn.WtBal.On": "false", - }}, - {Sel: "Layer", Desc: "using default 1.8 inhib for all of network -- can explore", - Params: Params{ - "Layer.Inhib.Layer.Gi": "1.8", - }, - Hypers: Hypers{ - "Layer.Inhib.Layer.Gi": {"Min": "0.5", "StdDev": "0.1"}, - }, - }, - {Sel: "#Output", Desc: "output definitely needs lower inhib -- true for smaller layers in general", - Params: Params{ - "Layer.Inhib.Layer.Gi": "1.4", - }}, - {Sel: ".Back", Desc: "top-down back-pathways MUST have lower relative weight scale, otherwise network hallucinates", - Params: Params{ - "Path.WtScale.Rel": "0.2", - }}, - }, - "DefaultInhib": { - {Sel: "#Output", Desc: "go back to default", - Params: Params{ - "Layer.Inhib.Layer.Gi": "1.8", - }}, - }, - "NoMomentum": { - {Sel: "Path", Desc: "no norm or momentum", - Params: Params{ - "Path.Learn.Norm.On": "false", - "Path.Learn.Momentum.On": "false", - }}, - }, - "WtBalOn": { - {Sel: "Path", Desc: "weight bal on", - Params: Params{ - "Path.Learn.WtBal.On": "true", - }}, - }, +type test struct { + Name string + Class string + Norm bool + Momentum bool + WtBal bool + WtScale float32 } -var trgCode = `params.Sets{ +func (t *test) StyleName() string { return t.Name } +func (t *test) StyleClass() string { return t.Class } + +var paramSets = Sheets[*test]{ "Base": { - {Sel: "Path", Desc: "norm and momentum on works better, but wt bal is not better for smaller nets", - Params: params.Params{ - "Path.Learn.Norm.On": "true", - "Path.Learn.Momentum.On": "true", - "Path.Learn.WtBal.On": "false", + {Sel: "", Doc: "norm and momentum on works better, but wt bal is not better for smaller nets", + Set: func(t *test) { + t.Norm = true + t.Momentum = true + t.WtBal = false }}, - {Sel: "Layer", Desc: "using default 1.8 inhib for all of network -- can explore", - Params: params.Params{ - "Layer.Inhib.Layer.Gi": "1.8", - }, - Hypers: params.Hypers{ - "Layer.Inhib.Layer.Gi": {"Min": "0.5", "StdDev": "0.1"}, - }, - }, - {Sel: "#Output", Desc: "output definitely needs lower inhib -- true for smaller layers in general", - Params: params.Params{ - "Layer.Inhib.Layer.Gi": "1.4", + {Sel: ".Back", Doc: "top-down back-pathways MUST have lower relative weight scale, otherwise network hallucinates", + Set: func(t *test) { + t.WtScale = 0.2 }}, - {Sel: ".Back", Desc: "top-down back-pathways MUST have lower relative weight scale, otherwise network hallucinates", - Params: params.Params{ - "Path.WtScale.Rel": "0.2", - }}, - }, - "DefaultInhib": { - {Sel: "#Output", Desc: "go back to default", - Params: params.Params{ - "Layer.Inhib.Layer.Gi": "1.8", + {Sel: "#ToOutput", Doc: "to output must be stronger", + Set: func(t *test) { + t.WtScale = 2.0 }}, }, "NoMomentum": { - {Sel: "Path", Desc: "no norm or momentum", - Params: params.Params{ - "Path.Learn.Norm.On": "false", - "Path.Learn.Momentum.On": "false", + {Sel: "", Doc: "no norm or momentum", + Set: func(t *test) { + t.Norm = false + t.Momentum = false }}, }, "WtBalOn": { - {Sel: "Path", Desc: "weight bal on", - Params: params.Params{ - "Path.Learn.WtBal.On": "true", + {Sel: "", Doc: "weight bal on", + Set: func(t *test) { + t.WtBal = true }}, }, } -` - -func TestParamSetsWriteGo(t *testing.T) { - t.Skip("todo: need to sort the map for this to work now") - var buf bytes.Buffer - paramSets.WriteGoCode(&buf, 0) - dfb := buf.Bytes() - dfs := string(dfb) - // fmt.Printf("%v", dfs) - assert.Equal(t, trgCode, dfs) -} +func TestSet(t *testing.T) { + tf := &test{} + tf.Name = "Forward" + tb := &test{} + tb.Class = "Back" + to := &test{} + to.Name = "ToOutput" -func TestParamSetsSet(t *testing.T) { - cval, err := paramSets.ParamValue("Base", "Path", "Path.Learn.WtBal.On") - if err != nil { - t.Error(err) - } - // fmt.Printf("current value: %s\n", cval) - if cval != "false" { - t.Errorf("value should have been false: %s\n", cval) - } - err = paramSets.SetString("Base", "Path", "Path.Learn.WtBal.On", "true") - if err != nil { - t.Error(err) - } - cval, err = paramSets.ParamValue("Base", "Path", "Path.Learn.WtBal.On") - // fmt.Printf("new value: %s\n", cval) - if cval != "true" { - t.Errorf("value should have been true: %s\n", cval) - } - err = paramSets.SetFloat("Base", "Path", "Path.Learn.WtBal.On", 5.1) - if err != nil { - t.Error(err) - } - cval, err = paramSets.ParamValue("Base", "Path", "Path.Learn.WtBal.On") - // fmt.Printf("new value: %s\n", cval) - if cval != "5.1" { - t.Errorf("value should have been 5.1: %s\n", cval) - } - cval, err = paramSets.ParamValue("Basre", "Path", "Path.Learn.WtBal.On") - if err == nil { - t.Errorf("Should have had an error") - } - // fmt.Printf("error: %s\n", err) - cval, err = paramSets.ParamValue("Base", "Paths", "Path.Learn.WtBal.On") - if err == nil { - t.Errorf("Should have had an error") - } - // fmt.Printf("error: %s\n", err) -} + paramSets["Base"].Apply(tf) + assert.Equal(t, true, tf.Norm) -var trgHypers = `{ - "Hidden1": { - "Name": "Hidden1", - "Type": "Layer", - "Class": "Hidden", - "Object": { - "Layer.Inhib.Layer.Gi": { - "Min": "0.5", - "StdDev": "0.1", - "Val": "1.8" - } - }, - "History": [ - { - "Sel": "Layer", - "Desc": "using default 1.8 inhib for all of network -- can explore", - "Params": { - "Layer.Inhib.Layer.Gi": "1.8" - }, - "Hypers": { - "Layer.Inhib.Layer.Gi": { - "Min": "0.5", - "StdDev": "0.1" - } - } - } - ] - }, - "Hidden2": { - "Name": "Hidden2", - "Type": "Layer", - "Class": "Hidden", - "Object": { - "Layer.Inhib.Layer.Gi": { - "Min": "0.5", - "StdDev": "0.1", - "Val": "1.8" - } - }, - "History": [ - { - "Sel": "Layer", - "Desc": "using default 1.8 inhib for all of network -- can explore", - "Params": { - "Layer.Inhib.Layer.Gi": "1.8" - }, - "Hypers": { - "Layer.Inhib.Layer.Gi": { - "Min": "0.5", - "StdDev": "0.1" - } - } - } - ] - }, - "Input": { - "Name": "Input", - "Type": "Layer", - "Class": "Input", - "Object": { - "Layer.Inhib.Layer.Gi": { - "Min": "0.5", - "StdDev": "0.1", - "Val": "1.8" - } - }, - "History": [ - { - "Sel": "Layer", - "Desc": "using default 1.8 inhib for all of network -- can explore", - "Params": { - "Layer.Inhib.Layer.Gi": "1.8" - }, - "Hypers": { - "Layer.Inhib.Layer.Gi": { - "Min": "0.5", - "StdDev": "0.1" - } - } - } - ] - }, - "Output": { - "Name": "Output", - "Type": "Layer", - "Class": "Target", - "Object": { - "Layer.Inhib.Layer.Gi": { - "Min": "0.5", - "StdDev": "0.1", - "Val": "1.4" - } - }, - "History": [ - { - "Sel": "Layer", - "Desc": "using default 1.8 inhib for all of network -- can explore", - "Params": { - "Layer.Inhib.Layer.Gi": "1.8" - }, - "Hypers": { - "Layer.Inhib.Layer.Gi": { - "Min": "0.5", - "StdDev": "0.1" - } - } - }, - { - "Sel": "#Output", - "Desc": "output definitely needs lower inhib -- true for smaller layers in general", - "Params": { - "Layer.Inhib.Layer.Gi": "1.4" - }, - "Hypers": null - } - ] - } -}` + paramSets["Base"].Apply(tb) + assert.Equal(t, float32(0.2), tb.WtScale) -func TestFlexHypers(t *testing.T) { - hypers := Flex{} - hypers.Init([]FlexVal{ - FlexVal{Name: "Input", Type: "Layer", Class: "Input", Object: Hypers{}}, - FlexVal{Name: "Hidden1", Type: "Layer", Class: "Hidden", Object: Hypers{}}, - FlexVal{Name: "Hidden2", Type: "Layer", Class: "Hidden", Object: Hypers{}}, - FlexVal{Name: "Output", Type: "Layer", Class: "Target", Object: Hypers{}}, - }) - basenet := paramSets["Base"] - hypers.ApplySheet(basenet, false) + paramSets["Base"].Apply(to) + assert.Equal(t, float32(2.0), to.WtScale) - dfs := hypers.JSONString() - // fmt.Printf("%s", dfs) - assert.Equal(t, trgHypers, dfs) + paramSets["NoMomentum"].Apply(tf) + assert.Equal(t, false, tf.Norm) } diff --git a/params/styler.go b/params/styler.go index 56f52196..fdc745e6 100644 --- a/params/styler.go +++ b/params/styler.go @@ -6,45 +6,25 @@ package params import "strings" -// The params.Styler interface exposes TypeName, Class, and Name methods -// that allow the params.Sel CSS-style selection specifier to determine -// whether a given parameter applies. -// Adding Set versions of Name and Class methods is a good idea but not -// needed for this interface, so they are not included here. +// Styler must be implemented by any object that parameters are +// applied to, to provide the .Class and #Name selector functionality. type Styler interface { - // StyleType returns the name of this type for CSS-style matching. - // This is used for CSS Sel selector with no prefix. - // This type is used *in addition* to the actual Go type name - // of the object, and is a kind of type-category (e.g., Layer - // or Path in emergent network objects). - StyleType() string - // StyleClass returns the space-separated list of class selectors (tags). // Parameters with a . prefix target class tags. // Do NOT include the . in the Class tags on Styler objects; - // The . is only used in the Sel selector on the params.Sel. + // The . is only used in the Sel selector on the [Sel]. StyleClass() string // StyleName returns the name of this object. // Parameters with a # prefix target object names, which are typically - // unique. Note, do not include the # prefix in the actual object name, - // only in the Sel selector on params.Sel. + // unique. Do NOT include the # prefix in the actual object name, + // which is only present in the Sel selector on [Sel]. StyleName() string } -// The params.StylerObject interface extends Styler to include an arbitary -// function to access the underlying object type. -type StylerObject interface { - Styler - - // StyleObject returns the object that will have its field values set by - // the params specifications. - StyleObject() any -} - -// AddClass adds given class(es) to current class string, -// ensuring it is not a duplicate of existing, and properly -// adding spaces +// AddClass is a helper function that adds given class(es) to current +// class string, ensuring it is not a duplicate of existing, and properly +// adding spaces. func AddClass(cur string, class ...string) string { cls := strings.Join(class, " ") if ClassMatch(cur, cls) { diff --git a/params/tweak.go b/params/tweak.go index ffe8121d..4f7e27c1 100644 --- a/params/tweak.go +++ b/params/tweak.go @@ -2,17 +2,12 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. +//go:build not + package params import ( - "fmt" - "slices" - "strconv" - "strings" - - "cogentcore.org/core/base/reflectx" "cogentcore.org/core/math32" - "golang.org/x/exp/maps" ) func tweakValue(msd, fact, exp10 float32, isRmdr bool) float32 { @@ -102,132 +97,132 @@ type Tweaks struct { // * list of comma-delimited set of values in square brackets, e.g.: "[1.5, 1.2, 1.8]" // The resulting search values are organized by the most specific selector that generated // the final parameter value, upon which the param tweak was based. -func TweaksFromHypers(hypers Flex) []*Tweaks { - var tweaks []*Tweaks - sels := make(map[*Sel]Flex) - - fkeys := maps.Keys(hypers) - slices.Sort(fkeys) - for _, fnm := range fkeys { - flx := hypers[fnm] - hyps := flx.Object.(Hypers) - hkeys := maps.Keys(hyps) - slices.Sort(hkeys) - for _, ppath := range hkeys { - hv := hyps[ppath] - vl := hv["Val"] - for _, sel := range flx.History { - pv, has := sel.Params[ppath] - if !has { - continue - } - if vl != pv { - continue - } - fm, ok := sels[sel] - if !ok { - fm = make(Flex) - } - sflex, ok := fm[fnm] - if !ok { - sflex = &FlexVal{} - sflex.CopyFrom(flx) - sflex.Object = make(Hypers) - fm[fnm] = sflex - } - shyps := sflex.Object.(Hypers) - _, ok = shyps[ppath] - if !ok { - shyps[ppath] = hv - } - sflex.Object = shyps - sels[sel] = fm - } - } - } - - slnms := make(map[string]*Sel) - for sel := range sels { - slnms[sel.Sel] = sel - } - slsort := maps.Keys(slnms) - slices.Sort(slsort) - - for _, slnm := range slsort { - sel := slnms[slnm] - flx := sels[sel] - // fmt.Println(reflectx.StringJSON(sel), "\n", reflectx.StringJSON(flx)) - var f0 *FlexVal - for _, fv := range flx { - if f0 == nil { - f0 = fv - break - } - } - hyps := f0.Object.(Hypers) - hkeys := maps.Keys(hyps) - slices.Sort(hkeys) - for _, ppath := range hkeys { - twk := &Tweaks{Param: ppath, Sel: sel} - var svals []SearchValues - - fkeys := maps.Keys(flx) - slices.Sort(fkeys) - for _, fk := range fkeys { - fv := flx[fk] - hyp := fv.Object.(Hypers) - vals := hyp[ppath] - tweak, ok := vals["Tweak"] - tweak = strings.ToLower(strings.TrimSpace(tweak)) - if !ok || tweak == "" || tweak == "false" || tweak == "-" { - continue - } - - val, ok := vals["Val"] - if !ok { - continue - } - f64, err := strconv.ParseFloat(val, 32) - if err != nil { - fmt.Printf("TweakFromHypers float parse error: only works for float type params. Obj: %s Param: %s val: %s parse error: %v\n", fv.Name, ppath, val, err) - continue - } - start := float32(f64) - - sval := SearchValues{Name: fv.Name, Type: fv.Type, Path: ppath, Start: start} - - var pars []float32 // param vals to search - if tweak[0] == '[' { - err := reflectx.SetRobust(&pars, tweak) - if err != nil { - fmt.Println("Error processing tweak value list:", tweak, "error:", err) - continue - } - } else { - log := false - incr := false - if strings.Contains(tweak, "log") { - log = true - } - if strings.Contains(tweak, "incr") { - incr = true - } - if !log && !incr { - fmt.Printf("Tweak value not recognized: %q\n", tweak) - continue - } - pars = Tweak(start, log, incr) - } - if len(pars) > 0 { - sval.Values = pars - svals = append(svals, sval) - } - } - if len(svals) > 0 { - twk.Search = svals - tweaks = append(tweaks, twk) - } - } - } - return tweaks -} +// func TweaksFromHypers(hypers Flex) []*Tweaks { +// var tweaks []*Tweaks +// sels := make(map[*Sel]Flex) +// +// fkeys := maps.Keys(hypers) +// slices.Sort(fkeys) +// for _, fnm := range fkeys { +// flx := hypers[fnm] +// hyps := flx.Object.(Hypers) +// hkeys := maps.Keys(hyps) +// slices.Sort(hkeys) +// for _, ppath := range hkeys { +// hv := hyps[ppath] +// vl := hv["Val"] +// for _, sel := range flx.History { +// pv, has := sel.Params[ppath] +// if !has { +// continue +// } +// if vl != pv { +// continue +// } +// fm, ok := sels[sel] +// if !ok { +// fm = make(Flex) +// } +// sflex, ok := fm[fnm] +// if !ok { +// sflex = &FlexVal{} +// sflex.CopyFrom(flx) +// sflex.Object = make(Hypers) +// fm[fnm] = sflex +// } +// shyps := sflex.Object.(Hypers) +// _, ok = shyps[ppath] +// if !ok { +// shyps[ppath] = hv +// } +// sflex.Object = shyps +// sels[sel] = fm +// } +// } +// } +// +// slnms := make(map[string]*Sel) +// for sel := range sels { +// slnms[sel.Sel] = sel +// } +// slsort := maps.Keys(slnms) +// slices.Sort(slsort) +// +// for _, slnm := range slsort { +// sel := slnms[slnm] +// flx := sels[sel] +// // fmt.Println(reflectx.StringJSON(sel), "\n", reflectx.StringJSON(flx)) +// var f0 *FlexVal +// for _, fv := range flx { +// if f0 == nil { +// f0 = fv +// break +// } +// } +// hyps := f0.Object.(Hypers) +// hkeys := maps.Keys(hyps) +// slices.Sort(hkeys) +// for _, ppath := range hkeys { +// twk := &Tweaks{Param: ppath, Sel: sel} +// var svals []SearchValues +// +// fkeys := maps.Keys(flx) +// slices.Sort(fkeys) +// for _, fk := range fkeys { +// fv := flx[fk] +// hyp := fv.Object.(Hypers) +// vals := hyp[ppath] +// tweak, ok := vals["Tweak"] +// tweak = strings.ToLower(strings.TrimSpace(tweak)) +// if !ok || tweak == "" || tweak == "false" || tweak == "-" { +// continue +// } +// +// val, ok := vals["Val"] +// if !ok { +// continue +// } +// f64, err := strconv.ParseFloat(val, 32) +// if err != nil { +// fmt.Printf("TweakFromHypers float parse error: only works for float type params. Obj: %s Param: %s val: %s parse error: %v\n", fv.Name, ppath, val, err) +// continue +// } +// start := float32(f64) +// +// sval := SearchValues{Name: fv.Name, Type: fv.Type, Path: ppath, Start: start} +// +// var pars []float32 // param vals to search +// if tweak[0] == '[' { +// err := reflectx.SetRobust(&pars, tweak) +// if err != nil { +// fmt.Println("Error processing tweak value list:", tweak, "error:", err) +// continue +// } +// } else { +// log := false +// incr := false +// if strings.Contains(tweak, "log") { +// log = true +// } +// if strings.Contains(tweak, "incr") { +// incr = true +// } +// if !log && !incr { +// fmt.Printf("Tweak value not recognized: %q\n", tweak) +// continue +// } +// pars = Tweak(start, log, incr) +// } +// if len(pars) > 0 { +// sval.Values = pars +// svals = append(svals, sval) +// } +// } +// if len(svals) > 0 { +// twk.Search = svals +// tweaks = append(tweaks, twk) +// } +// } +// } +// return tweaks +// } diff --git a/params/tweak_test.go b/params/tweak_test.go index e5e9d21c..de25d727 100644 --- a/params/tweak_test.go +++ b/params/tweak_test.go @@ -4,16 +4,10 @@ package params -import ( - "testing" - - "cogentcore.org/core/base/reflectx" - "github.com/stretchr/testify/assert" -) - +/* var tweakSets = Sets{ "Base": { - {Sel: "Path", Desc: "norm and momentum on works better, but wt bal is not better for smaller nets", + {Sel: "Path", Doc: "norm and momentum on works better, but wt bal is not better for smaller nets", Params: Params{ "Path.Learn.LRate": "0.02", "Path.Learn.Momentum": "0.9", @@ -22,21 +16,21 @@ var tweakSets = Sets{ "Path.Learn.LRate": {"Tweak": "log"}, "Path.Learn.Momentum": {"Tweak": "incr"}, }}, - {Sel: "Layer", Desc: "using default 1.8 inhib for all of network -- can explore", + {Sel: "Layer", Doc: "using default 1.8 inhib for all of network -- can explore", Params: Params{ "Layer.Inhib.Layer.Gi": "1.8", }, Hypers: Hypers{ "Layer.Inhib.Layer.Gi": {"Tweak": "[1.75, 1.85]"}, }}, - {Sel: "#Hidden", Desc: "output definitely needs lower inhib -- true for smaller layers in general", + {Sel: "#Hidden", Doc: "output definitely needs lower inhib -- true for smaller layers in general", Params: Params{ "Layer.Inhib.Layer.Gi": "1.4", }, Hypers: Hypers{ "Layer.Inhib.Layer.Gi": {"Tweak": "incr"}, }}, - {Sel: ".Back", Desc: "top-down back-pathways MUST have lower relative weight scale, otherwise network hallucinates", + {Sel: ".Back", Doc: "top-down back-pathways MUST have lower relative weight scale, otherwise network hallucinates", Params: Params{ "Path.WtScale.Rel": "0.2", }, @@ -46,6 +40,9 @@ var tweakSets = Sets{ }, } +*/ + +/* func TestTweak(t *testing.T) { logvals := []float32{.1, .2, .5, 1, 1.5, 12, .015} logtargs := []float32{.05, .2, .1, .5, .2, 1, .5, 2, 1.2, 2, 11, 15, .012, .02} @@ -70,13 +67,15 @@ func TestTweak(t *testing.T) { } } } +*/ +/* var trgSearch = `[ { "Param": "Layer.Inhib.Layer.Gi", "Sel": { "Sel": "#Hidden", - "Desc": "output definitely needs lower inhib -- true for smaller layers in general", + "Doc": "output definitely needs lower inhib -- true for smaller layers in general", "Params": { "Layer.Inhib.Layer.Gi": "1.4" }, @@ -103,7 +102,7 @@ var trgSearch = `[ "Param": "Path.WtScale.Rel", "Sel": { "Sel": ".Back", - "Desc": "top-down back-pathways MUST have lower relative weight scale, otherwise network hallucinates", + "Doc": "top-down back-pathways MUST have lower relative weight scale, otherwise network hallucinates", "Params": { "Path.WtScale.Rel": "0.2" }, @@ -130,7 +129,7 @@ var trgSearch = `[ "Param": "Layer.Inhib.Layer.Gi", "Sel": { "Sel": "Layer", - "Desc": "using default 1.8 inhib for all of network -- can explore", + "Doc": "using default 1.8 inhib for all of network -- can explore", "Params": { "Layer.Inhib.Layer.Gi": "1.8" }, @@ -157,7 +156,7 @@ var trgSearch = `[ "Param": "Path.Learn.LRate", "Sel": { "Sel": "Path", - "Desc": "norm and momentum on works better, but wt bal is not better for smaller nets", + "Doc": "norm and momentum on works better, but wt bal is not better for smaller nets", "Params": { "Path.Learn.LRate": "0.02", "Path.Learn.Momentum": "0.9" @@ -198,7 +197,7 @@ var trgSearch = `[ "Param": "Path.Learn.Momentum", "Sel": { "Sel": "Path", - "Desc": "norm and momentum on works better, but wt bal is not better for smaller nets", + "Doc": "norm and momentum on works better, but wt bal is not better for smaller nets", "Params": { "Path.Learn.LRate": "0.02", "Path.Learn.Momentum": "0.9" @@ -256,3 +255,5 @@ func TestTweakHypers(t *testing.T) { // fmt.Println("\n\n##########\n", ss) assert.Equal(t, trgSearch, ss) } + +*/ From 7e5664ed3385856eaedfb5387762340d69323a15 Mon Sep 17 00:00:00 2001 From: "Randall C. O'Reilly" Date: Fri, 22 Nov 2024 03:48:15 -0800 Subject: [PATCH 12/24] patgen first pass fix -- tests need work --- params/README.md | 31 ++++++-------- params/tweak_test.go | 74 +++++++++++++++++----------------- patgen/configpats.go | 46 ++++++++++----------- patgen/configvocab.go | 53 ++++++++++++------------ patgen/configvocabpats_test.go | 11 +++-- patgen/flip.go | 10 ++--- patgen/permuted.go | 26 ++++++------ patgen/reshape.go | 13 +++--- patgen/shuffle.go | 5 +-- 9 files changed, 130 insertions(+), 139 deletions(-) diff --git a/params/README.md b/params/README.md index 2b423f4b..cdbab5e0 100644 --- a/params/README.md +++ b/params/README.md @@ -17,25 +17,18 @@ Three levels of organization are supported: TODO: replace with actual example from axon: ``` -Sheets { - "Base" { - Sel: "Layer" { - Params: { - "Layer.Inhib.Layer.Gi": "1.1", - ... - } - }, - Sel: ".Back" { - Params: { - "Path.PathScale.Rel": "0.2", - ... - } - } - }, - "Option1" { - ... - } -} +var LayerParams = axon.LayerSheets{ + "Base": { + {Sel: "Layer", Doc: "all defaults", + Set: func(ly *axon.LayerParams) { + ly.Inhib.Layer.Gi = 1.05 // 1.05 > 1.1 for short-term; 1.1 better long-run stability + ly.Inhib.Layer.FB = 0.5 // 0.5 > 0.2 > 0.1 > 1.0 -- usu 1.0 + ly.Inhib.ActAvg.Nominal = 0.06 // 0.6 > 0.5 + ly.Acts.NMDA.MgC = 1.2 // 1.2 > 1.4 here, still.. + ly.Learn.RLRate.SigmoidLinear.SetBool(false) // false > true here + }}, + }, +} ``` In summary, the overall logic is all about the order of application, going from broad defaults to more specific overrides, with the following overall ordering: diff --git a/params/tweak_test.go b/params/tweak_test.go index de25d727..53af8fe7 100644 --- a/params/tweak_test.go +++ b/params/tweak_test.go @@ -6,36 +6,36 @@ package params /* var tweakSets = Sets{ - "Base": { + "Base": = { {Sel: "Path", Doc: "norm and momentum on works better, but wt bal is not better for smaller nets", Params: Params{ - "Path.Learn.LRate": "0.02", - "Path.Learn.Momentum": "0.9", + pt.Learn.LRate = "0.02", + pt.Learn.Momentum = "0.9", }, Hypers: Hypers{ - "Path.Learn.LRate": {"Tweak": "log"}, - "Path.Learn.Momentum": {"Tweak": "incr"}, + pt.Learn.LRate = {"Tweak = "log"}, + pt.Learn.Momentum = {"Tweak = "incr"}, }}, {Sel: "Layer", Doc: "using default 1.8 inhib for all of network -- can explore", Params: Params{ - "Layer.Inhib.Layer.Gi": "1.8", + ly.Inhib.Layer.Gi = "1.8", }, Hypers: Hypers{ - "Layer.Inhib.Layer.Gi": {"Tweak": "[1.75, 1.85]"}, + ly.Inhib.Layer.Gi = {"Tweak = "[1.75, 1.85]"}, }}, {Sel: "#Hidden", Doc: "output definitely needs lower inhib -- true for smaller layers in general", Params: Params{ - "Layer.Inhib.Layer.Gi": "1.4", + ly.Inhib.Layer.Gi = "1.4", }, Hypers: Hypers{ - "Layer.Inhib.Layer.Gi": {"Tweak": "incr"}, + ly.Inhib.Layer.Gi = {"Tweak = "incr"}, }}, {Sel: ".Back", Doc: "top-down back-pathways MUST have lower relative weight scale, otherwise network hallucinates", Params: Params{ - "Path.WtScale.Rel": "0.2", + pt.WtScale.Rel = "0.2", }, Hypers: Hypers{ - "Path.WtScale.Rel": {"Tweak": "log"}, + pt.WtScale.Rel = {"Tweak = "log"}, }}, }, } @@ -72,15 +72,15 @@ func TestTweak(t *testing.T) { /* var trgSearch = `[ { - "Param": "Layer.Inhib.Layer.Gi", + "Param": ly.Inhib.Layer.Gi", "Sel": { "Sel": "#Hidden", "Doc": "output definitely needs lower inhib -- true for smaller layers in general", "Params": { - "Layer.Inhib.Layer.Gi": "1.4" + ly.Inhib.Layer.Gi": "1.4" }, "Hypers": { - "Layer.Inhib.Layer.Gi": { + ly.Inhib.Layer.Gi": { "Tweak": "incr" } } @@ -89,7 +89,7 @@ var trgSearch = `[ { "Name": "Hidden", "Type": "Layer", - "Path": "Layer.Inhib.Layer.Gi", + "Path": ly.Inhib.Layer.Gi", "Start": 1.4, "Values": [ 1.3, @@ -99,15 +99,15 @@ var trgSearch = `[ ] }, { - "Param": "Path.WtScale.Rel", + "Param": pt.WtScale.Rel", "Sel": { "Sel": ".Back", "Doc": "top-down back-pathways MUST have lower relative weight scale, otherwise network hallucinates", "Params": { - "Path.WtScale.Rel": "0.2" + pt.WtScale.Rel": "0.2" }, "Hypers": { - "Path.WtScale.Rel": { + pt.WtScale.Rel": { "Tweak": "log" } } @@ -116,7 +116,7 @@ var trgSearch = `[ { "Name": "HiddenToInput", "Type": "Path", - "Path": "Path.WtScale.Rel", + "Path": pt.WtScale.Rel", "Start": 0.2, "Values": [ 0.1, @@ -126,15 +126,15 @@ var trgSearch = `[ ] }, { - "Param": "Layer.Inhib.Layer.Gi", + "Param": ly.Inhib.Layer.Gi", "Sel": { "Sel": "Layer", "Doc": "using default 1.8 inhib for all of network -- can explore", "Params": { - "Layer.Inhib.Layer.Gi": "1.8" + ly.Inhib.Layer.Gi": "1.8" }, "Hypers": { - "Layer.Inhib.Layer.Gi": { + ly.Inhib.Layer.Gi": { "Tweak": "[1.75, 1.85]" } } @@ -143,7 +143,7 @@ var trgSearch = `[ { "Name": "Input", "Type": "Layer", - "Path": "Layer.Inhib.Layer.Gi", + "Path": ly.Inhib.Layer.Gi", "Start": 1.8, "Values": [ 1.75, @@ -153,19 +153,19 @@ var trgSearch = `[ ] }, { - "Param": "Path.Learn.LRate", + "Param": pt.Learn.LRate", "Sel": { "Sel": "Path", "Doc": "norm and momentum on works better, but wt bal is not better for smaller nets", "Params": { - "Path.Learn.LRate": "0.02", - "Path.Learn.Momentum": "0.9" + pt.Learn.LRate": "0.02", + pt.Learn.Momentum": "0.9" }, "Hypers": { - "Path.Learn.LRate": { + pt.Learn.LRate": { "Tweak": "log" }, - "Path.Learn.Momentum": { + pt.Learn.Momentum": { "Tweak": "incr" } } @@ -174,7 +174,7 @@ var trgSearch = `[ { "Name": "HiddenToInput", "Type": "Path", - "Path": "Path.Learn.LRate", + "Path": pt.Learn.LRate", "Start": 0.02, "Values": [ 0.01, @@ -184,7 +184,7 @@ var trgSearch = `[ { "Name": "InputToHidden", "Type": "Path", - "Path": "Path.Learn.LRate", + "Path": pt.Learn.LRate", "Start": 0.02, "Values": [ 0.01, @@ -194,19 +194,19 @@ var trgSearch = `[ ] }, { - "Param": "Path.Learn.Momentum", + "Param": pt.Learn.Momentum", "Sel": { "Sel": "Path", "Doc": "norm and momentum on works better, but wt bal is not better for smaller nets", "Params": { - "Path.Learn.LRate": "0.02", - "Path.Learn.Momentum": "0.9" + pt.Learn.LRate": "0.02", + pt.Learn.Momentum": "0.9" }, "Hypers": { - "Path.Learn.LRate": { + pt.Learn.LRate": { "Tweak": "log" }, - "Path.Learn.Momentum": { + pt.Learn.Momentum": { "Tweak": "incr" } } @@ -215,7 +215,7 @@ var trgSearch = `[ { "Name": "HiddenToInput", "Type": "Path", - "Path": "Path.Learn.Momentum", + "Path": pt.Learn.Momentum", "Start": 0.9, "Values": [ 0.8, @@ -225,7 +225,7 @@ var trgSearch = `[ { "Name": "InputToHidden", "Type": "Path", - "Path": "Path.Learn.Momentum", + "Path": pt.Learn.Momentum", "Start": 0.9, "Values": [ 0.8, diff --git a/patgen/configpats.go b/patgen/configpats.go index 6a4c868f..254efca8 100644 --- a/patgen/configpats.go +++ b/patgen/configpats.go @@ -7,36 +7,36 @@ package patgen import ( "fmt" "log" - "reflect" + "slices" - "cogentcore.org/core/base/errors" + "cogentcore.org/core/base/metadata" "cogentcore.org/core/tensor/table" ) // InitPats initiates patterns to be used in MixPats -func InitPats(dt *table.Table, name, desc, inputName, outputName string, listSize, ySize, xSize, poolY, poolX int) { +func InitPats(dt *table.Table, name, doc, inputName, outputName string, listSize, ySize, xSize, poolY, poolX int) { dt.DeleteAll() - dt.SetMetaData("name", name) - dt.SetMetaData("desc", desc) + metadata.SetName(dt, name) + metadata.SetDoc(dt, doc) dt.AddStringColumn("Name") - dt.AddFloat32TensorColumn(inputName, []int{ySize, xSize, poolY, poolX}, "ySize", "xSize", "poolY", "poolX") - dt.AddFloat32TensorColumn(outputName, []int{ySize, xSize, poolY, poolX}, "ySize", "xSize", "poolY", "poolX") + dt.AddFloat32Column(inputName, ySize, xSize, poolY, poolX) + dt.AddFloat32Column(outputName, ySize, xSize, poolY, poolX) dt.SetNumRows(listSize) } // MixPats mixes patterns using first listSize rows in the vocabulary map // poolSource order: left right, bottom up func MixPats(dt *table.Table, mp Vocab, colName string, poolSource []string) error { - name := dt.MetaData["name"] - listSize := errors.Log1(dt.ColumnByName(colName)).Shape().Sizes[0] - ySize := errors.Log1(dt.ColumnByName(colName)).Shape().Sizes[1] - xSize := errors.Log1(dt.ColumnByName(colName)).Shape().Sizes[2] + name := metadata.Name(dt) + listSize := dt.Column(colName).DimSize(0) + ySize := dt.Column(colName).DimSize(1) + xSize := dt.Column(colName).DimSize(2) for row := 0; row < listSize; row++ { - dt.SetString("Name", row, fmt.Sprint(name, row)) + dt.Column("Name").SetString1D(fmt.Sprint(name, row), row) npool := 0 for iY := 0; iY < ySize; iY++ { for iX := 0; iX < xSize; iX++ { - trgPool := dt.Tensor(colName, row).SubSpace([]int{iY, iX}) + trgPool := dt.Column(colName).SubSpace(row, iY, iX) vocNm := poolSource[npool] voc, ok := mp[vocNm] if !ok { @@ -44,10 +44,10 @@ func MixPats(dt *table.Table, mp Vocab, colName string, poolSource []string) err log.Println(err.Error()) return err } - vocSize := voc.Shape().Sizes[0] + vocSize := voc.DimSize(0) effIndex := row % vocSize // be safe and wrap-around to re-use patterns - frmPool := voc.SubSpace([]int{effIndex}) - if !reflect.DeepEqual(trgPool.Shape().Sizes, frmPool.Shape().Sizes) { + frmPool := voc.SubSpace(effIndex) + if !slices.Equal(trgPool.Shape().Sizes, frmPool.Shape().Sizes) { err := fmt.Errorf("Vocab and pools in the table should have the same shape") log.Println(err.Error()) return err @@ -64,18 +64,18 @@ func MixPats(dt *table.Table, mp Vocab, colName string, poolSource []string) err // of vocabulary patterns, inserting starting at specified targRow in table. // poolSource order: left right, bottom up func MixPatsN(dt *table.Table, mp Vocab, colName string, poolSource []string, targRow, vocabStart, vocabN int) error { - name := dt.MetaData["name"] + name := metadata.Name(dt) _ = name - ySize := errors.Log1(dt.ColumnByName(colName)).Shape().Sizes[1] - xSize := errors.Log1(dt.ColumnByName(colName)).Shape().Sizes[2] + ySize := dt.Column(colName).DimSize(1) + xSize := dt.Column(colName).DimSize(2) for ri := 0; ri < vocabN; ri++ { row := targRow + ri vocIndex := vocabStart + ri - dt.SetString("Name", row, fmt.Sprint(name, row)) + dt.Column("Name").SetString1D(fmt.Sprint(name, row), row) npool := 0 for iY := 0; iY < ySize; iY++ { for iX := 0; iX < xSize; iX++ { - trgPool := dt.Tensor(colName, row).SubSpace([]int{iY, iX}) + trgPool := dt.Column(colName).SubSpace(row, iY, iX) vocNm := poolSource[npool] voc, ok := mp[vocNm] if !ok { @@ -85,8 +85,8 @@ func MixPatsN(dt *table.Table, mp Vocab, colName string, poolSource []string, ta } vocSize := voc.Shape().Sizes[0] effIndex := vocIndex % vocSize // be safe and wrap-around to re-use patterns - frmPool := voc.SubSpace([]int{effIndex}) - if !reflect.DeepEqual(trgPool.Shape().Sizes, frmPool.Shape().Sizes) { + frmPool := voc.SubSpace(effIndex) + if !slices.Equal(trgPool.Shape().Sizes, frmPool.Shape().Sizes) { err := fmt.Errorf("Vocab and pools in the table should have the same shape") log.Println(err.Error()) return err diff --git a/patgen/configvocab.go b/patgen/configvocab.go index 92ea6978..7b382747 100644 --- a/patgen/configvocab.go +++ b/patgen/configvocab.go @@ -10,6 +10,7 @@ import ( "fmt" "log" "math" + "slices" "cogentcore.org/core/base/errors" "cogentcore.org/core/tensor" @@ -38,7 +39,7 @@ func (vc Vocab) ByName(name string) (*tensor.Float32, error) { // NOnInTensor returns the number of bits active in given tensor func NOnInTensor(trow *tensor.Float32) int { - return int(stats.SumTensor(trow)) + return stats.Sum(trow).Int1D(0) } // PctActInTensor returns the percent activity in given tensor (NOn / size) @@ -55,7 +56,7 @@ func NFromPct(pct float32, n int) int { // AddVocabEmpty adds an empty pool to the vocabulary. // This can be used to make test cases with missing pools. func AddVocabEmpty(mp Vocab, name string, rows, poolY, poolX int) (*tensor.Float32, error) { - tsr := tensor.NewFloat32([]int{rows, poolY, poolX}, "row", "Y", "X") + tsr := tensor.NewFloat32(rows, poolY, poolX) mp[name] = tsr return tsr, nil } @@ -72,7 +73,7 @@ func AddVocabEmpty(mp Vocab, name string, rows, poolY, poolX int) (*tensor.Float func AddVocabPermutedBinary(mp Vocab, name string, rows, poolY, poolX int, pctAct, minPctDiff float32) (*tensor.Float32, error) { nOn := NFromPct(pctAct, poolY*poolX) minDiff := NFromPct(minPctDiff, nOn) - tsr := tensor.NewFloat32([]int{rows, poolY, poolX}, "row", "Y", "X") + tsr := tensor.NewFloat32(rows, poolY, poolX) err := PermutedBinaryMinDiff(tsr, nOn, 1, 0, minDiff) mp[name] = tsr return tsr, err @@ -100,11 +101,11 @@ func AddVocabRepeat(mp Vocab, name string, rows int, copyFrom string, copyRow in tsr := &tensor.Float32{} cpshp := cp.Shape().Sizes cpshp[0] = rows - tsr.SetShape(cpshp, cp.Shape().Names...) + tsr.SetShapeSizes(cpshp...) mp[name] = tsr - cprow := cp.SubSpace([]int{copyRow}) + cprow := cp.SubSpace(copyRow) for i := 0; i < rows; i++ { - trow := tsr.SubSpace([]int{i}) + trow := tsr.SubSpace(i) trow.CopyFrom(cprow) } return tsr, nil @@ -123,17 +124,17 @@ func AddVocabDrift(mp Vocab, name string, rows int, pctDrift float32, copyFrom s tsr := &tensor.Float32{} cpshp := cp.Shape().Sizes cpshp[0] = rows - tsr.SetShape(cpshp, cp.Shape().Names...) + tsr.SetShapeSizes(cpshp...) mp[name] = tsr - cprow := cp.SubSpace([]int{copyRow}).(*tensor.Float32) - trow := tsr.SubSpace([]int{0}) + cprow := cp.SubSpace(copyRow).(*tensor.Float32) + trow := tsr.SubSpace(0) trow.CopyFrom(cprow) nOn := NOnInTensor(cprow) rmdr := 0.0 // remainder carryover in drift drift := float64(nOn) * float64(pctDrift) // precise fractional amount of drift for i := 1; i < rows; i++ { - srow := tsr.SubSpace([]int{i - 1}) - trow := tsr.SubSpace([]int{i}) + srow := tsr.SubSpace(i - 1) + trow := tsr.SubSpace(i) trow.CopyFrom(srow) curDrift := math.Round(drift + rmdr) // integer amount nDrift := int(curDrift) @@ -149,13 +150,13 @@ func AddVocabDrift(mp Vocab, name string, rows int, pctDrift float32, copyFrom s func VocabShuffle(mp Vocab, shufflePools []string) { for _, key := range shufflePools { tsr := mp[key] - rows := tsr.Shape().Sizes[0] - poolY := tsr.Shape().Sizes[1] - poolX := tsr.Shape().Sizes[2] + rows := tsr.DimSize(0) + poolY := tsr.DimSize(1) + poolX := tsr.DimSize(2) sRows := RandSource.Perm(rows) - sTsr := tensor.NewFloat32([]int{rows, poolY, poolX}, "row", "Y", "X") + sTsr := tensor.NewFloat32(rows, poolY, poolX) for iRow, sRow := range sRows { - sTsr.SubSpace([]int{iRow}).CopyFrom(tsr.SubSpace([]int{sRow})) + sTsr.SubSpace(iRow).CopyFrom(tsr.SubSpace(sRow)) } mp[key] = sTsr } @@ -167,18 +168,18 @@ func VocabConcat(mp Vocab, newPool string, frmPools []string) error { for i, key := range frmPools { if i > 0 { // check pool shape - if !(tsr.SubSpace([]int{0}).(*tensor.Float32).Shp.IsEqual(&mp[key].SubSpace([]int{0}).(*tensor.Float32).Shp)) { + if !slices.Equal(tsr.SubSpace(0).Shape().Sizes, mp[key].SubSpace(0).Shape().Sizes) { err := fmt.Errorf("shapes of input pools must be the same") // how do I stop the program? log.Println(err.Error()) return err } - currows := tsr.Shape().Sizes[0] - approws := mp[key].Shape().Sizes[0] - tsr.SetShape([]int{currows + approws, tsr.Shape().Sizes[1], tsr.Shape().Sizes[2]}, "row", "Y", "X") + currows := tsr.DimSize(0) + approws := mp[key].DimSize(0) + tsr.SetShapeSizes(currows+approws, tsr.DimSize(1), tsr.DimSize(2)) for iRow := 0; iRow < approws; iRow++ { - subtsr := tsr.SubSpace([]int{iRow + currows}) - subtsr.CopyFrom(mp[key].SubSpace([]int{iRow})) + subtsr := tsr.SubSpace(iRow + currows) + subtsr.CopyFrom(mp[key].SubSpace(iRow)) } } } @@ -190,8 +191,8 @@ func VocabConcat(mp Vocab, newPool string, frmPools []string) error { // SliceOffs is the cutoff points in the original pool, should have one more element than newPools. func VocabSlice(mp Vocab, frmPool string, newPools []string, sliceOffs []int) error { oriTsr := mp[frmPool] - poolY := oriTsr.Shape().Sizes[1] - poolX := oriTsr.Shape().Sizes[2] + poolY := oriTsr.DimSize(1) + poolX := oriTsr.DimSize(2) // check newPools and sliceOffs have same length if len(newPools)+1 != len(sliceOffs) { @@ -219,9 +220,9 @@ func VocabSlice(mp Vocab, frmPool string, newPools []string, sliceOffs []int) er for i := range newPools { toOff := sliceOffs[i+1] newPool := newPools[i] - newTsr := tensor.NewFloat32([]int{toOff - frmOff, poolY, poolX}, "row", "Y", "X") + newTsr := tensor.NewFloat32(toOff-frmOff, poolY, poolX) for off := frmOff; off < toOff; off++ { - newTsr.SubSpace([]int{off - frmOff}).CopyFrom(oriTsr.SubSpace([]int{off})) + newTsr.SubSpace(off - frmOff).CopyFrom(oriTsr.SubSpace(off)) } mp[newPool] = newTsr frmOff = toOff diff --git a/patgen/configvocabpats_test.go b/patgen/configvocabpats_test.go index 86eba68d..5de594cd 100644 --- a/patgen/configvocabpats_test.go +++ b/patgen/configvocabpats_test.go @@ -5,7 +5,6 @@ import ( "slices" "testing" - "cogentcore.org/core/base/errors" "cogentcore.org/core/tensor/table" "github.com/stretchr/testify/assert" "golang.org/x/exp/maps" @@ -247,7 +246,7 @@ func TestVocab(t *testing.T) { assert.Equal(t, exbpp, m["B''"].String()) // config pats - dt := table.NewTable("TrainAB") + dt := table.New("TrainAB") InitPats(dt, "TrainAB", "describe", "Input", "ECout", 6, 3, 2, 3, 3) MixPats(dt, m, "Input", []string{"A", "B", "ctxt1", "ctxt1", "empty", "B'"}) MixPats(dt, m, "ECout", []string{"A", "B", "ctxt1", "ctxt1", "empty", "B'"}) @@ -314,8 +313,8 @@ func TestVocab(t *testing.T) { // fmt.Println("Input Pats") // fmt.Println(dt.ColumnByName("Input").Shape.Sizes) // fmt.Println(dt.ColumnByName("Input").String()) - assert.Equal(t, []int{6, 3, 2, 3, 3}, errors.Log1(dt.ColumnByName("Input")).Shape().Sizes) - assert.Equal(t, exip, errors.Log1(dt.ColumnByName("Input")).String()) + assert.Equal(t, []int{6, 3, 2, 3, 3}, dt.Column("Input").Shape().Sizes) + assert.Equal(t, exip, dt.Column("Input").String()) exop := `Tensor: [Row: 6, ySize: 3, xSize: 2, poolY: 3, poolX: 3] [0 0 0]: 0 0 0 0 1 0 @@ -378,6 +377,6 @@ func TestVocab(t *testing.T) { // fmt.Println(dt.ColumnByName("ECout").Shape.Sizes) // fmt.Println(dt.ColumnByName("ECout").String()) - assert.Equal(t, []int{6, 3, 2, 3, 3}, errors.Log1(dt.ColumnByName("ECout")).Shape().Sizes) - assert.Equal(t, exop, errors.Log1(dt.ColumnByName("ECout")).String()) + assert.Equal(t, []int{6, 3, 2, 3, 3}, dt.Column("ECout").Shape().Sizes) + assert.Equal(t, exop, dt.Column("ECout").String()) } diff --git a/patgen/flip.go b/patgen/flip.go index dfaf4614..46ea45f0 100644 --- a/patgen/flip.go +++ b/patgen/flip.go @@ -34,20 +34,20 @@ func FlipBits(tsr tensor.Tensor, nOff, nOn int, onVal, offVal float64) { nOn = len(offs) } for i := 0; i < nOff; i++ { - tsr.SetFloat1D(ons[i], offVal) + tsr.SetFloat1D(offVal, ons[i]) } for i := 0; i < nOn; i++ { - tsr.SetFloat1D(offs[i], onVal) + tsr.SetFloat1D(onVal, offs[i]) } } // FlipBitsRows turns nOff bits that are currently On to Off and // nOn bits that are currently Off to On, using permuted lists. // Iterates over the outer-most tensor dimension as rows. -func FlipBitsRows(tsr tensor.Tensor, nOff, nOn int, onVal, offVal float64) { - rows, _ := tsr.RowCellSize() +func FlipBitsRows(tsr tensor.Values, nOff, nOn int, onVal, offVal float64) { + rows, _ := tsr.Shape().RowCellSize() for i := 0; i < rows; i++ { - trow := tsr.SubSpace([]int{i}) + trow := tsr.SubSpace(i) FlipBits(trow, nOff, nOn, onVal, offVal) } } diff --git a/patgen/permuted.go b/patgen/permuted.go index 8b188584..acbd7a28 100644 --- a/patgen/permuted.go +++ b/patgen/permuted.go @@ -27,9 +27,9 @@ func PermutedBinary(tsr tensor.Tensor, nOn int, onVal, offVal float64) { pord := RandSource.Perm(ln) for i := 0; i < ln; i++ { if i < nOn { - tsr.SetFloat1D(pord[i], onVal) + tsr.SetFloat1D(onVal, pord[i]) } else { - tsr.SetFloat1D(pord[i], offVal) + tsr.SetFloat1D(offVal, pord[i]) } } } @@ -38,7 +38,7 @@ func PermutedBinary(tsr tensor.Tensor, nOn int, onVal, offVal float64) { // and sets each row to contain nOn onVal values and the remainder are offVal values, // using a permuted order of tensor elements (i.e., randomly shuffled or permuted). func PermutedBinaryRows(tsr tensor.Tensor, nOn int, onVal, offVal float64) { - rows, cells := tsr.RowCellSize() + rows, cells := tsr.Shape().RowCellSize() if rows == 0 || cells == 0 { return } @@ -47,9 +47,9 @@ func PermutedBinaryRows(tsr tensor.Tensor, nOn int, onVal, offVal float64) { stidx := rw * cells for i := 0; i < cells; i++ { if i < nOn { - tsr.SetFloat1D(stidx+pord[i], onVal) + tsr.SetFloat1D(onVal, stidx+pord[i]) } else { - tsr.SetFloat1D(stidx+pord[i], offVal) + tsr.SetFloat1D(offVal, stidx+pord[i]) } } randx.PermuteInts(pord, RandSource) @@ -68,7 +68,7 @@ var MinDiffPrintIters = false // If the mindiff constraint cannot be met within a reasonable number of iterations, // then an error is returned. func PermutedBinaryMinDiff(tsr *tensor.Float32, nOn int, onVal, offVal float32, minDiff int) error { - rows, cells := tsr.RowCellSize() + rows, cells := tsr.Shape().RowCellSize() if rows == 0 || cells == 0 { return errors.New("empty tensor") } @@ -97,10 +97,10 @@ func PermutedBinaryMinDiff(tsr *tensor.Float32, nOn int, onVal, offVal float32, nbad := 0 mxnun := 0 for r1 := 0; r1 < rows; r1++ { - r1v := tsr.SubSpace([]int{r1}).(*tensor.Float32) + r1v := tsr.SubSpace(r1).(*tensor.Float32) for r2 := r1 + 1; r2 < rows; r2++ { - r2v := tsr.SubSpace([]int{r2}).(*tensor.Float32) - dst := metric.Hamming32(r1v.Values, r2v.Values) + r2v := tsr.SubSpace(r2).(*tensor.Float32) + dst := metric.Hamming(r1v, r2v).Float1D(0) df := int(math.Round(float64(.5 * dst))) if df < minDiff { nunder[r1]++ @@ -130,16 +130,16 @@ func PermutedBinaryMinDiff(tsr *tensor.Float32, nOn int, onVal, offVal float32, // RowVsPrevDist32 returns the minimum and maximum distance between the given row // in tensor and all previous rows. Row must be >= 1 and < total rows. // (outer-most dimension is row, as in columns of table.Table). -func RowVsPrevDist32(tsr *tensor.Float32, row int, fun metric.Func32) (min, max float32) { +func RowVsPrevDist32(tsr *tensor.Float32, row int, fun metric.MetricFunc) (min, max float32) { if row < 1 { return } min = float32(math.MaxFloat32) max = float32(-math.MaxFloat32) - lrow := tsr.SubSpace([]int{row}).(*tensor.Float32) + lrow := tsr.SubSpace(row).(*tensor.Float32) for i := 0; i <= row-1; i++ { - crow := tsr.SubSpace([]int{i}).(*tensor.Float32) - dst := fun(lrow.Values, crow.Values) + crow := tsr.SubSpace(i).(*tensor.Float32) + dst := float32(fun(lrow, crow).Float1D(0)) min = math32.Min(min, dst) max = math32.Max(max, dst) } diff --git a/patgen/reshape.go b/patgen/reshape.go index f532d6c4..6ce8ab70 100644 --- a/patgen/reshape.go +++ b/patgen/reshape.go @@ -9,6 +9,7 @@ import ( "reflect" "cogentcore.org/core/core" + "cogentcore.org/core/tensor" "cogentcore.org/core/tensor/table" ) @@ -17,17 +18,15 @@ import ( // that are float 2D or 4D columns -- assumes these are layer patterns // and names dimensions accordingly. func ReshapeCpp(dt *table.Table) { - for _, cl := range dt.Columns { + for _, cl := range dt.Columns.Values { shp := cl.Shape().Sizes if cl.NumDims() == 3 && (cl.DataType() == reflect.Float32 || cl.DataType() == reflect.Float64) { revshp := []int{shp[0], shp[2], shp[1]} // [0] = row - dnms := []string{"Row", "Y", "X"} - cl.SetShape(revshp, dnms...) + cl.SetShapeSizes(revshp...) } if cl.NumDims() == 5 && (cl.DataType() == reflect.Float32 || cl.DataType() == reflect.Float64) { revshp := []int{shp[0], shp[4], shp[3], shp[2], shp[1]} // [0] = row - dnms := []string{"Row", "PoolY", "PoolX", "NeurY", "NeurX"} - cl.SetShape(revshp, dnms...) + cl.SetShapeSizes(revshp...) } } } @@ -35,11 +34,11 @@ func ReshapeCpp(dt *table.Table) { // ReshapeCppFile fixes C++ emergent table shape which is reversed from Go. // It loads file from fname and saves to fixnm func ReshapeCppFile(dt *table.Table, fname, fixnm string) { - err := dt.OpenCSV(core.Filename(fname), table.Tab) + err := dt.OpenCSV(core.Filename(fname), tensor.Tab) if err != nil { log.Println(err) return } ReshapeCpp(dt) - dt.SaveCSV(core.Filename(fixnm), table.Tab, true) + dt.SaveCSV(core.Filename(fixnm), tensor.Tab, true) } diff --git a/patgen/shuffle.go b/patgen/shuffle.go index de771705..484696a3 100644 --- a/patgen/shuffle.go +++ b/patgen/shuffle.go @@ -18,7 +18,7 @@ func Shuffle(dt *table.Table, rows []int, colNames []string, colIndependent bool copy(sfrows, rows) randx.PermuteInts(sfrows, RandSource) for i, row := range rows { - dt.Tensor(colNm, row).CopyFrom(cl.Tensor(colNm, sfrows[i])) + dt.Column(colNm).RowTensor(row).CopyFrom(cl.Column(colNm).RowTensor(sfrows[i])) } } } else { // shuffle together @@ -27,9 +27,8 @@ func Shuffle(dt *table.Table, rows []int, colNames []string, colIndependent bool randx.PermuteInts(sfrows, RandSource) for _, colNm := range colNames { for i, row := range rows { - dt.Tensor(colNm, row).CopyFrom(cl.Tensor(colNm, sfrows[i])) + dt.Column(colNm).RowTensor(row).CopyFrom(cl.Column(colNm).RowTensor(sfrows[i])) } } } - } From 4b9e69757706a9398dd3d10d8cebc189ba72a5ac Mon Sep 17 00:00:00 2001 From: "Randall C. O'Reilly" Date: Fri, 22 Nov 2024 17:34:45 -0800 Subject: [PATCH 13/24] goal: patgen test outputs updated to new format --- patgen/configvocabpats_test.go | 580 +++++++++++++++++---------------- 1 file changed, 295 insertions(+), 285 deletions(-) diff --git a/patgen/configvocabpats_test.go b/patgen/configvocabpats_test.go index 5de594cd..a11bdbe7 100644 --- a/patgen/configvocabpats_test.go +++ b/patgen/configvocabpats_test.go @@ -14,7 +14,7 @@ func TestVocab(t *testing.T) { NewRand(10) m := make(Vocab) AddVocabEmpty(m, "empty", 6, 3, 3) - AddVocabPermutedBinary(m, "A", 6, 3, 3, 0.3, 0.5) + AddVocabPermutedBinary(m, "A", 6, 3, 3, 0.3, 0.4) AddVocabDrift(m, "B", 6, 0.2, "A", 0) // nOn=4*(3*3*0.3); nDrift=nOn*0.5 AddVocabRepeat(m, "ctxt1", 6, "A", 0) VocabConcat(m, "AB-C", []string{"A", "B"}) @@ -29,216 +29,224 @@ func TestVocab(t *testing.T) { // fmt.Println(reflect.ValueOf(m).MapKeys()) assert.Equal(t, exmap, fmt.Sprintf("%v", keys)) - exempty := `Tensor: [row: 6, Y: 3, X: 3] -[0 0]: 0 0 0 -[0 1]: 0 0 0 -[0 2]: 0 0 0 -[1 0]: 0 0 0 -[1 1]: 0 0 0 -[1 2]: 0 0 0 -[2 0]: 0 0 0 -[2 1]: 0 0 0 -[2 2]: 0 0 0 -[3 0]: 0 0 0 -[3 1]: 0 0 0 -[3 2]: 0 0 0 -[4 0]: 0 0 0 -[4 1]: 0 0 0 -[4 2]: 0 0 0 -[5 0]: 0 0 0 -[5 1]: 0 0 0 -[5 2]: 0 0 0 + exempty := `[6 3 3] +[r r c] [0] [1] [2] +[0 0] 0 0 0 +[0 1] 0 0 0 +[0 2] 0 0 0 +[1 0] 0 0 0 +[1 1] 0 0 0 +[1 2] 0 0 0 +[2 0] 0 0 0 +[2 1] 0 0 0 +[2 2] 0 0 0 +[3 0] 0 0 0 +[3 1] 0 0 0 +[3 2] 0 0 0 +[4 0] 0 0 0 +[4 1] 0 0 0 +[4 2] 0 0 0 +[5 0] 0 0 0 +[5 1] 0 0 0 +[5 2] 0 0 0 ` // fmt.Println("empty") // fmt.Println(m["empty"].String()) assert.Equal(t, exempty, m["empty"].String()) - exa := `Tensor: [row: 6, Y: 3, X: 3] -[0 0]: 0 1 1 -[0 1]: 0 0 0 -[0 2]: 1 0 0 -[1 0]: 0 0 0 -[1 1]: 1 0 1 -[1 2]: 0 1 0 -[2 0]: 1 0 1 -[2 1]: 0 0 0 -[2 2]: 0 1 0 -[3 0]: 0 1 0 -[3 1]: 0 1 0 -[3 2]: 0 0 1 -[4 0]: 0 0 0 -[4 1]: 0 1 0 -[4 2]: 1 1 0 -[5 0]: 0 0 0 -[5 1]: 1 0 0 -[5 2]: 1 0 1 + exa := `[6 3 3] +[r r c] [0] [1] [2] +[0 0] 0 1 0 +[0 1] 1 0 1 +[0 2] 0 0 0 +[1 0] 1 0 1 +[1 1] 1 0 0 +[1 2] 0 0 0 +[2 0] 0 0 0 +[2 1] 1 0 0 +[2 2] 1 1 0 +[3 0] 0 1 0 +[3 1] 0 0 0 +[3 2] 1 0 1 +[4 0] 1 0 1 +[4 1] 0 1 0 +[4 2] 0 0 0 +[5 0] 1 0 0 +[5 1] 0 0 1 +[5 2] 1 0 0 ` // fmt.Println("A") // fmt.Println(m["A"].String()) assert.Equal(t, exa, m["A"].String()) - exb := `Tensor: [row: 6, Y: 3, X: 3] -[0 0]: 0 1 1 -[0 1]: 0 0 0 -[0 2]: 1 0 0 -[1 0]: 0 0 1 -[1 1]: 0 1 0 -[1 2]: 1 0 0 -[2 0]: 0 0 1 -[2 1]: 0 1 0 -[2 2]: 1 0 0 -[3 0]: 0 1 0 -[3 1]: 0 1 0 -[3 2]: 1 0 0 -[4 0]: 0 1 0 -[4 1]: 0 1 0 -[4 2]: 1 0 0 -[5 0]: 0 1 1 -[5 1]: 0 0 0 -[5 2]: 1 0 0 + exb := `[6 3 3] +[r r c] [0] [1] [2] +[0 0] 0 1 0 +[0 1] 1 0 1 +[0 2] 0 0 0 +[1 0] 0 1 0 +[1 1] 1 0 1 +[1 2] 0 0 0 +[2 0] 0 1 0 +[2 1] 1 0 1 +[2 2] 0 0 0 +[3 0] 0 1 1 +[3 1] 1 0 0 +[3 2] 0 0 0 +[4 0] 0 1 1 +[4 1] 1 0 0 +[4 2] 0 0 0 +[5 0] 0 1 1 +[5 1] 1 0 0 +[5 2] 0 0 0 ` // fmt.Println("B") // fmt.Println(m["B"].String()) assert.Equal(t, exb, m["B"].String()) - exctxt := `Tensor: [row: 6, Y: 3, X: 3] -[0 0]: 0 1 1 -[0 1]: 0 0 0 -[0 2]: 1 0 0 -[1 0]: 0 1 1 -[1 1]: 0 0 0 -[1 2]: 1 0 0 -[2 0]: 0 1 1 -[2 1]: 0 0 0 -[2 2]: 1 0 0 -[3 0]: 0 1 1 -[3 1]: 0 0 0 -[3 2]: 1 0 0 -[4 0]: 0 1 1 -[4 1]: 0 0 0 -[4 2]: 1 0 0 -[5 0]: 0 1 1 -[5 1]: 0 0 0 -[5 2]: 1 0 0 + exctxt := `[6 3 3] +[r r c] [0] [1] [2] +[0 0] 0 1 0 +[0 1] 1 0 1 +[0 2] 0 0 0 +[1 0] 0 1 0 +[1 1] 1 0 1 +[1 2] 0 0 0 +[2 0] 0 1 0 +[2 1] 1 0 1 +[2 2] 0 0 0 +[3 0] 0 1 0 +[3 1] 1 0 1 +[3 2] 0 0 0 +[4 0] 0 1 0 +[4 1] 1 0 1 +[4 2] 0 0 0 +[5 0] 0 1 0 +[5 1] 1 0 1 +[5 2] 0 0 0 ` // fmt.Println("ctxt1") // fmt.Println(m["ctxt1"].String()) assert.Equal(t, exctxt, m["ctxt1"].String()) - exabc := `Tensor: [row: 12, Y: 3, X: 3] -[0 0]: 0 1 1 -[0 1]: 0 0 0 -[0 2]: 1 0 0 -[1 0]: 0 0 0 -[1 1]: 1 0 1 -[1 2]: 0 1 0 -[2 0]: 1 0 1 -[2 1]: 0 0 0 -[2 2]: 0 1 0 -[3 0]: 0 1 0 -[3 1]: 0 1 0 -[3 2]: 0 0 1 -[4 0]: 0 0 0 -[4 1]: 0 1 0 -[4 2]: 1 1 0 -[5 0]: 0 0 0 -[5 1]: 1 0 0 -[5 2]: 1 0 1 -[6 0]: 0 1 1 -[6 1]: 0 0 0 -[6 2]: 1 0 0 -[7 0]: 0 0 1 -[7 1]: 0 1 0 -[7 2]: 1 0 0 -[8 0]: 0 0 1 -[8 1]: 0 1 0 -[8 2]: 1 0 0 -[9 0]: 0 1 0 -[9 1]: 0 1 0 -[9 2]: 1 0 0 -[10 0]: 0 1 0 -[10 1]: 0 1 0 -[10 2]: 1 0 0 -[11 0]: 0 1 1 -[11 1]: 0 0 0 -[11 2]: 1 0 0 + exabc := `[12 3 3] +[r r c] [0] [1] [2] +[0 0] 0 1 0 +[0 1] 1 0 1 +[0 2] 0 0 0 +[1 0] 1 0 1 +[1 1] 1 0 0 +[1 2] 0 0 0 +[2 0] 0 0 0 +[2 1] 1 0 0 +[2 2] 1 1 0 +[3 0] 0 1 0 +[3 1] 0 0 0 +[3 2] 1 0 1 +[4 0] 1 0 1 +[4 1] 0 1 0 +[4 2] 0 0 0 +[5 0] 1 0 0 +[5 1] 0 0 1 +[5 2] 1 0 0 +[6 0] 0 1 0 +[6 1] 1 0 1 +[6 2] 0 0 0 +[7 0] 0 1 0 +[7 1] 1 0 1 +[7 2] 0 0 0 +[8 0] 0 1 0 +[8 1] 1 0 1 +[8 2] 0 0 0 +[9 0] 0 1 1 +[9 1] 1 0 0 +[9 2] 0 0 0 +[10 0] 0 1 1 +[10 1] 1 0 0 +[10 2] 0 0 0 +[11 0] 0 1 1 +[11 1] 1 0 0 +[11 2] 0 0 0 ` // fmt.Println("AB-C") // fmt.Println(m["AB-C"].String()) assert.Equal(t, exabc, m["AB-C"].String()) - exap := `Tensor: [row: 6, Y: 3, X: 3] -[0 0]: 0 1 1 -[0 1]: 0 0 0 -[0 2]: 1 0 0 -[1 0]: 0 0 0 -[1 1]: 1 0 1 -[1 2]: 0 1 0 -[2 0]: 1 0 1 -[2 1]: 0 0 0 -[2 2]: 0 1 0 -[3 0]: 0 1 0 -[3 1]: 0 1 0 -[3 2]: 0 0 1 -[4 0]: 0 0 0 -[4 1]: 0 1 0 -[4 2]: 1 1 0 -[5 0]: 0 0 0 -[5 1]: 1 0 0 -[5 2]: 1 0 1 + exap := `[6 3 3] +[r r c] [0] [1] [2] +[0 0] 0 1 0 +[0 1] 1 0 1 +[0 2] 0 0 0 +[1 0] 1 0 1 +[1 1] 1 0 0 +[1 2] 0 0 0 +[2 0] 0 0 0 +[2 1] 1 0 0 +[2 2] 1 1 0 +[3 0] 0 1 0 +[3 1] 0 0 0 +[3 2] 1 0 1 +[4 0] 1 0 1 +[4 1] 0 1 0 +[4 2] 0 0 0 +[5 0] 1 0 0 +[5 1] 0 0 1 +[5 2] 1 0 0 ` // fmt.Println("A'") // fmt.Println(m["A'"].String()) assert.Equal(t, exap, m["A'"].String()) - exbp := `Tensor: [row: 6, Y: 3, X: 3] -[0 0]: 0 1 1 -[0 1]: 0 0 0 -[0 2]: 1 0 0 -[1 0]: 0 1 0 -[1 1]: 0 1 0 -[1 2]: 1 0 0 -[2 0]: 0 0 1 -[2 1]: 0 1 0 -[2 2]: 1 0 0 -[3 0]: 0 0 1 -[3 1]: 0 1 0 -[3 2]: 1 0 0 -[4 0]: 0 1 1 -[4 1]: 0 0 0 -[4 2]: 1 0 0 -[5 0]: 0 1 0 -[5 1]: 0 1 0 -[5 2]: 1 0 0 + exbp := `[6 3 3] +[r r c] [0] [1] [2] +[0 0] 0 1 0 +[0 1] 1 0 1 +[0 2] 0 0 0 +[1 0] 0 1 1 +[1 1] 1 0 0 +[1 2] 0 0 0 +[2 0] 0 1 1 +[2 1] 1 0 0 +[2 2] 0 0 0 +[3 0] 0 1 0 +[3 1] 1 0 1 +[3 2] 0 0 0 +[4 0] 0 1 1 +[4 1] 1 0 0 +[4 2] 0 0 0 +[5 0] 0 1 0 +[5 1] 1 0 1 +[5 2] 0 0 0 ` // fmt.Println("B'") // fmt.Println(m["B'"].String()) assert.Equal(t, exbp, m["B'"].String()) - exbpp := `Tensor: [row: 6, Y: 3, X: 3] -[0 0]: 0 1 1 -[0 1]: 0 0 0 -[0 2]: 1 0 0 -[1 0]: 0 1 0 -[1 1]: 0 1 0 -[1 2]: 1 0 0 -[2 0]: 0 0 1 -[2 1]: 0 1 0 -[2 2]: 1 0 0 -[3 0]: 0 0 1 -[3 1]: 0 1 0 -[3 2]: 1 0 0 -[4 0]: 0 1 1 -[4 1]: 0 0 0 -[4 2]: 1 0 0 -[5 0]: 0 1 0 -[5 1]: 0 1 0 -[5 2]: 1 0 0 + exbpp := `[6 3 3] +[r r c] [0] [1] [2] +[0 0] 0 1 0 +[0 1] 1 0 1 +[0 2] 0 0 0 +[1 0] 0 1 1 +[1 1] 1 0 0 +[1 2] 0 0 0 +[2 0] 0 1 1 +[2 1] 1 0 0 +[2 2] 0 0 0 +[3 0] 0 1 0 +[3 1] 1 0 1 +[3 2] 0 0 0 +[4 0] 0 1 1 +[4 1] 1 0 0 +[4 2] 0 0 0 +[5 0] 0 1 0 +[5 1] 1 0 1 +[5 2] 0 0 0 ` // fmt.Println("B''") @@ -254,128 +262,130 @@ func TestVocab(t *testing.T) { // try shuffle Shuffle(dt, []int{0, 1, 2, 3, 4, 5}, []string{"Input", "ECout"}, false) - exip := `Tensor: [Row: 6, ySize: 3, xSize: 2, poolY: 3, poolX: 3] -[0 0 0]: 0 0 0 0 1 0 -[0 0 1]: 0 1 0 0 1 0 -[0 0 2]: 1 1 0 1 0 0 -[0 1 0]: 0 1 1 0 1 1 -[0 1 1]: 0 0 0 0 0 0 -[0 1 2]: 1 0 0 1 0 0 -[0 2 0]: 0 0 0 0 1 1 -[0 2 1]: 0 0 0 0 0 0 -[0 2 2]: 0 0 0 1 0 0 -[1 0 0]: 0 1 0 0 1 0 -[1 0 1]: 0 1 0 0 1 0 -[1 0 2]: 0 0 1 1 0 0 -[1 1 0]: 0 1 1 0 1 1 -[1 1 1]: 0 0 0 0 0 0 -[1 1 2]: 1 0 0 1 0 0 -[1 2 0]: 0 0 0 0 0 1 -[1 2 1]: 0 0 0 0 1 0 -[1 2 2]: 0 0 0 1 0 0 -[2 0 0]: 1 0 1 0 0 1 -[2 0 1]: 0 0 0 0 1 0 -[2 0 2]: 0 1 0 1 0 0 -[2 1 0]: 0 1 1 0 1 1 -[2 1 1]: 0 0 0 0 0 0 -[2 1 2]: 1 0 0 1 0 0 -[2 2 0]: 0 0 0 0 0 1 -[2 2 1]: 0 0 0 0 1 0 -[2 2 2]: 0 0 0 1 0 0 -[3 0 0]: 0 0 0 0 0 1 -[3 0 1]: 1 0 1 0 1 0 -[3 0 2]: 0 1 0 1 0 0 -[3 1 0]: 0 1 1 0 1 1 -[3 1 1]: 0 0 0 0 0 0 -[3 1 2]: 1 0 0 1 0 0 -[3 2 0]: 0 0 0 0 1 0 -[3 2 1]: 0 0 0 0 1 0 -[3 2 2]: 0 0 0 1 0 0 -[4 0 0]: 0 0 0 0 1 1 -[4 0 1]: 1 0 0 0 0 0 -[4 0 2]: 1 0 1 1 0 0 -[4 1 0]: 0 1 1 0 1 1 -[4 1 1]: 0 0 0 0 0 0 -[4 1 2]: 1 0 0 1 0 0 -[4 2 0]: 0 0 0 0 1 0 -[4 2 1]: 0 0 0 0 1 0 -[4 2 2]: 0 0 0 1 0 0 -[5 0 0]: 0 1 1 0 1 1 -[5 0 1]: 0 0 0 0 0 0 -[5 0 2]: 1 0 0 1 0 0 -[5 1 0]: 0 1 1 0 1 1 -[5 1 1]: 0 0 0 0 0 0 -[5 1 2]: 1 0 0 1 0 0 -[5 2 0]: 0 0 0 0 1 1 -[5 2 1]: 0 0 0 0 0 0 -[5 2 2]: 0 0 0 1 0 0 + exip := `Input [6 3 2 3 3] +[r r c r c] [0 0] [0 1] [0 2] [1 0] [1 1] [1 2] +[0 0 0] 0 0 0 0 1 0 +[0 0 1] 1 0 0 1 0 1 +[0 0 2] 1 1 0 0 0 0 +[0 1 0] 0 1 0 0 1 0 +[0 1 1] 1 0 1 1 0 1 +[0 1 2] 0 0 0 0 0 0 +[0 2 0] 0 0 0 0 1 1 +[0 2 1] 0 0 0 1 0 0 +[0 2 2] 0 0 0 0 0 0 +[1 0 0] 1 0 1 0 1 1 +[1 0 1] 0 1 0 1 0 0 +[1 0 2] 0 0 0 0 0 0 +[1 1 0] 0 1 0 0 1 0 +[1 1 1] 1 0 1 1 0 1 +[1 1 2] 0 0 0 0 0 0 +[1 2 0] 0 0 0 0 1 1 +[1 2 1] 0 0 0 1 0 0 +[1 2 2] 0 0 0 0 0 0 +[2 0 0] 1 0 1 0 1 1 +[2 0 1] 0 1 0 1 0 0 +[2 0 2] 0 0 0 0 0 0 +[2 1 0] 0 1 0 0 1 0 +[2 1 1] 1 0 1 1 0 1 +[2 1 2] 0 0 0 0 0 0 +[2 2 0] 0 0 0 0 1 1 +[2 2 1] 0 0 0 1 0 0 +[2 2 2] 0 0 0 0 0 0 +[3 0 0] 1 0 0 0 1 1 +[3 0 1] 0 0 1 1 0 0 +[3 0 2] 1 0 0 0 0 0 +[3 1 0] 0 1 0 0 1 0 +[3 1 1] 1 0 1 1 0 1 +[3 1 2] 0 0 0 0 0 0 +[3 2 0] 0 0 0 0 1 0 +[3 2 1] 0 0 0 1 0 1 +[3 2 2] 0 0 0 0 0 0 +[4 0 0] 0 0 0 0 1 0 +[4 0 1] 1 0 0 1 0 1 +[4 0 2] 1 1 0 0 0 0 +[4 1 0] 0 1 0 0 1 0 +[4 1 1] 1 0 1 1 0 1 +[4 1 2] 0 0 0 0 0 0 +[4 2 0] 0 0 0 0 1 1 +[4 2 1] 0 0 0 1 0 0 +[4 2 2] 0 0 0 0 0 0 +[5 0 0] 1 0 0 0 1 1 +[5 0 1] 0 0 1 1 0 0 +[5 0 2] 1 0 0 0 0 0 +[5 1 0] 0 1 0 0 1 0 +[5 1 1] 1 0 1 1 0 1 +[5 1 2] 0 0 0 0 0 0 +[5 2 0] 0 0 0 0 1 0 +[5 2 1] 0 0 0 1 0 1 +[5 2 2] 0 0 0 0 0 0 ` // fmt.Println("Input Pats") - // fmt.Println(dt.ColumnByName("Input").Shape.Sizes) - // fmt.Println(dt.ColumnByName("Input").String()) + // fmt.Println(dt.Column("Input").Shape().Sizes) + // fmt.Println(dt.Column("Input").String()) assert.Equal(t, []int{6, 3, 2, 3, 3}, dt.Column("Input").Shape().Sizes) assert.Equal(t, exip, dt.Column("Input").String()) - exop := `Tensor: [Row: 6, ySize: 3, xSize: 2, poolY: 3, poolX: 3] -[0 0 0]: 0 0 0 0 1 0 -[0 0 1]: 0 1 0 0 1 0 -[0 0 2]: 1 1 0 1 0 0 -[0 1 0]: 0 1 1 0 1 1 -[0 1 1]: 0 0 0 0 0 0 -[0 1 2]: 1 0 0 1 0 0 -[0 2 0]: 0 0 0 0 1 1 -[0 2 1]: 0 0 0 0 0 0 -[0 2 2]: 0 0 0 1 0 0 -[1 0 0]: 0 1 0 0 1 0 -[1 0 1]: 0 1 0 0 1 0 -[1 0 2]: 0 0 1 1 0 0 -[1 1 0]: 0 1 1 0 1 1 -[1 1 1]: 0 0 0 0 0 0 -[1 1 2]: 1 0 0 1 0 0 -[1 2 0]: 0 0 0 0 0 1 -[1 2 1]: 0 0 0 0 1 0 -[1 2 2]: 0 0 0 1 0 0 -[2 0 0]: 1 0 1 0 0 1 -[2 0 1]: 0 0 0 0 1 0 -[2 0 2]: 0 1 0 1 0 0 -[2 1 0]: 0 1 1 0 1 1 -[2 1 1]: 0 0 0 0 0 0 -[2 1 2]: 1 0 0 1 0 0 -[2 2 0]: 0 0 0 0 0 1 -[2 2 1]: 0 0 0 0 1 0 -[2 2 2]: 0 0 0 1 0 0 -[3 0 0]: 0 0 0 0 0 1 -[3 0 1]: 1 0 1 0 1 0 -[3 0 2]: 0 1 0 1 0 0 -[3 1 0]: 0 1 1 0 1 1 -[3 1 1]: 0 0 0 0 0 0 -[3 1 2]: 1 0 0 1 0 0 -[3 2 0]: 0 0 0 0 1 0 -[3 2 1]: 0 0 0 0 1 0 -[3 2 2]: 0 0 0 1 0 0 -[4 0 0]: 0 0 0 0 1 1 -[4 0 1]: 1 0 0 0 0 0 -[4 0 2]: 1 0 1 1 0 0 -[4 1 0]: 0 1 1 0 1 1 -[4 1 1]: 0 0 0 0 0 0 -[4 1 2]: 1 0 0 1 0 0 -[4 2 0]: 0 0 0 0 1 0 -[4 2 1]: 0 0 0 0 1 0 -[4 2 2]: 0 0 0 1 0 0 -[5 0 0]: 0 1 1 0 1 1 -[5 0 1]: 0 0 0 0 0 0 -[5 0 2]: 1 0 0 1 0 0 -[5 1 0]: 0 1 1 0 1 1 -[5 1 1]: 0 0 0 0 0 0 -[5 1 2]: 1 0 0 1 0 0 -[5 2 0]: 0 0 0 0 1 1 -[5 2 1]: 0 0 0 0 0 0 -[5 2 2]: 0 0 0 1 0 0 + exop := `ECout [6 3 2 3 3] +[r r c r c] [0 0] [0 1] [0 2] [1 0] [1 1] [1 2] +[0 0 0] 0 0 0 0 1 0 +[0 0 1] 1 0 0 1 0 1 +[0 0 2] 1 1 0 0 0 0 +[0 1 0] 0 1 0 0 1 0 +[0 1 1] 1 0 1 1 0 1 +[0 1 2] 0 0 0 0 0 0 +[0 2 0] 0 0 0 0 1 1 +[0 2 1] 0 0 0 1 0 0 +[0 2 2] 0 0 0 0 0 0 +[1 0 0] 1 0 1 0 1 1 +[1 0 1] 0 1 0 1 0 0 +[1 0 2] 0 0 0 0 0 0 +[1 1 0] 0 1 0 0 1 0 +[1 1 1] 1 0 1 1 0 1 +[1 1 2] 0 0 0 0 0 0 +[1 2 0] 0 0 0 0 1 1 +[1 2 1] 0 0 0 1 0 0 +[1 2 2] 0 0 0 0 0 0 +[2 0 0] 1 0 1 0 1 1 +[2 0 1] 0 1 0 1 0 0 +[2 0 2] 0 0 0 0 0 0 +[2 1 0] 0 1 0 0 1 0 +[2 1 1] 1 0 1 1 0 1 +[2 1 2] 0 0 0 0 0 0 +[2 2 0] 0 0 0 0 1 1 +[2 2 1] 0 0 0 1 0 0 +[2 2 2] 0 0 0 0 0 0 +[3 0 0] 1 0 0 0 1 1 +[3 0 1] 0 0 1 1 0 0 +[3 0 2] 1 0 0 0 0 0 +[3 1 0] 0 1 0 0 1 0 +[3 1 1] 1 0 1 1 0 1 +[3 1 2] 0 0 0 0 0 0 +[3 2 0] 0 0 0 0 1 0 +[3 2 1] 0 0 0 1 0 1 +[3 2 2] 0 0 0 0 0 0 +[4 0 0] 0 0 0 0 1 0 +[4 0 1] 1 0 0 1 0 1 +[4 0 2] 1 1 0 0 0 0 +[4 1 0] 0 1 0 0 1 0 +[4 1 1] 1 0 1 1 0 1 +[4 1 2] 0 0 0 0 0 0 +[4 2 0] 0 0 0 0 1 1 +[4 2 1] 0 0 0 1 0 0 +[4 2 2] 0 0 0 0 0 0 +[5 0 0] 1 0 0 0 1 1 +[5 0 1] 0 0 1 1 0 0 +[5 0 2] 1 0 0 0 0 0 +[5 1 0] 0 1 0 0 1 0 +[5 1 1] 1 0 1 1 0 1 +[5 1 2] 0 0 0 0 0 0 +[5 2 0] 0 0 0 0 1 0 +[5 2 1] 0 0 0 1 0 1 +[5 2 2] 0 0 0 0 0 0 ` // fmt.Println("ECout Pats") - // fmt.Println(dt.ColumnByName("ECout").Shape.Sizes) - // fmt.Println(dt.ColumnByName("ECout").String()) + // fmt.Println(dt.Column("ECout").Shape().Sizes) + // fmt.Println(dt.Column("ECout").String()) assert.Equal(t, []int{6, 3, 2, 3, 3}, dt.Column("ECout").Shape().Sizes) assert.Equal(t, exop, dt.Column("ECout").String()) From 0511a19223be75be2661fcaec3f404f7131fc403 Mon Sep 17 00:00:00 2001 From: "Randall C. O'Reilly" Date: Sat, 23 Nov 2024 14:51:18 -0800 Subject: [PATCH 14/24] looper returns stop level, egui uses that to pass stop info, so counters can be properly updated --- egui/gui.go | 10 +++--- egui/loopctrl.go | 8 ++--- env/README.md | 16 +++------ env/counter.go | 68 +++++++++++++++++++++++++++++++++++++ env/ctr.go | 73 ---------------------------------------- env/ctrs.go | 64 ----------------------------------- env/curprev.go | 20 +++++++++++ env/curprv.go | 81 --------------------------------------------- env/doc.go | 54 ++++++++---------------------- env/element.go | 44 ------------------------ env/env.go | 40 +++++++++++----------- env/envs.go | 4 +-- env/fixed.go | 25 ++++++++------ env/freq.go | 18 ++++++---- env/mpifixed.go | 29 +++++++++------- looper/run.go | 25 +++++++++----- looper/stacks.go | 23 ++++++++----- looper/step_test.go | 40 +++++++++++++++++----- netview/netdata.go | 2 +- 19 files changed, 245 insertions(+), 399 deletions(-) create mode 100644 env/counter.go delete mode 100644 env/ctr.go delete mode 100644 env/ctrs.go create mode 100644 env/curprev.go delete mode 100644 env/curprv.go delete mode 100644 env/element.go diff --git a/egui/gui.go b/egui/gui.go index 1446aa9b..6f988502 100644 --- a/egui/gui.go +++ b/egui/gui.go @@ -8,6 +8,7 @@ package egui import ( "cogentcore.org/core/core" + "cogentcore.org/core/enums" "cogentcore.org/core/events" _ "cogentcore.org/core/goal/gosl/slbool/slboolcore" // include to get gui views "cogentcore.org/core/styles" @@ -41,7 +42,8 @@ type GUI struct { Body *core.Body `display:"-"` // OnStop is called when running stopped through the GUI. - OnStop func() + // Should update the network view. + OnStop func(mode, level enums.Enum) } // UpdateWindow triggers an update on window body, @@ -67,13 +69,13 @@ func (gui *GUI) GoUpdateWindow() { // Stopped is called when a run method stops running, // from a separate goroutine (do not call from main event loop). // Updates the IsRunning flag and toolbar. -func (gui *GUI) Stopped() { +func (gui *GUI) Stopped(mode, level enums.Enum) { gui.IsRunning = false if gui.Body == nil { return } if gui.OnStop != nil { - gui.OnStop() + gui.OnStop(mode, level) } gui.GoUpdateWindow() } @@ -108,7 +110,7 @@ func (gui *GUI) MakeBody(sim any, appname, title, about string) { tabs.Name = "tabs" gui.Files.Tabber = tabs split.SetTiles(core.TileSplit, core.TileSpan) - split.SetSplits(.2, .5, .8) + split.SetSplits(.2, .7, .8) } // AddNetView adds NetView in tab with given name diff --git a/egui/loopctrl.go b/egui/loopctrl.go index 3201a179..8b1531d0 100644 --- a/egui/loopctrl.go +++ b/egui/loopctrl.go @@ -117,8 +117,8 @@ func (gui *GUI) AddLooperCtrl(p *tree.Plan, loops *looper.Stacks, prefix ...stri gui.IsRunning = true tb.Restyle() go func() { - loops.Run(curMode) - gui.Stopped() + stop := loops.Run(curMode) + gui.Stopped(curMode, stop) }() } }) @@ -139,8 +139,8 @@ func (gui *GUI) AddLooperCtrl(p *tree.Plan, loops *looper.Stacks, prefix ...stri go func() { st := loops.Stacks[curMode] nst := int(stepNSpin.Value) - loops.Step(curMode, nst, st.StepLevel) - gui.Stopped() + stop := loops.Step(curMode, nst, st.StepLevel) + gui.Stopped(curMode, stop) }() } }) diff --git a/env/README.md b/env/README.md index c2742d36..506c0cfc 100644 --- a/env/README.md +++ b/env/README.md @@ -1,24 +1,16 @@ Docs: [GoDoc](https://pkg.go.dev/github.com/emer/emergent/env) -See [Wiki Env](https://github.com/emer/emergent/wiki/Env) page for detailed docs. - -Package `env` defines the `Env` interface for environments, which determine the nature and sequence of States that can be used as inputs to a model and it can also accept Action responses from the model that affect how the environment evolves in the future. +Package `env` defines the `Env` interface for environments, which determine the nature and sequence of States as inputs to a model. Action responses from the model can also drive state evolution. By adhering to this interface, it is then easier to mix-and-match environments with models. ![Env / Agent](agent_env_interface.png?raw=true "Logical interface between the agent and the environment: the Environment supplies State to the Agent, and receives Actions from the Agent.") -The overall division of labor is that the model keeps track of the outer-most Run time-scale depending on its own parameters and learning trajectory and the environment is responsible for generating patterns for each run. - -Multiple different environments will typically be used in a model, e.g., one for training and other(s) for testing. Even if these envs all share a common database of patterns, a different Env should be used for each case where different counters and sequences of events etc are presented, which keeps them from interfering with each other. Also, the table.IndexView can be used to allow multiple different Env's to all present different indexed views into a shared common table.Table (e.g., train / test splits). The basic `FixedTable` env implementation uses this. - -Thus, the Env encapsulates all of the counter management logic for each aspect of model training and testing, so that the model itself just needs to manage which Env to use, when, and manage the connection of the Env States as inputs to the model, and vice-versa for Actions on the Env coming from the model. - -With the newer `looper` framework, the counters are managed by looper independent of the env. +Multiple different environments will typically be used in a model, e.g., one for training and other(s) for testing. Even if these envs all share a common database of patterns, a different Env should be used for each case where different counters and sequences of events etc are presented, which keeps them from interfering with each other. Also, `table.NewView` can be used to create new views on a common set of pattenrs, so different Envs can present different indexed views. The basic `FixedTable` env implementation uses this. -There is also an `Envs` map that provides a basic container for managing multiple Envs -- the key is typically an `etime.Modes` e.g., `etime.Train` or `etime.Test`. +The standard `String() string` `fmt.Stringer` method must be defined to return a string description of the current environment state, e.g., as a TrialName. A `Label() string` method must be defined to return the Name of the environment, which is typically the Mode of usage (Train vs. Test). -The `EnvDesc` interface provides additional methods (originally included in `Env`) that describe the Counters, States, and Actions, of the Env. Each `Element` of the overall `State` allows annotation about the different elements of state that are available in general. +There is also an `Envs` map that provides a basic container for managing multiple Envs, using a `string` key based on the `Label()` name. The `Step` should update all relevant state elements as appropriate, so these can be queried by the user. Particular paradigms of environments must establish naming conventions for these state elements which then allow the model to use the information appropriately -- the Env interface only provides the most basic framework for establishing these paradigms, and ultimately a given model will only work within a particular paradigm of environments following specific conventions. diff --git a/env/counter.go b/env/counter.go new file mode 100644 index 00000000..30949d03 --- /dev/null +++ b/env/counter.go @@ -0,0 +1,68 @@ +// Copyright (c) 2019, The Emergent Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package env + +// Counter maintains a current and previous counter value, +// and a Max value with methods to manage. +type Counter struct { + + // Cur is the current counter value. + Cur int + + // Prev previous counter value, prior to last Incr() call (init to -1) + Prev int `display:"-"` + + // Changed reports if it changed on the last Step() call or not. + Changed bool `display:"-"` + + // Max is the maximum counter value, above which the counter will reset back to 0. + // Only used if > 0. + Max int +} + +// Init initializes counter: Cur = 0, Prev = -1 +func (ct *Counter) Init() { + ct.Prev = -1 + ct.Cur = 0 + ct.Changed = false +} + +// Same resets Changed = false -- good idea to call this on all counters at start of Step +// or can put in an else statement, but that is more error-prone. +func (ct *Counter) Same() { + ct.Changed = false +} + +// Incr increments the counter by 1. If Max > 0 then if Incr >= Max +// the counter is reset to 0 and true is returned. Otherwise false. +func (ct *Counter) Incr() bool { + ct.Changed = true + ct.Prev = ct.Cur + ct.Cur++ + if ct.Max > 0 && ct.Cur >= ct.Max { + ct.Cur = 0 + return true + } + return false +} + +// Set sets the Cur value if different from Cur, while preserving previous value +// and setting Changed appropriately. Returns true if changed. +// does NOT check Cur vs. Max. +func (ct *Counter) Set(cur int) bool { + if ct.Cur == cur { + ct.Changed = false + return false + } + ct.Changed = true + ct.Prev = ct.Cur + ct.Cur = cur + return true +} + +// Query returns the current, previous and changed values for this counter +func (ct *Counter) Query() (cur, prev int, chg bool) { + return ct.Cur, ct.Prev, ct.Changed +} diff --git a/env/ctr.go b/env/ctr.go deleted file mode 100644 index ea2fbe2c..00000000 --- a/env/ctr.go +++ /dev/null @@ -1,73 +0,0 @@ -// Copyright (c) 2019, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package env - -import "github.com/emer/emergent/v2/etime" - -// Counter is a counter that counts increments at a given time scale. -// It keeps track of when it has been incremented or not, and -// retains the previous value. -type Counter struct { - - // current counter value - Cur int - - // previous counter value, prior to last Incr() call (init to -1) - Prv int `display:"-"` - - // did this change on the last Step() call or not? - Chg bool `display:"-"` - - // where relevant, this is a fixed maximum counter value, above which the counter will reset back to 0 -- only used if > 0 - Max int - - // the unit of time scale represented by this counter (just FYI) - Scale etime.Times `display:"-"` -} - -// Init initializes counter -- Cur = 0, Prv = -1 -func (ct *Counter) Init() { - ct.Prv = -1 - ct.Cur = 0 - ct.Chg = false -} - -// Same resets Chg = false -- good idea to call this on all counters at start of Step -// or can put in an else statement, but that is more error-prone. -func (ct *Counter) Same() { - ct.Chg = false -} - -// Incr increments the counter by 1. If Max > 0 then if Incr >= Max -// the counter is reset to 0 and true is returned. Otherwise false. -func (ct *Counter) Incr() bool { - ct.Chg = true - ct.Prv = ct.Cur - ct.Cur++ - if ct.Max > 0 && ct.Cur >= ct.Max { - ct.Cur = 0 - return true - } - return false -} - -// Set sets the Cur value if different from Cur, while preserving previous value -// and setting Chg appropriately. Returns true if changed. -// does NOT check Cur vs. Max. -func (ct *Counter) Set(cur int) bool { - if ct.Cur == cur { - ct.Chg = false - return false - } - ct.Chg = true - ct.Prv = ct.Cur - ct.Cur = cur - return true -} - -// Query returns the current, previous and changed values for this counter -func (ct *Counter) Query() (cur, prv int, chg bool) { - return ct.Cur, ct.Prv, ct.Chg -} diff --git a/env/ctrs.go b/env/ctrs.go deleted file mode 100644 index 89e26d49..00000000 --- a/env/ctrs.go +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright (c) 2019, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package env - -import ( - "fmt" - - "github.com/emer/emergent/v2/estats" - "github.com/emer/emergent/v2/etime" -) - -// Counters contains an ordered slice of timescales, -// and a lookup map of counters by timescale -// used to manage counters in the Env. -type Counters struct { - - // ordered list of the counter timescales, from outer-most (highest) to inner-most (lowest) - Order []etime.Times - - // map of the counters by timescale - Counters map[etime.Times]*Counter -} - -// SetTimes initializes Counters for given mode -// and list of times ordered from highest to lowest -func (cs *Counters) SetTimes(mode string, times ...etime.Times) { - cs.Order = times - cs.Counters = make(map[etime.Times]*Counter, len(times)) - for _, tm := range times { - cs.Counters[tm] = &Counter{Scale: tm} - } -} - -// ByTime returns counter by timescale key -- nil if not found -func (cs *Counters) ByScope(tm etime.Times) *Counter { - return cs.Counters[tm] -} - -// ByTime returns counter by timescale key. returns nil, error if not found. -func (cs *Counters) ByTime(tm etime.Times) (*Counter, error) { - ct, ok := cs.Counters[tm] - if ok { - return ct, nil - } - return nil, fmt.Errorf("env.Counters: scope not found: %s", tm.String()) -} - -// Init does Init on all the counters -func (cs *Counters) Init() { - for _, ct := range cs.Counters { - ct.Init() - } -} - -// CountersToStats sets the current counter values to estats Int values -// by their time names only (no eval Mode). -func (cs *Counters) CountersToStats(mode string, stats *estats.Stats) { - for _, ct := range cs.Counters { - tm := ct.Scale.String() - stats.SetInt(mode+":"+tm, ct.Cur) - } -} diff --git a/env/curprev.go b/env/curprev.go new file mode 100644 index 00000000..2626b8cb --- /dev/null +++ b/env/curprev.go @@ -0,0 +1,20 @@ +// Copyright (c) 2019, The Emergent Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package env + +import "cogentcore.org/core/tensor" + +// CurPrev manages current and previous values for basic data types. +type CurPrev[T tensor.DataTypes] struct { + Cur, Prev T +} + +// Set sets the new current value, after saving Cur to Prev. +func (cv *CurPrev[T]) Set(cur T) { + cv.Prev = cv.Cur + cv.Cur = cur +} + +type CurPrevString = CurPrev[string] diff --git a/env/curprv.go b/env/curprv.go deleted file mode 100644 index 6420fa4b..00000000 --- a/env/curprv.go +++ /dev/null @@ -1,81 +0,0 @@ -// Copyright (c) 2019, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package env - -// CurPrvF32 is basic state management for current and previous values, float32 values -type CurPrvF32 struct { - - // current value - Cur float32 - - // previous value - Prv float32 -} - -// Set sets the new current value, copying Cur to Prv -func (cv *CurPrvF32) Set(cur float32) { - cv.Prv = cv.Cur - cv.Cur = cur -} - -// Incr increments Cur by 1 -func (cv *CurPrvF32) Incr() { - cv.Prv = cv.Cur - cv.Cur += 1.0 -} - -// Diff returns the difference between current and previous values -func (cv *CurPrvF32) Diff() float32 { - return cv.Cur - cv.Prv -} - -////////////////////////////// -// Int - -// CurPrvInt is basic state management for current and previous values, int values -type CurPrvInt struct { - - // current value - Cur int - - // previous value - Prv int -} - -// Set sets the new current value, copying Cur to Prv -func (cv *CurPrvInt) Set(cur int) { - cv.Prv = cv.Cur - cv.Cur = cur -} - -// Incr increments Cur by 1 -func (cv *CurPrvInt) Incr() { - cv.Prv = cv.Cur - cv.Cur++ -} - -// Diff returns the difference between current and previous values -func (cv *CurPrvInt) Diff() int { - return cv.Cur - cv.Prv -} - -////////////////////////////// -// String - -// CurPrvString is basic state management for current and previous values, string values -type CurPrvString struct { - - // current value - Cur string - - // previous value - Prv string -} - -// Set sets the new current value, copying Cur to Prv -func (cv *CurPrvString) Set(cur string) { - cv.Prv = cv.Cur - cv.Cur = cur -} diff --git a/env/doc.go b/env/doc.go index 251e665f..2e981b77 100644 --- a/env/doc.go +++ b/env/doc.go @@ -3,51 +3,25 @@ // license that can be found in the LICENSE file. /* -Package env defines an interface for environments, which determine the -nature and sequence of States that can be used as inputs to a model -and it can also accept Action responses from the model that affect -how the enviroment evolves in the future. +Package env defines an interface for environments, which determine the nature and +sequence of States as inputs to a model. Action responses from the model +can also drive state evolution. -By adhering to this interface, it is then easier to mix-and-match -environments with models. +State is comprised of one or more Elements, each of which consists of an +tensor.Values chunk of values that can be obtained by the model. +Likewise, Actions can also have Elements. The Step method is the main +interface for advancing the Env state. -The overall division of labor is that the model keeps track of the outer-most -Run time-scale depending on its own parameters and learning trajectory -and the environment is responsible for generating patterns for each run. - -Multiple different environments will typically be used in a model, e.g., -one for training and other(s) for testing. Even if these envs all share -a common database of patterns, a different Env should be used for each -case where different counters and sequences of events etc are presented, -which keeps them from interfering with each other. Also, the table.IndexView -can be used to allow multiple different Env's to all present different -indexed views into a shared common table.Table (e.g., train / test splits). -The basic FixedTable env implementation uses this. - -Thus, the Env encapsulates all of the counter management logic for each -aspect of model training and testing, so that the model itself just -needs to manange which Env to use, when, and manage the connection of -the Env States as inputs to the model, and vice-versa for Actions on the -Env coming from the model. - -Each Element of the overall State allows annotation about the different -elements of state that are available in general, and the `Step` should -update all relevant state elements as appropriate, so these can be queried -by the user. Particular paradigms of environments must establish naming -conventions for these state elements which then allow the model to use -the information appropriately -- the Env interface only provides the most -basic framework for establishing these paradigms, and ultimately a given -model will only work within a particular paradigm of environments following -specific conventions. - -See e.g., env.FixedTable for particular implementation of a fixed Table -of patterns, for one example of a widely used paradigm. +The standard String() string fmt.Stringer method must be defined to return +a string description of the current environment state, e.g., as a TrialName. +A Label() string method must be defined to return the Name of the environment, +which is typically the Mode of usage (Train vs. Test). Typically each specific implementation of this Env interface will have -multiple parameters etc that can be modified to control env behavior -- +multiple parameters etc that can be modified to control env behavior: all of this is paradigm-specific and outside the scope of this basic interface. -See the emergent github wiki for more info: -https://github.com/emer/emergent/v2/wiki/Env +See e.g., env.FixedTable for particular implementation of a fixed Table +of patterns, for one example of a widely used paradigm. */ package env diff --git a/env/element.go b/env/element.go deleted file mode 100644 index c693a7cc..00000000 --- a/env/element.go +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) 2019, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package env - -// Element specifies one element of State or Action in an environment -type Element struct { - - // name of this element -- must be unique - Name string - - // shape of the tensor for this element -- each element should generally have a well-defined consistent shape to enable the model to process it consistently - Shape []int - - // names of the dimensions within the Shape -- optional but useful for ensuring correct usage - DimNames []string -} - -// Elements is a list of Element info -type Elements []Element - -// // FromSchema copies element data from a table Schema that describes an -// // table.Table -// func (ch *Elements) FromSchema(sc table.Schema) { -// *ch = make(Elements, len(sc)) -// for i, cl := range sc { -// (*ch)[i].FromColumn(&cl) -// } -// } - -// FromColumn copies element data from table Column that describes an -// table.Table -// func (ch *Element) FromColumn(sc *table.Column) { -// ch.Name = sc.Name -// ch.Shape = make([]int, len(sc.CellShape)) -// copy(ch.Shape, sc.CellShape) -// if sc.DimNames != nil { -// ch.DimNames = make([]string, len(sc.DimNames)) -// copy(ch.DimNames, sc.DimNames) -// } else { -// ch.DimNames = nil -// } -// } diff --git a/env/env.go b/env/env.go index afc036cc..3dd02d29 100644 --- a/env/env.go +++ b/env/env.go @@ -5,6 +5,8 @@ package env import ( + "fmt" + "cogentcore.org/core/base/labels" "cogentcore.org/core/tensor" ) @@ -12,35 +14,33 @@ import ( //go:generate core generate -add-types // Env defines an interface for environments, which determine the nature and -// sequence of States that can be used as inputs to a model, and the Env -// also can accept Action responses from the model that affect state evolution. -// -// The Env manages [Counter] values to advance the temporal state of the -// environment, using [etime.Times] standard intervals. +// sequence of States as inputs to a model. Action responses from the model +// can also drive state evolution. // // State is comprised of one or more Elements, each of which consists of an -// tensor.Tensor chunk of values that can be obtained by the model. -// Likewise, Actions can also have Elements. The Step method is the main -// interface for advancing the Env state. Counters should be queried -// after calling Step to see if any relevant values have changed, to trigger -// functions in the model (e.g., logging of prior statistics, etc). +// tensor.Values chunk of values that can be obtained by the model. +// Likewise, Actions can also have Elements. The Step method is the main +// interface for advancing the Env state. +// +// The standard String() string fmt.Stringer method must be defined to return +// a string description of the current environment state, e.g., as a TrialName. +// A Label() string method must be defined to return the Name of the environment, +// which is typically the Mode of usage (Train vs. Test). // // Typically each specific implementation of this Env interface will have -// multiple parameters etc that can be modified to control env behavior -- +// multiple parameters etc that can be modified to control env behavior: // all of this is paradigm-specific and outside the scope of this basic interface. type Env interface { + fmt.Stringer labels.Labeler // Init initializes the environment for a given run of the model. // The environment may not care about the run number, but may implement // different parameterizations for different runs (e.g., between-subject - // manipulations). In general the Env can expect that the model will likely + // manipulations). In general the Env can expect that the model will likely // have established a different random seed per run, prior to calling this // method, and that may be sufficient to enable different run-level behavior. - // All other initialization / updating beyond this outer-most Run level must - // be managed internally by the Env itself, and the model can query the - // Counter state information to determine when things have updated at different - // time scales. See Step() for important info about state of env after Init + // See Step() for important info about state of env after Init // but prior to first Step() call. Init(run int) @@ -57,7 +57,7 @@ type Env interface { // // This implies that the state just after Init and prior to first Step // call should be an *initialized* state that then allows the first Step - // call to establish the proper *first* state. Typically this means that + // call to establish the proper *first* state. Typically this means that // one or more counters will be set to -1 during Init and then get incremented // to 0 on the first Step call. Step() bool @@ -66,11 +66,11 @@ type Env interface { // based on the current state of the env, as a function of having called Step(). // If no output is available on that element, then nil is returned. // The returned tensor must be treated as read-only as it likely points to original - // source data -- please make a copy before modifying (e.g., Clone() methdod). - State(element string) tensor.Tensor + // source data: please make a copy before modifying (e.g., Clone() methdod). + State(element string) tensor.Values // Action sends tensor data about e.g., responses from model back to act // on the environment and influence its subsequent evolution. // The nature and timing of this input is paradigm dependent. - Action(element string, input tensor.Tensor) + Action(element string, input tensor.Values) } diff --git a/env/envs.go b/env/envs.go index ca2fafad..8829e7c4 100644 --- a/env/envs.go +++ b/env/envs.go @@ -30,7 +30,7 @@ func (es *Envs) Add(evs ...Env) { } // ByMode returns env by Modes evaluation mode as the map key. -// returns nil if not found +// returns nil if not found. func (es *Envs) ByMode(mode enums.Enum) Env { return (*es)[mode.String()] } @@ -43,7 +43,7 @@ func ModeDi(mode enums.Enum, di int) string { // ByModeDi returns env by etime.Modes evaluation mode and // data parallel index as the map key, using ModeDi function. -// returns nil if not found +// returns nil if not found. func (es *Envs) ByModeDi(mode enums.Enum, di int) Env { return (*es)[ModeDi(mode, di)] } diff --git a/env/fixed.go b/env/fixed.go index 81df2f58..4a1ff71f 100644 --- a/env/fixed.go +++ b/env/fixed.go @@ -12,14 +12,12 @@ import ( "cogentcore.org/core/base/randx" "cogentcore.org/core/tensor" "cogentcore.org/core/tensor/table" - "github.com/emer/emergent/v2/etime" ) -// FixedTable is a basic Env that manages patterns from an table.Table, with -// either sequential or permuted random ordering, with the Trial counters +// FixedTable is a basic Env that manages patterns from a [table.Table], with +// either sequential or permuted random ordering, with a Trial counter // to record progress and iterations through the table. -// It uses an IndexView indexed view of the Table, so a single shared table -// can be used across different environments, with each having its own unique view. +// Use [table.NewView] to provide a unique indexed view of a shared table. type FixedTable struct { // name of this environment, usually Train vs. Test. Name string @@ -43,10 +41,10 @@ type FixedTable struct { Trial Counter `display:"inline"` // if Table has a Name column, this is the contents of that. - TrialName CurPrvString + TrialName CurPrevString // if Table has a Group column, this is contents of that. - GroupName CurPrvString + GroupName CurPrevString // name of the Name column -- defaults to 'Name'. NameCol string @@ -67,6 +65,14 @@ func (ft *FixedTable) Validate() error { func (ft *FixedTable) Label() string { return ft.Name } +func (ft *FixedTable) String() string { + s := ft.TrialName.Cur + if ft.GroupName.Cur != "" { + s = ft.GroupName.Cur + "_" + s + } + return s +} + func (ft *FixedTable) Init(run int) { if ft.NameCol == "" { ft.NameCol = "Name" @@ -74,7 +80,6 @@ func (ft *FixedTable) Init(run int) { if ft.GroupCol == "" { ft.GroupCol = "Group" } - ft.Trial.Scale = etime.Trial ft.Trial.Init() ft.NewOrder() ft.Trial.Cur = -1 // init state -- key so that first Step() = 0 @@ -140,7 +145,7 @@ func (ft *FixedTable) Step() bool { return true } -func (ft *FixedTable) State(element string) tensor.Tensor { +func (ft *FixedTable) State(element string) tensor.Values { et := ft.Table.Column(element).RowTensor(ft.Row()) if et == nil { log.Println("FixedTable.State -- could not find element:", element) @@ -148,7 +153,7 @@ func (ft *FixedTable) State(element string) tensor.Tensor { return et } -func (ft *FixedTable) Action(element string, input tensor.Tensor) { +func (ft *FixedTable) Action(element string, input tensor.Values) { // nop } diff --git a/env/freq.go b/env/freq.go index e54fc0da..da1c9315 100644 --- a/env/freq.go +++ b/env/freq.go @@ -12,7 +12,6 @@ import ( "cogentcore.org/core/base/randx" "cogentcore.org/core/tensor" "cogentcore.org/core/tensor/table" - "github.com/emer/emergent/v2/etime" ) // FreqTable is an Env that manages patterns from an table.Table with frequency @@ -51,10 +50,10 @@ type FreqTable struct { Trial Counter `display:"inline"` // if Table has a Name column, this is the contents of that - TrialName CurPrvString + TrialName CurPrevString // if Table has a Group column, this is contents of that - GroupName CurPrvString + GroupName CurPrevString // name of the Name column -- defaults to 'Name' NameCol string @@ -82,6 +81,14 @@ func (ft *FreqTable) Validate() error { func (ft *FreqTable) Label() string { return ft.Name } +func (ft *FreqTable) String() string { + s := ft.TrialName.Cur + if ft.GroupName.Cur != "" { + s = ft.GroupName.Cur + "_" + s + } + return s +} + func (ft *FreqTable) Init(run int) { if ft.NameCol == "" { ft.NameCol = "Name" @@ -92,7 +99,6 @@ func (ft *FreqTable) Init(run int) { if ft.FreqCol == "" { ft.FreqCol = "Freq" } - ft.Trial.Scale = etime.Trial ft.Trial.Init() ft.Sample() ft.Trial.Max = len(ft.Order) @@ -167,7 +173,7 @@ func (ft *FreqTable) Step() bool { return true } -func (ft *FreqTable) State(element string) tensor.Tensor { +func (ft *FreqTable) State(element string) tensor.Values { et := ft.Table.Column(element).RowTensor(ft.Row()) if et == nil { log.Println("FreqTable.State -- could not find element:", element) @@ -175,7 +181,7 @@ func (ft *FreqTable) State(element string) tensor.Tensor { return et } -func (ft *FreqTable) Action(element string, input tensor.Tensor) { +func (ft *FreqTable) Action(element string, input tensor.Values) { // nop } diff --git a/env/mpifixed.go b/env/mpifixed.go index 0f9a3b79..0edc8b60 100644 --- a/env/mpifixed.go +++ b/env/mpifixed.go @@ -13,15 +13,13 @@ import ( "cogentcore.org/core/tensor" "cogentcore.org/core/tensor/table" "cogentcore.org/core/tensor/tensormpi" - "github.com/emer/emergent/v2/etime" ) -// MPIFixedTable is an MPI-enabled version of the FixedTable, which is -// a basic Env that manages patterns from an table.Table, with -// either sequential or permuted random ordering, and uses standard Trial -// Time counter to record iterations through the table. -// It uses an IndexView indexed view of the Table, so a single shared table -// can be used across different environments, with each having its own unique view. +// MPIFixedTable is an MPI-enabled version of the [FixedTable], which is +// a basic Env that manages patterns from a [table.Table[, with +// either sequential or permuted random ordering, and a Trial counter to +// record iterations through the table. +// Use [table.NewView] to provide a unique indexed view of a shared table. // The MPI version distributes trials across MPI procs, in the Order list. // It is ESSENTIAL that the number of trials (rows) in Table is // evenly divisible by number of MPI procs! @@ -47,10 +45,10 @@ type MPIFixedTable struct { Trial Counter `display:"inline"` // if Table has a Name column, this is the contents of that - TrialName CurPrvString + TrialName CurPrevString // if Table has a Group column, this is contents of that - GroupName CurPrvString + GroupName CurPrevString // name of the Name column -- defaults to 'Name' NameCol string @@ -77,6 +75,14 @@ func (ft *MPIFixedTable) Validate() error { func (ft *MPIFixedTable) Label() string { return ft.Name } +func (ft *MPIFixedTable) String() string { + s := ft.TrialName.Cur + if ft.GroupName.Cur != "" { + s = ft.GroupName.Cur + "_" + s + } + return s +} + func (ft *MPIFixedTable) Init(run int) { if ft.NameCol == "" { ft.NameCol = "Name" @@ -84,7 +90,6 @@ func (ft *MPIFixedTable) Init(run int) { if ft.GroupCol == "" { ft.GroupCol = "Group" } - ft.Trial.Scale = etime.Trial ft.Trial.Init() ft.NewOrder() ft.Trial.Cur = ft.TrialSt - 1 // init state -- key so that first Step() = ft.TrialSt @@ -142,7 +147,7 @@ func (ft *MPIFixedTable) Step() bool { return true } -func (ft *MPIFixedTable) State(element string) tensor.Tensor { +func (ft *MPIFixedTable) State(element string) tensor.Values { et := ft.Table.Column(element).RowTensor(ft.Row()) if et == nil { log.Println("MPIFixedTable.State -- could not find element:", element) @@ -150,7 +155,7 @@ func (ft *MPIFixedTable) State(element string) tensor.Tensor { return et } -func (ft *MPIFixedTable) Action(element string, input tensor.Tensor) { +func (ft *MPIFixedTable) Action(element string, input tensor.Values) { // nop } diff --git a/looper/run.go b/looper/run.go index 0640851a..b0bf3609 100644 --- a/looper/run.go +++ b/looper/run.go @@ -7,20 +7,24 @@ package looper import ( "fmt" "strings" + + "cogentcore.org/core/enums" ) func indent(level int) string { return strings.Repeat(" ", level) } -// runLevel implements nested for loops recursively. -// It is set up so that it can be stopped and resumed at any point. -func (ss *Stacks) runLevel(currentLevel int) bool { +// runLevel implements nested run for loops recursively. +// It can be stopped and resumed at any point. +// returns true if the level was completed, and the level where it stopped. +func (ss *Stacks) runLevel(currentLevel int) (bool, enums.Enum) { st := ss.Stacks[ss.Mode] if currentLevel >= len(st.Order) { - return true // Stack overflow, expected at bottom of stack. + return true, st.Order[0] // Stack overflow, should not happen } level := st.Order[currentLevel] + stoppedLevel := level // return value for what level it stopped at loop := st.Loops[level] ctr := &loop.Counter @@ -28,6 +32,7 @@ func (ss *Stacks) runLevel(currentLevel int) bool { stoplev := int64(-1) if st.StopLevel != nil { stoplev = st.StopLevel.Int64() + stoppedLevel = st.StopLevel } stopAtLevelOrLarger := st.Order[currentLevel].Int64() >= stoplev if st.StopFlag && stopAtLevelOrLarger { @@ -36,7 +41,7 @@ func (ss *Stacks) runLevel(currentLevel int) bool { if ss.internalStop { // This should occur before ctr incrementing and before functions. st.StopFlag = false - return false // Don't continue above, e.g. Stop functions + return false, stoppedLevel // Don't continue above, e.g. Stop functions } if st.StopNext && st.Order[currentLevel] == st.StopLevel { st.StopCount -= 1 @@ -63,10 +68,12 @@ func (ss *Stacks) runLevel(currentLevel int) bool { fmt.Printf("%s%s: Skipping Start: %d\n", indent(currentLevel), level.String(), ctr.Cur) } - // Recursion! - runComplete := ss.runLevel(currentLevel + 1) + done := true + if currentLevel+1 < len(st.Order) { + done, stoppedLevel = ss.runLevel(currentLevel + 1) + } - if runComplete { + if done { if PrintControlFlow { fmt.Printf("%s%s: End: %d\n", indent(currentLevel), level.String(), ctr.Cur) } @@ -92,5 +99,5 @@ func (ss *Stacks) runLevel(currentLevel int) bool { exitLoop: // Only get to this point if this loop is done. - return true + return true, level } diff --git a/looper/stacks.go b/looper/stacks.go index ec7727df..43f8096a 100644 --- a/looper/stacks.go +++ b/looper/stacks.go @@ -57,43 +57,48 @@ func (ls *Stacks) newInit() { // Run runs the stack of loops for given mode (Train, Test, etc). // This resets any stepping settings for this stack and runs // until completion or stopped externally. -func (ls *Stacks) Run(mode enums.Enum) { +// Returns the level that was running when it stopped. +func (ls *Stacks) Run(mode enums.Enum) enums.Enum { ls.Mode = mode ls.ClearStep(mode) - ls.Cont() + return ls.Cont() } // ResetAndRun calls ResetCountersByMode on this mode // and then Run. This ensures that the Stack is run from // the start, regardless of what state it might have been in. -func (ls *Stacks) ResetAndRun(mode enums.Enum) { +// Returns the level that was running when it stopped. +func (ls *Stacks) ResetAndRun(mode enums.Enum) enums.Enum { ls.ResetCountersByMode(mode) - ls.Run(mode) + return ls.Run(mode) } // Cont continues running based on current state of the stacks. // This is common pathway for Step and Run, which set state and // call Cont. Programatic calling of Step can continue with Cont. -func (ls *Stacks) Cont() { +// Returns the level that was running when it stopped. +func (ls *Stacks) Cont() enums.Enum { ls.isRunning = true ls.internalStop = false - ls.runLevel(0) // 0 Means the top level loop + _, stop := ls.runLevel(0) // 0 Means the top level loop ls.isRunning = false + return stop } // Step numSteps at given stopLevel. Use this if you want to do exactly one trial // or two epochs or 50 cycles or whatever. If numSteps <= 0 then the default // number of steps for given step level is used. -func (ls *Stacks) Step(mode enums.Enum, numSteps int, stopLevel enums.Enum) { +// Returns the level that was running when it stopped. +func (ls *Stacks) Step(mode enums.Enum, numSteps int, stopLevel enums.Enum) enums.Enum { ls.Mode = mode st := ls.Stacks[ls.Mode] st.SetStep(numSteps, stopLevel) - ls.Cont() + return ls.Cont() } // ClearStep clears stepping variables from given mode, // so it will run to completion in a subsequent Cont(). -// Called by Run +// Called by Run. func (ls *Stacks) ClearStep(mode enums.Enum) { st := ls.Stacks[ls.Mode] st.ClearStep() diff --git a/looper/step_test.go b/looper/step_test.go index f01962e7..0f234897 100644 --- a/looper/step_test.go +++ b/looper/step_test.go @@ -107,26 +107,50 @@ func TestStep(t *testing.T) { } } else { PrintControlFlow = false - stacks.Step(levels.Train, 1, levels.Cycle) - stacks.Step(levels.Train, 1, levels.Cycle) - stacks.Step(levels.Train, 1, levels.Cycle) - stacks.Step(levels.Train, 1, levels.Cycle) - stacks.Step(levels.Train, 2, levels.Cycle) - stacks.Step(levels.Train, 1, levels.Run) + stop := stacks.Step(levels.Train, 1, levels.Cycle) + if stop != levels.Cycle { + t.Errorf("stop != Cycle: %s", stop) + } + stop = stacks.Step(levels.Train, 1, levels.Cycle) + if stop != levels.Cycle { + t.Errorf("stop != Cycle: %s", stop) + } + stop = stacks.Step(levels.Train, 1, levels.Cycle) + if stop != levels.Cycle { + t.Errorf("stop != Cycle: %s", stop) + } + stop = stacks.Step(levels.Train, 1, levels.Cycle) + if stop != levels.Cycle { + t.Errorf("stop != Cycle: %s", stop) + } + stop = stacks.Step(levels.Train, 2, levels.Cycle) + if stop != levels.Cycle { + t.Errorf("stop != Cycle: %s", stop) + } + stop = stacks.Step(levels.Train, 1, levels.Run) if run.Counter.Cur != 1 { t.Errorf("Incorrect step run") } - stacks.Step(levels.Train, 3, levels.Epoch) + if stop != levels.Run { + t.Errorf("stop != Run: %s", stop) + } + stop = stacks.Step(levels.Train, 3, levels.Epoch) if run.Counter.Cur != 1 || epc.Counter.Cur != 3 { t.Errorf("Incorrect step epoch") } + if stop != levels.Epoch { + t.Errorf("stop != Epoch: %s", stop) + } if trialCount != 32 { // 32 = 1*5*4+3*4 t.Errorf("Cycles not counted correctly: %d != 32", trialCount) } - stacks.Step(levels.Train, 2, levels.Trial) + stop = stacks.Step(levels.Train, 2, levels.Trial) if trialCount != 34 { // 34 = 1*5*4+3*4+2 t.Errorf("Cycles not counted correctly: %d != 34", trialCount) } + if stop != levels.Trial { + t.Errorf("stop != Trial: %s", stop) + } } } diff --git a/netview/netdata.go b/netview/netdata.go index ae5fc143..bc8ab337 100644 --- a/netview/netdata.go +++ b/netview/netdata.go @@ -681,7 +681,7 @@ func (nd *NetData) SelectedUnitTable(di int) *table.Table { selnm := nd.PathLay + fmt.Sprintf("[%d]", nd.PathUnIndex) - dt := &table.Table{} + dt := table.New() metadata.SetName(dt, "NetView: "+selnm) metadata.SetTo(dt, "read-only", true) tensor.SetPrecision(dt, 4) From 78a0d48adba37eee78ba27408842b0cc98c1f153 Mon Sep 17 00:00:00 2001 From: "Randall C. O'Reilly" Date: Mon, 25 Nov 2024 01:31:04 -0800 Subject: [PATCH 15/24] update to latest core goal revision and remove obsolete stuff -- fully building. --- confusion/confusion.go | 35 +-- confusion/typegen.go | 9 - econfig/README.md | 209 ------------- econfig/args.go | 263 ----------------- econfig/config.go | 100 ------- econfig/defaults.go | 13 - econfig/econfig_test.go | 286 ------------------ econfig/enum.go | 19 -- econfig/enumgen.go | 48 --- econfig/include.go | 101 ------- econfig/io.go | 67 ----- econfig/testdata/testcfg.toml | 21 -- econfig/testdata/testcfginc.toml | 4 - econfig/testdata/testinc.toml | 3 - econfig/testdata/testinc2.toml | 4 - econfig/testdata/testinc3.toml | 3 - econfig/testdata/testincinc.toml | 3 - econfig/testdata/testincinc2.toml | 1 - econfig/typegen.go | 13 - econfig/usage.go | 63 ---- elog/README.md | 445 ---------------------------- elog/context.go | 309 ------------------- elog/elog_test.go | 45 --- elog/item.go | 148 ---------- elog/logs.go | 475 ------------------------------ elog/stditems.go | 450 ---------------------------- elog/table.go | 76 ----- elog/typegen.go | 78 ----- estats/funcs.go | 68 +---- estats/rasters.go | 47 --- go.mod | 7 +- go.sum | 16 +- 32 files changed, 20 insertions(+), 3409 deletions(-) delete mode 100644 confusion/typegen.go delete mode 100644 econfig/README.md delete mode 100644 econfig/args.go delete mode 100644 econfig/config.go delete mode 100644 econfig/defaults.go delete mode 100644 econfig/econfig_test.go delete mode 100644 econfig/enum.go delete mode 100644 econfig/enumgen.go delete mode 100644 econfig/include.go delete mode 100644 econfig/io.go delete mode 100644 econfig/testdata/testcfg.toml delete mode 100644 econfig/testdata/testcfginc.toml delete mode 100644 econfig/testdata/testinc.toml delete mode 100644 econfig/testdata/testinc2.toml delete mode 100644 econfig/testdata/testinc3.toml delete mode 100644 econfig/testdata/testincinc.toml delete mode 100644 econfig/testdata/testincinc2.toml delete mode 100644 econfig/typegen.go delete mode 100644 econfig/usage.go delete mode 100644 elog/README.md delete mode 100644 elog/context.go delete mode 100644 elog/elog_test.go delete mode 100644 elog/item.go delete mode 100644 elog/logs.go delete mode 100644 elog/stditems.go delete mode 100644 elog/table.go delete mode 100644 elog/typegen.go delete mode 100644 estats/rasters.go diff --git a/confusion/confusion.go b/confusion/confusion.go index 554bfbb2..1d60db5d 100644 --- a/confusion/confusion.go +++ b/confusion/confusion.go @@ -6,14 +6,7 @@ package confusion //go:generate core generate -add-types -import ( - "fmt" - "math" - - "cogentcore.org/core/core" - "cogentcore.org/core/tensor" - "cogentcore.org/core/tensor/stats/simat" -) +/* // Matrix computes the confusion matrix, with rows representing // the ground truth correct class, and columns representing the @@ -208,30 +201,4 @@ func (cm *Matrix) SaveCSV(fname core.Filename) { func (cm *Matrix) OpenCSV(fname core.Filename) { tensor.OpenCSV(&cm.Prob, fname, ',') } - -/* -var MatrixProps = tree.Props{ - "ToolBar": tree.PropSlice{ - {"SaveCSV", tree.Props{ - "label": "Save CSV...", - "icon": "file-save", - "desc": "Save CSV-formatted confusion probabilities (Probs)", - "Args": tree.PropSlice{ - {"CSV File Name", tree.Props{ - "ext": ".csv", - }}, - }, - }}, - {"OpenCSV", tree.Props{ - "label": "Open CSV...", - "icon": "file-open", - "desc": "Open CSV-formatted confusion probabilities (Probs)", - "Args": tree.PropSlice{ - {"Weights File Name", tree.Props{ - "ext": ".csv", - }}, - }, - }}, - }, -} */ diff --git a/confusion/typegen.go b/confusion/typegen.go deleted file mode 100644 index 4ba23fa4..00000000 --- a/confusion/typegen.go +++ /dev/null @@ -1,9 +0,0 @@ -// Code generated by "core generate -add-types"; DO NOT EDIT. - -package confusion - -import ( - "cogentcore.org/core/types" -) - -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/confusion.Matrix", IDName: "matrix", Doc: "Matrix computes the confusion matrix, with rows representing\nthe ground truth correct class, and columns representing the\nactual answer produced. Correct answers are along the diagonal.", Directives: []types.Directive{{Tool: "git", Directive: "add"}}, Fields: []types.Field{{Name: "Prob", Doc: "normalized probability of confusion: Row = ground truth class, Col = actual response for that class."}, {Name: "Sum", Doc: "incremental sums"}, {Name: "N", Doc: "counts per ground truth (rows)"}, {Name: "Vis", Doc: "visualization using SimMat"}, {Name: "TFPN", Doc: "true pos/neg, false pos/neg for each class, generated from the confusion matrix"}, {Name: "ClassScores", Doc: "precision, recall and F1 score by class"}, {Name: "MatrixScores", Doc: "micro F1, macro F1 and weighted F1 scores for entire matrix ignoring class"}}}) diff --git a/econfig/README.md b/econfig/README.md deleted file mode 100644 index f196aa57..00000000 --- a/econfig/README.md +++ /dev/null @@ -1,209 +0,0 @@ -Docs: [GoDoc](https://pkg.go.dev/github.com/emer/emergent/econfig) - -`econfig` provides methods to set values on a `Config` struct through a (TOML) config file or command-line args (`flags` in Go terminology), with support for setting Network params and values on any other struct as well (e.g., an Env to be constructed later in a ConfigEnv method). - -* Standard usage: - + `cfg := &ss.Config` - + `cfg.Defaults()` -- sets hard-coded defaults -- user should define and call this method first. - + It is better to use the `default:` field tag however because it then shows in `-h` or `--help` usage and in the [Cogent Core](https://cogentcore.org/core) GUI. See [Default Tags](#def_default_tags) for how to specify def values for more complex types. - + `econfig.Config(cfg, "config.toml")` -- sets config values according to the standard order, with given file name specifying the default config file name. - -* Has support for nested `Include` paths, which are processed in the natural deepest-first order. The processed `Config` struct field will contain a list of all such files processed. There are two options for include file support: - + `Includes []string` for multiple includes in one config: Config implements the `IncludesPtr() *[]string` method which satisfies the `Includeser` interface, and returns a pointer to the `Includes` field containing a list of config files to include. - + `Include string` for single include in one config: Config implements the `IncludePtr() *string` method which satisfies the `Includer` interface, and returns a pointer to the Include field. - + The default `IncludePaths` includes current dir (`.`) and `configs` directory, which is recommended location to store different configs. - -* Order of setting in `econfig.Config`: - + Apply any `def:` field tag default values. - + Look for `--config`, `--cfg` arg, specifying config file(s) on the command line (comma separated if multiple, with no spaces). - + Fall back on default config file name passed to `Config` function, if arg not found. - + Read any `Include[s]` files in config file in deepest-first (natural) order, then the specified config file last -- includee overwrites included settings. - + Process command-line args based on Config field names, with `.` separator for sub-fields. - -* All field name references in toml files and command-line args are case-insensitive. For args (flags) kebab-case (with either `-` or `_` delimiter) can be used. For bool args, use "No" prefix in any form (e.g., "NoRunLog" or "no-run-log"). Instead of polluting the flags space with all the different options, custom args processing code is used. - -* Args in sub-structs are automatically available with just the field name and also nested within the name of the parent struct field -- for example, `-Run.NEpochs` and just `-NEpochs` (or `-nepochs` lowercase). Use `nest:"+"` to force a field to only be available in its nested form, in case of conflict of names without nesting (which are logged). - -* Is a replacement for `ecmd` and includes the helper methods for saving log files etc. - -* A `map[string]any` type can be used for deferred raw params to be applied later (`Network`, `Env` etc). Example: `Network = {'.PFCLayer:Layer.Inhib.Layer.Gi' = '2.4', '#VSPatchPath:Path.Learn.LRate' = '0.01'}` where the key expression contains the [params](../params) selector : path to variable. - -* Supports full set of `Open` (file), `OpenFS` (takes fs.FS arg, e.g., for embedded), `Read` (bytes) methods for loading config files. The overall `Config()` version uses `OpenWithIncludes` which processes includes -- others are just for single files. Also supports `Write` and `Save` methods for saving from current state. - -* If needed, different config file encoding formats can be supported, with TOML being the default (currently only TOML). - -# Special fields, supported types, and field tags - -* To enable include file processing, add a `Includes []string` field and a `func (cfg *Config) IncludesPtr() *[]string { return &cfg.Includes }` method. The include file(s) are read first before the current one. A stack of such includes is created and processed in the natural order encountered, so each includer is applied after the includees, recursively. Note: use `--config` to specify the first config file read -- the `Includes` field is excluded from arg processing because it would be processed _after_ the point where include files are processed. - -* `Field map[string]any` -- allows raw parsing of values that can be applied later. Use this for `Network`, `Env` etc fields. Here are some examples: - -```toml -[Env.Env] - "NItems" = 10 - "NTrials" = 10 -``` - -```toml -[Params.Network] - "#Output:Layer.Inhib.Layer.Gi" = 0.7 - "Path:Path.Learn.LRate.Base" = 0.05 -``` - -* Field tag `default:"value"`, used in the [Cogent Core](https://cogentcore.org/core) GUI, sets the initial default value and is shown for the `-h` or `--help` usage info. - -* [kit](https://cogentcore.org/core/ki) registered "enum" `const` types, with names automatically parsed from string values (including bit flags). Must use the [goki stringer](https://github.com/goki/stringer) version to generate `FromString()` method, and register the type like this: `var KitTestEnum = kit.Enums.AddEnum(TestEnumN, kit.NotBitFlag, nil)` -- see [enum.go](enum.go) file for example. - -# `default` Default Tags - -The [Cogent Core](https://cogentcore.org/core) GUI processes `default:"value"` struct tags to highlight values that are not at their defaults. econfig uses these same tags to auto-initialize fields as well, ensuring that the tag and the actual initial value are the same. The value for strings or numbers is just the string representation. For more complex types, here ar some examples: - -* `struct`: specify using standard Go literal expression as a string, with single-quotes `'` used instead of double-quotes around strings, such as the name of the fields: - + `vecint.Vector2i`: `default:"{'X':10,'Y':10}"` - -* `slice`: comma-separated list of values in square braces -- use `'` for internal string boundaries: - + `[]float32`: `default:"[1, 2.14, 3.14]"` - + `[]string`: `default:"{'A', 'bbb bbb', 'c c c'}"` - -* `map`: comma-separated list of key:value in curly braces -- use `'` for internal string boundaries: - + `map[string]float32`: `default:"{'key1': 1, 'key2': 2.14, 'key3': 3.14]"` - -# Standard Config Example - -Here's the `Config` struct from [axon/examples/ra25](https://github.com/emer/axon), which can provide a useful starting point. It uses Params, Run and Log sub-structs to better organize things. For sims with extensive Env config, that should be added as a separate sub-struct as well. The `display:"add-fields"` struct tag shows all of the fields in one big dialog in the GUI -- if you want separate ones, omit that. - -```Go -// ParamConfig has config parameters related to sim params -type ParamConfig struct { - - // network parameters - Network map[string]any - - // size of hidden layer -- can use emer.LaySize for 4D layers - Hidden1Size vecint.Vector2i `default:"{'X':10,'Y':10}" nest:"+"` - - // size of hidden layer -- can use emer.LaySize for 4D layers - Hidden2Size vecint.Vector2i `default:"{'X':10,'Y':10}" nest:"+"` - - // Extra Param Sheet name(s) to use (space separated if multiple) -- must be valid name as listed in compiled-in params or loaded params - Sheet string - - // extra tag to add to file names and logs saved from this run - Tag string - - // user note -- describe the run params etc -- like a git commit message for the run - Note string - - // Name of the JSON file to input saved parameters from. - File string `nest:"+"` - - // Save a snapshot of all current param and config settings in a directory named params_ (or _good if Good is true), then quit -- useful for comparing to later changes and seeing multiple views of current params - SaveAll bool `nest:"+"` - - // for SaveAll, save to params_good for a known good params state. This can be done prior to making a new release after all tests are passing -- add results to git to provide a full diff record of all params over time. - Good bool `nest:"+"` -} - -// RunConfig has config parameters related to running the sim -type RunConfig struct { - - // use the GPU for computation -- generally faster even for small models if NData ~16 - GPU bool `default:"true"` - - // number of data-parallel items to process in parallel per trial -- works (and is significantly faster) for both CPU and GPU. Results in an effective mini-batch of learning. - NData int `default:"16" min:"1"` - - // number of parallel threads for CPU computation -- 0 = use default - NThreads int `default:"0"` - - // starting run number -- determines the random seed -- runs counts from there -- can do all runs in parallel by launching separate jobs with each run, runs = 1 - Run int `default:"0"` - - // total number of runs to do when running Train - NRuns int `default:"5" min:"1"` - - // total number of epochs per run - NEpochs int `default:"100"` - - // stop run after this number of perfect, zero-error epochs - NZero int `default:"2"` - - // total number of trials per epoch. Should be an even multiple of NData. - NTrials int `default:"32"` - - // how often to run through all the test patterns, in terms of training epochs -- can use 0 or -1 for no testing - TestInterval int `default:"5"` - - // how frequently (in epochs) to compute PCA on hidden representations to measure variance? - PCAInterval int `default:"5"` - - // if non-empty, is the name of weights file to load at start of first run -- for testing - StartWts string -} - -// LogConfig has config parameters related to logging data -type LogConfig struct { - - // if true, save final weights after each run - SaveWts bool - - // if true, save train epoch log to file, as .epc.tsv typically - Epoch bool `default:"true" nest:"+"` - - // if true, save run log to file, as .run.tsv typically - Run bool `default:"true" nest:"+"` - - // if true, save train trial log to file, as .trl.tsv typically. May be large. - Trial bool `default:"false" nest:"+"` - - // if true, save testing epoch log to file, as .tst_epc.tsv typically. In general it is better to copy testing items over to the training epoch log and record there. - TestEpoch bool `default:"false" nest:"+"` - - // if true, save testing trial log to file, as .tst_trl.tsv typically. May be large. - TestTrial bool `default:"false" nest:"+"` - - // if true, save network activation etc data from testing trials, for later viewing in netview - NetData bool -} - -// Config is a standard Sim config -- use as a starting point. -type Config struct { - - // specify include files here, and after configuration, it contains list of include files added - Includes []string - - // open the GUI -- does not automatically run -- if false, then runs automatically and quits - GUI bool `default:"true"` - - // log debugging information - Debug bool - - // parameter related configuration options - Params ParamConfig `display:"add-fields"` - - // sim running related configuration options - Run RunConfig `display:"add-fields"` - - // data logging related configuration options - Log LogConfig `display:"add-fields"` -} - -func (cfg *Config) IncludesPtr() *[]string { return &cfg.Includes } - -``` - -# Key design considerations - -* Can set config values from command-line args and/or config file (TOML being the preferred format) (or env vars) - + current axon models only support args. obelisk models only support TOML. conflicts happen. - -* Sims use a Config struct with fields that represents the definitive value of all arg / config settings (vs a `map[string]any`) - + struct provides _compile time_ error checking (and IDE completion) -- very important and precludes map. - + Add Config to Sim so it is visible in the GUI for easy visual debugging etc (current args map is organized by types -- makes it hard to see everything). - -* Enable setting Network or Env params directly: - + Use `Network.`, `Env.`, `TrainEnv.`, `TestEnv.` etc prefixes followed by standard `params` selectors (e.g., `Layer.Act.Gain`) or paths to fields in relevant env. These can be added to Config as `map[string]any` and then applied during ConfigNet, ConfigEnv etc. - -* TOML Go implementations are case insensitive (TOML spec says case sensitive..) -- makes sense to use standard Go CamelCase conventions as in every other Go struct. - - diff --git a/econfig/args.go b/econfig/args.go deleted file mode 100644 index d93ff1e0..00000000 --- a/econfig/args.go +++ /dev/null @@ -1,263 +0,0 @@ -// Copyright (c) 2023, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// note: parsing code adapted from pflag package https://github.com/spf13/pflag -// Copyright (c) 2012 Alex Ogier. All rights reserved. -// Copyright (c) 2012 The Go Authors. All rights reserved. - -package econfig - -import ( - "fmt" - "reflect" - "strings" - - "cogentcore.org/core/base/iox/tomlx" - "cogentcore.org/core/base/mpi" - "cogentcore.org/core/base/reflectx" - "cogentcore.org/core/base/strcase" -) - -// SetFromArgs sets Config values from command-line args, -// based on the field names in the Config struct. -// Returns any args that did not start with a `-` flag indicator. -// For more robust error processing, it is assumed that all flagged args (-) -// must refer to fields in the config, so any that fail to match trigger -// an error. Errors can also result from parsing. -// Errors are automatically logged because these are user-facing. -func SetFromArgs(cfg any, args []string) (nonFlags []string, err error) { - allArgs := make(map[string]reflect.Value) - CommandArgs(allArgs) // need these to not trigger not-found errors - FieldArgNames(cfg, allArgs) - nonFlags, err = ParseArgs(cfg, args, allArgs, true) - if err != nil { - mpi.Println(Usage(cfg)) - } - return -} - -// ParseArgs parses given args using map of all available args -// setting the value accordingly, and returning any leftover args. -// setting errNotFound = true causes args that are not in allArgs to -// trigger an error. Otherwise, it just skips those. -func ParseArgs(cfg any, args []string, allArgs map[string]reflect.Value, errNotFound bool) ([]string, error) { - var nonFlags []string - var err error - for len(args) > 0 { - s := args[0] - args = args[1:] - if len(s) == 0 || s[0] != '-' || len(s) == 1 { - nonFlags = append(nonFlags, s) - continue - } - - if s[1] == '-' && len(s) == 2 { // "--" terminates the flags - // f.argsLenAtDash = len(f.args) - nonFlags = append(nonFlags, args...) - break - } - args, err = ParseArg(s, args, allArgs, errNotFound) - if err != nil { - return nonFlags, err - } - } - return nonFlags, nil -} - -func ParseArg(s string, args []string, allArgs map[string]reflect.Value, errNotFound bool) (a []string, err error) { - a = args - name := s[1:] - if name[0] == '-' { - name = name[1:] - } - if len(name) == 0 || name[0] == '-' || name[0] == '=' { - err = fmt.Errorf("econfig.ParseArgs: bad flag syntax: %s", s) - mpi.Println(err) - return - } - - if strings.HasPrefix(name, "test.") { // go test passes args.. - return - } - - split := strings.SplitN(name, "=", 2) - name = split[0] - fval, exists := allArgs[name] - if !exists { - if errNotFound { - err = fmt.Errorf("econfig.ParseArgs: flag name not recognized: %s", name) - mpi.Println(err) - } - return - } - - isbool := reflectx.NonPointerValue(fval).Kind() == reflect.Bool - - var value string - switch { - case len(split) == 2: - // '--flag=arg' - value = split[1] - case isbool: - // '--flag' bare - lcnm := strings.ToLower(name) - negate := false - if len(lcnm) > 3 { - if lcnm[:3] == "no_" || lcnm[:3] == "no-" { - negate = true - } else if lcnm[:2] == "no" { - if _, has := allArgs[lcnm[2:]]; has { // e.g., nogui and gui is on list - negate = true - } - } - } - if negate { - value = "false" - } else { - value = "true" - } - case len(a) > 0: - // '--flag arg' - value = a[0] - a = a[1:] - default: - // '--flag' (arg was required) - err = fmt.Errorf("econfig.ParseArgs: flag needs an argument: %s", s) - mpi.Println(err) - return - } - - err = SetArgValue(name, fval, value) - return -} - -// SetArgValue sets given arg name to given value, into settable reflect.Value -func SetArgValue(name string, fval reflect.Value, value string) error { - nptyp := reflectx.NonPointerType(fval.Type()) - vk := nptyp.Kind() - switch { - // todo: enum - // case vk >= reflect.Int && vk <= reflect.Uint64 && kit.Enums.TypeRegistered(nptyp): - // return kit.Enums.SetAnyEnumValueFromString(fval, value) - case vk == reflect.Map: - mval := make(map[string]any) - err := tomlx.ReadBytes(&mval, []byte("tmp="+value)) // use toml decoder - if err != nil { - mpi.Println(err) - return err - } - err = reflectx.CopyMapRobust(fval.Interface(), mval["tmp"]) - if err != nil { - mpi.Println(err) - err = fmt.Errorf("econfig.ParseArgs: not able to set map field from arg: %s val: %s", name, value) - mpi.Println(err) - return err - } - case vk == reflect.Slice: - mval := make(map[string]any) - err := tomlx.ReadBytes(&mval, []byte("tmp="+value)) // use toml decoder - if err != nil { - mpi.Println(err) - return err - } - err = reflectx.CopySliceRobust(fval.Interface(), mval["tmp"]) - if err != nil { - mpi.Println(err) - err = fmt.Errorf("econfig.ParseArgs: not able to set slice field from arg: %s val: %s", name, value) - mpi.Println(err) - return err - } - default: - err := reflectx.SetRobust(fval.Interface(), value) // overkill but whatever - if err != nil { - err := fmt.Errorf("econfig.ParseArgs: not able to set field from arg: %s val: %s", name, value) - mpi.Println(err) - return err - } - } - return nil -} - -// FieldArgNames adds to given args map all the different ways the field names -// can be specified as arg flags, mapping to the reflect.Value -func FieldArgNames(obj any, allArgs map[string]reflect.Value) { - fieldArgNamesStruct(obj, "", false, allArgs) -} - -func addAllCases(nm, path string, pval reflect.Value, allArgs map[string]reflect.Value) { - if nm == "Includes" { - return // skip - } - if path != "" { - nm = path + "." + nm - } - allArgs[nm] = pval - allArgs[strings.ToLower(nm)] = pval - allArgs[strcase.ToKebab(nm)] = pval - allArgs[strcase.ToSnake(nm)] = pval - allArgs[strcase.ToSNAKE(nm)] = pval -} - -// fieldArgNamesStruct returns map of all the different ways the field names -// can be specified as arg flags, mapping to the reflect.Value -func fieldArgNamesStruct(obj any, path string, nest bool, allArgs map[string]reflect.Value) { - if reflectx.AnyIsNil(obj) { - return - } - ov := reflect.ValueOf(obj) - if ov.Kind() == reflect.Pointer && ov.IsNil() { - return - } - val := reflectx.NonPointerValue(ov) - typ := val.Type() - for i := 0; i < typ.NumField(); i++ { - f := typ.Field(i) - fv := val.Field(i) - if reflectx.NonPointerType(f.Type).Kind() == reflect.Struct { - nwPath := f.Name - if path != "" { - nwPath = path + "." + nwPath - } - nwNest := nest - if !nwNest { - neststr, ok := f.Tag.Lookup("nest") - if ok && (neststr == "+" || neststr == "true") { - nwNest = true - } - } - fieldArgNamesStruct(reflectx.PointerValue(fv).Interface(), nwPath, nwNest, allArgs) - continue - } - pval := reflectx.PointerValue(fv) - addAllCases(f.Name, path, pval, allArgs) - if f.Type.Kind() == reflect.Bool { - addAllCases("No"+f.Name, path, pval, allArgs) - } - // now process adding non-nested version of field - if path == "" || nest { - continue - } - neststr, ok := f.Tag.Lookup("nest") - if ok && (neststr == "+" || neststr == "true") { - continue - } - if _, has := allArgs[f.Name]; has { - mpi.Printf("econfig Field: %s.%s cannot be added as a non-nested %s arg because it has already been registered -- add 'nest:'+'' field tag to the one you want to keep only as a nested arg with path, to eliminate this message\n", path, f.Name, f.Name) - continue - } - addAllCases(f.Name, "", pval, allArgs) - if f.Type.Kind() == reflect.Bool { - addAllCases("No"+f.Name, "", pval, allArgs) - } - } -} - -// CommandArgs adds non-field args that control the config process: -// -config -cfg -help -h -func CommandArgs(allArgs map[string]reflect.Value) { - allArgs["config"] = reflect.ValueOf(&ConfigFile) - allArgs["cfg"] = reflect.ValueOf(&ConfigFile) - allArgs["help"] = reflect.ValueOf(&Help) - allArgs["h"] = reflect.ValueOf(&Help) -} diff --git a/econfig/config.go b/econfig/config.go deleted file mode 100644 index 7f0707dd..00000000 --- a/econfig/config.go +++ /dev/null @@ -1,100 +0,0 @@ -// Copyright (c) 2023, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package econfig - -//go:generate core generate -add-types - -import ( - "errors" - "os" - "reflect" - - "cogentcore.org/core/base/fsx" - "cogentcore.org/core/base/mpi" -) - -var ( - // DefaultEncoding is the default encoding format for config files. - // currently toml is the only supported format, but others could be added - // if needed. - DefaultEncoding = "toml" - - // IncludePaths is a list of file paths to try for finding config files - // specified in Include field or via the command line --config --cfg or -c args. - // Set this prior to calling Config -- default is current directory '.' and 'configs' - IncludePaths = []string{".", "configs"} - - // NonFlagArgs are the command-line args that remain after all the flags have - // been processed. This is set after the call to Config. - NonFlagArgs = []string{} - - // ConfigFile is the name of the config file actually loaded, specified by the - // -config or -cfg command-line arg or the default file given in Config - ConfigFile string - - // Help is variable target for -help or -h args - Help bool -) - -// Config is the overall config setting function, processing config files -// and command-line arguments, in the following order: -// - Apply any `default:` field tag default values. -// - Look for `--config`, `--cfg`, or `-c` arg, specifying a config file on the command line. -// - Fall back on default config file name passed to `Config` function, if arg not found. -// - Read any `Include[s]` files in config file in deepest-first (natural) order, -// then the specified config file last. -// - if multiple config files are listed, then the first one that exists is used -// - Process command-line args based on Config field names, with `.` separator -// for sub-fields. -// - Boolean flags are set on with plain -flag; use No prefix to turn off -// (or explicitly set values to true or false). -// -// Also processes -help or -h and prints usage and quits immediately. -func Config(cfg any, defaultFile ...string) ([]string, error) { - ConfigFile = "" - - var errs []error - err := SetFromDefaults(cfg) - if err != nil { - errs = append(errs, err) - } - - allArgs := make(map[string]reflect.Value) - CommandArgs(allArgs) - - args := os.Args[1:] - _, err = ParseArgs(cfg, args, allArgs, false) // false = ignore non-matches - - if Help { - mpi.Println(Usage(cfg)) - os.Exit(0) - } - - var cfgFiles []string - if ConfigFile != "" { - files := fsx.FindFilesOnPaths(IncludePaths, ConfigFile) - if len(files) > 0 { - cfgFiles = append(cfgFiles, ConfigFile) - } - } else { - for _, fn := range defaultFile { - files := fsx.FindFilesOnPaths(IncludePaths, fn) - if len(files) > 0 { - cfgFiles = append(cfgFiles, fn) - } - } - } - for _, fn := range cfgFiles { - err = OpenWithIncludes(cfg, fn) - if err != nil { - errs = append(errs, err) - } - } - NonFlagArgs, err = SetFromArgs(cfg, args) - if err != nil { - errs = append(errs, err) - } - return args, errors.Join(errs...) -} diff --git a/econfig/defaults.go b/econfig/defaults.go deleted file mode 100644 index 34acbf15..00000000 --- a/econfig/defaults.go +++ /dev/null @@ -1,13 +0,0 @@ -// Copyright (c) 2023, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package econfig - -import "cogentcore.org/core/base/reflectx" - -// SetFromDefaults sets Config values from field tag `default:` values. -// Parsing errors are automatically logged. -func SetFromDefaults(cfg any) error { - return reflectx.SetFromDefaultTags(cfg) -} diff --git a/econfig/econfig_test.go b/econfig/econfig_test.go deleted file mode 100644 index f32fc11c..00000000 --- a/econfig/econfig_test.go +++ /dev/null @@ -1,286 +0,0 @@ -// Copyright (c) 2023, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package econfig - -import ( - "fmt" - "reflect" - "sort" - "strings" - "testing" - - "golang.org/x/exp/maps" -) - -// TestSubConfig is a sub-struct with special params -type TestSubConfig struct { - - // number of patterns to create - NPats int `default:"10"` - - // proportion activity of created params - Sparseness float32 `default:"0.15"` -} - -// TestConfig is a testing config -type TestConfig struct { - - // specify include files here, and after configuration, it contains list of include files added - Includes []string - - // open the GUI -- does not automatically run -- if false, then runs automatically and quits - GUI bool `default:"true"` - - // use the GPU for computation - GPU bool `default:"true"` - - // log debugging information - Debug bool - - // important for testing . notation etc - PatParams TestSubConfig - - // network parameters applied after built-in params -- use toml map format: '{key = val, key2 = val2}' where key is 'selector:path' (e.g., '.PFCLayer:Layer.Inhib.Layer.Gi' where '.PFCLayer' is a class) and values should be strings to be consistent with standard params format - Network map[string]any - - // ParamSet name to use -- must be valid name as listed in compiled-in params or loaded params - ParamSet string - - // Name of the JSON file to input saved parameters from. - ParamFile string - - // Name of the file to output all parameter data. If not empty string, program should write file(s) and then exit - ParamDocFile string - - // extra tag to add to file names and logs saved from this run - Tag string - - // user note -- describe the run params etc -- like a git commit message for the run - Note string `default:"testing is fun"` - - // starting run number -- determines the random seed -- runs counts from there -- can do all runs in parallel by launching separate jobs with each run, runs = 1 - Run int `default:"0"` - - // total number of runs to do when running Train - Runs int `default:"10"` - - // total number of epochs per run - Epochs int `default:"100"` - - // total number of trials per epoch. Should be an even multiple of NData. - NTrials int `default:"128"` - - // number of data-parallel items to process in parallel per trial -- works (and is significantly faster) for both CPU and GPU. Results in an effective mini-batch of learning. - NData int `default:"16"` - - // if true, save final weights after each run - SaveWeights bool - - // if true, save train epoch log to file, as .epc.tsv typically - EpochLog bool `default:"true"` - - // if true, save run log to file, as .run.tsv typically - RunLog bool `default:"true"` - - // if true, save train trial log to file, as .trl.tsv typically. May be large. - TrialLog bool `default:"true"` - - // if true, save testing epoch log to file, as .tst_epc.tsv typically. In general it is better to copy testing items over to the training epoch log and record there. - TestEpochLog bool `default:"false"` - - // if true, save testing trial log to file, as .tst_trl.tsv typically. May be large. - TestTrialLog bool `default:"false"` - - // if true, save network activation etc data from testing trials, for later viewing in netview - NetData bool - - // can set these values by string representation if stringer and registered as an enum with kit - Enum TestEnum - - // ] test slice case - Slice []float32 `default:"[1, 2.14, 3.14]"` - - // ] test string slice case - StrSlice []string `default:"['cat','dog one','dog two']"` -} - -func (cfg *TestConfig) IncludesPtr() *[]string { return &cfg.Includes } - -func TestDefaults(t *testing.T) { - cfg := &TestConfig{} - SetFromDefaults(cfg) - if cfg.Epochs != 100 || cfg.EpochLog != true || cfg.Note != "testing is fun" { - t.Errorf("Main defaults failed to set") - } - if cfg.PatParams.NPats != 10 || cfg.PatParams.Sparseness != 0.15 { - t.Errorf("PatParams defaults failed to set") - } - // fmt.Printf("%#v\n", cfg.Slice) - if len(cfg.Slice) != 3 || cfg.Slice[2] != 3.14 { - t.Errorf("Slice defaults failed to set") - } - if len(cfg.StrSlice) != 3 || cfg.StrSlice[1] != "dog one" { - t.Errorf("StrSlice defaults failed to set") - } -} - -func TestArgsPrint(t *testing.T) { - t.Skip("prints all possible args") - - cfg := &TestConfig{} - allArgs := make(map[string]reflect.Value) - FieldArgNames(cfg, allArgs) - - keys := maps.Keys(allArgs) - sort.Slice(keys, func(i, j int) bool { - return strings.ToLower(keys[i]) < strings.ToLower(keys[j]) - }) - fmt.Println("Args:") - fmt.Println(strings.Join(keys, "\n")) -} - -func TestArgs(t *testing.T) { - cfg := &TestConfig{} - SetFromDefaults(cfg) - // note: cannot use "-Includes=testcfg.toml", - args := []string{"-save-weights", "-nogui", "-no-epoch-log", "--NoRunLog", "--runs=5", "--run", "1", "--TAG", "nice", "--PatParams.Sparseness=0.1", "--Network", "{'.PFCLayer:Layer.Inhib.Gi' = '2.4', '#VSPatchPath:Path.Learn.LRate' = '0.01'}", "-Enum=TestValue2", "-Slice=[3.2, 2.4, 1.9]", "leftover1", "leftover2"} - allArgs := make(map[string]reflect.Value) - FieldArgNames(cfg, allArgs) - leftovers, err := ParseArgs(cfg, args, allArgs, true) - if err != nil { - t.Errorf(err.Error()) - } - fmt.Println(leftovers) - if cfg.Runs != 5 || cfg.Run != 1 || cfg.Tag != "nice" || cfg.PatParams.Sparseness != 0.1 || cfg.SaveWeights != true || cfg.GUI != false || cfg.EpochLog != false || cfg.RunLog != false { - t.Errorf("args not set properly: %#v", cfg) - } - if cfg.Enum != TestValue2 { - t.Errorf("args enum from string not set properly: %#v", cfg) - } - if len(cfg.Slice) != 3 || cfg.Slice[2] != 1.9 { - t.Errorf("args Slice not set properly: %#v", cfg) - } - - // if cfg.Network != nil { - // mv := cfg.Network - // for k, v := range mv { - // fmt.Println(k, " = ", v) - // } - // } -} - -func TestOpen(t *testing.T) { - IncludePaths = []string{".", "testdata"} - cfg := &TestConfig{} - err := OpenWithIncludes(cfg, "testcfg.toml") - if err != nil { - t.Errorf(err.Error()) - } - - // fmt.Println("includes:", cfg.Includes) - - // if cfg.Network != nil { - // mv := cfg.Network - // for k, v := range mv { - // fmt.Println(k, " = ", v) - // } - // } - - if cfg.GUI != true || cfg.Tag != "testing" { - t.Errorf("testinc.toml not parsed\n") - } - if cfg.Epochs != 500 || cfg.GPU != true { - t.Errorf("testinc2.toml not parsed\n") - } - if cfg.Note != "something else" { - t.Errorf("testinc3.toml not parsed\n") - } - if cfg.Runs != 8 { - t.Errorf("testinc3.toml didn't overwrite testinc2\n") - } - if cfg.NTrials != 32 { - t.Errorf("testinc.toml didn't overwrite testinc2\n") - } - if cfg.NData != 12 { - t.Errorf("testcfg.toml didn't overwrite testinc3\n") - } - if cfg.Enum != TestValue2 { - t.Errorf("testinc.toml Enum value not parsed\n") - } -} - -func TestUsage(t *testing.T) { - t.Skip("prints usage string") - cfg := &TestConfig{} - us := Usage(cfg) - fmt.Println(us) -} - -func TestSave(t *testing.T) { - // t.Skip("prints usage string") - IncludePaths = []string{".", "testdata"} - cfg := &TestConfig{} - OpenWithIncludes(cfg, "testcfg.toml") - Save(cfg, "testdata/testwrite.toml") -} - -func TestConfigOpen(t *testing.T) { - // t.Skip("prints usage string") - IncludePaths = []string{".", "testdata"} - cfg := &TestConfig{} - _, err := Config(cfg) - // no errors for missing config fiels - // if err == nil { - // t.Errorf("should have Config error") - // // } else { - // // fmt.Println(err) - // } - _, err = Config(cfg, "aldfkj.toml") - // if err == nil { - // t.Errorf("should have Config error") - // // } else { - // // fmt.Println(err) - // } - _, err = Config(cfg, "aldfkj.toml", "testcfg.toml") - if err != nil { - t.Error(err) - } -} - -// TestIncConfig is a testing config with Include instead of Includes -type TestIncConfig struct { - - // specify include file here, and after configuration, it contains list of include files added - Include string - - // open the GUI -- does not automatically run -- if false, then runs automatically and quits - GUI bool `default:"true"` - - // extra tag to add to file names and logs saved from this run - Tag string - - // starting run number -- determines the random seed -- runs counts from there -- can do all runs in parallel by launching separate jobs with each run, runs = 1 - Run int `default:"0"` -} - -func (cfg *TestIncConfig) IncludePtr() *string { return &cfg.Include } - -func TestIncOpen(t *testing.T) { - IncludePaths = []string{".", "testdata"} - cfg := &TestIncConfig{} - err := OpenWithIncludes(cfg, "testcfginc.toml") - if err != nil { - t.Errorf(err.Error()) - } - - fmt.Println("include:", cfg.Include) - - if cfg.GUI != true { - t.Errorf("testcfginc.toml not parsed\n") - } - if cfg.Tag != "initial" { - t.Errorf("testincinc.toml not parsed\n") - } -} diff --git a/econfig/enum.go b/econfig/enum.go deleted file mode 100644 index 91282786..00000000 --- a/econfig/enum.go +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright (c) 2023, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package econfig - -//go:generate core generate - -// TestEnum is an enum type for testing -type TestEnum int32 //enums:enum - -// note: we need to add the Layer extension to avoid naming -// conflicts between layer, pathway and other things. - -const ( - TestValue1 TestEnum = iota - - TestValue2 -) diff --git a/econfig/enumgen.go b/econfig/enumgen.go deleted file mode 100644 index 2e19dbde..00000000 --- a/econfig/enumgen.go +++ /dev/null @@ -1,48 +0,0 @@ -// Code generated by "core generate"; DO NOT EDIT. - -package econfig - -import ( - "cogentcore.org/core/enums" -) - -var _TestEnumValues = []TestEnum{0, 1} - -// TestEnumN is the highest valid value for type TestEnum, plus one. -const TestEnumN TestEnum = 2 - -var _TestEnumValueMap = map[string]TestEnum{`TestValue1`: 0, `TestValue2`: 1} - -var _TestEnumDescMap = map[TestEnum]string{0: ``, 1: ``} - -var _TestEnumMap = map[TestEnum]string{0: `TestValue1`, 1: `TestValue2`} - -// String returns the string representation of this TestEnum value. -func (i TestEnum) String() string { return enums.String(i, _TestEnumMap) } - -// SetString sets the TestEnum value from its string representation, -// and returns an error if the string is invalid. -func (i *TestEnum) SetString(s string) error { - return enums.SetString(i, s, _TestEnumValueMap, "TestEnum") -} - -// Int64 returns the TestEnum value as an int64. -func (i TestEnum) Int64() int64 { return int64(i) } - -// SetInt64 sets the TestEnum value from an int64. -func (i *TestEnum) SetInt64(in int64) { *i = TestEnum(in) } - -// Desc returns the description of the TestEnum value. -func (i TestEnum) Desc() string { return enums.Desc(i, _TestEnumDescMap) } - -// TestEnumValues returns all possible values for the type TestEnum. -func TestEnumValues() []TestEnum { return _TestEnumValues } - -// Values returns all possible values for the type TestEnum. -func (i TestEnum) Values() []enums.Enum { return enums.Values(_TestEnumValues) } - -// MarshalText implements the [encoding.TextMarshaler] interface. -func (i TestEnum) MarshalText() ([]byte, error) { return []byte(i.String()), nil } - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (i *TestEnum) UnmarshalText(text []byte) error { return enums.UnmarshalText(i, text, "TestEnum") } diff --git a/econfig/include.go b/econfig/include.go deleted file mode 100644 index f6051313..00000000 --- a/econfig/include.go +++ /dev/null @@ -1,101 +0,0 @@ -// Copyright (c) 2023, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// note: FindFileOnPaths adapted from viper package https://github.com/spf13/viper -// Copyright (c) 2014 Steve Francia - -package econfig - -import ( - "errors" - "reflect" - - "cogentcore.org/core/base/iox/tomlx" - "cogentcore.org/core/base/reflectx" -) - -// Includeser enables processing of Includes []string field with files to include in Config objects. -type Includeser interface { - // IncludesPtr returns a pointer to the Includes []string field containing file(s) to include - // before processing the current config file. - IncludesPtr() *[]string -} - -// Includer enables processing of Include string field with file to include in Config objects. -type Includer interface { - // IncludePtr returns a pointer to the Include string field containing single file to include - // before processing the current config file. - IncludePtr() *string -} - -// IncludesStack returns the stack of include files in the natural -// order in which they are encountered (nil if none). -// Files should then be read in reverse order of the slice. -// Returns an error if any of the include files cannot be found on IncludePath. -// Does not alter cfg. -func IncludesStack(cfg Includeser) ([]string, error) { - clone := reflect.New(reflectx.NonPointerType(reflect.TypeOf(cfg))).Interface().(Includeser) - *clone.IncludesPtr() = *cfg.IncludesPtr() - return includesStackImpl(clone, nil) -} - -// includeStackImpl implements IncludeStack, operating on cloned cfg -// todo: could use a more efficient method to just extract the include field.. -func includesStackImpl(clone Includeser, includes []string) ([]string, error) { - incs := *clone.IncludesPtr() - ni := len(incs) - if ni == 0 { - return includes, nil - } - for i := ni - 1; i >= 0; i-- { - includes = append(includes, incs[i]) // reverse order so later overwrite earlier - } - var errs []error - for _, inc := range incs { - *clone.IncludesPtr() = nil - err := tomlx.OpenFromPaths(clone, inc, IncludePaths...) - if err == nil { - includes, err = includesStackImpl(clone, includes) - if err != nil { - errs = append(errs, err) - } - } else { - errs = append(errs, err) - } - } - return includes, errors.Join(errs...) -} - -// IncludeStack returns the stack of include files in the natural -// order in which they are encountered (nil if none). -// Files should then be read in reverse order of the slice. -// Returns an error if any of the include files cannot be found on IncludePath. -// Does not alter cfg. -func IncludeStack(cfg Includer) ([]string, error) { - clone := reflect.New(reflectx.NonPointerType(reflect.TypeOf(cfg))).Interface().(Includer) - *clone.IncludePtr() = *cfg.IncludePtr() - return includeStackImpl(clone, nil) -} - -// includeStackImpl implements IncludeStack, operating on cloned cfg -// todo: could use a more efficient method to just extract the include field.. -func includeStackImpl(clone Includer, includes []string) ([]string, error) { - inc := *clone.IncludePtr() - if inc == "" { - return includes, nil - } - includes = append(includes, inc) - var errs []error - *clone.IncludePtr() = "" - err := tomlx.OpenFromPaths(clone, inc, IncludePaths...) - if err == nil { - includes, err = includeStackImpl(clone, includes) - if err != nil { - errs = append(errs, err) - } - } else { - errs = append(errs, err) - } - return includes, errors.Join(errs...) -} diff --git a/econfig/io.go b/econfig/io.go deleted file mode 100644 index 615bd918..00000000 --- a/econfig/io.go +++ /dev/null @@ -1,67 +0,0 @@ -// Copyright (c) 2023, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package econfig - -import ( - "io/fs" - "strings" - - "cogentcore.org/core/base/iox/tomlx" - "cogentcore.org/core/base/mpi" -) - -// OpenWithIncludes reads config from given config file, -// looking on IncludePaths for the file, -// and opens any Includes specified in the given config file -// in the natural include order so includee overwrites included settings. -// Is equivalent to Open if there are no Includes. -// Returns an error if any of the include files cannot be found on IncludePath. -func OpenWithIncludes(cfg any, file string) error { - err := tomlx.OpenFromPaths(cfg, file, IncludePaths...) - if err != nil { - return err - } - incsObj, hasIncludes := cfg.(Includeser) - incObj, hasInclude := cfg.(Includer) - if !hasInclude && !hasIncludes { - return nil // no further processing - } - var incs []string - if hasIncludes { - incs, err = IncludesStack(incsObj) - } else { - incs, err = IncludeStack(incObj) - } - ni := len(incs) - if err != nil || ni == 0 { - return err - } - for i := ni - 1; i >= 0; i-- { - inc := incs[i] - err = tomlx.OpenFromPaths(cfg, inc, IncludePaths...) - if err != nil { - mpi.Println(err) - } - } - // reopen original - tomlx.OpenFromPaths(cfg, file, IncludePaths...) - if hasIncludes { - *incsObj.IncludesPtr() = incs - } else { - *incObj.IncludePtr() = strings.Join(incs, ",") - } - return err -} - -// OpenFS reads config from given TOML file, -// using the fs.FS filesystem -- e.g., for embed files. -func OpenFS(cfg any, fsys fs.FS, file string) error { - return tomlx.OpenFS(cfg, fsys, file) -} - -// Save writes TOML to given file. -func Save(cfg any, file string) error { - return tomlx.Save(cfg, file) -} diff --git a/econfig/testdata/testcfg.toml b/econfig/testdata/testcfg.toml deleted file mode 100644 index 73bde369..00000000 --- a/econfig/testdata/testcfg.toml +++ /dev/null @@ -1,21 +0,0 @@ -Includes=["testinc.toml"] -gui = true -Tag = "testing" -NData = 12 -Enum = 'TestValue2' # can use string here instead of number - -# inline form: -# PatParams.Sparseness = 0.22 - -# better multi-line form: -[PatParams] - NPats = 14 - Sparseness = 0.22 - -# inline form: -# Network = {'.PFCLayer:Layer.Inhib.Layer.Gi' = '2.4', '#VSPatchPrjn:Prjn.Learn.LRate' = '0.01'} - -# better multi-line form: -[Network] - "#VSPatchPrjn:Prjn.Learn.LRate" = "0.01" - ".PFCLayer:Layer.Inhib.Layer.Gi" = "2.4" diff --git a/econfig/testdata/testcfginc.toml b/econfig/testdata/testcfginc.toml deleted file mode 100644 index e32d764c..00000000 --- a/econfig/testdata/testcfginc.toml +++ /dev/null @@ -1,4 +0,0 @@ -Include="testincinc.toml" -gui = true -Run = 2 - diff --git a/econfig/testdata/testinc.toml b/econfig/testdata/testinc.toml deleted file mode 100644 index 611d5685..00000000 --- a/econfig/testdata/testinc.toml +++ /dev/null @@ -1,3 +0,0 @@ -Includes=["testinc2.toml", "testinc3.toml"] -NTrials=32 - diff --git a/econfig/testdata/testinc2.toml b/econfig/testdata/testinc2.toml deleted file mode 100644 index c6c4deb9..00000000 --- a/econfig/testdata/testinc2.toml +++ /dev/null @@ -1,4 +0,0 @@ -GPU=true -Epochs=500 -Runs=5 # overwritten by testinc3 -NTrials=64 # overwritten by testinc diff --git a/econfig/testdata/testinc3.toml b/econfig/testdata/testinc3.toml deleted file mode 100644 index 70e11149..00000000 --- a/econfig/testdata/testinc3.toml +++ /dev/null @@ -1,3 +0,0 @@ -NData=32 # overwritten in testcfg -Runs=8 # this overwrites testinc2 -Note="something else" diff --git a/econfig/testdata/testincinc.toml b/econfig/testdata/testincinc.toml deleted file mode 100644 index 0f12afc1..00000000 --- a/econfig/testdata/testincinc.toml +++ /dev/null @@ -1,3 +0,0 @@ -Include="testincinc2.toml" -Tag = "initial" - diff --git a/econfig/testdata/testincinc2.toml b/econfig/testdata/testincinc2.toml deleted file mode 100644 index f19db5ec..00000000 --- a/econfig/testdata/testincinc2.toml +++ /dev/null @@ -1 +0,0 @@ -Tag = "final" diff --git a/econfig/typegen.go b/econfig/typegen.go deleted file mode 100644 index 4767aa0b..00000000 --- a/econfig/typegen.go +++ /dev/null @@ -1,13 +0,0 @@ -// Code generated by "core generate -add-types"; DO NOT EDIT. - -package econfig - -import ( - "cogentcore.org/core/types" -) - -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/econfig.TestEnum", IDName: "test-enum", Doc: "TestEnum is an enum type for testing"}) - -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/econfig.Includeser", IDName: "includeser", Doc: "Includeser enables processing of Includes []string field with files to include in Config objects.", Methods: []types.Method{{Name: "IncludesPtr", Doc: "IncludesPtr returns a pointer to the Includes []string field containing file(s) to include\nbefore processing the current config file.", Returns: []string{"*"}}}}) - -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/econfig.Includer", IDName: "includer", Doc: "Includer enables processing of Include string field with file to include in Config objects.", Methods: []types.Method{{Name: "IncludePtr", Doc: "IncludePtr returns a pointer to the Include string field containing single file to include\nbefore processing the current config file.", Returns: []string{"*string"}}}}) diff --git a/econfig/usage.go b/econfig/usage.go deleted file mode 100644 index ea708529..00000000 --- a/econfig/usage.go +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright (c) 2023, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package econfig - -import ( - "fmt" - "reflect" - "strings" - - "cogentcore.org/core/base/reflectx" -) - -// Usage returns the usage string for args based on given Config object -func Usage(cfg any) string { - var b strings.Builder - b.WriteString("The following command-line arguments set fields on the Config struct.\n") - b.WriteString("args are case insensitive and kebab-case or snake_case also works\n") - b.WriteString("most can be used without nesting path (e.g. -nepochs instead of -run.nepochs)\n") - b.WriteString("\n") - b.WriteString("-help or -h\tshow available command-line arguments and exit\n") - b.WriteString("-config or -cfg\tspecify filename for loading Config settings\n") - b.WriteString("\n") - usageStruct(cfg, "", &b) - return b.String() -} - -// usageStruct adds usage info to given strings.Builder -func usageStruct(obj any, path string, b *strings.Builder) { - typ := reflectx.NonPointerType(reflect.TypeOf(obj)) - val := reflectx.NonPointerValue(reflect.ValueOf(obj)) - for i := 0; i < typ.NumField(); i++ { - f := typ.Field(i) - fv := val.Field(i) - if reflectx.NonPointerType(f.Type).Kind() == reflect.Struct { - nwPath := f.Name - if path != "" { - nwPath = path + "." + nwPath - } - usageStruct(reflectx.PointerValue(fv).Interface(), nwPath, b) - continue - } - nm := f.Name - if nm == "Includes" { - continue - } - if path != "" { - nm = path + "." + nm - } - b.WriteString(fmt.Sprintf("-%s\n", nm)) - desc, ok := f.Tag.Lookup("desc") - if ok && desc != "" { - b.WriteString("\t") - b.WriteString(desc) - def, ok := f.Tag.Lookup("def") - if ok && def != "" { - b.WriteString(fmt.Sprintf(" (default %s)", def)) - } - } - b.WriteString("\n") - } -} diff --git a/elog/README.md b/elog/README.md deleted file mode 100644 index 54c28a21..00000000 --- a/elog/README.md +++ /dev/null @@ -1,445 +0,0 @@ -# elog - -Docs: [GoDoc](https://pkg.go.dev/github.com/emer/emergent/elog) - -`elog` provides a full infrastructure for recording data of all sorts at multiple time scales and evaluation modes (training, testing, validation, etc). - -The `elog.Item` provides a full definition of each distinct item that is logged with a map of Write functions keyed by a scope string that reflects the time scale and mode. The same function can be used across multiple scopes, or a different function for each scope, etc. - -The Items are written to the table *in the order added*, so you can take advantage of previously computed item values based on the actual ordering of item code. For example, intermediate values can be stored / retrieved from Stats, or from other items on a log, e.g., using `Context.LogItemFloat` function. - -The Items are then processed in `CreateTables()` to create a set of `table.Table` tables to hold the data. - -The `elog.Logs` struct holds all the relevant data and functions for managing the logging process. - -* `Log(mode, time)` does logging, adding a new row - -* `LogRow(mode, time, row)` does logging at given row - -Both of these functions automatically write incrementally to a `tsv` File if it has been opened. - -The `Context` object is passed to the Item Write functions, and has all the info typically needed -- must call `SetContext(stats, net)` on the Logs to provide those elements. Write functions can do most standard things by calling methods on Context -- see that in Docs above for more info. - -# Scopes - -Everything is organized according to a `etime.ScopeKey`, which is just a `string`, that is formatted to represent two factors: an **evaluation mode** (standard versions defined by `etime.Modes` enum) and a **time scale** (`etime.Times` enum). - -Standard `etime.Modes` are: -* `Train` -* `Test` -* `Validate` -* `Analyze` -- used for internal representational analysis functions such as PCA, ActRF, SimMat, etc. - -Standard `etime.Times` are based on the [Env](https://github.com/emer/emergent/wiki/Env) `TimeScales` augmented with Leabra / Axon finer-grained scales, including: -* `Cycle` -* `Trial` -* `Epoch` -* `Run` - -Other arbitrary scope values can be used -- there are `Scope` versions of every method that take an arbitrary `etime.ScopeKey` that can be composed using the `ScopeStr` method from any two strings, along with the "plain" versions of these methods that take the standard `mode` and `time` enums for convenience. These enums can themselves also be extended but it is probably easier to just use strings. - -# Examples - -The [ra25](https://github.com/emer/axon/tree/main/examples/ra25) example has a fully updated implementation of this new logging infrastructure. The individual log Items are added in the `logitems.go` file, which keeps the main sim file smaller and easier to navigate. It is also a good idea to put the params in a separate `params.go` file, as we now do in this example. - -## Main Config and Log functions - -The `ConfigLogs` function configures the items, creates the tables, and configures any other log-like entities including spike rasters. - -```Go -func (ss *Sim) ConfigLogs() { - ss.ConfigLogItems() - ss.Logs.CreateTables() - ss.Logs.SetContext(&ss.Stats, ss.Net) - // don't plot certain combinations we don't use - ss.Logs.NoPlot(etime.Train, etime.Cycle) - ss.Logs.NoPlot(etime.Test, etime.Run) - // note: Analyze not plotted by default - ss.Logs.SetMeta(etime.Train, etime.Run, "LegendCol", "Params") - ss.Stats.ConfigRasters(ss.Net, ss.Net.LayersByClass()) -} -``` - -There is one master `Log` function that handles any details associated with different levels of logging -- it is called with the scope elements, e.g., `ss.Log(etime.Train, etime.Trial)` - -```Go -// Log is the main logging function, handles special things for different scopes -func (ss *Sim) Log(mode etime.Modes, time etime.Times) { - dt := ss.Logs.Table(mode, time) - row := dt.Rows - switch { - case mode == etime.Test && time == etime.Epoch: - ss.LogTestErrors() - case mode == etime.Train && time == etime.Epoch: - epc := ss.TrainEnv.Epoch.Cur - if (ss.PCAInterval > 0) && ((epc-1)%ss.PCAInterval == 0) { // -1 so runs on first epc - ss.PCAStats() - } - case time == etime.Cycle: - row = ss.Stats.Int("Cycle") - case time == etime.Trial: - row = ss.Stats.Int("Trial") - } - - ss.Logs.LogRow(mode, time, row) // also logs to file, etc - if time == etime.Cycle { - ss.GUI.UpdateCyclePlot(etime.Test, ss.Time.Cycle) - } else { - ss.GUI.UpdatePlot(mode, time) - } - - // post-logging special statistics - switch { - case mode == etime.Train && time == etime.Run: - ss.LogRunStats() - case mode == etime.Train && time == etime.Trial: - epc := ss.TrainEnv.Epoch.Cur - if (ss.PCAInterval > 0) && (epc%ss.PCAInterval == 0) { - ss.Log(etime.Analyze, etime.Trial) - } - } -} -``` - -### Resetting logs - -Often, at the end of the `Log` function, you need to reset logs at a lower level, after the data has been aggregated. This is critical for logs that add rows incrementally, and also when using MPI aggregation. - -```Go - if time == etime.Epoch { // Reset Trial log after Epoch - ss.Logs.ResetLog(mode, etime.Trial) - } -``` - -### MPI Aggregation - -When splitting trials across different processors using [mpi](https://github.com/emer/empi), you typically need to gather the trial-level data for aggregating at the epoch level. There is a function that handles this: - -```Go - if ss.UseMPI && time == etime.Epoch { // Must gather data for trial level if doing epoch level - ss.Logs.MPIGatherTableRows(mode, etime.Trial, ss.Comm) - } -``` - -The function switches the aggregated table in place of the local table, so that all the usual functions accessing the trial data will work properly. Because of this, it is essential to do the `ResetLog` or otherwise call `SetNumRows` to restore the trial log back to the proper number of rows -- otherwise it will grow exponentially! - -### Additional stats - -There are various additional analysis functions called here, for example this one that generates summary statistics about the overall performance across runs -- these are stored in the `MiscTables` in the `Logs` object: - -```Go -// LogRunStats records stats across all runs, at Train Run scope -func (ss *Sim) LogRunStats() { - sk := etime.Scope(etime.Train, etime.Run) - lt := ss.Logs.TableDetailsScope(sk) - ix, _ := lt.NamedIndexView("RunStats") - - spl := split.GroupBy(ix, []string{"Params"}) - split.Desc(spl, "FirstZero") - split.Desc(spl, "PctCor") - ss.Logs.MiscTables["RunStats"] = spl.AggsToTable(table.AddAggName) -} -``` - -## Counter Items - -All counters of interest should be written to [estats](https://github.com/emer/emergent/tree/main/estats) `Stats` elements, whenever the counters might be updated, and then logging just reads those stats. Here's a `StatCounters` function: - -```Go -// StatCounters saves current counters to Stats, so they are available for logging etc -// Also saves a string rep of them to the GUI, if the GUI is active -func (ss *Sim) StatCounters(train bool) { - ev := ss.TrainEnv - if !train { - ev = ss.TestEnv - } - ss.Stats.SetInt("Run", ss.TrainEnv.Run.Cur) - ss.Stats.SetInt("Epoch", ss.TrainEnv.Epoch.Cur) - ss.Stats.SetInt("Trial", ev.Trial.Cur) - ss.Stats.SetString("TrialName", ev.TrialName.Cur) - ss.Stats.SetInt("Cycle", ss.Time.Cycle) - ss.GUI.NetViewText = ss.Stats.Print([]string{"Run", "Epoch", "Trial", "TrialName", "Cycle", "TrlUnitErr", "TrlErr", "TrlCosDiff"}) -} -``` - -Then they are easily logged -- just showing different Scope expressions here: - -```Go - ss.Logs.AddItem(&elog.Item{ - Name: "Run", - Type: reflect.Int, - Plot: false, - Write: elog.WriteMap{ - etime.Scope(etime.AllModes, etime.AllTimes): func(ctx *elog.Context) { - ctx.SetStatInt("Run") - }}}) -``` - -```Go - ss.Logs.AddItem(&elog.Item{ - Name: "Epoch", - Type: reflect.Int, - Plot: false, - Write: elog.WriteMap{ - etime.Scopes([]etime.Modes{etime.AllModes}, []etime.Times{etime.Epoch, etime.Trial}): func(ctx *elog.Context) { - ctx.SetStatInt("Epoch") - }}}) -``` - -```Go - ss.Logs.AddItem(&elog.Item{ - Name: "Trial", - Type: reflect.Int, - Write: elog.WriteMap{ - etime.Scope(etime.AllModes, etime.Trial): func(ctx *elog.Context) { - ctx.SetStatInt("Trial") - }}}) -``` - -## Performance Stats - -Overall summary performance statistics have multiple Write functions for different scopes, performing aggregation over log data at lower levels: - -```Go - ss.Logs.AddItem(&elog.Item{ - Name: "UnitErr", - Type: reflect.Float64, - Plot: false, - Write: elog.WriteMap{ - etime.Scope(etime.AllModes, etime.Trial): func(ctx *elog.Context) { - ctx.SetStatFloat("TrlUnitErr") - }, etime.Scope(etime.AllModes, etime.Epoch): func(ctx *elog.Context) { - ctx.SetAgg(ctx.Mode, etime.Trial, stats.Mean) - }, etime.Scope(etime.AllModes, etime.Run): func(ctx *elog.Context) { - ix := ctx.LastNRows(ctx.Mode, etime.Epoch, 5) - ctx.SetFloat64(agg.Mean(ix, ctx.Item.Name)[0]) - }}}) -``` - -## Copy Stats from Testing (or any other log) - -It is often convenient to have just one log file with both training and testing performance recorded -- this code copies over relevant stats from the testing epoch log to the training epoch log: - -```Go - // Copy over Testing items - stats := []string{"UnitErr", "PctErr", "PctCor", "PctErr2", "CosDiff"} - for _, st := range stats { - stnm := st - tstnm := "Tst" + st - ss.Logs.AddItem(&elog.Item{ - Name: tstnm, - Type: reflect.Float64, - Plot: false, - Write: elog.WriteMap{ - etime.Scope(etime.Train, etime.Epoch): func(ctx *elog.Context) { - ctx.SetFloat64(ctx.ItemFloat(etime.Test, etime.Epoch, stnm)) - }}}) - } -``` - -## Layer Stats - -Iterate over layers of interest (use `LayersByClass` function). It is *essential* to create a local variable inside the loop for the `lnm` variable, which is then captured by the closure (see https://github.com/golang/go/wiki/CommonMistakes): - -```Go - // Standard stats for Ge and AvgAct tuning -- for all hidden, output layers - layers := ss.Net.LayersByClass("Hidden", "Target") - for _, lnm := range layers { - clnm := lnm - ss.Logs.AddItem(&elog.Item{ - Name: clnm + "_ActAvg", - Type: reflect.Float64, - Plot: false, - FixMax: false, - Range: minmax.F32{Max: 1}, - Write: elog.WriteMap{ - etime.Scope(etime.Train, etime.Epoch): func(ctx *elog.Context) { - ly := ctx.Layer(clnm).(axon.AxonLayer).AsAxon() - ctx.SetFloat32(ly.ActAvg.ActMAvg) - }}}) - ... - } -``` - -Here's how to log a pathway variable: - -```Go - ss.Logs.AddItem(&elog.Item{ - Name: clnm + "_FF_AvgMaxG", - Type: reflect.Float64, - Plot: false, - Range: minmax.F32{Max: 1}, - Write: elog.WriteMap{ - etime.Scope(etime.Train, etime.Trial): func(ctx *elog.Context) { - ffpj := cly.RecvPath(0).(*axon.Path) - ctx.SetFloat32(ffpj.GScale.AvgMax) - }, etime.Scope(etime.AllModes, etime.Epoch): func(ctx *elog.Context) { - ctx.SetAgg(ctx.Mode, etime.Trial, stats.Mean) - }}}) -``` - -## Layer Activity Patterns - -A log column can be a tensor of any shape -- the `SetLayerTensor` method on the Context grabs the data from the layer into a reused tensor (no memory churning after first initialization), and then stores that tensor into the log column. - -```Go - // input / output layer activity patterns during testing - layers = ss.Net.LayersByClass("Input", "Target") - for _, lnm := range layers { - clnm := lnm - cly := ss.Net.LayerByName(clnm) - ss.Logs.AddItem(&elog.Item{ - Name: clnm + "_Act", - Type: reflect.Float64, - CellShape: cly.Shape().Shp, - FixMax: true, - Range: minmax.F32{Max: 1}, - Write: elog.WriteMap{ - etime.Scope(etime.Test, etime.Trial): func(ctx *elog.Context) { - ctx.SetLayerTensor(clnm, "Act") - }}}) -``` - -## PCA on Activity - -Computing stats on the principal components of variance (PCA) across different input patterns is very informative about the nature of the internal representations in hidden layers. The [estats](https://github.com/emer/emergent/tree/main/estats) package has support for this -- it is fairly expensive computationally so we only do this every N epochs (10 or so), calling this method: - -```Go -// PCAStats computes PCA statistics on recorded hidden activation patterns -// from Analyze, Trial log data -func (ss *Sim) PCAStats() { - ss.Stats.PCAStats(ss.Logs.IndexView(etime.Analyze, etime.Trial), "ActM", ss.Net.LayersByClass("Hidden")) - ss.Logs.ResetLog(etime.Analyze, etime.Trial) -} -``` - -Here's how you record the data and log the resulting stats, using the `Analyze` `EvalMode`: - -```Go - // hidden activities for PCA analysis, and PCA results - layers = ss.Net.LayersByClass("Hidden") - for _, lnm := range layers { - clnm := lnm - cly := ss.Net.LayerByName(clnm) - ss.Logs.AddItem(&elog.Item{ - Name: clnm + "_ActM", - Type: reflect.Float64, - CellShape: cly.Shape().Shp, - FixMax: true, - Range: minmax.F32{Max: 1}, - Write: elog.WriteMap{ - etime.Scope(etime.Analyze, etime.Trial): func(ctx *elog.Context) { - ctx.SetLayerTensor(clnm, "ActM") - }}}) - ss.Logs.AddItem(&elog.Item{ - Name: clnm + "_PCA_NStrong", - Type: reflect.Float64, - Plot: false, - Write: elog.WriteMap{ - etime.Scope(etime.Train, etime.Epoch): func(ctx *elog.Context) { - ctx.SetStatFloat(ctx.Item.Name) - }, etime.Scope(etime.AllModes, etime.Run): func(ctx *elog.Context) { - ix := ctx.LastNRows(ctx.Mode, etime.Epoch, 5) - ctx.SetFloat64(agg.Mean(ix, ctx.Item.Name)[0]) - }}}) - ... - } -``` - -## Error by Input Category - -This item creates a tensor column that records the average error for each category of input stimulus (e.g., for images from object categories), using the `split.GroupBy` function for `table`. The `IndexView` function (see also `NamedIndexView`) automatically manages the `table.IndexView` indexed view onto a log table, which is used for all aggregation and further analysis of data, so that you can efficiently analyze filtered subsets of the original data. - -```Go - ss.Logs.AddItem(&elog.Item{ - Name: "CatErr", - Type: reflect.Float64, - CellShape: []int{20}, - DimNames: []string{"Cat"}, - Plot: true, - Range: minmax.F32{Min: 0}, - TensorIndex: -1, // plot all values - Write: elog.WriteMap{ - etime.Scope(etime.Test, etime.Epoch): func(ctx *elog.Context) { - ix := ctx.Logs.IndexView(etime.Test, etime.Trial) - spl := split.GroupBy(ix, []string{"Cat"}) - split.AggTry(spl, "Err", stats.Mean) - cats := spl.AggsToTable(table.ColumnNameOnly) - ss.Logs.MiscTables[ctx.Item.Name] = cats - ctx.SetTensor(cats.Columns[1]) - }}}) -``` - -## Confusion matricies - -The [estats](https://github.com/emer/emergent/tree/main/estats) package has a `Confusion` object to manage computation of a confusion matirx -- see [confusion](https://github.com/emer/emergent/tree/main/confusion) for more info. - -## Closest Pattern Stat - -The [estats](https://github.com/emer/emergent/tree/main/estats) package has a `ClosestPat` function that grabs the activity from a given variable in a given layer, and compares it to a list of patterns in a table, returning the pattern that is closest to the layer activity pattern, using the Correlation metric, which is the most robust metric in terms of ignoring differences in overall activity levels. You can also compare that closest pattern name to a (list of) acceptable target names and use that as an error measure. - -```Go - row, cor, cnm := ss.Stats.ClosestPat(ss.Net, "Output", "ActM", ss.Pats, "Output", "Name") - ss.Stats.SetString("TrlClosest", cnm) - ss.Stats.SetFloat("TrlCorrel", float64(cor)) - tnm := ss.TrainEnv.TrialName - if cnm == tnm { - ss.Stats.SetFloat("TrlErr", 0) - } else { - ss.Stats.SetFloat("TrlErr", 1) - } -``` - -## Activation-based Receptive Fields - -The [estats](https://github.com/emer/emergent/tree/main/estats) package has support for recording activation-based receptive fields ([actrf](https://github.com/emer/emergent/tree/main/actrf)), which are very useful for decoding what units represent. - -First, initialize the ActRFs in the `ConfigLogs` function, using strings that specify the layer name to record activity from, followed by the source data for the receptive field, which can be *anything* that might help you understand what the units are responding to, including the name of another layer. If it is not another layer name, then the code will look for the name in the `Stats.F32Tensors` map of named tensors. - -```Go - ss.Stats.SetF32Tensor("Image", &ss.TestEnv.Vis.ImgTsr) // image used for actrfs, must be there first - ss.Stats.InitActRFs(ss.Net, []string{"V4:Image", "V4:Output", "IT:Image", "IT:Output"}, "ActM") -``` - -To add tabs in the gui to visualize the resulting RFs, add this in your `ConfigGUI` (note also adding a tab to visualize the input Image that is being presented to the network): - -```Go - tg := ss.GUI.TabView.AddNewTab(tensorcore.KiT_TensorGrid, "Image").(*tensorcore.TensorGrid) - tg.SetStretchMax() - ss.GUI.SetGrid("Image", tg) - tg.SetTensor(&ss.TrainEnv.Vis.ImgTsr) - - ss.GUI.AddActRFGridTabs(&ss.Stats.ActRFs) -``` - -At the relevant `Trial` level, call the function to update the RF data based on current network activity state: - -```Go - ss.Stats.UpdateActRFs(ss.Net, "ActM", 0.01) -``` - -Here's a `TestAll` function that manages the testing of a large number of inputs to compute the RFs (often need a large amount of testing data to sample the space sufficiently to get meaningful results): - -```Go -// TestAll runs through the full set of testing items -func (ss *Sim) TestAll() { - ss.TestEnv.Init(ss.TrainEnv.Run.Cur) - ss.Stats.ActRFs.Reset() // initialize prior to testing - for { - ss.TestTrial(true) - ss.Stats.UpdateActRFs(ss.Net, "ActM", 0.01) - _, _, chg := ss.TestEnv.Counter(env.Epoch) - if chg || ss.StopNow { - break - } - } - ss.Stats.ActRFsAvgNorm() // final - ss.GUI.ViewActRFs(&ss.Stats.ActRFs) -} -``` - -## Representational Similarity Analysis (SimMat) - -## Cluster Plots - - diff --git a/elog/context.go b/elog/context.go deleted file mode 100644 index 3b5af40c..00000000 --- a/elog/context.go +++ /dev/null @@ -1,309 +0,0 @@ -// Copyright (c) 2022, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package elog - -import ( - "fmt" - - "cogentcore.org/core/base/errors" - "cogentcore.org/core/tensor" - "cogentcore.org/core/tensor/stats/metric" - "cogentcore.org/core/tensor/stats/stats" - "cogentcore.org/core/tensor/table" - "github.com/emer/emergent/v2/emer" - "github.com/emer/emergent/v2/estats" - "github.com/emer/emergent/v2/etime" -) - -// WriteFunc function that computes and sets log values -// The Context provides information typically needed for logging -type WriteFunc func(ctx *Context) - -// Context provides the context for logging Write functions. -// SetContext must be called on Logs to set the Stats and Net values -// Provides various convenience functions for setting log values -// and other commonly used operations. -type Context struct { - - // pointer to the Logs object with all log data - Logs *Logs - - // pointer to stats - Stats *estats.Stats - - // network - Net emer.Network - - // data parallel index for accessing data from network - Di int - - // current log Item - Item *Item - - // current scope key - Scope etime.ScopeKey - - // current scope eval mode (if standard) - Mode etime.Modes - - // current scope timescale (if standard) - Time etime.Times - - // LogTable with extra data for the table - LogTable *LogTable - - // current table to record value to - Table *table.Table - - // current row in table to write to - Row int -} - -// SetTable sets the current table & scope -- called by WriteItems -func (ctx *Context) SetTable(sk etime.ScopeKey, lt *LogTable, row int) { - ctx.Scope = sk - ctx.LogTable = lt - ctx.Table = lt.Table - ctx.Row = row - ctx.Mode, ctx.Time = sk.ModeAndTime() -} - -// SetFloat64 sets a float64 to current table, item, row -func (ctx *Context) SetFloat64(val float64) { - ctx.Table.SetFloat(ctx.Item.Name, ctx.Row, val) -} - -// SetFloat32 sets a float32 to current table, item, row -func (ctx *Context) SetFloat32(val float32) { - ctx.Table.SetFloat(ctx.Item.Name, ctx.Row, float64(val)) -} - -// SetInt sets an int to current table, item, row -func (ctx *Context) SetInt(val int) { - ctx.Table.SetFloat(ctx.Item.Name, ctx.Row, float64(val)) -} - -// SetString sets a string to current table, item, row -func (ctx *Context) SetString(val string) { - ctx.Table.SetString(ctx.Item.Name, ctx.Row, val) -} - -// SetStatFloat sets a Stats Float of given name to current table, item, row -func (ctx *Context) SetStatFloat(name string) { - ctx.Table.SetFloat(ctx.Item.Name, ctx.Row, ctx.Stats.Float(name)) -} - -// SetStatInt sets a Stats int of given name to current table, item, row -func (ctx *Context) SetStatInt(name string) { - ctx.Table.SetFloat(ctx.Item.Name, ctx.Row, float64(ctx.Stats.Int(name))) -} - -// SetStatString sets a Stats string of given name to current table, item, row -func (ctx *Context) SetStatString(name string) { - ctx.Table.SetString(ctx.Item.Name, ctx.Row, ctx.Stats.String(name)) -} - -// SetTensor sets a Tensor to current table, item, row -func (ctx *Context) SetTensor(val tensor.Tensor) { - ctx.Table.SetTensor(ctx.Item.Name, ctx.Row, val) -} - -// SetFloat64Cells sets float64 values to tensor cell -// in current table, item, row -func (ctx *Context) SetFloat64Cells(vals []float64) { - for i, v := range vals { - ctx.Table.SetTensorFloat1D(ctx.Item.Name, ctx.Row, i, v) - } -} - -/////////////////////////////////////////////////// -// Aggregation, data access - -// SetAgg sets an aggregated value computed from given eval mode -// and time scale with same Item name, to current item, row. -// Supports scalar or tensor cells. -// returns aggregated value(s). -func (ctx *Context) SetAgg(mode etime.Modes, time etime.Times, ag stats.Stats) []float64 { - return ctx.SetAggScope(etime.Scope(mode, time), ag) -} - -// SetAggScope sets an aggregated value computed from -// another scope (ScopeKey) with same Item name, to current item, row. -// Supports scalar or tensor cells. -// returns aggregated value(s). -func (ctx *Context) SetAggScope(scope etime.ScopeKey, ag stats.Stats) []float64 { - return ctx.SetAggItemScope(scope, ctx.Item.Name, ag) -} - -// SetAggItem sets an aggregated value computed from given eval mode -// and time scale with given Item name, to current item, row. -// Supports scalar or tensor cells. -// returns aggregated value(s). -func (ctx *Context) SetAggItem(mode etime.Modes, time etime.Times, itemNm string, ag stats.Stats) []float64 { - return ctx.SetAggItemScope(etime.Scope(mode, time), itemNm, ag) -} - -// SetAggItemScope sets an aggregated value computed from -// another scope (ScopeKey) with given Item name, to current item, row. -// Supports scalar or tensor cells. -// returns aggregated value(s). -func (ctx *Context) SetAggItemScope(scope etime.ScopeKey, itemNm string, ag stats.Stats) []float64 { - ix := ctx.Logs.IndexViewScope(scope) - vals, err := stats.StatColumn(ix, itemNm, ag) - if err != nil { - fmt.Printf("elog.Context SetAggItemScope for item: %s in scope: %s: could not aggregate item: %s from scope: %s: %s\n", ctx.Item.Name, ctx.Scope, itemNm, scope, err.Error()) - return nil - } - cl, err := ctx.Table.ColumnByName(ctx.Item.Name) - if errors.Log(err) != nil { - return vals - } - if cl.NumDims() > 1 { - ctx.SetFloat64Cells(vals) - } else { - ctx.SetFloat64(vals[0]) - } - return vals -} - -// ItemFloat returns a float64 value of the last row of given item name -// in log for given mode, time -func (ctx *Context) ItemFloat(mode etime.Modes, time etime.Times, itemNm string) float64 { - return ctx.ItemFloatScope(etime.Scope(mode, time), itemNm) -} - -// ItemFloatScope returns a float64 value of the last row of given item name -// in log for given scope. -func (ctx *Context) ItemFloatScope(scope etime.ScopeKey, itemNm string) float64 { - dt := ctx.Logs.TableScope(scope) - if dt.Rows == 0 { - return 0 - } - return dt.Float(itemNm, dt.Rows-1) -} - -// ItemString returns a string value of the last row of given item name -// in log for given mode, time -func (ctx *Context) ItemString(mode etime.Modes, time etime.Times, itemNm string) string { - return ctx.ItemStringScope(etime.Scope(mode, time), itemNm) -} - -// ItemStringScope returns a string value of the last row of given item name -// in log for given scope. -func (ctx *Context) ItemStringScope(scope etime.ScopeKey, itemNm string) string { - dt := ctx.Logs.TableScope(scope) - if dt.Rows == 0 { - return "" - } - return dt.StringValue(itemNm, dt.Rows-1) -} - -// ItemTensor returns an tensor.Tensor of the last row of given item name -// in log for given mode, time -func (ctx *Context) ItemTensor(mode etime.Modes, time etime.Times, itemNm string) tensor.Tensor { - return ctx.ItemTensorScope(etime.Scope(mode, time), itemNm) -} - -// ItemTensorScope returns an tensor.Tensor of the last row of given item name -// in log for given scope. -func (ctx *Context) ItemTensorScope(scope etime.ScopeKey, itemNm string) tensor.Tensor { - dt := ctx.Logs.TableScope(scope) - if dt.Rows == 0 { - return nil - } - return dt.Tensor(itemNm, dt.Rows-1) -} - -// ItemColTensor returns an tensor.Tensor of the entire column of given item name -// in log for given mode, time -func (ctx *Context) ItemColTensor(mode etime.Modes, time etime.Times, itemNm string) tensor.Tensor { - return ctx.ItemColTensorScope(etime.Scope(mode, time), itemNm) -} - -// ItemColTensorScope returns an tensor.Tensor of the entire column of given item name -// in log for given scope. -func (ctx *Context) ItemColTensorScope(scope etime.ScopeKey, itemNm string) tensor.Tensor { - dt := ctx.Logs.TableScope(scope) - return errors.Log1(dt.ColumnByName(itemNm)) -} - -/////////////////////////////////////////////////// -// Network - -// Layer returns layer by name as the emer.Layer interface. -// May then need to convert to a concrete type depending. -func (ctx *Context) Layer(layNm string) emer.Layer { - return errors.Log1(ctx.Net.AsEmer().EmerLayerByName(layNm)) -} - -// GetLayerTensor gets tensor of Unit values on a layer for given variable -// from current ctx.Di data parallel index. -func (ctx *Context) GetLayerTensor(layNm, unitVar string) *tensor.Float32 { - ly := ctx.Layer(layNm) - tsr := ctx.Stats.F32Tensor(layNm) - ly.AsEmer().UnitValuesTensor(tsr, unitVar, ctx.Di) - return tsr -} - -// GetLayerSampleTensor gets tensor of representative Unit values on a layer for given variable -// from current ctx.Di data parallel index. -func (ctx *Context) GetLayerSampleTensor(layNm, unitVar string) *tensor.Float32 { - ly := ctx.Layer(layNm) - tsr := ctx.Stats.F32Tensor(layNm) - ly.AsEmer().UnitValuesSampleTensor(tsr, unitVar, ctx.Di) - return tsr -} - -// SetLayerTensor sets tensor of Unit values on a layer for given variable -// to current ctx.Di data parallel index. -func (ctx *Context) SetLayerTensor(layNm, unitVar string) *tensor.Float32 { - tsr := ctx.GetLayerTensor(layNm, unitVar) - ctx.SetTensor(tsr) - return tsr -} - -// SetLayerSampleTensor sets tensor of representative Unit values on a layer for given variable -// to current ctx.Di data parallel index. -func (ctx *Context) SetLayerSampleTensor(layNm, unitVar string) *tensor.Float32 { - tsr := ctx.GetLayerSampleTensor(layNm, unitVar) - ctx.SetTensor(tsr) - return tsr -} - -// ClosestPat finds the closest pattern in given column of given pats table to -// given layer activation pattern using given variable. Returns the row number, -// correlation value, and value of a column named namecol for that row if non-empty. -// Column must be tensor.Float32 -func (ctx *Context) ClosestPat(layNm, unitVar string, pats *table.Table, colnm, namecol string) (int, float32, string) { - tsr := ctx.SetLayerTensor(layNm, unitVar) - col := errors.Log1(pats.ColumnByName(colnm)) - // note: requires Increasing metric so using Inv - row, cor := metric.ClosestRow32(tsr, col.(*tensor.Float32), metric.InvCorrelation32) - cor = 1 - cor // convert back to correl - nm := "" - if namecol != "" { - nm = pats.StringValue(namecol, row) - } - return row, cor, nm -} - -/////////////////////////////////////////////////// -// IndexViews - -// LastNRows returns an IndexView onto table for given scope with the last -// n rows of the table (only valid rows, if less than n). -// This index view is available later with the "LastNRows" name via -// NamedIndexView functions. -func (ctx *Context) LastNRows(mode etime.Modes, time etime.Times, n int) *table.IndexView { - return ctx.LastNRowsScope(etime.Scope(mode, time), n) -} - -// LastNRowsScope returns an IndexView onto table for given scope with the last -// n rows of the table (only valid rows, if less than n). -// This index view is available later with the "LastNRows" name via -// NamedIndexView functions. -func (ctx *Context) LastNRowsScope(sk etime.ScopeKey, n int) *table.IndexView { - return ctx.Logs.LastNRowsScope(sk, n) -} diff --git a/elog/elog_test.go b/elog/elog_test.go deleted file mode 100644 index fe104ec2..00000000 --- a/elog/elog_test.go +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright (c) 2022, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package elog - -import ( - "reflect" - "testing" - - "github.com/emer/emergent/v2/etime" -) - -func TestScopeKeyStringing(t *testing.T) { - sk := etime.Scope(etime.Train, etime.Epoch) - if sk != "Train&Epoch" { - t.Errorf("Got unexpected scopekey " + string(sk)) - } - sk2 := etime.Scopes([]etime.Modes{etime.Train, etime.Test}, []etime.Times{etime.Epoch, etime.Cycle}) - if sk2 != "Train|Test&Epoch|Cycle" { - t.Errorf("Got unexpected scopekey " + string(sk2)) - } - modes, times := sk2.ModesAndTimes() - if len(modes) != 2 || len(times) != 2 { - t.Errorf("Error parsing scopekey") - } -} - -func TestItem(t *testing.T) { - item := Item{ - Name: "Testo", - Type: reflect.String, - Write: WriteMap{"Train|Test&Epoch|Cycle": func(ctx *Context) { - // DO NOTHING - }}, - } - item.SetEachScopeKey() - _, ok := item.WriteFunc("Train", "Epoch") - if !ok { - t.Errorf("Error getting compute function") - } - if item.HasMode(etime.Validate) || item.HasTime(etime.Run) { - t.Errorf("Item has mode or time it shouldn't") - } -} diff --git a/elog/item.go b/elog/item.go deleted file mode 100644 index bf457750..00000000 --- a/elog/item.go +++ /dev/null @@ -1,148 +0,0 @@ -// Copyright (c) 2022, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package elog - -import ( - "reflect" - - "cogentcore.org/core/math32/minmax" - "github.com/emer/emergent/v2/etime" -) - -const ( - // DTrue is deprecated -- just use true - DTrue = true - // DFalse is deprecated -- just use false - DFalse = false -) - -// WriteMap holds log writing functions for scope keys -type WriteMap map[etime.ScopeKey]WriteFunc - -// Item describes one item to be logged -- has all the info -// for this item, across all scopes where it is relevant. -type Item struct { //types:add -setters - - // name of column -- must be unique for a table - Name string - - // data type, using tensor types which are isomorphic with arrow.Type - Type reflect.Kind - - // shape of a single cell in the column (i.e., without the row dimension) -- for scalars this is nil -- tensor column will add the outer row dimension to this shape - CellShape []int - - // names of the dimensions within the CellShape -- 'Row' will be added to outer dimension - DimNames []string - - // holds Write functions for different scopes. After processing, the scope key will be a single mode and time, from Scope(mode, time), but the initial specification can lists for each, or the All* option, if there is a Write function that works across scopes - Write WriteMap - - // Whether or not to plot it - Plot bool - - // The minimum and maximum values, for plotting - Range minmax.F32 - - // Whether to fix the minimum in the display - FixMin bool - - // Whether to fix the maximum in the display - FixMax bool - - // Name of other item that has the error bar values for this item -- for plotting - ErrCol string - - // index of tensor to plot -- defaults to 0 -- use -1 to plot all - TensorIndex int - - // specific color for plot -- uses default ordering of colors if empty - Color string - - // map of eval modes that this item has a Write function for - Modes map[string]bool - - // map of times that this item has a Write function for - Times map[string]bool -} - -func (item *Item) WriteFunc(mode, time string) (WriteFunc, bool) { - val, ok := item.Write[etime.ScopeStr(mode, time)] - return val, ok -} - -// SetWriteFuncAll sets the Write function for all existing Modes and Times -// Can be used to replace a Write func after the fact. -func (item *Item) SetWriteFuncAll(theFunc WriteFunc) { - for mode := range item.Modes { - for time := range item.Times { - item.Write[etime.ScopeStr(mode, time)] = theFunc - } - } -} - -// SetWriteFuncOver sets the Write function over range of modes and times -func (item *Item) SetWriteFuncOver(modes []etime.Modes, times []etime.Times, theFunc WriteFunc) { - for _, mode := range modes { - for _, time := range times { - item.Write[etime.Scope(mode, time)] = theFunc - } - } -} - -// SetWriteFunc sets Write function for one mode, time -func (item *Item) SetWriteFunc(mode etime.Modes, time etime.Times, theFunc WriteFunc) { - item.SetWriteFuncOver([]etime.Modes{mode}, []etime.Times{time}, theFunc) -} - -// SetEachScopeKey updates the Write map so that it only contains entries -// for a unique Mode,Time pair, where multiple modes and times may have -// originally been specified. -func (item *Item) SetEachScopeKey() { - newWrite := WriteMap{} - doReplace := false - for sk, c := range item.Write { - modes, times := sk.ModesAndTimes() - if len(modes) > 1 || len(times) > 1 { - doReplace = true - for _, m := range modes { - for _, t := range times { - newWrite[etime.ScopeStr(m, t)] = c - } - } - } else { - newWrite[sk] = c - } - } - if doReplace { - item.Write = newWrite - } -} - -// CompileScopes compiles maps of modes and times where this item appears. -// Based on the final updated Write map -func (item *Item) CompileScopes() { - item.Modes = make(map[string]bool) - item.Times = make(map[string]bool) - for scope, _ := range item.Write { - modes, times := scope.ModesAndTimes() - for _, mode := range modes { - item.Modes[mode] = true - } - for _, time := range times { - item.Times[time] = true - } - } -} - -func (item *Item) HasMode(mode etime.Modes) bool { - _, has := item.Modes[mode.String()] - return has -} - -func (item *Item) HasTime(time etime.Times) bool { - _, has := item.Times[time.String()] - return has -} diff --git a/elog/logs.go b/elog/logs.go deleted file mode 100644 index 01510c90..00000000 --- a/elog/logs.go +++ /dev/null @@ -1,475 +0,0 @@ -// Copyright (c) 2022, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package elog - -//go:generate core generate -add-types - -import ( - "fmt" - "log" - "os" - "path/filepath" - "strconv" - - "cogentcore.org/core/base/mpi" - "cogentcore.org/core/tensor/table" - "cogentcore.org/core/tensor/tensormpi" - "github.com/emer/emergent/v2/emer" - "github.com/emer/emergent/v2/estats" - "github.com/emer/emergent/v2/etime" -) - -// LogPrec is precision for saving float values in logs -const LogPrec = 4 - -// LogDir is a directory that is prefixed for saving log files -var LogDir = "" - -// Logs contains all logging state and API for doing logging. -// do AddItem to add any number of items, at different eval mode, time scopes. -// Each Item has its own Write functions, at each scope as neeeded. -// Then call CreateTables to generate log Tables from those items. -// Call Log with a scope to add a new row of data to the log -// and ResetLog to reset the log to empty. -type Logs struct { - - // Tables storing log data, auto-generated from Items. - Tables map[etime.ScopeKey]*LogTable - - // holds additional tables not computed from items -- e.g., aggregation results, intermediate computations, etc - MiscTables map[string]*table.Table - - // A list of the items that should be logged. Each item should describe one column that you want to log, and how. Order in list determines order in logs. - Items []*Item `display:"-"` - - // context information passed to logging Write functions -- has all the information needed to compute and write log values -- is updated for each item in turn - Context Context `display:"-"` - - // All the eval modes that appear in any of the items of this log. - Modes map[string]bool `display:"-"` - - // All the timescales that appear in any of the items of this log. - Times map[string]bool `display:"-"` - - // map of item indexes by name, for rapid access to items if they need to be modified after adding. - ItemIndexMap map[string]int `display:"-"` - - // sorted order of table scopes - TableOrder []etime.ScopeKey `display:"-"` -} - -// AddItem adds an item to the list. The items are stored in the order -// they are added, and this order is used for calling the item Write -// functions, so you can rely on that ordering for any sequential -// dependencies across items (e.g., in using intermediate computed values). -// Note: item names must be unique -- use different scopes for Write functions -// where needed. -func (lg *Logs) AddItem(item *Item) *Item { - lg.Items = append(lg.Items, item) - if lg.ItemIndexMap == nil { - lg.ItemIndexMap = make(map[string]int) - } - // note: we're not really in a position to track errors in a big list of - // AddItem statements, so don't bother with error return - if _, has := lg.ItemIndexMap[item.Name]; has { - log.Printf("elog.AddItem Warning: item name repeated: %s -- item names must be unique -- use different scopes in their Write functions instead of adding multiple entries\n", item.Name) - } - lg.ItemIndexMap[item.Name] = len(lg.Items) - 1 - return item -} - -// ItemByName returns item by given name, false if not found -func (lg *Logs) ItemByName(name string) (*Item, bool) { - idx, has := lg.ItemIndexMap[name] - if !has { - return nil, false - } - itm := lg.Items[idx] - return itm, true -} - -// SetContext sets the Context for logging Write functions -// to give general access to the stats and network -func (lg *Logs) SetContext(stats *estats.Stats, net emer.Network) { - lg.Context.Logs = lg - lg.Context.Stats = stats - lg.Context.Net = net -} - -// Table returns the table for given mode, time -func (lg *Logs) Table(mode etime.Modes, time etime.Times) *table.Table { - sk := etime.Scope(mode, time) - tb, ok := lg.Tables[sk] - if !ok { - // log.Printf("Table for scope not found: %s\n", sk) - return nil - } - return tb.Table -} - -// TableScope returns the table for given etime.ScopeKey -func (lg *Logs) TableScope(sk etime.ScopeKey) *table.Table { - tb, ok := lg.Tables[sk] - if !ok { - // log.Printf("Table for scope not found: %s\n", sk) - return nil - } - return tb.Table -} - -// MiscTable gets a miscellaneous table, e.g., for misc analysis. -// If it doesn't exist, one is created. -func (lg *Logs) MiscTable(name string) *table.Table { - dt, has := lg.MiscTables[name] - if has { - return dt - } - dt = &table.Table{} - lg.MiscTables[name] = dt - return dt -} - -// IndexView returns the Index View of a log table for a given mode, time -// This is used for data aggregation functions over the entire table. -// It should not be altered (don't Filter!) and always shows the whole table. -// See NamedIndexView for custom index views. -func (lg *Logs) IndexView(mode etime.Modes, time etime.Times) *table.IndexView { - return lg.IndexViewScope(etime.Scope(mode, time)) -} - -// IndexViewScope returns the Index View of a log table for given etime.ScopeKey -// This is used for data aggregation functions over the entire table. -// This view should not be altered and always shows the whole table. -// See NamedIndexView for custom index views. -func (lg *Logs) IndexViewScope(sk etime.ScopeKey) *table.IndexView { - lt := lg.Tables[sk] - return lt.GetIndexView() -} - -// NamedIndexView returns a named Index View of a log table for a given mode, time. -// This is used for additional data aggregation, filtering etc. -// When accessing the first time during writing a new row of the log, -// it automatically shows a view of the entire table and returns true for 2nd arg. -// You can then filter, sort, etc as needed. Subsequent calls within same row Write will -// return the last filtered view, and false for 2nd arg -- can then just reuse view. -func (lg *Logs) NamedIndexView(mode etime.Modes, time etime.Times, name string) (*table.IndexView, bool) { - return lg.NamedIndexViewScope(etime.Scope(mode, time), name) -} - -// NamedIndexView returns a named Index View of a log table for a given mode, time. -// This is used for additional data aggregation, filtering etc. -// When accessing the first time during writing a new row of the log, -// it automatically shows a view of the entire table and returns true for 2nd arg. -// You can then filter, sort, etc as needed. Subsequent calls within same row Write will -// return the last filtered view, and false for 2nd arg -- can then just reuse view. -func (lg *Logs) NamedIndexViewScope(sk etime.ScopeKey, name string) (*table.IndexView, bool) { - lt := lg.Tables[sk] - return lt.NamedIndexView(name) -} - -// TableDetails returns the LogTable record of associated info for given table -func (lg *Logs) TableDetails(mode etime.Modes, time etime.Times) *LogTable { - return lg.Tables[etime.Scope(mode, time)] -} - -// TableDetailsScope returns the LogTable record of associated info for given table -func (lg *Logs) TableDetailsScope(sk etime.ScopeKey) *LogTable { - return lg.Tables[sk] -} - -// SetMeta sets table meta data for given scope mode, time. -func (lg *Logs) SetMeta(mode etime.Modes, time etime.Times, key, val string) { - lg.SetMetaScope(etime.Scope(mode, time), key, val) -} - -// SetMetaScope sets table meta data for given scope -func (lg *Logs) SetMetaScope(sk etime.ScopeKey, key, val string) { - lt, has := lg.Tables[sk] - if !has { - return - } - lt.Meta[key] = val -} - -// NoPlot sets meta data to not plot for given scope mode, time. -// Typically all combinations of mode and time end up being -// generated, so you have to turn off plotting of cases not used. -func (lg *Logs) NoPlot(mode etime.Modes, time ...etime.Times) { - for _, tm := range time { - lg.NoPlotScope(etime.Scope(mode, tm)) - } -} - -// NoPlotScope sets meta data to not plot for given scope mode, time. -// Typically all combinations of mode and time end up being -// generated, so you have to turn off plotting of cases not used. -func (lg *Logs) NoPlotScope(sk etime.ScopeKey) { - lg.SetMetaScope(sk, "Plot", "false") -} - -// CreateTables creates the log tables based on all the specified log items -// It first calls ProcessItems to instantiate specific scopes. -func (lg *Logs) CreateTables() error { - lg.ProcessItems() - tables := make(map[etime.ScopeKey]*LogTable) - tableOrder := make([]etime.ScopeKey, 0) //initial size - var err error - for _, item := range lg.Items { - for scope, _ := range item.Write { - _, has := tables[scope] - modes, times := scope.ModesAndTimes() - if len(modes) != 1 || len(times) != 1 { - err = fmt.Errorf("Unexpected too long modes or times in: " + string(scope)) - log.Println(err) // actually print the err - } - if !has { - dt := lg.NewTable(modes[0], times[0]) - tables[scope] = NewLogTable(dt) - tableOrder = append(tableOrder, scope) - if modes[0] == "Analyze" || modes[0] == "Validate" || modes[0] == "Debug" { - tables[scope].Meta["Plot"] = "false" // don't plot by default - } - } - } - } - lg.Tables = tables - lg.TableOrder = etime.SortScopes(tableOrder) - lg.MiscTables = make(map[string]*table.Table) - - return err -} - -// Log performs logging for given mode, time. -// Adds a new row and Writes all the items. -// and saves data to file if open. -func (lg *Logs) Log(mode etime.Modes, time etime.Times) *table.Table { - sk := etime.Scope(mode, time) - lt := lg.Tables[sk] - return lg.LogRow(mode, time, lt.Table.Rows) -} - -// LogScope performs logging for given etime.ScopeKey -// Adds a new row and Writes all the items. -// and saves data to file if open. -func (lg *Logs) LogScope(sk etime.ScopeKey) *table.Table { - lt := lg.Tables[sk] - return lg.LogRowScope(sk, lt.Table.Rows, 0) -} - -// LogRow performs logging for given mode, time, at given row. -// Saves data to file if open. -func (lg *Logs) LogRow(mode etime.Modes, time etime.Times, row int) *table.Table { - return lg.LogRowScope(etime.Scope(mode, time), row, 0) -} - -// LogRowDi performs logging for given mode, time, at given row, -// using given data parallel index di, which adds to the row and all network -// access routines use this index for accessing network data. -// Saves data to file if open. -func (lg *Logs) LogRowDi(mode etime.Modes, time etime.Times, row int, di int) *table.Table { - return lg.LogRowScope(etime.Scope(mode, time), row, di) -} - -// LogRowScope performs logging for given etime.ScopeKey, at given row. -// Saves data to file if open. -// di is a data parallel index, for networks capable of processing input patterns in parallel. -// effective row is row + di -func (lg *Logs) LogRowScope(sk etime.ScopeKey, row int, di int) *table.Table { - lt := lg.Tables[sk] - dt := lt.Table - lg.Context.Di = di - if row < 0 { - row = dt.Rows - } else { - row += di - } - if dt.Rows <= row { - dt.SetNumRows(row + 1) - } - lg.WriteItems(sk, row) - lt.ResetIndexViews() // dirty that so it is regenerated later when needed - lg.WriteLastRowToFile(lt) - return dt -} - -// ResetLog resets the log for given mode, time, at given row. -// by setting number of rows = 0 -// The IndexViews are reset too. -func (lg *Logs) ResetLog(mode etime.Modes, time etime.Times) { - sk := etime.Scope(mode, time) - lt, ok := lg.Tables[sk] - if !ok { - return - } - dt := lt.Table - dt.SetNumRows(0) - lt.ResetIndexViews() -} - -// MPIGatherTableRows calls tensormpi.GatherTableRows on the given log table -// using an "MPI" suffixed MiscTable that is then switched out with the main table, -// so that any subsequent aggregation etc operates as usual on the full set of data. -// IMPORTANT: this switch means that the number of rows in the table MUST be reset -// back to either 0 (e.g., ResetLog) or the target number of rows, after the table -// is used, otherwise it will grow exponentially! -func (lg *Logs) MPIGatherTableRows(mode etime.Modes, time etime.Times, comm *mpi.Comm) { - sk := etime.Scope(mode, time) - lt := lg.Tables[sk] - dt := lt.Table - skm := string(sk + "MPI") - mt, has := lg.MiscTables[skm] - if !has { - mt = &table.Table{} - } - tensormpi.GatherTableRows(mt, dt, comm) - lt.Table = mt - lg.MiscTables[skm] = dt // note: actual underlying tables are always being swapped - lt.ResetIndexViews() -} - -// SetLogFile sets the log filename for given scope -func (lg *Logs) SetLogFile(mode etime.Modes, time etime.Times, fnm string) { - lt := lg.TableDetails(mode, time) - if lt == nil { - return - } - if LogDir != "" { - fnm = filepath.Join(LogDir, fnm) - } - var err error - lt.File, err = os.Create(fnm) - if err != nil { - log.Println(err) - lt.File = nil - } else { - fmt.Printf("Saving log to: %s\n", fnm) - } -} - -// CloseLogFiles closes all open log files -func (lg *Logs) CloseLogFiles() { - for _, lt := range lg.Tables { - if lt.File != nil { - lt.File.Close() - lt.File = nil - } - } -} - -/////////////////////////////////////////////////////////////////////////// -// Internal infrastructure below, main user API above - -// WriteItems calls all item Write functions within given scope -// providing the relevant Context for the function. -// Items are processed in the order added, to enable sequential -// dependencies to be used. -func (lg *Logs) WriteItems(sk etime.ScopeKey, row int) { - lg.Context.SetTable(sk, lg.Tables[sk], row) - for _, item := range lg.Items { - fun, ok := item.Write[sk] - if ok { - lg.Context.Item = item - // if strings.Contains(string(sk), "Epoch") { - // fmt.Printf("%#v\n", lg.Context.Item) - // } - fun(&lg.Context) - } - } -} - -// WriteLastRowToFile writes the last row of table to file, if File != nil -func (lg *Logs) WriteLastRowToFile(lt *LogTable) { - if lt.File == nil { - return - } - dt := lt.Table - if !lt.WroteHeaders { - dt.WriteCSVHeaders(lt.File, table.Tab) - lt.WroteHeaders = true - } - dt.WriteCSVRow(lt.File, dt.Rows-1, table.Tab) -} - -// ProcessItems is called in CreateTables, after all items have been added. -// It instantiates All scopes, and compiles multi-list scopes into -// single mode, item pairs -func (lg *Logs) ProcessItems() { - lg.CompileAllScopes() - for _, item := range lg.Items { - lg.ItemBindAllScopes(item) - item.SetEachScopeKey() - item.CompileScopes() - } -} - -// CompileAllScopes gathers all the modes and times used across all items -func (lg *Logs) CompileAllScopes() { - lg.Modes = make(map[string]bool) - lg.Times = make(map[string]bool) - for _, item := range lg.Items { - for sk, _ := range item.Write { - modes, times := sk.ModesAndTimes() - for _, m := range modes { - if m == "AllModes" || m == "NoEvalMode" { - continue - } - lg.Modes[m] = true - } - for _, t := range times { - if t == "AllTimes" || t == "NoTime" { - continue - } - lg.Times[t] = true - } - } - } -} - -// ItemBindAllScopes translates the AllModes or AllTimes scopes into -// a concrete list of actual Modes and Times used across all items -func (lg *Logs) ItemBindAllScopes(item *Item) { - newMap := WriteMap{} - for sk, c := range item.Write { - newsk := sk - useAllModes := false - useAllTimes := false - modes, times := sk.ModesAndTimesMap() - for m := range modes { - if m == "AllModes" { - useAllModes = true - } - } - for t := range times { - if t == "AllTimes" { - useAllTimes = true - } - } - if useAllModes && useAllTimes { - newsk = etime.ScopesMap(lg.Modes, lg.Times) - } else if useAllModes { - newsk = etime.ScopesMap(lg.Modes, times) - } else if useAllTimes { - newsk = etime.ScopesMap(modes, lg.Times) - } - newMap[newsk] = c - } - item.Write = newMap -} - -// NewTable returns a new table configured for given mode, time scope -func (lg *Logs) NewTable(mode, time string) *table.Table { - dt := &table.Table{} - dt.SetMetaData("name", mode+time+"Log") - dt.SetMetaData("desc", "Record of performance over "+time+" for "+mode) - dt.SetMetaData("read-only", "true") - dt.SetMetaData("precision", strconv.Itoa(LogPrec)) - for _, val := range lg.Items { - // Write is the definive record for which timescales are logged. - if _, ok := val.WriteFunc(mode, time); ok { - dt.AddTensorColumnOfType(val.Type, val.Name, val.CellShape, val.DimNames...) - } - } - return dt -} diff --git a/elog/stditems.go b/elog/stditems.go deleted file mode 100644 index 14ab81bc..00000000 --- a/elog/stditems.go +++ /dev/null @@ -1,450 +0,0 @@ -// Copyright (c) 2022, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package elog - -import ( - "fmt" - "reflect" - "time" - - "cogentcore.org/core/base/errors" - "cogentcore.org/core/math32/minmax" - "cogentcore.org/core/tensor/stats/stats" - "cogentcore.org/core/tensor/table" - "github.com/emer/emergent/v2/emer" - "github.com/emer/emergent/v2/etime" -) - -// AddCounterItems adds given Int counters from Stats, -// typically by recording looper counter values to Stats. -func (lg *Logs) AddCounterItems(ctrs ...etime.Times) { - for ci, ctr := range ctrs { - ctrName := ctr.String() // closure - tm := etime.AllTimes - if ctr < etime.Epoch { - tm = ctr - } - itm := lg.AddItem(&Item{ - Name: ctrName, - Type: reflect.Int, - Write: WriteMap{ - etime.Scope(etime.AllModes, tm): func(ctx *Context) { - ctx.SetStatInt(ctrName) - }}}) - if ctr < etime.Epoch { - for ti := ci + 1; ti < len(ctrs); ti++ { - itm.Write[etime.Scope(etime.AllModes, ctrs[ti])] = func(ctx *Context) { - ctx.SetStatInt(ctrName) - } - } - } - } -} - -// AddStdAggs adds standard aggregation items for times up to the penultimate -// time step provided, for given stat item that was created for the final timescale. -func (lg *Logs) AddStdAggs(itm *Item, mode etime.Modes, times ...etime.Times) { - ntimes := len(times) - for i := ntimes - 2; i >= 0; i-- { - tm := times[i] - if tm == etime.Run || tm == etime.Condition { - itm.Write[etime.Scope(mode, tm)] = func(ctx *Context) { - ix := ctx.LastNRows(ctx.Mode, times[i+1], 5) // cached - ctx.SetFloat64(stats.MeanColumn(ix, ctx.Item.Name)[0]) - } - } else { - itm.Write[etime.Scope(mode, times[i])] = func(ctx *Context) { - ctx.SetAgg(ctx.Mode, times[i+1], stats.Mean) - } - } - } -} - -// AddStatAggItem adds a Float64 stat that is aggregated -// with stats.MeanColumn across the given time scales, -// ordered from higher to lower, e.g., Run, Epoch, Trial. -// The statName is the source statistic in stats at the lowest level, -// and is also used for the log item name. -// For the Run or Condition level, aggregation is the mean over last 5 rows of prior -// level (Epoch) -func (lg *Logs) AddStatAggItem(statName string, times ...etime.Times) *Item { - ntimes := len(times) - itm := lg.AddItem(&Item{ - Name: statName, - Type: reflect.Float64, - FixMin: true, - // FixMax: true, - Range: minmax.F32{Max: 1}, - Write: WriteMap{ - etime.Scope(etime.AllModes, times[ntimes-1]): func(ctx *Context) { - ctx.SetFloat64(ctx.Stats.Float(statName)) - }}}) - lg.AddStdAggs(itm, etime.AllModes, times...) - return itm -} - -// AddStatFloatNoAggItem adds float statistic(s) of given names -// for just one mode, time, with no aggregation. -// If another item already exists for a different mode / time, this is added -// to it so there aren't any duplicate items. -func (lg *Logs) AddStatFloatNoAggItem(mode etime.Modes, etm etime.Times, stats ...string) { - for _, st := range stats { - stName := st // closure - itm, has := lg.ItemByName(stName) - if has { - itm.Write[etime.Scope(mode, etm)] = func(ctx *Context) { - ctx.SetStatFloat(stName) - } - } else { - lg.AddItem(&Item{ - Name: stName, - Type: reflect.Float64, - Range: minmax.F32{Min: -1}, - Write: WriteMap{ - etime.Scope(mode, etm): func(ctx *Context) { - ctx.SetStatFloat(stName) - }}}) - } - } -} - -// AddStatIntNoAggItem adds int statistic(s) of given names -// for just one mode, time, with no aggregation. -// If another item already exists for a different mode / time, this is added -// to it so there aren't any duplicate items. -func (lg *Logs) AddStatIntNoAggItem(mode etime.Modes, etm etime.Times, stats ...string) { - for _, st := range stats { - stName := st // closure - itm, has := lg.ItemByName(stName) - if has { - itm.Write[etime.Scope(mode, etm)] = func(ctx *Context) { - ctx.SetStatInt(stName) - } - } else { - lg.AddItem(&Item{ - Name: stName, - Type: reflect.Int, - Range: minmax.F32{Min: -1}, - Write: WriteMap{ - etime.Scope(mode, etm): func(ctx *Context) { - ctx.SetStatInt(stName) - }}}) - } - } -} - -// AddStatStringItem adds string stat item(s) to given mode and time (e.g., Allmodes, Trial). -// If another item already exists for a different mode / time, this is added -// to it so there aren't any duplicate items. -func (lg *Logs) AddStatStringItem(mode etime.Modes, etm etime.Times, stats ...string) { - for _, st := range stats { - stName := st // closure - itm, has := lg.ItemByName(stName) - if has { - itm.Write[etime.Scope(mode, etm)] = func(ctx *Context) { - ctx.SetStatString(stName) - } - } else { - lg.AddItem(&Item{ - Name: stName, - Type: reflect.String, - Write: WriteMap{ - etime.Scope(mode, etm): func(ctx *Context) { - ctx.SetStatString(stName) - }}}) - } - } -} - -// InitErrStats initializes the base stats variables used for -// AddErrStatAggItems: TrlErr, FirstZero, LastZero, NZero -func (lg *Logs) InitErrStats() { - stats := lg.Context.Stats - if stats == nil { - return - } - stats.SetFloat("TrlErr", 0.0) - stats.SetInt("FirstZero", -1) // critical to reset to -1 - stats.SetInt("LastZero", -1) // critical to reset to -1 - stats.SetInt("NZero", 0) -} - -// AddErrStatAggItems adds Err, PctErr, PctCor items recording overall performance -// from the given statName statistic (e.g., "TrlErr") across the 3 time scales, -// ordered from higher to lower, e.g., Run, Epoch, Trial. -func (lg *Logs) AddErrStatAggItems(statName string, times ...etime.Times) { - lg.AddItem(&Item{ - Name: "Err", - Type: reflect.Float64, - FixMin: true, - FixMax: true, - Range: minmax.F32{Max: 1}, - Write: WriteMap{ - etime.Scope(etime.AllModes, times[2]): func(ctx *Context) { - ctx.SetStatFloat(statName) - }}}) - lg.AddItem(&Item{ - Name: "PctErr", - Type: reflect.Float64, - FixMin: true, - FixMax: true, - Range: minmax.F32{Max: 1}, - Write: WriteMap{ - etime.Scope(etime.Train, times[1]): func(ctx *Context) { - pcterr := ctx.SetAggItem(ctx.Mode, times[2], "Err", stats.Mean)[0] - epc := ctx.Stats.Int("Epoch") - if ctx.Stats.Int("FirstZero") < 0 && pcterr == 0 { - ctx.Stats.SetInt("FirstZero", epc) - } - if pcterr == 0 { - nzero := ctx.Stats.Int("NZero") - ctx.Stats.SetInt("NZero", nzero+1) - ctx.Stats.SetInt("LastZero", epc) - } else { - ctx.Stats.SetInt("NZero", 0) - } - }, etime.Scope(etime.Test, times[1]): func(ctx *Context) { - ctx.SetAggItem(ctx.Mode, times[2], "Err", stats.Mean) - }, etime.Scope(etime.AllModes, times[0]): func(ctx *Context) { - ix := ctx.LastNRows(ctx.Mode, times[1], 5) // cached - ctx.SetFloat64(stats.MeanColumn(ix, ctx.Item.Name)[0]) - }}}) - lg.AddItem(&Item{ - Name: "PctCor", - Type: reflect.Float64, - FixMin: true, - FixMax: true, - Range: minmax.F32{Max: 1}, - Write: WriteMap{ - etime.Scope(etime.AllModes, times[1]): func(ctx *Context) { - ctx.SetFloat64(1 - ctx.ItemFloatScope(ctx.Scope, "PctErr")) - }, etime.Scope(etime.AllModes, times[0]): func(ctx *Context) { - ix := ctx.LastNRows(ctx.Mode, times[1], 5) // cached - ctx.SetFloat64(stats.MeanColumn(ix, ctx.Item.Name)[0]) - }}}) - - lg.AddItem(&Item{ - Name: "FirstZero", - Type: reflect.Float64, - Range: minmax.F32{Min: -1}, - Write: WriteMap{ - etime.Scope(etime.Train, times[0]): func(ctx *Context) { - ctx.SetStatInt("FirstZero") - }}}) - - lg.AddItem(&Item{ - Name: "LastZero", - Type: reflect.Float64, - Range: minmax.F32{Min: -1}, - Write: WriteMap{ - etime.Scope(etime.Train, times[0]): func(ctx *Context) { - ctx.SetStatInt("LastZero") - }}}) - -} - -// AddPerTrlMSec adds a log item that records PerTrlMSec log item that records -// the time taken to process one trial. itemName is PerTrlMSec by default. -// and times are relevant 3 times to record, ordered higher to lower, -// e.g., Run, Epoch, Trial -func (lg *Logs) AddPerTrlMSec(itemName string, times ...etime.Times) *Item { - return lg.AddItem(&Item{ - Name: itemName, - Type: reflect.Float64, - Write: WriteMap{ - etime.Scope(etime.Train, times[1]): func(ctx *Context) { - nm := ctx.Item.Name - tmr := ctx.Stats.StopTimer(nm) - trls := ctx.Logs.Table(ctx.Mode, times[2]) - tmr.N = trls.Rows - pertrl := float64(tmr.Avg()) / float64(time.Millisecond) - if ctx.Row == 0 { - pertrl = 0 // first one is always inaccruate - } - ctx.Stats.SetFloat(nm, pertrl) - ctx.SetFloat64(pertrl) - tmr.ResetStart() - }, etime.Scope(etime.AllModes, times[0]): func(ctx *Context) { - ix := ctx.LastNRows(ctx.Mode, times[1], 5) - ctx.SetFloat64(stats.MeanColumn(ix, ctx.Item.Name)[0]) - }}}) -} - -// RunStats records descriptive values for given stats across all runs, -// at Train Run scope, saving to RunStats misc table -func (lg *Logs) RunStats(stats ...string) { - sk := etime.Scope(etime.Train, etime.Run) - lt := lg.TableDetailsScope(sk) - ix, _ := lt.NamedIndexView("RunStats") - - // spl := split.GroupBy(ix, "RunName") - // for _, st := range stats { - // split.DescColumn(spl, st) - // } - // lg.MiscTables["RunStats"] = spl.AggsToTable(table.AddAggName) -} - -// AddLayerTensorItems adds tensor recording items for given variable, -// classes of layers, mode and time (e.g., Test, Trial). -// If another item already exists for a different mode / time, this is added -// to it so there aren't any duplicate items. -// di is a data parallel index di, for networks capable of processing input patterns in parallel. -func (lg *Logs) AddLayerTensorItems(net emer.Network, varNm string, mode etime.Modes, etm etime.Times, layClasses ...string) { - en := net.AsEmer() - layers := en.LayersByClass(layClasses...) - for _, lnm := range layers { - clnm := lnm - cly := errors.Log1(en.EmerLayerByName(clnm)) - itmNm := clnm + "_" + varNm - itm, has := lg.ItemByName(itmNm) - if has { - itm.Write[etime.Scope(mode, etm)] = func(ctx *Context) { - ctx.SetLayerSampleTensor(clnm, varNm) - } - } else { - lg.AddItem(&Item{ - Name: itmNm, - Type: reflect.Float32, - CellShape: cly.AsEmer().GetSampleShape().Sizes, - FixMin: true, - Range: minmax.F32{Max: 1}, - Write: WriteMap{ - etime.Scope(mode, etm): func(ctx *Context) { - ctx.SetLayerSampleTensor(clnm, varNm) - }}}) - } - } -} - -// AddCopyFromFloatItems adds items that copy from one log to another, -// adding the given prefix string to each. -// if toTimes has more than 1 item, subsequent times are AggMean aggregates of first one. -// float64 type. -func (lg *Logs) AddCopyFromFloatItems(toMode etime.Modes, toTimes []etime.Times, fmMode etime.Modes, fmTime etime.Times, prefix string, itemNames ...string) { - for _, st := range itemNames { - stnm := st - tonm := prefix + st - itm := lg.AddItem(&Item{ - Name: tonm, - Type: reflect.Float64, - Write: WriteMap{ - etime.Scope(toMode, toTimes[0]): func(ctx *Context) { - ctx.SetFloat64(ctx.ItemFloat(fmMode, fmTime, stnm)) - }}}) - for i := 1; i < len(toTimes); i++ { - i := i - itm.Write[etime.Scope(toMode, toTimes[i])] = func(ctx *Context) { - ctx.SetAgg(ctx.Mode, toTimes[i-1], stats.Mean) - } - } - } -} - -// PlotItems turns on Plot flag for given items -func (lg *Logs) PlotItems(itemNames ...string) { - for _, nm := range itemNames { - itm, has := lg.ItemByName(nm) - if !has { - fmt.Printf("elog.PlotItems: item named: %s not found\n", nm) - continue - } - itm.Plot = true - } -} - -// SetFloatMinItems turns off the FixMin flag for given items -func (lg *Logs) SetFloatMinItems(itemNames ...string) { - for _, nm := range itemNames { - itm, has := lg.ItemByName(nm) - if !has { - fmt.Printf("elog.SetFloatMinItems: item named: %s not found\n", nm) - continue - } - itm.FixMin = false - } -} - -// SetFloatMaxItems turns off the FixMax flag for given items -func (lg *Logs) SetFloatMaxItems(itemNames ...string) { - for _, nm := range itemNames { - itm, has := lg.ItemByName(nm) - if !has { - fmt.Printf("elog.SetFloatMaxItems: item named: %s not found\n", nm) - continue - } - itm.FixMax = false - } -} - -// SetFixMaxItems sets the FixMax flag and Range Max val for given items -func (lg *Logs) SetFixMaxItems(max float32, itemNames ...string) { - for _, nm := range itemNames { - itm, has := lg.ItemByName(nm) - if !has { - fmt.Printf("elog.SetFixMaxItems: item named: %s not found\n", nm) - continue - } - itm.FixMax = true - itm.Range.Max = max - } -} - -// SetFixMinItems sets the FixMin flag and Range Min val for given items -func (lg *Logs) SetFixMinItems(min float32, itemNames ...string) { - for _, nm := range itemNames { - itm, has := lg.ItemByName(nm) - if !has { - fmt.Printf("elog.SetFixMinItems: item named: %s not found\n", nm) - continue - } - itm.FixMin = true - itm.Range.Min = min - } -} - -// LastNRows returns an IndexView onto table for given scope with the last -// n rows of the table (only valid rows, if less than n). -// This index view is available later with the "LastNRows" name via -// NamedIndexView functions. -func (lg *Logs) LastNRows(mode etime.Modes, time etime.Times, n int) *table.IndexView { - return lg.LastNRowsScope(etime.Scope(mode, time), n) -} - -// LastNRowsScope returns an IndexView onto table for given scope with the last -// n rows of the table (only valid rows, if less than n). -// This index view is available later with the "LastNRows" name via -// NamedIndexView functions. -func (lg *Logs) LastNRowsScope(sk etime.ScopeKey, n int) *table.IndexView { - ix, isnew := lg.NamedIndexViewScope(sk, "LastNRows") - if !isnew { - return ix - } - if n > ix.Len()-1 { - n = ix.Len() - 1 - } - if ix.Indexes == nil { // should not happen - ix.Indexes = make([]int, ix.Table.Rows) - } - ix.Indexes = ix.Indexes[ix.Len()-n:] - return ix -} - -// log filenames - -// LogFilename returns a standard log file name as netName_runName_logName.tsv -func LogFilename(logName, netName, runName string) string { - return netName + "_" + runName + "_" + logName + ".tsv" -} - -// SetLogFile sets the log file for given mode and time, -// using given logName (extension), netName and runName, -// if the Config flag is set. -func SetLogFile(logs *Logs, configOn bool, mode etime.Modes, time etime.Times, logName, netName, runName string) { - if !configOn { - return - } - fnm := LogFilename(logName, netName, runName) - logs.SetLogFile(mode, time, fnm) -} diff --git a/elog/table.go b/elog/table.go deleted file mode 100644 index e159ebae..00000000 --- a/elog/table.go +++ /dev/null @@ -1,76 +0,0 @@ -// Copyright (c) 2022, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package elog - -import ( - "os" - - "cogentcore.org/core/tensor/table" -) - -// LogTable contains all the data for one log table -type LogTable struct { - - // Actual data stored. - Table *table.Table - - // arbitrary meta-data for each table, e.g., hints for plotting: Plot = false to not plot, XAxis, LegendCol - Meta map[string]string - - // Index View of the table -- automatically updated when a new row of data is logged to the table. - IndexView *table.IndexView `display:"-"` - - // named index views onto the table that can be saved and used across multiple items -- these are reset to nil after a new row is written -- see NamedIndexView funtion for more details. - NamedViews map[string]*table.IndexView `display:"-"` - - // File to store the log into. - File *os.File `display:"-"` - - // true if headers for File have already been written - WroteHeaders bool `display:"-"` -} - -// NewLogTable returns a new LogTable entry for given table, initializing values -func NewLogTable(dt *table.Table) *LogTable { - lt := &LogTable{Table: dt} - lt.Meta = make(map[string]string) - lt.NamedViews = make(map[string]*table.IndexView) - return lt -} - -// GetIndexView returns the index view for the whole table. -// It is reset to nil after log row is written, and if nil -// then it is initialized to reflect current rows. -func (lt *LogTable) GetIndexView() *table.IndexView { - if lt.IndexView == nil { - lt.IndexView = table.NewIndexView(lt.Table) - } - return lt.IndexView -} - -// NamedIndexView returns a named Index View of the table, and true -// if this index view was newly created to show entire table (else false). -// This is used for additional data aggregation, filtering etc. -// It is reset to nil after log row is written, and if nil -// then it is initialized to reflect current rows as a starting point (returning true). -// Thus, the bool return value can be used for re-using cached indexes. -func (lt *LogTable) NamedIndexView(name string) (*table.IndexView, bool) { - ix, has := lt.NamedViews[name] - isnew := false - if !has || ix == nil { - ix = table.NewIndexView(lt.Table) - lt.NamedViews[name] = ix - isnew = true - } - return ix, isnew -} - -// ResetIndexViews resets all IndexViews -- after log row is written -func (lt *LogTable) ResetIndexViews() { - lt.IndexView = nil - for nm := range lt.NamedViews { - lt.NamedViews[nm] = nil - } -} diff --git a/elog/typegen.go b/elog/typegen.go deleted file mode 100644 index fb015ada..00000000 --- a/elog/typegen.go +++ /dev/null @@ -1,78 +0,0 @@ -// Code generated by "core generate -add-types"; DO NOT EDIT. - -package elog - -import ( - "reflect" - - "cogentcore.org/core/math32/minmax" - "cogentcore.org/core/types" -) - -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/elog.WriteFunc", IDName: "write-func", Doc: "WriteFunc function that computes and sets log values\nThe Context provides information typically needed for logging"}) - -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/elog.Context", IDName: "context", Doc: "Context provides the context for logging Write functions.\nSetContext must be called on Logs to set the Stats and Net values\nProvides various convenience functions for setting log values\nand other commonly used operations.", Fields: []types.Field{{Name: "Logs", Doc: "pointer to the Logs object with all log data"}, {Name: "Stats", Doc: "pointer to stats"}, {Name: "Net", Doc: "network"}, {Name: "Di", Doc: "data parallel index for accessing data from network"}, {Name: "Item", Doc: "current log Item"}, {Name: "Scope", Doc: "current scope key"}, {Name: "Mode", Doc: "current scope eval mode (if standard)"}, {Name: "Time", Doc: "current scope timescale (if standard)"}, {Name: "LogTable", Doc: "LogTable with extra data for the table"}, {Name: "Table", Doc: "current table to record value to"}, {Name: "Row", Doc: "current row in table to write to"}}}) - -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/elog.WriteMap", IDName: "write-map", Doc: "WriteMap holds log writing functions for scope keys"}) - -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/elog.Item", IDName: "item", Doc: "Item describes one item to be logged -- has all the info\nfor this item, across all scopes where it is relevant.", Directives: []types.Directive{{Tool: "types", Directive: "add", Args: []string{"-setters"}}}, Fields: []types.Field{{Name: "Name", Doc: "name of column -- must be unique for a table"}, {Name: "Type", Doc: "data type, using tensor types which are isomorphic with arrow.Type"}, {Name: "CellShape", Doc: "shape of a single cell in the column (i.e., without the row dimension) -- for scalars this is nil -- tensor column will add the outer row dimension to this shape"}, {Name: "DimNames", Doc: "names of the dimensions within the CellShape -- 'Row' will be added to outer dimension"}, {Name: "Write", Doc: "holds Write functions for different scopes. After processing, the scope key will be a single mode and time, from Scope(mode, time), but the initial specification can lists for each, or the All* option, if there is a Write function that works across scopes"}, {Name: "Plot", Doc: "Whether or not to plot it"}, {Name: "Range", Doc: "The minimum and maximum values, for plotting"}, {Name: "FixMin", Doc: "Whether to fix the minimum in the display"}, {Name: "FixMax", Doc: "Whether to fix the maximum in the display"}, {Name: "ErrCol", Doc: "Name of other item that has the error bar values for this item -- for plotting"}, {Name: "TensorIndex", Doc: "index of tensor to plot -- defaults to 0 -- use -1 to plot all"}, {Name: "Color", Doc: "specific color for plot -- uses default ordering of colors if empty"}, {Name: "Modes", Doc: "map of eval modes that this item has a Write function for"}, {Name: "Times", Doc: "map of times that this item has a Write function for"}}}) - -// SetName sets the [Item.Name]: -// name of column -- must be unique for a table -func (t *Item) SetName(v string) *Item { t.Name = v; return t } - -// SetType sets the [Item.Type]: -// data type, using tensor types which are isomorphic with arrow.Type -func (t *Item) SetType(v reflect.Kind) *Item { t.Type = v; return t } - -// SetCellShape sets the [Item.CellShape]: -// shape of a single cell in the column (i.e., without the row dimension) -- for scalars this is nil -- tensor column will add the outer row dimension to this shape -func (t *Item) SetCellShape(v ...int) *Item { t.CellShape = v; return t } - -// SetDimNames sets the [Item.DimNames]: -// names of the dimensions within the CellShape -- 'Row' will be added to outer dimension -func (t *Item) SetDimNames(v ...string) *Item { t.DimNames = v; return t } - -// SetWrite sets the [Item.Write]: -// holds Write functions for different scopes. After processing, the scope key will be a single mode and time, from Scope(mode, time), but the initial specification can lists for each, or the All* option, if there is a Write function that works across scopes -func (t *Item) SetWrite(v WriteMap) *Item { t.Write = v; return t } - -// SetPlot sets the [Item.Plot]: -// Whether or not to plot it -func (t *Item) SetPlot(v bool) *Item { t.Plot = v; return t } - -// SetRange sets the [Item.Range]: -// The minimum and maximum values, for plotting -func (t *Item) SetRange(v minmax.F32) *Item { t.Range = v; return t } - -// SetFixMin sets the [Item.FixMin]: -// Whether to fix the minimum in the display -func (t *Item) SetFixMin(v bool) *Item { t.FixMin = v; return t } - -// SetFixMax sets the [Item.FixMax]: -// Whether to fix the maximum in the display -func (t *Item) SetFixMax(v bool) *Item { t.FixMax = v; return t } - -// SetErrCol sets the [Item.ErrCol]: -// Name of other item that has the error bar values for this item -- for plotting -func (t *Item) SetErrCol(v string) *Item { t.ErrCol = v; return t } - -// SetTensorIndex sets the [Item.TensorIndex]: -// index of tensor to plot -- defaults to 0 -- use -1 to plot all -func (t *Item) SetTensorIndex(v int) *Item { t.TensorIndex = v; return t } - -// SetColor sets the [Item.Color]: -// specific color for plot -- uses default ordering of colors if empty -func (t *Item) SetColor(v string) *Item { t.Color = v; return t } - -// SetModes sets the [Item.Modes]: -// map of eval modes that this item has a Write function for -func (t *Item) SetModes(v map[string]bool) *Item { t.Modes = v; return t } - -// SetTimes sets the [Item.Times]: -// map of times that this item has a Write function for -func (t *Item) SetTimes(v map[string]bool) *Item { t.Times = v; return t } - -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/elog.Logs", IDName: "logs", Doc: "Logs contains all logging state and API for doing logging.\ndo AddItem to add any number of items, at different eval mode, time scopes.\nEach Item has its own Write functions, at each scope as neeeded.\nThen call CreateTables to generate log Tables from those items.\nCall Log with a scope to add a new row of data to the log\nand ResetLog to reset the log to empty.", Fields: []types.Field{{Name: "Tables", Doc: "Tables storing log data, auto-generated from Items."}, {Name: "MiscTables", Doc: "holds additional tables not computed from items -- e.g., aggregation results, intermediate computations, etc"}, {Name: "Items", Doc: "A list of the items that should be logged. Each item should describe one column that you want to log, and how. Order in list determines order in logs."}, {Name: "Context", Doc: "context information passed to logging Write functions -- has all the information needed to compute and write log values -- is updated for each item in turn"}, {Name: "Modes", Doc: "All the eval modes that appear in any of the items of this log."}, {Name: "Times", Doc: "All the timescales that appear in any of the items of this log."}, {Name: "ItemIndexMap", Doc: "map of item indexes by name, for rapid access to items if they need to be modified after adding."}, {Name: "TableOrder", Doc: "sorted order of table scopes"}}}) - -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/elog.LogTable", IDName: "log-table", Doc: "LogTable contains all the data for one log table", Fields: []types.Field{{Name: "Table", Doc: "Actual data stored."}, {Name: "Meta", Doc: "arbitrary meta-data for each table, e.g., hints for plotting: Plot = false to not plot, XAxis, LegendCol"}, {Name: "IndexView", Doc: "Index View of the table -- automatically updated when a new row of data is logged to the table."}, {Name: "NamedViews", Doc: "named index views onto the table that can be saved and used across multiple items -- these are reset to nil after a new row is written -- see NamedIndexView funtion for more details."}, {Name: "File", Doc: "File to store the log into."}, {Name: "WroteHeaders", Doc: "true if headers for File have already been written"}}}) diff --git a/estats/funcs.go b/estats/funcs.go index 577eb04d..b32b75d4 100644 --- a/estats/funcs.go +++ b/estats/funcs.go @@ -4,14 +4,7 @@ package estats -import ( - "cogentcore.org/core/base/errors" - "cogentcore.org/core/tensor" - "cogentcore.org/core/tensor/stats/metric" - "cogentcore.org/core/tensor/stats/stats" - "cogentcore.org/core/tensor/table" - "github.com/emer/emergent/v2/emer" -) +/* // funcs contains misc stats functions @@ -76,61 +69,4 @@ func (st *Stats) ClosestPat(net emer.Network, layNm, unitVar string, di int, pat } return row, cor, nm } - -////////////////////////////////////////////// -// PCA Stats - -// PCAStrongThr is the threshold for counting PCA eigenvalues as "strong" -// Applies to SVD as well. -var PCAStrongThr = 0.01 - -// PCAStats computes PCA statistics on recorded hidden activation patterns -// on given log table (IndexView), and given list of layer names -// and variable name -- columns named "layer_var". -// Helpful for measuring the overall information (variance) in the representations -// to detect a common failure mode where a few patterns dominate over everything ("hogs"). -// Records Float stats as: -// layer_PCA_NStrong: number of eigenvalues above the PCAStrongThr threshold -// layer_PCA_Top5: average strength of top 5 eigenvalues -// layer_PCA_Next5: average strength of next 5 eigenvalues -// layer_PCA_Rest: average strength of remaining eigenvalues (if more than 10 total eigens) -// Uses SVD to compute much more efficiently than official PCA. -func (st *Stats) PCAStats(ix *table.Table, varNm string, layers []string) { - // svd.Cond = PCAStrongThr - covar := tensor.NewFloat64() - evecs := tensor.NewFloat64() - _ = evecs - evals := tensor.NewFloat64() - for _, lnm := range layers { - col := ix.Column(lnm + "_" + varNm) - metric.CovarianceMatrixOut(metric.Covariance, col, covar) - // matrix.SVDOut(covar, evecs, evals) // todo - ln := len(evals.Values) - var nstr float64 // nstr := float64(svd.Rank) this didn't work.. - for i, v := range evals.Values { - if v < PCAStrongThr { - nstr = float64(i) - break - } - } - var top5, next5 float64 - for i := 0; i < 5; i++ { - if ln >= 5 { - top5 += evals.Values[i] - } - if ln >= 10 { - next5 += evals.Values[i+5] - } - } - st.SetFloat(lnm+"_PCA_NStrong", nstr) - st.SetFloat(lnm+"_PCA_Top5", top5/5) - st.SetFloat(lnm+"_PCA_Next5", next5/5) - if ln > 10 { - sum := stats.Sum(evals).Float1D(0) - ravg := (sum - (top5 + next5)) / float64(ln-10) - st.SetFloat(lnm+"_PCA_Rest", ravg) - } else { - st.SetFloat(lnm+"_PCA_Rest", 0) - } - } -} +*/ diff --git a/estats/rasters.go b/estats/rasters.go deleted file mode 100644 index 18ea1d89..00000000 --- a/estats/rasters.go +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright (c) 2022, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package estats - -import ( - "cogentcore.org/core/base/errors" - "cogentcore.org/core/tensor" - "github.com/emer/emergent/v2/emer" -) - -// ConfigRasters configures spike rasters for given maximum number of cycles -// and layer names. -func (st *Stats) ConfigRasters(net emer.Network, maxCyc int, layers []string) { - st.Rasters = layers - for _, lnm := range st.Rasters { - ly := errors.Log1(net.AsEmer().EmerLayerByName(lnm)).AsEmer() - sr := st.F32Tensor("Raster_" + lnm) - nu := len(ly.SampleIndexes) - if nu == 0 { - nu = ly.Shape.Len() - } - sr.SetShapeSizes(nu, maxCyc) - } -} - -// SetRasterCol sets column of given raster from data -func (st *Stats) SetRasterCol(sr, tsr *tensor.Float32, col int) { - for ni, v := range tsr.Values { - sr.Set(v, ni, col) - } -} - -// RasterRec records data from layers configured with ConfigRasters -// using variable name, for given cycle number (X axis index) -// di is a data parallel index di, for networks capable of processing input patterns in parallel. -func (st *Stats) RasterRec(net emer.Network, cyc int, varNm string, di int) { - for _, lnm := range st.Rasters { - tsr := st.SetLayerSampleTensor(net, lnm, varNm, di) - sr := st.F32Tensor("Raster_" + lnm) - if sr.DimSize(1) <= cyc { - continue - } - st.SetRasterCol(sr, tsr, cyc) - } -} diff --git a/go.mod b/go.mod index 63bdcd86..3416182b 100644 --- a/go.mod +++ b/go.mod @@ -3,8 +3,7 @@ module github.com/emer/emergent/v2 go 1.22 require ( - cogentcore.org/core v0.3.5 - github.com/BurntSushi/toml v1.3.2 + cogentcore.org/core v0.3.6-0.20241125091405-29e40fa36d9b github.com/stretchr/testify v1.9.0 golang.org/x/exp v0.0.0-20240823005443-9b4947da3948 ) @@ -16,10 +15,13 @@ require ( github.com/anthonynsimon/bild v0.13.0 // indirect github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect github.com/aymerick/douceur v0.2.0 // indirect + github.com/bramvdbogaerde/go-scp v1.4.0 // indirect github.com/chewxy/math32 v1.10.1 // indirect github.com/cogentcore/webgpu v0.0.0-20240906154609-e35089e9a725 // indirect + github.com/cogentcore/yaegi v0.0.0-20240724064145-e32a03faad56 // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect github.com/dlclark/regexp2 v1.11.0 // indirect + github.com/ergochat/readline v0.1.2 // indirect github.com/fsnotify/fsnotify v1.7.0 // indirect github.com/go-gl/glfw/v3.3/glfw v0.0.0-20240506104042-037f3cc74f2a // indirect github.com/goki/freetype v1.0.5 // indirect @@ -40,6 +42,7 @@ require ( github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/rivo/uniseg v0.4.7 // indirect github.com/rogpeppe/go-internal v1.12.0 // indirect + golang.org/x/crypto v0.26.0 // indirect golang.org/x/image v0.18.0 // indirect golang.org/x/mod v0.20.0 // indirect golang.org/x/net v0.28.0 // indirect diff --git a/go.sum b/go.sum index ef74d145..88417fce 100644 --- a/go.sum +++ b/go.sum @@ -1,10 +1,8 @@ -cogentcore.org/core v0.3.5 h1:qTW/+GSUbeYYCSi29zdAQ/fnjWg2/E/aTbAMbfAFGfI= -cogentcore.org/core v0.3.5/go.mod h1:q3OrXihYG2aTE2oG785ad5b6aJ3wzSCXeeD2SSLKWIE= +cogentcore.org/core v0.3.6-0.20241125091405-29e40fa36d9b h1:kVgx3WPYmCQKY/y5WCgKdVGEYvDgMbPuYH8kUYxr85o= +cogentcore.org/core v0.3.6-0.20241125091405-29e40fa36d9b/go.mod h1:q3OrXihYG2aTE2oG785ad5b6aJ3wzSCXeeD2SSLKWIE= github.com/Bios-Marcel/wastebasket v0.0.4-0.20240213135800-f26f1ae0a7c4 h1:6lx9xzJAhdjq0LvVfbITeC3IH9Fzvo1aBahyPu2FuG8= github.com/Bios-Marcel/wastebasket v0.0.4-0.20240213135800-f26f1ae0a7c4/go.mod h1:FChzXi1izqzdPb6BiNZmcZLGyTYiT61iGx9Rxx9GNeI= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/BurntSushi/toml v1.3.2 h1:o7IhLm0Msx3BaB+n3Ag7L8EVlByGnpq14C4YWiu/gL8= -github.com/BurntSushi/toml v1.3.2/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= github.com/Masterminds/vcs v1.13.3 h1:IIA2aBdXvfbIM+yl/eTnL4hb1XwdpvuQLglAix1gweE= github.com/Masterminds/vcs v1.13.3/go.mod h1:TiE7xuEjl1N4j016moRd6vezp6e6Lz23gypeXfzXeW8= github.com/alecthomas/assert/v2 v2.6.0 h1:o3WJwILtexrEUk3cUVal3oiQY2tfgr/FHWiz/v2n4FU= @@ -20,10 +18,14 @@ github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiE github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8= github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk= github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4= +github.com/bramvdbogaerde/go-scp v1.4.0 h1:jKMwpwCbcX1KyvDbm/PDJuXcMuNVlLGi0Q0reuzjyKY= +github.com/bramvdbogaerde/go-scp v1.4.0/go.mod h1:on2aH5AxaFb2G0N5Vsdy6B0Ml7k9HuHSwfo1y0QzAbQ= github.com/chewxy/math32 v1.10.1 h1:LFpeY0SLJXeaiej/eIp2L40VYfscTvKh/FSEZ68uMkU= github.com/chewxy/math32 v1.10.1/go.mod h1:dOB2rcuFrCn6UHrze36WSLVPKtzPMRAQvBvUwkSsLqs= github.com/cogentcore/webgpu v0.0.0-20240906154609-e35089e9a725 h1:Zf2ixJY1PsN3huZa0Uof7VKYuoBbdxJCEbKdhKhHqwE= github.com/cogentcore/webgpu v0.0.0-20240906154609-e35089e9a725/go.mod h1:ciqaxChrmRRMU1SnI5OE12Cn3QWvOKO+e5nSy+N9S1o= +github.com/cogentcore/yaegi v0.0.0-20240724064145-e32a03faad56 h1:Fz1uHiFCHnijFcMXzn36KLamcx5q4pxoR5rKCrcXIcQ= +github.com/cogentcore/yaegi v0.0.0-20240724064145-e32a03faad56/go.mod h1:+MGpZ0srBmeJ7aaOLTdVss8WLolt0/y/plVHLpxgd3A= github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= @@ -35,6 +37,8 @@ github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1 github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxKI= github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= +github.com/ergochat/readline v0.1.2 h1:zxiwQB8DyTLD0HSWthJlnvs5E2X1qnyXZ44RFf1jRlg= +github.com/ergochat/readline v0.1.2/go.mod h1:o3ux9QLHLm77bq7hDB21UTm6HlV2++IPDMfIfKDuOgY= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA= github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM= @@ -106,6 +110,8 @@ github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8 github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.26.0 h1:RrRspgV4mU+YwB4FYnuBoKsUapNIL5cohGAmSH3azsw= +golang.org/x/crypto v0.26.0/go.mod h1:GY7jblb9wI+FOo5y8/S2oY4zWP07AkOJ4+jxCqdqn54= golang.org/x/exp v0.0.0-20240823005443-9b4947da3948 h1:kx6Ds3MlpiUHKj7syVnbp57++8WpuKPcR5yjLBjvLEA= golang.org/x/exp v0.0.0-20240823005443-9b4947da3948/go.mod h1:akd2r19cwCdwSwWeIdzYQGa/EZZyqcOdwWiwj5L5eKQ= golang.org/x/image v0.0.0-20190703141733-d6a02ce849c9/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= @@ -121,6 +127,8 @@ golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5h golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.23.0 h1:YfKFowiIMvtgl1UERQoTPPToxltDeZfbj4H7dVUCwmM= golang.org/x/sys v0.23.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/term v0.23.0 h1:F6D4vR+EHoL9/sWAWgAR1H2DcHr4PareCbAaCo1RpuU= +golang.org/x/term v0.23.0/go.mod h1:DgV24QBUrK6jhZXl+20l6UWznPlwAHm1Q1mGHtydmSk= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.17.0 h1:XtiM5bkSOt+ewxlOE/aE/AKEHibwj/6gvWMl9Rsh0Qc= golang.org/x/text v0.17.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= From 362e1fd027aa77b30c23cfb826d8e47042b6c6f5 Mon Sep 17 00:00:00 2001 From: "Randall C. O'Reilly" Date: Mon, 25 Nov 2024 01:43:07 -0800 Subject: [PATCH 16/24] fix tests --- decoder/linear.go | 2 +- decoder/linear_test.go | 30 +++++++------- paths/prjn_test.go | 90 ++++++++++++++++++++--------------------- popcode/popcode_test.go | 6 +-- 4 files changed, 64 insertions(+), 64 deletions(-) diff --git a/decoder/linear.go b/decoder/linear.go index 61a451f3..5d4f91f0 100644 --- a/decoder/linear.go +++ b/decoder/linear.go @@ -61,7 +61,7 @@ type Linear struct { // Layer is the subset of emer.Layer that is used by this code type Layer interface { Name() string - UnitValuesTensor(tsr tensor.Tensor, varNm string, di int) error + UnitValuesTensor(tsr tensor.Values, varNm string, di int) error Shape() *tensor.Shape } diff --git a/decoder/linear_test.go b/decoder/linear_test.go index 9ffbf38c..2f7da726 100644 --- a/decoder/linear_test.go +++ b/decoder/linear_test.go @@ -14,19 +14,19 @@ import ( // TestLayer implements a Layer type TestLayer struct { - tensors map[string]tensor.Tensor + tensors map[string]tensor.Values } func (tl *TestLayer) Name() string { return "TestLayer" } -func (tl *TestLayer) UnitValuesTensor(tsr tensor.Tensor, varNm string, di int) error { +func (tl *TestLayer) UnitValuesTensor(tsr tensor.Values, varNm string, di int) error { src, ok := tl.tensors[varNm] if !ok { return fmt.Errorf("bad key: %s", varNm) } - tsr.CopyShapeFrom(src) + tensor.SetShapeFrom(tsr, src) tsr.CopyFrom(src) return nil } @@ -94,58 +94,58 @@ func TestLinearLogistic(t *testing.T) { func TestInputPool1D(t *testing.T) { dec := Linear{} - shape := tensor.NewShape([]int{1, 5, 6, 6}) + shape := tensor.NewShape(1, 5, 6, 6) vals := make([]float32, shape.Len()) for i := range vals { vals[i] = float32(i) } - tsr := tensor.NewFloat32(shape.Sizes) + tsr := tensor.NewFloat32(shape.Sizes...) tsr.SetNumRows(1) for i := range tsr.Values { tsr.Values[i] = vals[i] } - layer := TestLayer{tensors: map[string]tensor.Tensor{"var0": tsr}} + layer := TestLayer{tensors: map[string]tensor.Values{"var0": tsr}} dec.InitPool(2, &layer, 0, IdentityFunc) dec.Input("var0", 0) - expected := tsr.SubSpace([]int{0, 0}).(*tensor.Float32).Values + expected := tsr.SubSpace(0, 0).(*tensor.Float32).Values assert.Equal(t, expected, dec.Inputs) dec.InitPool(2, &layer, 1, IdentityFunc) dec.Input("var0", 0) - expected = tsr.SubSpace([]int{0, 1}).(*tensor.Float32).Values + expected = tsr.SubSpace(0, 1).(*tensor.Float32).Values assert.Equal(t, expected, dec.Inputs) } func TestInputPool2D(t *testing.T) { dec := Linear{} - shape := tensor.NewShape([]int{2, 5, 6, 6}) + shape := tensor.NewShape(2, 5, 6, 6) vals := make([]float32, shape.Len()) for i := range vals { vals[i] = float32(i) } - tsr := tensor.NewFloat32(shape.Sizes) + tsr := tensor.NewFloat32(shape.Sizes...) for i := range tsr.Values { tsr.Values[i] = vals[i] } - layer := TestLayer{tensors: map[string]tensor.Tensor{"var0": tsr}} + layer := TestLayer{tensors: map[string]tensor.Values{"var0": tsr}} dec.InitPool(2, &layer, 0, IdentityFunc) dec.Input("var0", 0) - expected := tsr.SubSpace([]int{0, 0}).(*tensor.Float32).Values + expected := tsr.SubSpace(0, 0).(*tensor.Float32).Values assert.Equal(t, expected, dec.Inputs) dec.InitPool(2, &layer, 1, IdentityFunc) dec.Input("var0", 0) - expected = tsr.SubSpace([]int{0, 1}).(*tensor.Float32).Values + expected = tsr.SubSpace(0, 1).(*tensor.Float32).Values assert.Equal(t, expected, dec.Inputs) dec.InitPool(2, &layer, 5, IdentityFunc) dec.Input("var0", 0) - expected = tsr.SubSpace([]int{1, 0}).(*tensor.Float32).Values + expected = tsr.SubSpace(1, 0).(*tensor.Float32).Values assert.Equal(t, expected, dec.Inputs) dec.InitPool(2, &layer, 9, IdentityFunc) dec.Input("var0", 0) - expected = tsr.SubSpace([]int{1, 4}).(*tensor.Float32).Values + expected = tsr.SubSpace(1, 4).(*tensor.Float32).Values assert.Equal(t, expected, dec.Inputs) } diff --git a/paths/prjn_test.go b/paths/prjn_test.go index f504c0af..4cc95ac4 100644 --- a/paths/prjn_test.go +++ b/paths/prjn_test.go @@ -22,8 +22,8 @@ func CheckAllN(ns *tensor.Int32, trg int, t *testing.T) { } func TestFull(t *testing.T) { - send := tensor.NewShape([]int{2, 3}) - recv := tensor.NewShape([]int{3, 4}) + send := tensor.NewShape(2, 3) + recv := tensor.NewShape(3, 4) sNtot := send.Len() rNtot := recv.Len() @@ -52,7 +52,7 @@ func TestFull(t *testing.T) { } func TestFullSelf(t *testing.T) { - send := tensor.NewShape([]int{2, 3}) + send := tensor.NewShape(2, 3) sNtot := send.Len() @@ -76,8 +76,8 @@ func TestFullSelf(t *testing.T) { } func TestOneToOne(t *testing.T) { - send := tensor.NewShape([]int{3, 2}) - recv := tensor.NewShape([]int{3, 2}) + send := tensor.NewShape(3, 2) + recv := tensor.NewShape(3, 2) pj := NewOneToOne() sendn, recvn, cons := pj.Connect(send, recv, false) @@ -97,8 +97,8 @@ func TestOneToOne(t *testing.T) { } func TestPoolOneToOne(t *testing.T) { - send := tensor.NewShape([]int{2, 3, 1, 2}) - recv := tensor.NewShape([]int{2, 3, 1, 2}) + send := tensor.NewShape(2, 3, 1, 2) + recv := tensor.NewShape(2, 3, 1, 2) sNu := send.DimSize(2) * send.DimSize(3) rNu := recv.DimSize(2) * recv.DimSize(3) @@ -131,8 +131,8 @@ func TestPoolOneToOne(t *testing.T) { } func TestPoolOneToOneRecv(t *testing.T) { - send := tensor.NewShape([]int{2, 3}) - recv := tensor.NewShape([]int{2, 3, 1, 2}) + send := tensor.NewShape(2, 3) + recv := tensor.NewShape(2, 3, 1, 2) rNu := recv.DimSize(2) * recv.DimSize(3) @@ -160,8 +160,8 @@ func TestPoolOneToOneRecv(t *testing.T) { } func TestPoolOneToOneSend(t *testing.T) { - send := tensor.NewShape([]int{2, 3, 1, 2}) - recv := tensor.NewShape([]int{2, 3}) + send := tensor.NewShape(2, 3, 1, 2) + recv := tensor.NewShape(2, 3) sNu := send.DimSize(2) * send.DimSize(3) @@ -184,8 +184,8 @@ func TestPoolOneToOneSend(t *testing.T) { } func TestPoolTile(t *testing.T) { - send := tensor.NewShape([]int{4, 4, 1, 2}) - recv := tensor.NewShape([]int{2, 2, 1, 3}) + send := tensor.NewShape(4, 4, 1, 2) + recv := tensor.NewShape(2, 2, 1, 3) sNu := send.DimSize(2) * send.DimSize(3) rNu := recv.DimSize(2) * recv.DimSize(3) @@ -216,16 +216,16 @@ func TestPoolTile(t *testing.T) { CheckAllN(sendn, rNu, t) CheckAllN(recvn, pj.Size.X*pj.Size.Y*sNu, t) - // send = tensor.NewShape([]int{4, 4, 3, 3}) - // recv = tensor.NewShape([]int{2, 2, 2, 2}) + // send = tensor.NewShape(4, 4, 3, 3) + // recv = tensor.NewShape(2, 2, 2, 2) // wts := &tensor.Float32{} // pj.TopoWeights(send, recv, wts) // fmt.Printf("topo wts\n%v\n", wts) } func TestPoolTileRecip(t *testing.T) { - send := tensor.NewShape([]int{4, 4, 1, 2}) - recv := tensor.NewShape([]int{2, 2, 1, 3}) + send := tensor.NewShape(4, 4, 1, 2) + recv := tensor.NewShape(2, 2, 1, 3) sNu := send.DimSize(2) * send.DimSize(3) rNu := recv.DimSize(2) * recv.DimSize(3) @@ -277,16 +277,16 @@ func TestPoolTileRecip(t *testing.T) { CheckAllN(sendn, pj.Size.X*pj.Size.Y*sNu, t) CheckAllN(recvn, rNu, t) - // send = tensor.NewShape([]int{4, 4, 3, 3}) - // recv = tensor.NewShape([]int{2, 2, 2, 2}) + // send = tensor.NewShape(4, 4, 3, 3) + // recv = tensor.NewShape(2, 2, 2, 2) // wts := &tensor.Float32{} // pj.TopoWeights(send, recv, wts) // fmt.Printf("topo wts\n%v\n", wts) } func TestPoolTile2(t *testing.T) { - send := tensor.NewShape([]int{5, 4, 1, 2}) - recv := tensor.NewShape([]int{5, 4, 2, 1}) + send := tensor.NewShape(5, 4, 1, 2) + recv := tensor.NewShape(5, 4, 2, 1) sNu := send.DimSize(2) * send.DimSize(3) rNu := recv.DimSize(2) * recv.DimSize(3) @@ -347,8 +347,8 @@ func TestPoolTile2(t *testing.T) { } func TestPoolTileRecip2(t *testing.T) { - send := tensor.NewShape([]int{5, 4, 1, 2}) - recv := tensor.NewShape([]int{5, 4, 2, 1}) + send := tensor.NewShape(5, 4, 1, 2) + recv := tensor.NewShape(5, 4, 2, 1) sNu := send.DimSize(2) * send.DimSize(3) rNu := recv.DimSize(2) * recv.DimSize(3) @@ -410,8 +410,8 @@ func TestPoolTileRecip2(t *testing.T) { } func TestUniformRand(t *testing.T) { - send := tensor.NewShape([]int{2, 3}) - recv := tensor.NewShape([]int{3, 4}) + send := tensor.NewShape(2, 3) + recv := tensor.NewShape(3, 4) sNtot := send.Len() rNtot := recv.Len() @@ -480,8 +480,8 @@ func TestUniformRand(t *testing.T) { } func TestUniformRandLg(t *testing.T) { - send := tensor.NewShape([]int{20, 30}) - recv := tensor.NewShape([]int{30, 40}) + send := tensor.NewShape(20, 30) + recv := tensor.NewShape(30, 40) sNtot := send.Len() rNtot := recv.Len() @@ -512,8 +512,8 @@ func TestUniformRandLg(t *testing.T) { } func TestUniformRandSelf(t *testing.T) { - send := tensor.NewShape([]int{2, 3}) - recv := tensor.NewShape([]int{2, 3}) + send := tensor.NewShape(2, 3) + recv := tensor.NewShape(2, 3) sNtot := send.Len() rNtot := recv.Len() @@ -554,8 +554,8 @@ func TestUniformRandSelf(t *testing.T) { } func TestPoolUniformRand(t *testing.T) { - send := tensor.NewShape([]int{2, 3, 2, 3}) - recv := tensor.NewShape([]int{2, 3, 3, 4}) + send := tensor.NewShape(2, 3, 2, 3) + recv := tensor.NewShape(2, 3, 3, 4) sNtot := send.Len() rNtot := recv.Len() @@ -670,8 +670,8 @@ func TestPoolUniformRand(t *testing.T) { } func TestPoolUniformRandLg(t *testing.T) { - send := tensor.NewShape([]int{2, 3, 20, 30}) - recv := tensor.NewShape([]int{2, 3, 30, 40}) + send := tensor.NewShape(2, 3, 20, 30) + recv := tensor.NewShape(2, 3, 30, 40) sNtot := send.Len() rNtot := recv.Len() @@ -701,8 +701,8 @@ func TestPoolUniformRandLg(t *testing.T) { } func TestPoolUniformRandSelf(t *testing.T) { - send := tensor.NewShape([]int{2, 3, 2, 3}) - recv := tensor.NewShape([]int{2, 3, 2, 3}) + send := tensor.NewShape(2, 3, 2, 3) + recv := tensor.NewShape(2, 3, 2, 3) sNtot := send.Len() rNtot := recv.Len() @@ -733,8 +733,8 @@ func TestPoolUniformRandSelf(t *testing.T) { } func TestPoolSameUnit(t *testing.T) { - send := tensor.NewShape([]int{1, 2, 2, 3}) - recv := tensor.NewShape([]int{1, 2, 2, 3}) + send := tensor.NewShape(1, 2, 2, 3) + recv := tensor.NewShape(1, 2, 2, 3) sNp := send.DimSize(0) * send.DimSize(1) rNp := recv.DimSize(0) * recv.DimSize(1) @@ -764,8 +764,8 @@ func TestPoolSameUnit(t *testing.T) { } func TestPoolSameUnitRecv(t *testing.T) { - send := tensor.NewShape([]int{2, 3}) - recv := tensor.NewShape([]int{1, 2, 2, 3}) + send := tensor.NewShape(2, 3) + recv := tensor.NewShape(1, 2, 2, 3) rNp := recv.DimSize(0) * recv.DimSize(1) pj := NewPoolSameUnit() @@ -793,8 +793,8 @@ func TestPoolSameUnitRecv(t *testing.T) { } func TestPoolSameUnitSend(t *testing.T) { - send := tensor.NewShape([]int{1, 2, 2, 3}) - recv := tensor.NewShape([]int{2, 3}) + send := tensor.NewShape(1, 2, 2, 3) + recv := tensor.NewShape(2, 3) sNp := send.DimSize(0) * send.DimSize(1) @@ -817,8 +817,8 @@ func TestPoolSameUnitSend(t *testing.T) { } func TestRect(t *testing.T) { - send := tensor.NewShape([]int{2, 3}) - recv := tensor.NewShape([]int{2, 3}) + send := tensor.NewShape(2, 3) + recv := tensor.NewShape(2, 3) pj := NewRect() pj.Size.Set(2, 1) @@ -842,8 +842,8 @@ func TestRect(t *testing.T) { } func TestPoolRect(t *testing.T) { - send := tensor.NewShape([]int{2, 3, 2, 2}) - recv := tensor.NewShape([]int{2, 3, 2, 2}) + send := tensor.NewShape(2, 3, 2, 2) + recv := tensor.NewShape(2, 3, 2, 2) pj := NewPoolRect() pj.Size.Set(2, 1) diff --git a/popcode/popcode_test.go b/popcode/popcode_test.go index 55cb35f0..38cafef0 100644 --- a/popcode/popcode_test.go +++ b/popcode/popcode_test.go @@ -92,7 +92,7 @@ func TestPopCode2D(t *testing.T) { CmprFloats(valsY, corValues, "valsY for 11 units", t) var pat tensor.Float32 - pat.SetShape([]int{11, 11}) + pat.SetShapeSizes(11, 11) pc.Encode(&pat, math32.Vec2(0.3, 0.9), Set) // fmt.Printf("pat for 0.5: %v\n", pat) @@ -119,7 +119,7 @@ func TestPopCode2DMulti(t *testing.T) { var pat tensor.Float32 // note: usually you'd use a larger pattern size for multiple values - pat.SetShape([]int{11, 11}) + pat.SetShapeSizes(11, 11) pc.Encode(&pat, math32.Vec2(0.1, 0.9), Set) pc.Encode(&pat, math32.Vec2(0.9, 0.1), Add) @@ -203,7 +203,7 @@ func TestTwoDWrap(t *testing.T) { pc.Clip = false var pat tensor.Float32 - pat.SetShape([]int{21, 21}) + pat.SetShapeSizes(21, 21) tangs := []float32{-179, -90, 0, 90, 179} for _, ang := range tangs { v := math32.Vec2(ang, .5) From cd1be9360f675b0bb3d68f543b9ac39578b0be34 Mon Sep 17 00:00:00 2001 From: "Randall C. O'Reilly" Date: Sat, 30 Nov 2024 22:44:30 -0800 Subject: [PATCH 17/24] plot selected unit working, with styling --- netview/netdata.go | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/netview/netdata.go b/netview/netdata.go index bc8ab337..301b128d 100644 --- a/netview/netdata.go +++ b/netview/netdata.go @@ -20,6 +20,7 @@ import ( "cogentcore.org/core/base/metadata" "cogentcore.org/core/core" "cogentcore.org/core/math32" + "cogentcore.org/core/plot" "cogentcore.org/core/plot/plotcore" "cogentcore.org/core/tensor" "cogentcore.org/core/tensor/table" @@ -637,29 +638,29 @@ func (nv *NetView) PlotSelectedUnit() (*table.Table, *plotcore.PlotEditor) { //t b := core.NewBody("netview-selectedunit").SetTitle("NetView SelectedUnit Plot: " + selnm) plt := plotcore.NewPlotEditor(b) - // plt.Options.Title = "NetView " + selnm - // plt.Options.XAxis = "Rec" b.AddTopBar(func(bar *core.Frame) { core.NewToolbar(bar).Maker(plt.MakeToolbar) }) dt := nd.SelectedUnitTable(nv.Di) - plt.SetTable(dt) - for _, vnm := range nd.UnVars { vp, ok := nv.VarOptions[vnm] if !ok { continue } disp := (vnm == nv.Var) - _ = disp min := vp.Range.Min if min < 0 && vp.Range.FixMin && vp.MinMax.Min >= 0 { min = 0 // netview uses -1..1 but not great for graphs unless needed } - // plt.SetColumnOptions(vnm, disp, vp.Range.FixMin, min, vp.Range.FixMax, vp.Range.Max) + dc := dt.Column(vnm) + plot.AddStylerTo(dc, func(s *plot.Style) { + s.On = disp + s.Range.SetMin(float64(min)).SetMax(float64(vp.Range.Max)) + }) } + plt.SetTable(dt) b.RunWindow() return dt, plt @@ -700,11 +701,11 @@ func (nd *NetData) SelectedUnitTable(di int) *table.Table { for ri := 0; ri < ln; ri++ { ridx := nd.RecIndex(ri) - dt.Columns.Values[0].SetFloat(float64(ri), 0, ri) + dt.Columns.Values[0].SetFloat1D(float64(ri), ri) for vi := 0; vi < vlen; vi++ { idx := ridx*nvu + vi*nd.MaxData*nu + di*nu + uidx1d val := ld.Data[idx] - dt.Columns.Values[0].SetFloat(float64(val), vi+1, ri) + dt.Columns.Values[vi+1].SetFloat1D(float64(val), ri) } } return dt From 2bf1680528dfd06393902536fb9aae7d4f0c7826 Mon Sep 17 00:00:00 2001 From: "Randall C. O'Reilly" Date: Sun, 1 Dec 2024 01:10:49 -0800 Subject: [PATCH 18/24] NetView plot selected unit uses tensorfs.CurRoot and databrowser.CurTabber so data is avail from sim. --- egui/gui.go | 1 + netview/netdata.go | 33 ++++++++++++++++++++------------- 2 files changed, 21 insertions(+), 13 deletions(-) diff --git a/egui/gui.go b/egui/gui.go index 6f988502..94e9a96d 100644 --- a/egui/gui.go +++ b/egui/gui.go @@ -107,6 +107,7 @@ func (gui *GUI) MakeBody(sim any, appname, title, about string) { gui.Files = databrowser.NewDataTree(fform) tabs := databrowser.NewTabs(split) gui.Tabs = tabs + databrowser.CurTabber = tabs tabs.Name = "tabs" gui.Files.Tabber = tabs split.SetTiles(core.TileSplit, core.TileSpan) diff --git a/netview/netdata.go b/netview/netdata.go index 301b128d..e5544e9d 100644 --- a/netview/netdata.go +++ b/netview/netdata.go @@ -23,7 +23,9 @@ import ( "cogentcore.org/core/plot" "cogentcore.org/core/plot/plotcore" "cogentcore.org/core/tensor" + "cogentcore.org/core/tensor/databrowser" "cogentcore.org/core/tensor/table" + "cogentcore.org/core/tensor/tensorfs" "github.com/emer/emergent/v2/emer" "github.com/emer/emergent/v2/ringidx" ) @@ -625,7 +627,8 @@ func (nd *NetData) WriteJSON(w io.Writer) error { // } // PlotSelectedUnit opens a window with a plot of all the data for the -// currently selected unit. +// currently selected unit, saving data to the [tensorfs.CurRoot]/NetView +// directory. // Useful for replaying detailed trace for units of interest. func (nv *NetView) PlotSelectedUnit() (*table.Table, *plotcore.PlotEditor) { //types:add nd := &nv.Data @@ -635,15 +638,7 @@ func (nv *NetView) PlotSelectedUnit() (*table.Table, *plotcore.PlotEditor) { //t } selnm := nd.PathLay + fmt.Sprintf("[%d]", nd.PathUnIndex) - - b := core.NewBody("netview-selectedunit").SetTitle("NetView SelectedUnit Plot: " + selnm) - plt := plotcore.NewPlotEditor(b) - - b.AddTopBar(func(bar *core.Frame) { - core.NewToolbar(bar).Maker(plt.MakeToolbar) - }) dt := nd.SelectedUnitTable(nv.Di) - for _, vnm := range nd.UnVars { vp, ok := nv.VarOptions[vnm] if !ok { @@ -660,10 +655,22 @@ func (nv *NetView) PlotSelectedUnit() (*table.Table, *plotcore.PlotEditor) { //t s.Range.SetMin(float64(min)).SetMax(float64(vp.Range.Max)) }) } - plt.SetTable(dt) - - b.RunWindow() - return dt, plt + if tensorfs.CurRoot != nil && databrowser.CurTabber != nil { + dir := tensorfs.CurRoot.RecycleDir("NetView") + udir := dir.RecycleDir(selnm) + tensorfs.DirFromTable(udir, dt) + plt := databrowser.CurTabber.PlotTensorFS(udir) + return dt, plt + } else { + b := core.NewBody("netview-selectedunit").SetTitle("NetView SelectedUnit Plot: " + selnm) + plt := plotcore.NewPlotEditor(b) + plt.SetTable(dt) + b.AddTopBar(func(bar *core.Frame) { + core.NewToolbar(bar).Maker(plt.MakeToolbar) + }) + b.RunWindow() + return dt, plt + } } // SelectedUnitTable returns a table with all of the data for the From 4ebc8c8521227ffb08cb83c884af497d39866df6 Mon Sep 17 00:00:00 2001 From: "Randall C. O'Reilly" Date: Thu, 12 Dec 2024 01:25:58 -0800 Subject: [PATCH 19/24] netview does Current OnShow -- activated on tab switch. network goes through interface for Read / Write weights so can sync to from gpu in axon. --- emer/network.go | 12 ++++++++++++ emer/typegen.go | 18 ++++++------------ emer/weights.go | 12 ++++++------ netview/netview.go | 5 ++++- 4 files changed, 28 insertions(+), 19 deletions(-) diff --git a/emer/network.go b/emer/network.go index 4ceb612f..b7b0bab3 100644 --- a/emer/network.go +++ b/emer/network.go @@ -8,6 +8,7 @@ package emer import ( "fmt" + "io" "log" "os" "strings" @@ -125,6 +126,17 @@ type Network interface { // zeroctr:"+" or "-" = control whether zero-centering is used // Note: this is typically a global list so do not modify! SynVarProps() map[string]string + + // ReadWeightsJSON reads network weights from the receiver-side perspective + // in a JSON text format. Reads entire file into a temporary weights.Weights + // structure that is then passed to Layers etc using SetWeights method. + // Call the NetworkBase version followed by any post-load updates. + ReadWeightsJSON(r io.Reader) error + + // WriteWeightsJSON writes the weights from this network + // from the receiver-side perspective in a JSON text format. + // Call the NetworkBase version after pre-load updates. + WriteWeightsJSON(w io.Writer) error } // NetworkBase defines the basic data for a neural network, diff --git a/emer/typegen.go b/emer/typegen.go index 83a93c6c..b1cb38d6 100644 --- a/emer/typegen.go +++ b/emer/typegen.go @@ -6,22 +6,16 @@ import ( "cogentcore.org/core/types" ) -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.Layer", IDName: "layer", Doc: "Layer defines the minimal interface for neural network layers,\nnecessary to support the visualization (NetView), I/O,\nand parameter setting functionality provided by emergent.\nMost of the standard expected functionality is defined in the\nLayerBase struct, and this interface only has methods that must be\nimplemented specifically for a given algorithmic implementation.", Methods: []types.Method{{Name: "AsEmer", Doc: "AsEmer returns the layer as an *emer.LayerBase,\nto access base functionality.", Returns: []string{"LayerBase"}}, {Name: "Label", Doc: "Label satisfies the core.Labeler interface for getting\nthe name of objects generically.", Returns: []string{"string"}}, {Name: "TypeName", Doc: "TypeName is the type or category of layer, defined\nby the algorithm (and usually set by an enum).", Returns: []string{"string"}}, {Name: "TypeNumber", Doc: "TypeNumber is the numerical value for the type or category\nof layer, defined by the algorithm (and usually set by an enum).", Returns: []string{"int"}}, {Name: "UnitVarIndex", Doc: "UnitVarIndex returns the index of given variable within\nthe Neuron, according to *this layer's* UnitVarNames() list\n(using a map to lookup index), or -1 and error message if\nnot found.", Args: []string{"varNm"}, Returns: []string{"int", "error"}}, {Name: "UnitValue1D", Doc: "UnitValue1D returns value of given variable index on given unit,\nusing 1-dimensional index, and a data parallel index di,\nfor networks capable of processing multiple input patterns\nin parallel. Returns NaN on invalid index.\nThis is the core unit var access method used by other methods,\nso it is the only one that needs to be updated for derived layer types.", Args: []string{"varIndex", "idx", "di"}, Returns: []string{"float32"}}, {Name: "VarRange", Doc: "VarRange returns the min / max values for given variable", Args: []string{"varNm"}, Returns: []string{"min", "max", "err"}}, {Name: "NumRecvPaths", Doc: "NumRecvPaths returns the number of receiving pathways.", Returns: []string{"int"}}, {Name: "RecvPath", Doc: "RecvPath returns a specific receiving pathway.", Args: []string{"idx"}, Returns: []string{"Path"}}, {Name: "NumSendPaths", Doc: "NumSendPaths returns the number of sending pathways.", Returns: []string{"int"}}, {Name: "SendPath", Doc: "SendPath returns a specific sending pathway.", Args: []string{"idx"}, Returns: []string{"Path"}}, {Name: "RecvPathValues", Doc: "RecvPathValues fills in values of given synapse variable name,\nfor pathway from given sending layer and neuron 1D index,\nfor all receiving neurons in this layer,\ninto given float32 slice (only resized if not big enough).\npathType is the string representation of the path type;\nused if non-empty, useful when there are multiple pathways\nbetween two layers.\nReturns error on invalid var name.\nIf the receiving neuron is not connected to the given sending\nlayer or neuron then the value is set to math32.NaN().\nReturns error on invalid var name or lack of recv path\n(vals always set to nan on path err).", Args: []string{"vals", "varNm", "sendLay", "sendIndex1D", "pathType"}, Returns: []string{"error"}}, {Name: "SendPathValues", Doc: "SendPathValues fills in values of given synapse variable name,\nfor pathway into given receiving layer and neuron 1D index,\nfor all sending neurons in this layer,\ninto given float32 slice (only resized if not big enough).\npathType is the string representation of the path type -- used if non-empty,\nuseful when there are multiple pathways between two layers.\nReturns error on invalid var name.\nIf the sending neuron is not connected to the given receiving layer or neuron\nthen the value is set to math32.NaN().\nReturns error on invalid var name or lack of recv path (vals always set to nan on path err).", Args: []string{"vals", "varNm", "recvLay", "recvIndex1D", "pathType"}, Returns: []string{"error"}}, {Name: "UpdateParams", Doc: "UpdateParams() updates parameter values for all Layer\nand recv pathway parameters,\nbased on any other params that might have changed."}, {Name: "SetParam", Doc: "SetParam sets parameter at given path to given value.\nreturns error if path not found or value cannot be set.", Args: []string{"path", "val"}, Returns: []string{"error"}}, {Name: "NonDefaultParams", Doc: "NonDefaultParams returns a listing of all parameters in the Layer that\nare not at their default values -- useful for setting param styles etc.", Returns: []string{"string"}}, {Name: "AllParams", Doc: "AllParams returns a listing of all parameters in the Layer", Returns: []string{"string"}}, {Name: "WriteWeightsJSON", Doc: "WriteWeightsJSON writes the weights from this layer from the\nreceiver-side perspective in a JSON text format.", Args: []string{"w", "depth"}}, {Name: "SetWeights", Doc: "SetWeights sets the weights for this layer from weights.Layer\ndecoded values", Args: []string{"lw"}, Returns: []string{"error"}}}}) +var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.Layer", IDName: "layer", Doc: "Layer defines the minimal interface for neural network layers,\nnecessary to support the visualization (NetView), I/O,\nand parameter setting functionality provided by emergent.\nMost of the standard expected functionality is defined in the\nLayerBase struct, and this interface only has methods that must be\nimplemented specifically for a given algorithmic implementation.", Methods: []types.Method{{Name: "AsEmer", Doc: "AsEmer returns the layer as an *emer.LayerBase,\nto access base functionality.", Returns: []string{"LayerBase"}}, {Name: "Label", Doc: "Label satisfies the core.Labeler interface for getting\nthe name of objects generically. Use to access Name via interface.", Returns: []string{"string"}}, {Name: "TypeName", Doc: "TypeName is the type or category of layer, defined\nby the algorithm (and usually set by an enum).", Returns: []string{"string"}}, {Name: "TypeNumber", Doc: "TypeNumber is the numerical value for the type or category\nof layer, defined by the algorithm (and usually set by an enum).", Returns: []string{"int"}}, {Name: "UnitVarIndex", Doc: "UnitVarIndex returns the index of given variable within\nthe Neuron, according to *this layer's* UnitVarNames() list\n(using a map to lookup index), or -1 and error message if\nnot found.", Args: []string{"varNm"}, Returns: []string{"int", "error"}}, {Name: "UnitValue1D", Doc: "UnitValue1D returns value of given variable index on given unit,\nusing 1-dimensional index, and a data parallel index di,\nfor networks capable of processing multiple input patterns\nin parallel. Returns NaN on invalid index.\nThis is the core unit var access method used by other methods,\nso it is the only one that needs to be updated for derived layer types.", Args: []string{"varIndex", "idx", "di"}, Returns: []string{"float32"}}, {Name: "VarRange", Doc: "VarRange returns the min / max values for given variable", Args: []string{"varNm"}, Returns: []string{"min", "max", "err"}}, {Name: "NumRecvPaths", Doc: "NumRecvPaths returns the number of receiving pathways.", Returns: []string{"int"}}, {Name: "RecvPath", Doc: "RecvPath returns a specific receiving pathway.", Args: []string{"idx"}, Returns: []string{"Path"}}, {Name: "NumSendPaths", Doc: "NumSendPaths returns the number of sending pathways.", Returns: []string{"int"}}, {Name: "SendPath", Doc: "SendPath returns a specific sending pathway.", Args: []string{"idx"}, Returns: []string{"Path"}}, {Name: "RecvPathValues", Doc: "RecvPathValues fills in values of given synapse variable name,\nfor pathway from given sending layer and neuron 1D index,\nfor all receiving neurons in this layer,\ninto given float32 slice (only resized if not big enough).\npathType is the string representation of the path type;\nused if non-empty, useful when there are multiple pathways\nbetween two layers.\nReturns error on invalid var name.\nIf the receiving neuron is not connected to the given sending\nlayer or neuron then the value is set to math32.NaN().\nReturns error on invalid var name or lack of recv path\n(vals always set to nan on path err).", Args: []string{"vals", "varNm", "sendLay", "sendIndex1D", "pathType"}, Returns: []string{"error"}}, {Name: "SendPathValues", Doc: "SendPathValues fills in values of given synapse variable name,\nfor pathway into given receiving layer and neuron 1D index,\nfor all sending neurons in this layer,\ninto given float32 slice (only resized if not big enough).\npathType is the string representation of the path type -- used if non-empty,\nuseful when there are multiple pathways between two layers.\nReturns error on invalid var name.\nIf the sending neuron is not connected to the given receiving layer or neuron\nthen the value is set to math32.NaN().\nReturns error on invalid var name or lack of recv path (vals always set to nan on path err).", Args: []string{"vals", "varNm", "recvLay", "recvIndex1D", "pathType"}, Returns: []string{"error"}}, {Name: "NonDefaultParams", Doc: "NonDefaultParams returns a listing of all parameters in the Layer that\nare not at their default values; useful for setting param styles etc.", Returns: []string{"string"}}, {Name: "AllParams", Doc: "AllParams returns a listing of all parameters in the Layer", Returns: []string{"string"}}, {Name: "WriteWeightsJSON", Doc: "WriteWeightsJSON writes the weights from this layer from the\nreceiver-side perspective in a JSON text format.", Args: []string{"w", "depth"}}, {Name: "SetWeights", Doc: "SetWeights sets the weights for this layer from weights.Layer\ndecoded values", Args: []string{"lw"}, Returns: []string{"error"}}}}) -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.LayerBase", IDName: "layer-base", Doc: "LayerBase defines the basic shared data for neural network layers,\nused for managing the structural elements of a network,\nand for visualization, I/O, etc.\nNothing algorithm-specific is implemented here", Fields: []types.Field{{Name: "EmerLayer", Doc: "EmerLayer provides access to the emer.Layer interface\nmethods for functions defined in the LayerBase type.\nMust set this with a pointer to the actual instance\nwhen created, using InitLayer function."}, {Name: "Name", Doc: "Name of the layer, which must be unique within the network.\nLayers are typically accessed directly by name, via a map."}, {Name: "Class", Doc: "Class is for applying parameter styles across multiple layers\nthat all get the same parameters. This can be space separated\nwith multple classes."}, {Name: "Doc", Doc: "Doc contains documentation about the layer.\nThis is displayed in a tooltip in the network view."}, {Name: "Off", Doc: "Off turns off the layer, removing from all computations.\nThis provides a convenient way to dynamically test for\nthe contributions of the layer, for example."}, {Name: "Shape", Doc: "Shape of the layer, either 2D or 4D. Although spatial topology\nis not relevant to all algorithms, the 2D shape is important for\nefficiently visualizing large numbers of units / neurons.\n4D layers have 2D Pools of units embedded within a larger 2D\norganization of such pools. This is used for max-pooling or\npooled inhibition at a finer-grained level, and biologically\ncorresopnds to hypercolumns in the cortex for example.\nOrder is outer-to-inner (row major), so Y then X for 2D;\n4D: Y-X unit pools then Y-X neurons within pools."}, {Name: "Pos", Doc: "Pos specifies the relative spatial relationship to another\nlayer, which determines positioning. Every layer except one\n\"anchor\" layer should be positioned relative to another,\ne.g., RightOf, Above, etc. This provides robust positioning\nin the face of layer size changes etc.\nLayers are arranged in X-Y planes, stacked vertically along the Z axis."}, {Name: "Index", Doc: "Index is a 0..n-1 index of the position of the layer within\nthe list of layers in the network."}, {Name: "SampleIndexes", Doc: "SampleIndexes are the current set of \"sample\" unit indexes,\nwhich are a smaller subset of units that represent the behavior\nof the layer, for computationally intensive statistics and displays\n(e.g., PCA, ActRF, NetView rasters), when the layer is large.\nIf none have been set, then all units are used.\nSee utility function CenterPoolIndexes that returns indexes of\nunits in the central pools of a 4D layer."}, {Name: "SampleShape", Doc: "SampleShape is the shape to use for the subset of sample\nunit indexes, in terms of an array of dimensions.\nSee Shape for more info.\nLayers that set SampleIndexes should also set this,\notherwise a 1D array of len SampleIndexes will be used.\nSee utility function CenterPoolShape that returns shape of\nunits in the central pools of a 4D layer."}, {Name: "ParamsHistory", Doc: "provides a history of parameters applied to the layer"}, {Name: "MetaData", Doc: "optional metadata that is saved in network weights files,\ne.g., can indicate number of epochs that were trained,\nor any other information about this network that would be useful to save."}}}) - -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.NetParams", IDName: "net-params", Doc: "NetParams handles standard parameters for a Network\n(use econfig and a Config struct for other configuration params).\nAssumes a Set named \"Base\" has the base-level parameters, which are\nalways applied first, followed optionally by additional Set(s)\nthat can have different parameters to try.", Fields: []types.Field{{Name: "Params", Doc: "full collection of param sets to use"}, {Name: "ExtraSheets", Doc: "optional additional sheets of parameters to apply after Base -- can use multiple names separated by spaces (don't put spaces in Sheet names!)"}, {Name: "Tag", Doc: "optional additional tag to add to file names, logs to identify params / run config"}, {Name: "Network", Doc: "the network to apply parameters to"}, {Name: "NetHypers", Doc: "list of hyper parameters compiled from the network parameters, using the layers and pathways from the network, so that the same styling logic as for regular parameters can be used"}, {Name: "SetMsg", Doc: "print out messages for each parameter that is set"}}}) - -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.LaySize", IDName: "lay-size", Doc: "LaySize contains parameters for size of layers", Fields: []types.Field{{Name: "Y", Doc: "Y (vertical) size of layer -- in units for 2D, or number of pools (outer dimension) for 4D layer"}, {Name: "X", Doc: "X (horizontal) size of layer -- in units for 2D, or number of pools (outer dimension) for 4D layer"}, {Name: "PoolY", Doc: "Y (vertical) size of each pool in units, only for 4D layers (inner dimension)"}, {Name: "PoolX", Doc: "Y (horizontal) size of each pool in units, only for 4D layers (inner dimension)"}}}) - -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.NetSize", IDName: "net-size", Doc: "NetSize is a network schema for holding a params for layer sizes.\nValues can be queried for getting sizes when configuring the network.\nUses params.Flex to support flexible parameter specification"}) +var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.LayerBase", IDName: "layer-base", Doc: "LayerBase defines the basic shared data for neural network layers,\nused for managing the structural elements of a network,\nand for visualization, I/O, etc.\nNothing algorithm-specific is implemented here", Fields: []types.Field{{Name: "EmerLayer", Doc: "EmerLayer provides access to the emer.Layer interface\nmethods for functions defined in the LayerBase type.\nMust set this with a pointer to the actual instance\nwhen created, using InitLayer function."}, {Name: "Name", Doc: "Name of the layer, which must be unique within the network.\nLayers are typically accessed directly by name, via a map."}, {Name: "Class", Doc: "Class is for applying parameter styles across multiple layers\nthat all get the same parameters. This can be space separated\nwith multple classes."}, {Name: "Doc", Doc: "Doc contains documentation about the layer.\nThis is displayed in a tooltip in the network view."}, {Name: "Off", Doc: "Off turns off the layer, removing from all computations.\nThis provides a convenient way to dynamically test for\nthe contributions of the layer, for example."}, {Name: "Shape", Doc: "Shape of the layer, either 2D or 4D. Although spatial topology\nis not relevant to all algorithms, the 2D shape is important for\nefficiently visualizing large numbers of units / neurons.\n4D layers have 2D Pools of units embedded within a larger 2D\norganization of such pools. This is used for max-pooling or\npooled inhibition at a finer-grained level, and biologically\ncorresopnds to hypercolumns in the cortex for example.\nOrder is outer-to-inner (row major), so Y then X for 2D;\n4D: Y-X unit pools then Y-X neurons within pools."}, {Name: "Pos", Doc: "Pos specifies the relative spatial relationship to another\nlayer, which determines positioning. Every layer except one\n\"anchor\" layer should be positioned relative to another,\ne.g., RightOf, Above, etc. This provides robust positioning\nin the face of layer size changes etc.\nLayers are arranged in X-Y planes, stacked vertically along the Z axis."}, {Name: "Index", Doc: "Index is a 0..n-1 index of the position of the layer within\nthe list of layers in the network."}, {Name: "SampleIndexes", Doc: "SampleIndexes are the current set of \"sample\" unit indexes,\nwhich are a smaller subset of units that represent the behavior\nof the layer, for computationally intensive statistics and displays\n(e.g., PCA, ActRF, NetView rasters), when the layer is large.\nIf none have been set, then all units are used.\nSee utility function CenterPoolIndexes that returns indexes of\nunits in the central pools of a 4D layer."}, {Name: "SampleShape", Doc: "SampleShape is the shape to use for the subset of sample\nunit indexes, in terms of an array of dimensions.\nSee Shape for more info.\nLayers that set SampleIndexes should also set this,\notherwise a 1D array of len SampleIndexes will be used.\nSee utility function CenterPoolShape that returns shape of\nunits in the central pools of a 4D layer."}, {Name: "MetaData", Doc: "optional metadata that is saved in network weights files,\ne.g., can indicate number of epochs that were trained,\nor any other information about this network that would be useful to save."}}}) var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.VarCategory", IDName: "var-category", Doc: "VarCategory represents one category of unit, synapse variables.", Fields: []types.Field{{Name: "Cat", Doc: "Category name."}, {Name: "Doc", Doc: "Documentation of the category, used as a tooltip."}}}) -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.Network", IDName: "network", Doc: "Network defines the minimal interface for a neural network,\nused for managing the structural elements of a network,\nand for visualization, I/O, etc.\nMost of the standard expected functionality is defined in the\nNetworkBase struct, and this interface only has methods that must be\nimplemented specifically for a given algorithmic implementation.", Methods: []types.Method{{Name: "AsEmer", Doc: "AsEmer returns the network as an *emer.NetworkBase,\nto access base functionality.", Returns: []string{"NetworkBase"}}, {Name: "Label", Doc: "Label satisfies the core.Labeler interface for getting\nthe name of objects generically.", Returns: []string{"string"}}, {Name: "NumLayers", Doc: "NumLayers returns the number of layers in the network.", Returns: []string{"int"}}, {Name: "EmerLayer", Doc: "EmerLayer returns layer as emer.Layer interface at given index.\nDoes not do extra bounds checking.", Args: []string{"idx"}, Returns: []string{"Layer"}}, {Name: "MaxParallelData", Doc: "MaxParallelData returns the maximum number of data inputs that can be\nprocessed in parallel by the network.\nThe NetView supports display of up to this many data elements.", Returns: []string{"int"}}, {Name: "NParallelData", Doc: "NParallelData returns the current number of data inputs currently being\nprocessed in parallel by the network.\nLogging supports recording each of these where appropriate.", Returns: []string{"int"}}, {Name: "Defaults", Doc: "Defaults sets default parameter values for everything in the Network."}, {Name: "UpdateParams", Doc: "UpdateParams() updates parameter values for all Network parameters,\nbased on any other params that might have changed."}, {Name: "KeyLayerParams", Doc: "KeyLayerParams returns a listing for all layers in the network,\nof the most important layer-level params (specific to each algorithm).", Returns: []string{"string"}}, {Name: "KeyPathParams", Doc: "KeyPathParams returns a listing for all Recv pathways in the network,\nof the most important pathway-level params (specific to each algorithm).", Returns: []string{"string"}}, {Name: "UnitVarNames", Doc: "UnitVarNames returns a list of variable names available on\nthe units in this network.\nThis list determines what is shown in the NetView\n(and the order of vars list).\nNot all layers need to support all variables,\nbut must safely return math32.NaN() for unsupported ones.\nThis is typically a global list so do not modify!", Returns: []string{"[]string"}}, {Name: "UnitVarProps", Doc: "UnitVarProps returns a map of unit variable properties,\nwith the key being the name of the variable,\nand the value gives a space-separated list of\ngo-tag-style properties for that variable.\nThe NetView recognizes the following properties:\n\t- range:\"##\" = +- range around 0 for default display scaling\n\t- min:\"##\" max:\"##\" = min, max display range\n\t- auto-scale:\"+\" or \"-\" = use automatic scaling instead of fixed range or not.\n\t- zeroctr:\"+\" or \"-\" = control whether zero-centering is used\n\t- desc:\"txt\" tooltip description of the variable\n\t- cat:\"cat\" variable category, for category tabs", Returns: []string{"map[string]string"}}, {Name: "VarCategories", Doc: "VarCategories is a list of unit & synapse variable categories,\nwhich organizes the variables into separate tabs in the network view.\nUsing categories results in a more compact display and makes it easier\nto find variables.\nSet the 'cat' property in the UnitVarProps, SynVarProps for each variable.\nIf no categories returned, the default is Unit, Wt.", Returns: []string{"VarCategory"}}, {Name: "SynVarNames", Doc: "SynVarNames returns the names of all the variables\non the synapses in this network.\nThis list determines what is shown in the NetView\n(and the order of vars list).\nNot all pathways need to support all variables,\nbut must safely return math32.NaN() for\nunsupported ones.\nThis is typically a global list so do not modify!", Returns: []string{"[]string"}}, {Name: "SynVarProps", Doc: "SynVarProps returns a map of synapse variable properties,\nwith the key being the name of the variable,\nand the value gives a space-separated list of\ngo-tag-style properties for that variable.\nThe NetView recognizes the following properties:\nrange:\"##\" = +- range around 0 for default display scaling\nmin:\"##\" max:\"##\" = min, max display range\nauto-scale:\"+\" or \"-\" = use automatic scaling instead of fixed range or not.\nzeroctr:\"+\" or \"-\" = control whether zero-centering is used\nNote: this is typically a global list so do not modify!", Returns: []string{"map[string]string"}}}}) +var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.Network", IDName: "network", Doc: "Network defines the minimal interface for a neural network,\nused for managing the structural elements of a network,\nand for visualization, I/O, etc.\nMost of the standard expected functionality is defined in the\nNetworkBase struct, and this interface only has methods that must be\nimplemented specifically for a given algorithmic implementation.", Methods: []types.Method{{Name: "AsEmer", Doc: "AsEmer returns the network as an *emer.NetworkBase,\nto access base functionality.", Returns: []string{"NetworkBase"}}, {Name: "Label", Doc: "Label satisfies the core.Labeler interface for getting\nthe name of objects generically.", Returns: []string{"string"}}, {Name: "NumLayers", Doc: "NumLayers returns the number of layers in the network.", Returns: []string{"int"}}, {Name: "EmerLayer", Doc: "EmerLayer returns layer as emer.Layer interface at given index.\nDoes not do extra bounds checking.", Args: []string{"idx"}, Returns: []string{"Layer"}}, {Name: "MaxParallelData", Doc: "MaxParallelData returns the maximum number of data inputs that can be\nprocessed in parallel by the network.\nThe NetView supports display of up to this many data elements.", Returns: []string{"int"}}, {Name: "NParallelData", Doc: "NParallelData returns the current number of data inputs currently being\nprocessed in parallel by the network.\nLogging supports recording each of these where appropriate.", Returns: []string{"int"}}, {Name: "Defaults", Doc: "Defaults sets default parameter values for everything in the Network."}, {Name: "UpdateParams", Doc: "UpdateParams() updates parameter values for all Network parameters,\nbased on any other params that might have changed."}, {Name: "KeyLayerParams", Doc: "KeyLayerParams returns a listing for all layers in the network,\nof the most important layer-level params (specific to each algorithm).", Returns: []string{"string"}}, {Name: "KeyPathParams", Doc: "KeyPathParams returns a listing for all Recv pathways in the network,\nof the most important pathway-level params (specific to each algorithm).", Returns: []string{"string"}}, {Name: "UnitVarNames", Doc: "UnitVarNames returns a list of variable names available on\nthe units in this network.\nThis list determines what is shown in the NetView\n(and the order of vars list).\nNot all layers need to support all variables,\nbut must safely return math32.NaN() for unsupported ones.\nThis is typically a global list so do not modify!", Returns: []string{"[]string"}}, {Name: "UnitVarProps", Doc: "UnitVarProps returns a map of unit variable properties,\nwith the key being the name of the variable,\nand the value gives a space-separated list of\ngo-tag-style properties for that variable.\nThe NetView recognizes the following properties:\n\t- range:\"##\" = +- range around 0 for default display scaling\n\t- min:\"##\" max:\"##\" = min, max display range\n\t- auto-scale:\"+\" or \"-\" = use automatic scaling instead of fixed range or not.\n\t- zeroctr:\"+\" or \"-\" = control whether zero-centering is used\n\t- desc:\"txt\" tooltip description of the variable\n\t- cat:\"cat\" variable category, for category tabs", Returns: []string{"map[string]string"}}, {Name: "VarCategories", Doc: "VarCategories is a list of unit & synapse variable categories,\nwhich organizes the variables into separate tabs in the network view.\nUsing categories results in a more compact display and makes it easier\nto find variables.\nSet the 'cat' property in the UnitVarProps, SynVarProps for each variable.\nIf no categories returned, the default is Unit, Wt.", Returns: []string{"VarCategory"}}, {Name: "SynVarNames", Doc: "SynVarNames returns the names of all the variables\non the synapses in this network.\nThis list determines what is shown in the NetView\n(and the order of vars list).\nNot all pathways need to support all variables,\nbut must safely return math32.NaN() for\nunsupported ones.\nThis is typically a global list so do not modify!", Returns: []string{"[]string"}}, {Name: "SynVarProps", Doc: "SynVarProps returns a map of synapse variable properties,\nwith the key being the name of the variable,\nand the value gives a space-separated list of\ngo-tag-style properties for that variable.\nThe NetView recognizes the following properties:\nrange:\"##\" = +- range around 0 for default display scaling\nmin:\"##\" max:\"##\" = min, max display range\nauto-scale:\"+\" or \"-\" = use automatic scaling instead of fixed range or not.\nzeroctr:\"+\" or \"-\" = control whether zero-centering is used\nNote: this is typically a global list so do not modify!", Returns: []string{"map[string]string"}}, {Name: "ReadWeightsJSON", Doc: "ReadWeightsJSON reads network weights from the receiver-side perspective\nin a JSON text format. Reads entire file into a temporary weights.Weights\nstructure that is then passed to Layers etc using SetWeights method.\nCall the NetworkBase version followed by any post-load updates.", Args: []string{"r"}, Returns: []string{"error"}}, {Name: "WriteWeightsJSON", Doc: "WriteWeightsJSON writes the weights from this network\nfrom the receiver-side perspective in a JSON text format.\nCall the NetworkBase version after pre-load updates.", Args: []string{"w"}, Returns: []string{"error"}}}}) -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.NetworkBase", IDName: "network-base", Doc: "NetworkBase defines the basic data for a neural network,\nused for managing the structural elements of a network,\nand for visualization, I/O, etc.", Methods: []types.Method{{Name: "SaveWeightsJSON", Doc: "SaveWeightsJSON saves network weights (and any other state that adapts with learning)\nto a JSON-formatted file. If filename has .gz extension, then file is gzip compressed.", Directives: []types.Directive{{Tool: "types", Directive: "add"}}, Args: []string{"filename"}, Returns: []string{"error"}}, {Name: "OpenWeightsJSON", Doc: "OpenWeightsJSON opens network weights (and any other state that adapts with learning)\nfrom a JSON-formatted file. If filename has .gz extension, then file is gzip uncompressed.", Directives: []types.Directive{{Tool: "types", Directive: "add"}}, Args: []string{"filename"}, Returns: []string{"error"}}}, Fields: []types.Field{{Name: "EmerNetwork", Doc: "EmerNetwork provides access to the emer.Network interface\nmethods for functions defined in the NetworkBase type.\nMust set this with a pointer to the actual instance\nwhen created, using InitNetwork function."}, {Name: "Name", Doc: "overall name of network, which helps discriminate if there are multiple."}, {Name: "WeightsFile", Doc: "filename of last weights file loaded or saved."}, {Name: "LayerNameMap", Doc: "map of name to layers, for EmerLayerByName methods"}, {Name: "LayerClassMap", Doc: "map from class name to layer names."}, {Name: "MinPos", Doc: "minimum display position in network"}, {Name: "MaxPos", Doc: "maximum display position in network"}, {Name: "MetaData", Doc: "optional metadata that is saved in network weights files,\ne.g., can indicate number of epochs that were trained,\nor any other information about this network that would be useful to save."}, {Name: "Rand", Doc: "random number generator for the network.\nall random calls must use this.\nSet seed here for weight initialization values."}, {Name: "RandSeed", Doc: "Random seed to be set at the start of configuring\nthe network and initializing the weights.\nSet this to get a different set of weights."}}}) +var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.NetworkBase", IDName: "network-base", Doc: "NetworkBase defines the basic data for a neural network,\nused for managing the structural elements of a network,\nand for visualization, I/O, etc.", Methods: []types.Method{{Name: "SaveWeightsJSON", Doc: "SaveWeightsJSON saves network weights (and any other state that adapts with learning)\nto a JSON-formatted file. If filename has .gz extension, then file is gzip compressed.", Directives: []types.Directive{{Tool: "types", Directive: "add"}}, Args: []string{"filename"}, Returns: []string{"error"}}, {Name: "OpenWeightsJSON", Doc: "OpenWeightsJSON opens network weights (and any other state that adapts with learning)\nfrom a JSON-formatted file. If filename has .gz extension, then file is gzip uncompressed.", Directives: []types.Directive{{Tool: "types", Directive: "add"}}, Args: []string{"filename"}, Returns: []string{"error"}}}, Fields: []types.Field{{Name: "EmerNetwork", Doc: "EmerNetwork provides access to the emer.Network interface\nmethods for functions defined in the NetworkBase type.\nMust set this with a pointer to the actual instance\nwhen created, using InitNetwork function."}, {Name: "Name", Doc: "overall name of network, which helps discriminate if there are multiple."}, {Name: "WeightsFile", Doc: "filename of last weights file loaded or saved."}, {Name: "LayerNameMap", Doc: "map of name to layers, for EmerLayerByName methods"}, {Name: "MinPos", Doc: "minimum display position in network"}, {Name: "MaxPos", Doc: "maximum display position in network"}, {Name: "MetaData", Doc: "optional metadata that is saved in network weights files,\ne.g., can indicate number of epochs that were trained,\nor any other information about this network that would be useful to save."}, {Name: "Rand", Doc: "random number generator for the network.\nall random calls must use this.\nSet seed here for weight initialization values."}, {Name: "RandSeed", Doc: "Random seed to be set at the start of configuring\nthe network and initializing the weights.\nSet this to get a different set of weights."}}}) -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.Path", IDName: "path", Doc: "Path defines the minimal interface for a pathway\nwhich connects two layers, using a specific Pattern\nof connectivity, and with its own set of parameters.\nThis supports visualization (NetView), I/O,\nand parameter setting functionality provided by emergent.\nMost of the standard expected functionality is defined in the\nPathBase struct, and this interface only has methods that must be\nimplemented specifically for a given algorithmic implementation,", Methods: []types.Method{{Name: "AsEmer", Doc: "AsEmer returns the path as an *emer.PathBase,\nto access base functionality.", Returns: []string{"PathBase"}}, {Name: "Label", Doc: "Label satisfies the core.Labeler interface for getting\nthe name of objects generically.", Returns: []string{"string"}}, {Name: "TypeName", Doc: "TypeName is the type or category of path, defined\nby the algorithm (and usually set by an enum).", Returns: []string{"string"}}, {Name: "TypeNumber", Doc: "TypeNumber is the numerical value for the type or category\nof path, defined by the algorithm (and usually set by an enum).", Returns: []string{"int"}}, {Name: "SendLayer", Doc: "SendLayer returns the sending layer for this pathway,\nas an emer.Layer interface. The actual Path implmenetation\ncan use a Send field with the actual Layer struct type.", Returns: []string{"Layer"}}, {Name: "RecvLayer", Doc: "RecvLayer returns the receiving layer for this pathway,\nas an emer.Layer interface. The actual Path implmenetation\ncan use a Recv field with the actual Layer struct type.", Returns: []string{"Layer"}}, {Name: "NumSyns", Doc: "NumSyns returns the number of synapses for this path.\nThis is the max idx for SynValue1D and the number\nof vals set by SynValues.", Returns: []string{"int"}}, {Name: "SynIndex", Doc: "SynIndex returns the index of the synapse between given send, recv unit indexes\n(1D, flat indexes). Returns -1 if synapse not found between these two neurons.\nThis requires searching within connections for receiving unit (a bit slow).", Args: []string{"sidx", "ridx"}, Returns: []string{"int"}}, {Name: "SynVarNames", Doc: "SynVarNames returns the names of all the variables on the synapse\nThis is typically a global list so do not modify!", Returns: []string{"[]string"}}, {Name: "SynVarNum", Doc: "SynVarNum returns the number of synapse-level variables\nfor this paths. This is needed for extending indexes in derived types.", Returns: []string{"int"}}, {Name: "SynVarIndex", Doc: "SynVarIndex returns the index of given variable within the synapse,\naccording to *this path's* SynVarNames() list (using a map to lookup index),\nor -1 and error message if not found.", Args: []string{"varNm"}, Returns: []string{"int", "error"}}, {Name: "SynValues", Doc: "SynValues sets values of given variable name for each synapse,\nusing the natural ordering of the synapses (sender based for Axon),\ninto given float32 slice (only resized if not big enough).\nReturns error on invalid var name.", Args: []string{"vals", "varNm"}, Returns: []string{"error"}}, {Name: "SynValue1D", Doc: "SynValue1D returns value of given variable index\n(from SynVarIndex) on given SynIndex.\nReturns NaN on invalid index.\nThis is the core synapse var access method used by other methods,\nso it is the only one that needs to be updated for derived types.", Args: []string{"varIndex", "synIndex"}, Returns: []string{"float32"}}, {Name: "UpdateParams", Doc: "UpdateParams() updates parameter values for all Path parameters,\nbased on any other params that might have changed."}, {Name: "SetParam", Doc: "SetParam sets parameter at given path to given value.\nreturns error if path not found or value cannot be set.", Args: []string{"path", "val"}, Returns: []string{"error"}}, {Name: "AllParams", Doc: "AllParams returns a listing of all parameters in the Pathway.", Returns: []string{"string"}}, {Name: "WriteWeightsJSON", Doc: "WriteWeightsJSON writes the weights from this pathway\nfrom the receiver-side perspective in a JSON text format.", Args: []string{"w", "depth"}}, {Name: "SetWeights", Doc: "SetWeights sets the weights for this pathway from weights.Path\ndecoded values", Args: []string{"pw"}, Returns: []string{"error"}}}}) +var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.Path", IDName: "path", Doc: "Path defines the minimal interface for a pathway\nwhich connects two layers, using a specific Pattern\nof connectivity, and with its own set of parameters.\nThis supports visualization (NetView), I/O,\nand parameter setting functionality provided by emergent.\nMost of the standard expected functionality is defined in the\nPathBase struct, and this interface only has methods that must be\nimplemented specifically for a given algorithmic implementation,", Methods: []types.Method{{Name: "AsEmer", Doc: "AsEmer returns the path as an *emer.PathBase,\nto access base functionality.", Returns: []string{"PathBase"}}, {Name: "Label", Doc: "Label satisfies the core.Labeler interface for getting\nthe name of objects generically. Use to access Name via interface.", Returns: []string{"string"}}, {Name: "TypeName", Doc: "TypeName is the type or category of path, defined\nby the algorithm (and usually set by an enum).", Returns: []string{"string"}}, {Name: "TypeNumber", Doc: "TypeNumber is the numerical value for the type or category\nof path, defined by the algorithm (and usually set by an enum).", Returns: []string{"int"}}, {Name: "SendLayer", Doc: "SendLayer returns the sending layer for this pathway,\nas an emer.Layer interface. The actual Path implmenetation\ncan use a Send field with the actual Layer struct type.", Returns: []string{"Layer"}}, {Name: "RecvLayer", Doc: "RecvLayer returns the receiving layer for this pathway,\nas an emer.Layer interface. The actual Path implmenetation\ncan use a Recv field with the actual Layer struct type.", Returns: []string{"Layer"}}, {Name: "NumSyns", Doc: "NumSyns returns the number of synapses for this path.\nThis is the max idx for SynValue1D and the number\nof vals set by SynValues.", Returns: []string{"int"}}, {Name: "SynIndex", Doc: "SynIndex returns the index of the synapse between given send, recv unit indexes\n(1D, flat indexes). Returns -1 if synapse not found between these two neurons.\nThis requires searching within connections for receiving unit (a bit slow).", Args: []string{"sidx", "ridx"}, Returns: []string{"int"}}, {Name: "SynVarNames", Doc: "SynVarNames returns the names of all the variables on the synapse\nThis is typically a global list so do not modify!", Returns: []string{"[]string"}}, {Name: "SynVarNum", Doc: "SynVarNum returns the number of synapse-level variables\nfor this paths. This is needed for extending indexes in derived types.", Returns: []string{"int"}}, {Name: "SynVarIndex", Doc: "SynVarIndex returns the index of given variable within the synapse,\naccording to *this path's* SynVarNames() list (using a map to lookup index),\nor -1 and error message if not found.", Args: []string{"varNm"}, Returns: []string{"int", "error"}}, {Name: "SynValues", Doc: "SynValues sets values of given variable name for each synapse,\nusing the natural ordering of the synapses (sender based for Axon),\ninto given float32 slice (only resized if not big enough).\nReturns error on invalid var name.", Args: []string{"vals", "varNm"}, Returns: []string{"error"}}, {Name: "SynValue1D", Doc: "SynValue1D returns value of given variable index\n(from SynVarIndex) on given SynIndex.\nReturns NaN on invalid index.\nThis is the core synapse var access method used by other methods,\nso it is the only one that needs to be updated for derived types.", Args: []string{"varIndex", "synIndex"}, Returns: []string{"float32"}}, {Name: "AllParams", Doc: "AllParams returns a listing of all parameters in the Pathway.", Returns: []string{"string"}}, {Name: "WriteWeightsJSON", Doc: "WriteWeightsJSON writes the weights from this pathway\nfrom the receiver-side perspective in a JSON text format.", Args: []string{"w", "depth"}}, {Name: "SetWeights", Doc: "SetWeights sets the weights for this pathway from weights.Path\ndecoded values", Args: []string{"pw"}, Returns: []string{"error"}}}}) -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.PathBase", IDName: "path-base", Doc: "PathBase defines the basic shared data for a pathway\nwhich connects two layers, using a specific Pattern\nof connectivity, and with its own set of parameters.\nThe same struct token is added to the Recv and Send\nlayer path lists,", Fields: []types.Field{{Name: "EmerPath", Doc: "EmerPath provides access to the emer.Path interface\nmethods for functions defined in the PathBase type.\nMust set this with a pointer to the actual instance\nwhen created, using InitPath function."}, {Name: "Name", Doc: "Name of the path, which can be automatically set to\nSendLayer().Name + \"To\" + RecvLayer().Name via\nSetStandardName method."}, {Name: "Class", Doc: "Class is for applying parameter styles across multiple paths\nthat all get the same parameters. This can be space separated\nwith multple classes."}, {Name: "Doc", Doc: "Doc contains documentation about the pathway.\nThis is displayed in a tooltip in the network view."}, {Name: "Notes", Doc: "can record notes about this pathway here."}, {Name: "Pattern", Doc: "Pattern specifies the pattern of connectivity\nfor interconnecting the sending and receiving layers."}, {Name: "Off", Doc: "Off inactivates this pathway, allowing for easy experimentation."}, {Name: "ParamsHistory", Doc: "provides a history of parameters applied to the layer"}}}) +var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.PathBase", IDName: "path-base", Doc: "PathBase defines the basic shared data for a pathway\nwhich connects two layers, using a specific Pattern\nof connectivity, and with its own set of parameters.\nThe same struct token is added to the Recv and Send\nlayer path lists,", Fields: []types.Field{{Name: "EmerPath", Doc: "EmerPath provides access to the emer.Path interface\nmethods for functions defined in the PathBase type.\nMust set this with a pointer to the actual instance\nwhen created, using InitPath function."}, {Name: "Name", Doc: "Name of the path, which can be automatically set to\nSendLayer().Name + \"To\" + RecvLayer().Name via\nSetStandardName method."}, {Name: "Class", Doc: "Class is for applying parameter styles across multiple paths\nthat all get the same parameters. This can be space separated\nwith multple classes."}, {Name: "Doc", Doc: "Doc contains documentation about the pathway.\nThis is displayed in a tooltip in the network view."}, {Name: "Notes", Doc: "can record notes about this pathway here."}, {Name: "Pattern", Doc: "Pattern specifies the pattern of connectivity\nfor interconnecting the sending and receiving layers."}, {Name: "Off", Doc: "Off inactivates this pathway, allowing for easy experimentation."}}}) diff --git a/emer/weights.go b/emer/weights.go index 7ad3a25f..26c101a9 100644 --- a/emer/weights.go +++ b/emer/weights.go @@ -34,11 +34,11 @@ func (nt *NetworkBase) SaveWeightsJSON(filename core.Filename) error { //types:a ext := filepath.Ext(string(filename)) if ext == ".gz" { gzr := gzip.NewWriter(fp) - err = nt.WriteWeightsJSON(gzr) + err = nt.EmerNetwork.WriteWeightsJSON(gzr) gzr.Close() } else { bw := bufio.NewWriter(fp) - err = nt.WriteWeightsJSON(bw) + err = nt.EmerNetwork.WriteWeightsJSON(bw) bw.Flush() } return err @@ -61,9 +61,9 @@ func (nt *NetworkBase) OpenWeightsJSON(filename core.Filename) error { //types:a log.Println(err) return err } - return nt.ReadWeightsJSON(gzr) + return nt.EmerNetwork.ReadWeightsJSON(gzr) } else { - return nt.ReadWeightsJSON(bufio.NewReader(fp)) + return nt.EmerNetwork.ReadWeightsJSON(bufio.NewReader(fp)) } } @@ -85,9 +85,9 @@ func (nt *NetworkBase) OpenWeightsFS(fsys fs.FS, filename string) error { log.Println(err) return err } - return nt.ReadWeightsJSON(gzr) + return nt.EmerNetwork.ReadWeightsJSON(gzr) } else { - return nt.ReadWeightsJSON(bufio.NewReader(fp)) + return nt.EmerNetwork.ReadWeightsJSON(bufio.NewReader(fp)) } } diff --git a/netview/netview.go b/netview/netview.go index 7e485102..964dda3a 100644 --- a/netview/netview.go +++ b/netview/netview.go @@ -123,6 +123,9 @@ func (nv *NetView) Init() { laysGp := xyz.NewGroup(se) laysGp.Name = "Layers" }) + w.OnShow(func(e events.Event) { + nv.Current() + }) }) tree.AddChildAt(nv, "counters", func(w *core.Text) { w.SetText("Counters: " + strings.Repeat(" ", 200)). @@ -665,7 +668,7 @@ func (nv *NetView) UnitValColor(lay emer.Layer, idx1d int, raw float32, hasval b clr = NilColor } } else { - clp := nv.CurVarOptions.Range.ClipValue(raw) + clp := nv.CurVarOptions.Range.ClampValue(raw) norm := nv.CurVarOptions.Range.NormValue(clp) var op float32 if nv.CurVarOptions.ZeroCtr { From 1d210e2484d86dbcd6893debf3a9cae6eb70db1e Mon Sep 17 00:00:00 2001 From: "Randall C. O'Reilly" Date: Thu, 12 Dec 2024 13:43:34 -0800 Subject: [PATCH 20/24] netview layout fixes including counter text sizing using pw which eliminates growth issues... --- netview/netview.go | 3 ++- netview/toolbars.go | 12 ++++++++++++ 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/netview/netview.go b/netview/netview.go index 964dda3a..46bacfcd 100644 --- a/netview/netview.go +++ b/netview/netview.go @@ -130,7 +130,8 @@ func (nv *NetView) Init() { tree.AddChildAt(nv, "counters", func(w *core.Text) { w.SetText("Counters: " + strings.Repeat(" ", 200)). Styler(func(s *styles.Style) { - s.Grow.Set(1, 0) + s.Max.X.Pw(95) + s.Min.X.Pw(95) }) w.Updater(func() { if w.Text != nv.CurCtrs && nv.CurCtrs != "" { diff --git a/netview/toolbars.go b/netview/toolbars.go index af7e0cf8..acb991b5 100644 --- a/netview/toolbars.go +++ b/netview/toolbars.go @@ -78,6 +78,10 @@ func (nv *NetView) MakeToolbar(p *tree.Plan) { }) tree.Add(p, func(w *core.Spinner) { w.SetMin(0).SetStep(1).SetValue(float32(nv.Di)).SetTooltip(ditp) + w.Styler(func(s *styles.Style) { + s.Max.X.Ch(9) + s.Min.X.Ch(9) + }) w.OnChange(func(e events.Event) { maxData := nv.Net.MaxParallelData() md := int(w.Value) @@ -137,6 +141,10 @@ func (nv *NetView) MakeToolbar(p *tree.Plan) { }) tree.AddAt(p, "minSpin", func(w *core.Spinner) { minSpin = w + w.Styler(func(s *styles.Style) { + s.Min.X.Ch(12) + s.Max.X.Ch(12) + }) w.SetValue(vp.Range.Min). OnChange(func(e events.Event) { vp := nv.VarOptions[nv.Var] @@ -197,6 +205,10 @@ func (nv *NetView) MakeToolbar(p *tree.Plan) { tree.AddAt(p, "maxSpin", func(w *core.Spinner) { maxSpin = w + w.Styler(func(s *styles.Style) { + s.Min.X.Ch(12) + s.Max.X.Ch(12) + }) w.SetValue(vp.Range.Max).OnChange(func(e events.Event) { vp := nv.VarOptions[nv.Var] vp.Range.SetMax(w.Value) From ea37889cc253b3979ad423029cd3d2be08695e88 Mon Sep 17 00:00:00 2001 From: "Randall C. O'Reilly" Date: Thu, 19 Dec 2024 16:04:11 -0800 Subject: [PATCH 21/24] update to Dir instead of RecycleDir, and bigger min / max spinners --- netview/netdata.go | 4 ++-- netview/toolbars.go | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/netview/netdata.go b/netview/netdata.go index e5544e9d..6fbef2eb 100644 --- a/netview/netdata.go +++ b/netview/netdata.go @@ -656,8 +656,8 @@ func (nv *NetView) PlotSelectedUnit() (*table.Table, *plotcore.PlotEditor) { //t }) } if tensorfs.CurRoot != nil && databrowser.CurTabber != nil { - dir := tensorfs.CurRoot.RecycleDir("NetView") - udir := dir.RecycleDir(selnm) + dir := tensorfs.CurRoot.Dir("NetView") + udir := dir.Dir(selnm) tensorfs.DirFromTable(udir, dt) plt := databrowser.CurTabber.PlotTensorFS(udir) return dt, plt diff --git a/netview/toolbars.go b/netview/toolbars.go index acb991b5..06f439db 100644 --- a/netview/toolbars.go +++ b/netview/toolbars.go @@ -142,8 +142,8 @@ func (nv *NetView) MakeToolbar(p *tree.Plan) { tree.AddAt(p, "minSpin", func(w *core.Spinner) { minSpin = w w.Styler(func(s *styles.Style) { - s.Min.X.Ch(12) - s.Max.X.Ch(12) + s.Min.X.Ch(15) + s.Max.X.Ch(15) }) w.SetValue(vp.Range.Min). OnChange(func(e events.Event) { @@ -206,8 +206,8 @@ func (nv *NetView) MakeToolbar(p *tree.Plan) { tree.AddAt(p, "maxSpin", func(w *core.Spinner) { maxSpin = w w.Styler(func(s *styles.Style) { - s.Min.X.Ch(12) - s.Max.X.Ch(12) + s.Min.X.Ch(15) + s.Max.X.Ch(15) }) w.SetValue(vp.Range.Max).OnChange(func(e events.Event) { vp := nv.VarOptions[nv.Var] From e41ccf1bdc5ff064b96c203c097c3f142c022841 Mon Sep 17 00:00:00 2001 From: "Randall C. O'Reilly" Date: Fri, 20 Dec 2024 02:13:31 -0800 Subject: [PATCH 22/24] go mod update to latest core --- go.mod | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/go.mod b/go.mod index 3416182b..6dba6bf4 100644 --- a/go.mod +++ b/go.mod @@ -3,7 +3,7 @@ module github.com/emer/emergent/v2 go 1.22 require ( - cogentcore.org/core v0.3.6-0.20241125091405-29e40fa36d9b + cogentcore.org/core v0.3.8-0.20241219232922-740dc0f59957 github.com/stretchr/testify v1.9.0 golang.org/x/exp v0.0.0-20240823005443-9b4947da3948 ) From cdb9fc4a3d252adcec6bd40ff3272799d7c63084 Mon Sep 17 00:00:00 2001 From: "Randall C. O'Reilly" Date: Sun, 22 Dec 2024 16:30:52 -0800 Subject: [PATCH 23/24] update to new cogent lab repo --- README.md | 2 +- actrf/actrf.go | 4 ++-- actrf/actrfs.go | 2 +- actrf/mpi.go | 4 ++-- actrf/running.go | 2 +- chem/stater.go | 2 +- decoder/linear.go | 4 ++-- decoder/linear_test.go | 2 +- decoder/softmax.go | 4 ++-- egui/gui.go | 14 +++++++------- emer/layer.go | 2 +- emer/network.go | 2 +- env/curprev.go | 2 +- env/env.go | 2 +- env/fixed.go | 6 +++--- env/freq.go | 6 +++--- env/mpifixed.go | 8 ++++---- esg/rule.go | 2 +- estats/plots.go | 4 ++-- estats/stats.go | 4 ++-- go.mod | 15 +++++++-------- go.sum | 35 +++++++++++++++++----------------- netview/laymesh.go | 2 +- netview/netdata.go | 16 ++++++++-------- patgen/configpats.go | 2 +- patgen/configvocab.go | 4 ++-- patgen/configvocabpats_test.go | 2 +- patgen/flip.go | 4 ++-- patgen/permuted.go | 6 +++--- patgen/rand.go | 2 +- patgen/reshape.go | 4 ++-- patgen/shuffle.go | 4 ++-- paths/circle.go | 2 +- paths/full.go | 2 +- paths/onetoone.go | 2 +- paths/pattern.go | 2 +- paths/poolonetoone.go | 2 +- paths/poolrect.go | 2 +- paths/poolsameunit.go | 2 +- paths/pooltile.go | 2 +- paths/pooltilesub.go | 2 +- paths/poolunifrnd.go | 4 ++-- paths/prjn_test.go | 2 +- paths/rect.go | 2 +- paths/uniformrand.go | 4 ++-- popcode/popcode2d.go | 2 +- popcode/popcode_test.go | 2 +- 47 files changed, 104 insertions(+), 104 deletions(-) diff --git a/README.md b/README.md index 50ad6e92..dd2dd661 100644 --- a/README.md +++ b/README.md @@ -111,7 +111,7 @@ Here are the other packages from [Cogent Core](https://github.com/cogentcore/cor * [envs](https://github.com/emer/envs) has misc standalone environments that can be good starting points, including managing files, visual images, etc. -* [ttail](https://github.com/cogentcore/core/tree/main/tensor/cmd/ttail) is a `tail` program for interactively viewing tabular (csv, tsv, etc) log files in a terminal CLI environment! `go install cogentcore.org/core/tensor/cmd/ttail@latest` from anywhere to install. +* [ttail](https://github.com/cogentcore/core/tree/main/tensor/cmd/ttail) is a `tail` program for interactively viewing tabular (csv, tsv, etc) log files in a terminal CLI environment! `go install cogentcore.org/lab/tensor/cmd/ttail@latest` from anywhere to install. * [eTorch](https://github.com/emer/etorch) is the emergent interface to PyTorch models, providing emergent GUI NetView etc for these models. diff --git a/actrf/actrf.go b/actrf/actrf.go index 8f1e0805..cc8e87b3 100644 --- a/actrf/actrf.go +++ b/actrf/actrf.go @@ -9,8 +9,8 @@ package actrf import ( "slices" - "cogentcore.org/core/tensor" - "cogentcore.org/core/tensor/stats/stats" + "cogentcore.org/lab/stats/stats" + "cogentcore.org/lab/tensor" ) // RF is used for computing an activation-based receptive field. diff --git a/actrf/actrfs.go b/actrf/actrfs.go index b7da8825..6bedaf3f 100644 --- a/actrf/actrfs.go +++ b/actrf/actrfs.go @@ -8,7 +8,7 @@ import ( "fmt" "cogentcore.org/core/base/errors" - "cogentcore.org/core/tensor" + "cogentcore.org/lab/tensor" ) // RFs manages multiple named RF's -- each one must be initialized first diff --git a/actrf/mpi.go b/actrf/mpi.go index bebed8b9..8bb40764 100644 --- a/actrf/mpi.go +++ b/actrf/mpi.go @@ -5,8 +5,8 @@ package actrf import ( - "cogentcore.org/core/base/mpi" - "cogentcore.org/core/tensor/tensormpi" + "cogentcore.org/lab/base/mpi" + "cogentcore.org/lab/tensor/tensormpi" ) // MPISum aggregates RF Sum data across all processors in given mpi communicator. diff --git a/actrf/running.go b/actrf/running.go index b67d2bdc..eba9f323 100644 --- a/actrf/running.go +++ b/actrf/running.go @@ -4,7 +4,7 @@ package actrf -import "cogentcore.org/core/tensor" +import "cogentcore.org/lab/tensor" // RunningAvg computes a running-average activation-based receptive field // for activities act relative to source activations src (the thing we're projecting rf onto) diff --git a/chem/stater.go b/chem/stater.go index 48d3c8d7..eb505f5d 100644 --- a/chem/stater.go +++ b/chem/stater.go @@ -4,7 +4,7 @@ package chem -import "cogentcore.org/core/tensor/table" +import "cogentcore.org/lab/table" // The Stater interface defines the functions implemented for State // structures containing chem state variables. diff --git a/decoder/linear.go b/decoder/linear.go index 5d4f91f0..bb3306b6 100644 --- a/decoder/linear.go +++ b/decoder/linear.go @@ -9,9 +9,9 @@ package decoder import ( "fmt" - "cogentcore.org/core/base/mpi" "cogentcore.org/core/math32" - "cogentcore.org/core/tensor" + "cogentcore.org/lab/base/mpi" + "cogentcore.org/lab/tensor" ) type ActivationFunc func(float32) float32 diff --git a/decoder/linear_test.go b/decoder/linear_test.go index 2f7da726..6ae5c4cd 100644 --- a/decoder/linear_test.go +++ b/decoder/linear_test.go @@ -8,7 +8,7 @@ import ( "fmt" "testing" - "cogentcore.org/core/tensor" + "cogentcore.org/lab/tensor" "github.com/stretchr/testify/assert" ) diff --git a/decoder/softmax.go b/decoder/softmax.go index 5c289e22..00351c79 100644 --- a/decoder/softmax.go +++ b/decoder/softmax.go @@ -15,9 +15,9 @@ import ( "path/filepath" "sort" - "cogentcore.org/core/base/mpi" "cogentcore.org/core/math32" - "cogentcore.org/core/tensor" + "cogentcore.org/lab/base/mpi" + "cogentcore.org/lab/tensor" "github.com/emer/emergent/v2/emer" ) diff --git a/egui/gui.go b/egui/gui.go index 94e9a96d..9e5f4c5f 100644 --- a/egui/gui.go +++ b/egui/gui.go @@ -10,15 +10,15 @@ import ( "cogentcore.org/core/core" "cogentcore.org/core/enums" "cogentcore.org/core/events" - _ "cogentcore.org/core/goal/gosl/slbool/slboolcore" // include to get gui views "cogentcore.org/core/styles" - "cogentcore.org/core/tensor/databrowser" + _ "cogentcore.org/lab/gosl/slbool/slboolcore" // include to get gui views + "cogentcore.org/lab/lab" "github.com/emer/emergent/v2/netview" ) // GUI manages all standard elements of a simulation Graphical User Interface type GUI struct { - databrowser.Browser + lab.Browser // how many cycles between updates of cycle-level plots CycleUpdateInterval int @@ -104,10 +104,10 @@ func (gui *GUI) MakeBody(sim any, appname, title, about string) { s.Overflow.Set(styles.OverflowAuto) s.Grow.Set(1, 1) }) - gui.Files = databrowser.NewDataTree(fform) - tabs := databrowser.NewTabs(split) + gui.Files = lab.NewDataTree(fform) + tabs := lab.NewTabs(split) gui.Tabs = tabs - databrowser.CurTabber = tabs + lab.CurTabber = tabs tabs.Name = "tabs" gui.Files.Tabber = tabs split.SetTiles(core.TileSplit, core.TileSpan) @@ -116,7 +116,7 @@ func (gui *GUI) MakeBody(sim any, appname, title, about string) { // AddNetView adds NetView in tab with given name func (gui *GUI) AddNetView(tabName string) *netview.NetView { - nv := databrowser.NewTab(gui.Tabs, tabName, func(tab *core.Frame) *netview.NetView { + nv := lab.NewTab(gui.Tabs, tabName, func(tab *core.Frame) *netview.NetView { nv := netview.NewNetView(tab) nv.Var = "Act" // tb.OnFinal(events.Click, func(e events.Event) { diff --git a/emer/layer.go b/emer/layer.go index 559eb1b3..6d350880 100644 --- a/emer/layer.go +++ b/emer/layer.go @@ -12,7 +12,7 @@ import ( "cogentcore.org/core/base/slicesx" "cogentcore.org/core/math32" - "cogentcore.org/core/tensor" + "cogentcore.org/lab/tensor" "github.com/emer/emergent/v2/params" "github.com/emer/emergent/v2/relpos" "github.com/emer/emergent/v2/weights" diff --git a/emer/network.go b/emer/network.go index b7b0bab3..cd47591f 100644 --- a/emer/network.go +++ b/emer/network.go @@ -14,9 +14,9 @@ import ( "strings" "cogentcore.org/core/base/errors" - "cogentcore.org/core/base/randx" "cogentcore.org/core/core" "cogentcore.org/core/math32" + "cogentcore.org/lab/base/randx" "github.com/emer/emergent/v2/relpos" ) diff --git a/env/curprev.go b/env/curprev.go index 2626b8cb..3f6db67b 100644 --- a/env/curprev.go +++ b/env/curprev.go @@ -4,7 +4,7 @@ package env -import "cogentcore.org/core/tensor" +import "cogentcore.org/lab/tensor" // CurPrev manages current and previous values for basic data types. type CurPrev[T tensor.DataTypes] struct { diff --git a/env/env.go b/env/env.go index 3dd02d29..9d1b8392 100644 --- a/env/env.go +++ b/env/env.go @@ -8,7 +8,7 @@ import ( "fmt" "cogentcore.org/core/base/labels" - "cogentcore.org/core/tensor" + "cogentcore.org/lab/tensor" ) //go:generate core generate -add-types diff --git a/env/fixed.go b/env/fixed.go index 4a1ff71f..ef4591c4 100644 --- a/env/fixed.go +++ b/env/fixed.go @@ -9,9 +9,9 @@ import ( "log" "math/rand" - "cogentcore.org/core/base/randx" - "cogentcore.org/core/tensor" - "cogentcore.org/core/tensor/table" + "cogentcore.org/lab/base/randx" + "cogentcore.org/lab/table" + "cogentcore.org/lab/tensor" ) // FixedTable is a basic Env that manages patterns from a [table.Table], with diff --git a/env/freq.go b/env/freq.go index da1c9315..719c6a28 100644 --- a/env/freq.go +++ b/env/freq.go @@ -9,9 +9,9 @@ import ( "log" "math" - "cogentcore.org/core/base/randx" - "cogentcore.org/core/tensor" - "cogentcore.org/core/tensor/table" + "cogentcore.org/lab/base/randx" + "cogentcore.org/lab/table" + "cogentcore.org/lab/tensor" ) // FreqTable is an Env that manages patterns from an table.Table with frequency diff --git a/env/mpifixed.go b/env/mpifixed.go index 0edc8b60..034d1a88 100644 --- a/env/mpifixed.go +++ b/env/mpifixed.go @@ -9,10 +9,10 @@ import ( "log" "math/rand" - "cogentcore.org/core/base/randx" - "cogentcore.org/core/tensor" - "cogentcore.org/core/tensor/table" - "cogentcore.org/core/tensor/tensormpi" + "cogentcore.org/lab/base/randx" + "cogentcore.org/lab/table" + "cogentcore.org/lab/tensor" + "cogentcore.org/lab/tensor/tensormpi" ) // MPIFixedTable is an MPI-enabled version of the [FixedTable], which is diff --git a/esg/rule.go b/esg/rule.go index e7ba392a..83d3adb1 100644 --- a/esg/rule.go +++ b/esg/rule.go @@ -11,7 +11,7 @@ import ( "math/rand" "strings" - "cogentcore.org/core/base/randx" + "cogentcore.org/lab/base/randx" ) // RuleTypes are different types of rules (i.e., how the items are selected) diff --git a/estats/plots.go b/estats/plots.go index ad5651f8..694ff18d 100644 --- a/estats/plots.go +++ b/estats/plots.go @@ -5,8 +5,8 @@ package estats import ( - "cogentcore.org/core/plot/plotcore" - "cogentcore.org/core/tensor/table" + "cogentcore.org/lab/plotcore" + "cogentcore.org/lab/table" ) func ConfigPCAPlot(plt *plotcore.PlotEditor, dt *table.Table, nm string) { diff --git a/estats/stats.go b/estats/stats.go index 12bf1ce4..6a1297ba 100644 --- a/estats/stats.go +++ b/estats/stats.go @@ -10,8 +10,8 @@ import ( "fmt" "cogentcore.org/core/base/timer" - "cogentcore.org/core/plot/plotcore" - "cogentcore.org/core/tensor" + "cogentcore.org/lab/plotcore" + "cogentcore.org/lab/tensor" "github.com/emer/emergent/v2/decoder" ) diff --git a/go.mod b/go.mod index 6dba6bf4..cb473b2a 100644 --- a/go.mod +++ b/go.mod @@ -3,7 +3,8 @@ module github.com/emer/emergent/v2 go 1.22 require ( - cogentcore.org/core v0.3.8-0.20241219232922-740dc0f59957 + cogentcore.org/core v0.3.8-0.20241223001113-dbeb6f356523 + cogentcore.org/lab v0.0.0-20241223001418-86d359d14cb5 github.com/stretchr/testify v1.9.0 golang.org/x/exp v0.0.0-20240823005443-9b4947da3948 ) @@ -17,7 +18,7 @@ require ( github.com/aymerick/douceur v0.2.0 // indirect github.com/bramvdbogaerde/go-scp v1.4.0 // indirect github.com/chewxy/math32 v1.10.1 // indirect - github.com/cogentcore/webgpu v0.0.0-20240906154609-e35089e9a725 // indirect + github.com/cogentcore/webgpu v0.0.0-20241212004832-ad7475f3b4dd // indirect github.com/cogentcore/yaegi v0.0.0-20240724064145-e32a03faad56 // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect github.com/dlclark/regexp2 v1.11.0 // indirect @@ -31,7 +32,6 @@ require ( github.com/hack-pad/hackpadfs v0.2.1 // indirect github.com/hack-pad/safejs v0.1.1 // indirect github.com/jinzhu/copier v0.4.0 // indirect - github.com/kr/text v0.2.0 // indirect github.com/lucasb-eyer/go-colorful v1.2.0 // indirect github.com/mattn/go-isatty v0.0.20 // indirect github.com/mattn/go-runewidth v0.0.15 // indirect @@ -41,14 +41,13 @@ require ( github.com/pelletier/go-toml/v2 v2.1.2-0.20240227203013-2b69615b5d55 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/rivo/uniseg v0.4.7 // indirect - github.com/rogpeppe/go-internal v1.12.0 // indirect - golang.org/x/crypto v0.26.0 // indirect + golang.org/x/crypto v0.31.0 // indirect golang.org/x/image v0.18.0 // indirect golang.org/x/mod v0.20.0 // indirect golang.org/x/net v0.28.0 // indirect - golang.org/x/sync v0.8.0 // indirect - golang.org/x/sys v0.23.0 // indirect - golang.org/x/text v0.17.0 // indirect + golang.org/x/sync v0.10.0 // indirect + golang.org/x/sys v0.28.0 // indirect + golang.org/x/text v0.21.0 // indirect golang.org/x/tools v0.24.0 // indirect gonum.org/v1/gonum v0.15.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect diff --git a/go.sum b/go.sum index 88417fce..4aeb2b88 100644 --- a/go.sum +++ b/go.sum @@ -1,5 +1,7 @@ -cogentcore.org/core v0.3.6-0.20241125091405-29e40fa36d9b h1:kVgx3WPYmCQKY/y5WCgKdVGEYvDgMbPuYH8kUYxr85o= -cogentcore.org/core v0.3.6-0.20241125091405-29e40fa36d9b/go.mod h1:q3OrXihYG2aTE2oG785ad5b6aJ3wzSCXeeD2SSLKWIE= +cogentcore.org/core v0.3.8-0.20241223001113-dbeb6f356523 h1:wPx13YqgkwzAzhnnUhaGKp3tnJXJmgUCp+alKj/0Ocs= +cogentcore.org/core v0.3.8-0.20241223001113-dbeb6f356523/go.mod h1:Ipnb14B+l0qLzjvcvCIhnDucV9H9RyuqS4knZz5kB8A= +cogentcore.org/lab v0.0.0-20241223001418-86d359d14cb5 h1:wO8tyjVp3U8SphHiK+Pwx8ahHtLIGJRq+P1HOwX6gXM= +cogentcore.org/lab v0.0.0-20241223001418-86d359d14cb5/go.mod h1:mwkhWd1mVPzjjtXga3iMRE/80RiZHvNHZSx87la9BJc= github.com/Bios-Marcel/wastebasket v0.0.4-0.20240213135800-f26f1ae0a7c4 h1:6lx9xzJAhdjq0LvVfbITeC3IH9Fzvo1aBahyPu2FuG8= github.com/Bios-Marcel/wastebasket v0.0.4-0.20240213135800-f26f1ae0a7c4/go.mod h1:FChzXi1izqzdPb6BiNZmcZLGyTYiT61iGx9Rxx9GNeI= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= @@ -22,15 +24,14 @@ github.com/bramvdbogaerde/go-scp v1.4.0 h1:jKMwpwCbcX1KyvDbm/PDJuXcMuNVlLGi0Q0re github.com/bramvdbogaerde/go-scp v1.4.0/go.mod h1:on2aH5AxaFb2G0N5Vsdy6B0Ml7k9HuHSwfo1y0QzAbQ= github.com/chewxy/math32 v1.10.1 h1:LFpeY0SLJXeaiej/eIp2L40VYfscTvKh/FSEZ68uMkU= github.com/chewxy/math32 v1.10.1/go.mod h1:dOB2rcuFrCn6UHrze36WSLVPKtzPMRAQvBvUwkSsLqs= -github.com/cogentcore/webgpu v0.0.0-20240906154609-e35089e9a725 h1:Zf2ixJY1PsN3huZa0Uof7VKYuoBbdxJCEbKdhKhHqwE= -github.com/cogentcore/webgpu v0.0.0-20240906154609-e35089e9a725/go.mod h1:ciqaxChrmRRMU1SnI5OE12Cn3QWvOKO+e5nSy+N9S1o= +github.com/cogentcore/webgpu v0.0.0-20241212004832-ad7475f3b4dd h1:wmOdOGOfQDY/hmiQTWzoM59SskQSjrMz91jWv0gt6Yg= +github.com/cogentcore/webgpu v0.0.0-20241212004832-ad7475f3b4dd/go.mod h1:ciqaxChrmRRMU1SnI5OE12Cn3QWvOKO+e5nSy+N9S1o= github.com/cogentcore/yaegi v0.0.0-20240724064145-e32a03faad56 h1:Fz1uHiFCHnijFcMXzn36KLamcx5q4pxoR5rKCrcXIcQ= github.com/cogentcore/yaegi v0.0.0-20240724064145-e32a03faad56/go.mod h1:+MGpZ0srBmeJ7aaOLTdVss8WLolt0/y/plVHLpxgd3A= github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= -github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= @@ -89,8 +90,8 @@ github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= -github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= -github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= +github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= +github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= @@ -110,8 +111,8 @@ github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8 github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.26.0 h1:RrRspgV4mU+YwB4FYnuBoKsUapNIL5cohGAmSH3azsw= -golang.org/x/crypto v0.26.0/go.mod h1:GY7jblb9wI+FOo5y8/S2oY4zWP07AkOJ4+jxCqdqn54= +golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U= +golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= golang.org/x/exp v0.0.0-20240823005443-9b4947da3948 h1:kx6Ds3MlpiUHKj7syVnbp57++8WpuKPcR5yjLBjvLEA= golang.org/x/exp v0.0.0-20240823005443-9b4947da3948/go.mod h1:akd2r19cwCdwSwWeIdzYQGa/EZZyqcOdwWiwj5L5eKQ= golang.org/x/image v0.0.0-20190703141733-d6a02ce849c9/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= @@ -121,17 +122,17 @@ golang.org/x/mod v0.20.0 h1:utOm6MM3R3dnawAiJgn0y+xvuYRsm1RKM/4giyfDgV0= golang.org/x/mod v0.20.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.28.0 h1:a9JDOJc5GMUJ0+UDqmLT86WiEy7iWyIhz8gz8E4e5hE= golang.org/x/net v0.28.0/go.mod h1:yqtgsTWOOnlGLG9GFRrK3++bGOUEkNBoHZc8MEDWPNg= -golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= -golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ= +golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.23.0 h1:YfKFowiIMvtgl1UERQoTPPToxltDeZfbj4H7dVUCwmM= -golang.org/x/sys v0.23.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/term v0.23.0 h1:F6D4vR+EHoL9/sWAWgAR1H2DcHr4PareCbAaCo1RpuU= -golang.org/x/term v0.23.0/go.mod h1:DgV24QBUrK6jhZXl+20l6UWznPlwAHm1Q1mGHtydmSk= +golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA= +golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/term v0.27.0 h1:WP60Sv1nlK1T6SupCHbXzSaN0b9wUmsPoRS9b61A23Q= +golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.17.0 h1:XtiM5bkSOt+ewxlOE/aE/AKEHibwj/6gvWMl9Rsh0Qc= -golang.org/x/text v0.17.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= +golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= +golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= golang.org/x/tools v0.24.0 h1:J1shsA93PJUEVaUSaay7UXAyE8aimq3GW0pjlolpa24= golang.org/x/tools v0.24.0/go.mod h1:YhNqVBIfWHdzvTLs0d8LCuMhkKUgSUKldakyV7W/WDQ= gonum.org/v1/gonum v0.15.0 h1:2lYxjRbTYyxkJxlhC+LvJIx3SsANPdRybu1tGj9/OrQ= diff --git a/netview/laymesh.go b/netview/laymesh.go index f5690551..55b5cfda 100644 --- a/netview/laymesh.go +++ b/netview/laymesh.go @@ -7,8 +7,8 @@ package netview import ( "cogentcore.org/core/gpu/shape" "cogentcore.org/core/math32" - "cogentcore.org/core/tensor" "cogentcore.org/core/xyz" + "cogentcore.org/lab/tensor" "github.com/emer/emergent/v2/emer" ) diff --git a/netview/netdata.go b/netview/netdata.go index 6fbef2eb..aabd3396 100644 --- a/netview/netdata.go +++ b/netview/netdata.go @@ -20,12 +20,12 @@ import ( "cogentcore.org/core/base/metadata" "cogentcore.org/core/core" "cogentcore.org/core/math32" - "cogentcore.org/core/plot" - "cogentcore.org/core/plot/plotcore" - "cogentcore.org/core/tensor" - "cogentcore.org/core/tensor/databrowser" - "cogentcore.org/core/tensor/table" - "cogentcore.org/core/tensor/tensorfs" + "cogentcore.org/lab/lab" + "cogentcore.org/lab/plot" + "cogentcore.org/lab/plotcore" + "cogentcore.org/lab/table" + "cogentcore.org/lab/tensor" + "cogentcore.org/lab/tensorfs" "github.com/emer/emergent/v2/emer" "github.com/emer/emergent/v2/ringidx" ) @@ -655,11 +655,11 @@ func (nv *NetView) PlotSelectedUnit() (*table.Table, *plotcore.PlotEditor) { //t s.Range.SetMin(float64(min)).SetMax(float64(vp.Range.Max)) }) } - if tensorfs.CurRoot != nil && databrowser.CurTabber != nil { + if tensorfs.CurRoot != nil && lab.CurTabber != nil { dir := tensorfs.CurRoot.Dir("NetView") udir := dir.Dir(selnm) tensorfs.DirFromTable(udir, dt) - plt := databrowser.CurTabber.PlotTensorFS(udir) + plt := lab.CurTabber.PlotTensorFS(udir) return dt, plt } else { b := core.NewBody("netview-selectedunit").SetTitle("NetView SelectedUnit Plot: " + selnm) diff --git a/patgen/configpats.go b/patgen/configpats.go index 254efca8..f9e1cd1c 100644 --- a/patgen/configpats.go +++ b/patgen/configpats.go @@ -10,7 +10,7 @@ import ( "slices" "cogentcore.org/core/base/metadata" - "cogentcore.org/core/tensor/table" + "cogentcore.org/lab/table" ) // InitPats initiates patterns to be used in MixPats diff --git a/patgen/configvocab.go b/patgen/configvocab.go index 7b382747..73942d0f 100644 --- a/patgen/configvocab.go +++ b/patgen/configvocab.go @@ -13,8 +13,8 @@ import ( "slices" "cogentcore.org/core/base/errors" - "cogentcore.org/core/tensor" - "cogentcore.org/core/tensor/stats/stats" + "cogentcore.org/lab/stats/stats" + "cogentcore.org/lab/tensor" ) // Vocab is a map of named tensors that contain patterns used for creating diff --git a/patgen/configvocabpats_test.go b/patgen/configvocabpats_test.go index a11bdbe7..b812777c 100644 --- a/patgen/configvocabpats_test.go +++ b/patgen/configvocabpats_test.go @@ -5,7 +5,7 @@ import ( "slices" "testing" - "cogentcore.org/core/tensor/table" + "cogentcore.org/lab/table" "github.com/stretchr/testify/assert" "golang.org/x/exp/maps" ) diff --git a/patgen/flip.go b/patgen/flip.go index 46ea45f0..51a22a40 100644 --- a/patgen/flip.go +++ b/patgen/flip.go @@ -5,8 +5,8 @@ package patgen import ( - "cogentcore.org/core/base/randx" - "cogentcore.org/core/tensor" + "cogentcore.org/lab/base/randx" + "cogentcore.org/lab/tensor" ) // FlipBits turns nOff bits that are currently On to Off and diff --git a/patgen/permuted.go b/patgen/permuted.go index acbd7a28..7c05e03a 100644 --- a/patgen/permuted.go +++ b/patgen/permuted.go @@ -10,10 +10,10 @@ import ( "log" "math" - "cogentcore.org/core/base/randx" "cogentcore.org/core/math32" - "cogentcore.org/core/tensor" - "cogentcore.org/core/tensor/stats/metric" + "cogentcore.org/lab/base/randx" + "cogentcore.org/lab/stats/metric" + "cogentcore.org/lab/tensor" ) // PermutedBinary sets the given tensor to contain nOn onVal values and the diff --git a/patgen/rand.go b/patgen/rand.go index 481c6ae9..2e2befb4 100644 --- a/patgen/rand.go +++ b/patgen/rand.go @@ -4,7 +4,7 @@ package patgen -import "cogentcore.org/core/base/randx" +import "cogentcore.org/lab/base/randx" var ( // RandSource is a random source to use for all random numbers used in patgen diff --git a/patgen/reshape.go b/patgen/reshape.go index 6ce8ab70..d5b96528 100644 --- a/patgen/reshape.go +++ b/patgen/reshape.go @@ -9,8 +9,8 @@ import ( "reflect" "cogentcore.org/core/core" - "cogentcore.org/core/tensor" - "cogentcore.org/core/tensor/table" + "cogentcore.org/lab/table" + "cogentcore.org/lab/tensor" ) // ReshapeCpp fixes C++ emergent table shape which is reversed from Go. diff --git a/patgen/shuffle.go b/patgen/shuffle.go index 484696a3..15c1c36f 100644 --- a/patgen/shuffle.go +++ b/patgen/shuffle.go @@ -5,8 +5,8 @@ package patgen import ( - "cogentcore.org/core/base/randx" - "cogentcore.org/core/tensor/table" + "cogentcore.org/lab/base/randx" + "cogentcore.org/lab/table" ) // Shuffle shuffles rows in specified columns in the table independently diff --git a/paths/circle.go b/paths/circle.go index 2291e0ce..aee709b6 100644 --- a/paths/circle.go +++ b/paths/circle.go @@ -7,7 +7,7 @@ package paths import ( "cogentcore.org/core/math32" "cogentcore.org/core/math32/vecint" - "cogentcore.org/core/tensor" + "cogentcore.org/lab/tensor" "github.com/emer/emergent/v2/edge" "github.com/emer/emergent/v2/efuns" ) diff --git a/paths/full.go b/paths/full.go index 2201a3f8..587a7638 100644 --- a/paths/full.go +++ b/paths/full.go @@ -4,7 +4,7 @@ package paths -import "cogentcore.org/core/tensor" +import "cogentcore.org/lab/tensor" // Full implements full all-to-all pattern of connectivity between two layers type Full struct { diff --git a/paths/onetoone.go b/paths/onetoone.go index b40cc099..60cde4e6 100644 --- a/paths/onetoone.go +++ b/paths/onetoone.go @@ -4,7 +4,7 @@ package paths -import "cogentcore.org/core/tensor" +import "cogentcore.org/lab/tensor" // OneToOne implements point-to-point one-to-one pattern of connectivity between two layers type OneToOne struct { diff --git a/paths/pattern.go b/paths/pattern.go index a41ce341..694c0366 100644 --- a/paths/pattern.go +++ b/paths/pattern.go @@ -7,7 +7,7 @@ package paths //go:generate core generate -add-types import ( - "cogentcore.org/core/tensor" + "cogentcore.org/lab/tensor" ) // Pattern defines a pattern of connectivity between two layers. diff --git a/paths/poolonetoone.go b/paths/poolonetoone.go index 29cc7ad9..dac45be0 100644 --- a/paths/poolonetoone.go +++ b/paths/poolonetoone.go @@ -4,7 +4,7 @@ package paths -import "cogentcore.org/core/tensor" +import "cogentcore.org/lab/tensor" // PoolOneToOne implements one-to-one connectivity between pools within layers. // Pools are the outer-most two dimensions of a 4D layer shape. diff --git a/paths/poolrect.go b/paths/poolrect.go index 7521b63d..58385ca0 100644 --- a/paths/poolrect.go +++ b/paths/poolrect.go @@ -7,7 +7,7 @@ package paths import ( "cogentcore.org/core/math32" "cogentcore.org/core/math32/vecint" - "cogentcore.org/core/tensor" + "cogentcore.org/lab/tensor" "github.com/emer/emergent/v2/edge" ) diff --git a/paths/poolsameunit.go b/paths/poolsameunit.go index 418d00aa..f2d9e6d2 100644 --- a/paths/poolsameunit.go +++ b/paths/poolsameunit.go @@ -4,7 +4,7 @@ package paths -import "cogentcore.org/core/tensor" +import "cogentcore.org/lab/tensor" // PoolSameUnit connects a given unit to the unit at the same index // across all the pools in a layer. diff --git a/paths/pooltile.go b/paths/pooltile.go index 1f238236..0f2ad1ee 100644 --- a/paths/pooltile.go +++ b/paths/pooltile.go @@ -11,7 +11,7 @@ import ( "cogentcore.org/core/math32" "cogentcore.org/core/math32/minmax" "cogentcore.org/core/math32/vecint" - "cogentcore.org/core/tensor" + "cogentcore.org/lab/tensor" "github.com/emer/emergent/v2/edge" "github.com/emer/emergent/v2/efuns" ) diff --git a/paths/pooltilesub.go b/paths/pooltilesub.go index 446e0c3e..70c24232 100644 --- a/paths/pooltilesub.go +++ b/paths/pooltilesub.go @@ -11,7 +11,7 @@ import ( "cogentcore.org/core/math32" "cogentcore.org/core/math32/minmax" "cogentcore.org/core/math32/vecint" - "cogentcore.org/core/tensor" + "cogentcore.org/lab/tensor" "github.com/emer/emergent/v2/edge" "github.com/emer/emergent/v2/efuns" ) diff --git a/paths/poolunifrnd.go b/paths/poolunifrnd.go index aeaa184f..491b5348 100644 --- a/paths/poolunifrnd.go +++ b/paths/poolunifrnd.go @@ -8,8 +8,8 @@ import ( "math" "sort" - "cogentcore.org/core/base/randx" - "cogentcore.org/core/tensor" + "cogentcore.org/lab/base/randx" + "cogentcore.org/lab/tensor" ) // PoolUniformRand implements random pattern of connectivity between pools within layers. diff --git a/paths/prjn_test.go b/paths/prjn_test.go index 4cc95ac4..c3c7d1f7 100644 --- a/paths/prjn_test.go +++ b/paths/prjn_test.go @@ -7,7 +7,7 @@ package paths import ( "testing" - "cogentcore.org/core/tensor" + "cogentcore.org/lab/tensor" "github.com/stretchr/testify/assert" ) diff --git a/paths/rect.go b/paths/rect.go index 45fc4ec7..8adcb8da 100644 --- a/paths/rect.go +++ b/paths/rect.go @@ -7,7 +7,7 @@ package paths import ( "cogentcore.org/core/math32" "cogentcore.org/core/math32/vecint" - "cogentcore.org/core/tensor" + "cogentcore.org/lab/tensor" "github.com/emer/emergent/v2/edge" ) diff --git a/paths/uniformrand.go b/paths/uniformrand.go index ee67c083..3d7bd914 100644 --- a/paths/uniformrand.go +++ b/paths/uniformrand.go @@ -9,8 +9,8 @@ import ( "math/rand" "sort" - "cogentcore.org/core/base/randx" - "cogentcore.org/core/tensor" + "cogentcore.org/lab/base/randx" + "cogentcore.org/lab/tensor" ) // UniformRand implements uniform random pattern of connectivity between two layers diff --git a/popcode/popcode2d.go b/popcode/popcode2d.go index c8a14b5a..2ba5674a 100644 --- a/popcode/popcode2d.go +++ b/popcode/popcode2d.go @@ -10,7 +10,7 @@ import ( "sort" "cogentcore.org/core/math32" - "cogentcore.org/core/tensor" + "cogentcore.org/lab/tensor" ) // popcode.TwoD provides encoding and decoding of population diff --git a/popcode/popcode_test.go b/popcode/popcode_test.go index 38cafef0..00c8e239 100644 --- a/popcode/popcode_test.go +++ b/popcode/popcode_test.go @@ -8,7 +8,7 @@ import ( "testing" "cogentcore.org/core/math32" - "cogentcore.org/core/tensor" + "cogentcore.org/lab/tensor" ) // difTol is the numerical difference tolerance for comparing vs. target values From 38ee131acdf55d7ae35a79baf7f31a557a515e8e Mon Sep 17 00:00:00 2001 From: "Randall C. O'Reilly" Date: Mon, 23 Dec 2024 12:45:20 -0800 Subject: [PATCH 24/24] update to latest lab tabber --- netview/netdata.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/netview/netdata.go b/netview/netdata.go index aabd3396..3bad6afe 100644 --- a/netview/netdata.go +++ b/netview/netdata.go @@ -659,7 +659,7 @@ func (nv *NetView) PlotSelectedUnit() (*table.Table, *plotcore.PlotEditor) { //t dir := tensorfs.CurRoot.Dir("NetView") udir := dir.Dir(selnm) tensorfs.DirFromTable(udir, dt) - plt := lab.CurTabber.PlotTensorFS(udir) + plt := lab.CurTabber.AsLab().PlotTensorFS(udir) return dt, plt } else { b := core.NewBody("netview-selectedunit").SetTitle("NetView SelectedUnit Plot: " + selnm)