Perf - Add scale_by_ave() function.
Change-Id: I3072869e4c7520c8222d1dae9e1404d4e4f73215
Reviewed-on: https://skia-review.googlesource.com/c/buildbot/+/220750
Reviewed-by: Joe Gregorio <jcgregorio@google.com>
Commit-Queue: Joe Gregorio <jcgregorio@google.com>
diff --git a/go/calc/funcs.go b/go/calc/funcs.go
index fd4390b..47e0e22 100644
--- a/go/calc/funcs.go
+++ b/go/calc/funcs.go
@@ -556,3 +556,38 @@
}
var traceStepFunc = TraceStepFunc{}
+
+type ScaleByAveFunc struct{}
+
+// ScaleByAveFunc implements Func and Computes a new trace that is scaled by 1/(average of all values in the trace).
+//
+// vec32.MISSING_DATA_SENTINEL values are not taken into account for the ave. If the entire vector is vec32.MISSING_DATA_SENTINEL then
+// the result is also all vec32.MISSING_DATA_SENTINEL.
+func (ScaleByAveFunc) Eval(ctx *Context, node *Node) (Rows, error) {
+ if len(node.Args) != 1 {
+ return nil, fmt.Errorf("scale_by_ave() takes a single argument.")
+ }
+ if node.Args[0].Typ != NodeFunc {
+ return nil, fmt.Errorf("scale_by_ave() takes a function argument.")
+ }
+ rows, err := node.Args[0].Eval(ctx)
+ if err != nil {
+ return nil, fmt.Errorf("scale_by_ave() failed evaluating argument: %s", err)
+ }
+
+ ret := Rows{}
+ for key, r := range rows {
+ row := vec32.Dup(r)
+ mean := vec32.Mean(row)
+ vec32.ScaleBy(row, mean)
+ ret["scale_by_ave("+key+")"] = row
+ }
+
+ return ret, nil
+}
+
+func (ScaleByAveFunc) Describe() string {
+ return `Computes a new trace that is scaled by 1/(ave) where ave is the average of the input trace.`
+}
+
+var scaleByAveFunc = ScaleByAveFunc{}
diff --git a/go/calc/parser.go b/go/calc/parser.go
index a008ba6..8d464bd 100644
--- a/go/calc/parser.go
+++ b/go/calc/parser.go
@@ -106,6 +106,7 @@
"trace_stddev": traceStdDevFunc,
"trace_cov": traceCovFunc,
"step": traceStepFunc,
+ "scale_by_ave": scaleByAveFunc,
},
}
}
diff --git a/go/vec32/vec.go b/go/vec32/vec.go
index 04a31d9..b85fd66 100644
--- a/go/vec32/vec.go
+++ b/go/vec32/vec.go
@@ -50,6 +50,21 @@
return mean, stddev, nil
}
+// ScaleBy divides each non-sentinel value in the slice by 'b', converting
+// resulting NaNs and Infs into sentinel values.
+func ScaleBy(a []float32, b float32) {
+ for i, x := range a {
+ if x != MISSING_DATA_SENTINEL {
+ scaled := a[i] / b
+ if math.IsNaN(float64(scaled)) || math.IsInf(float64(scaled), 0) {
+ a[i] = MISSING_DATA_SENTINEL
+ } else {
+ a[i] = scaled
+ }
+ }
+ }
+}
+
// Norm normalizes the slice to a mean of 0 and a standard deviation of 1.0.
// The minStdDev is the minimum standard deviation that is normalized. Slices
// with a standard deviation less than that are not normalized for variance.
diff --git a/go/vec32/vec_test.go b/go/vec32/vec_test.go
index 5df67e5..bd4929b 100644
--- a/go/vec32/vec_test.go
+++ b/go/vec32/vec_test.go
@@ -335,6 +335,38 @@
}
}
+func TestScaleBy(t *testing.T) {
+ unittest.SmallTest(t)
+ testCases := []struct {
+ Slice []float32
+ Scale float32
+ Expected []float32
+ }{
+ {
+ Slice: []float32{e, 0, 2, 3},
+ Scale: math.SmallestNonzeroFloat32,
+ Expected: []float32{e, 0, e, e},
+ },
+ {
+ Slice: []float32{e, 0, -1, 2},
+ Scale: 0,
+ Expected: []float32{e, e, e, e},
+ },
+ {
+ Slice: []float32{e, 0, -2, 2},
+ Scale: 2,
+ Expected: []float32{e, 0, -1, 1},
+ },
+ }
+ for _, tc := range testCases {
+ v := Dup(tc.Slice)
+ ScaleBy(v, tc.Scale)
+ if got, want := v, tc.Expected; !vecNear(got, want) {
+ t.Errorf("Mean(%v) Got %v Want %v", tc.Slice, got, want)
+ }
+ }
+}
+
func TestFillStep(t *testing.T) {
unittest.SmallTest(t)
testCases := []struct {