[gold] Replace SummaryMap with []summary.TriageStatus
Name is more clear (I think) and the map part wasn't really being used.
Additionally, presuming it was a map would make the summary misbehave
if there were two different corpora with the same test name.
Bug: skia:9080
Change-Id: I55fa73d094b6c5b5fdb79cd83481394421dcf69d
Reviewed-on: https://skia-review.googlesource.com/c/buildbot/+/251976
Commit-Queue: Kevin Lubick <kjlubick@google.com>
Reviewed-by: Ben Wagner aka dogben <benjaminwagner@google.com>
diff --git a/golden/go/indexer/indexer.go b/golden/go/indexer/indexer.go
index 6a25f6a..0a66fa1 100644
--- a/golden/go/indexer/indexer.go
+++ b/golden/go/indexer/indexer.go
@@ -47,7 +47,7 @@
searchIndexConfig
// The indices of these arrays are the int values of types.IgnoreState
dCounters [2]digest_counter.DigestCounter
- summaries [2]summary.SummaryMap
+ summaries [2]countsAndBlames
paramsetSummaries [2]paramsets.ParamSummary
cpxTile types.ComplexTile
@@ -58,6 +58,9 @@
testNames types.TestNameSet
}
+// countsAndBlame makes the type declaration of SearchIndex a little nicer to read.
+type countsAndBlames []*summary.TriageStatus
+
type searchIndexConfig struct {
diffStore diff.DiffStore
expectationsStore expstorage.ExpectationsStore
@@ -73,7 +76,7 @@
searchIndexConfig: sic,
// The indices of these slices are the int values of types.IgnoreState
dCounters: [2]digest_counter.DigestCounter{},
- summaries: [2]summary.SummaryMap{},
+ summaries: [2]countsAndBlames{},
paramsetSummaries: [2]paramsets.ParamSummary{},
cpxTile: cpxTile,
}
@@ -87,7 +90,7 @@
expectationsStore: exp,
},
dCounters: dc,
- summaries: [2]summary.SummaryMap{},
+ summaries: [2]countsAndBlames{},
paramsetSummaries: pm,
blamer: b,
cpxTile: cpxTile,
@@ -125,20 +128,20 @@
}
// GetSummaries implements the IndexSearcher interface.
-func (idx *SearchIndex) GetSummaries(is types.IgnoreState) summary.SummaryMap {
+func (idx *SearchIndex) GetSummaries(is types.IgnoreState) []*summary.TriageStatus {
return idx.summaries[is]
}
// CalcSummaries implements the IndexSearcher interface.
-func (idx *SearchIndex) CalcSummaries(query url.Values, is types.IgnoreState, head bool) (summary.SummaryMap, error) {
+func (idx *SearchIndex) CalcSummaries(query url.Values, is types.IgnoreState, head bool) ([]*summary.TriageStatus, error) {
dCounter := idx.dCounters[is]
- smc := summary.SummaryMapConfig{
+ smc := summary.Utils{
ExpectationsStore: idx.expectationsStore,
DiffStore: idx.diffStore,
DigestCounter: dCounter,
Blamer: idx.blamer,
}
- return summary.NewSummaryMap(smc, idx.cpxTile.GetTile(is), nil, query, head)
+ return summary.Calculate(smc, idx.cpxTile.GetTile(is), nil, query, head)
}
// GetParamsetSummary implements the IndexSearcher interface.
@@ -372,9 +375,11 @@
dCounters: lastIdx.dCounters, // stay the same even if expectations change.
paramsetSummaries: lastIdx.paramsetSummaries, // stay the same even if expectations change.
- summaries: [2]summary.SummaryMap{
- lastIdx.summaries[types.ExcludeIgnoredTraces], // immutable, but may be replaced if
- lastIdx.summaries[types.IncludeIgnoredTraces], // expectations change
+ summaries: [2]countsAndBlames{
+ // the objects inside the summaries are immutable, but may be replaced if expectations
+ // are recalculated for a subset of tests.
+ lastIdx.summaries[types.ExcludeIgnoredTraces],
+ lastIdx.summaries[types.IncludeIgnoredTraces],
},
blamer: nil, // This will need to be recomputed if expectations change.
@@ -419,18 +424,20 @@
idx := state.(*SearchIndex)
for _, is := range types.IgnoreStates {
dCounter := idx.dCounters[is]
- smc := summary.SummaryMapConfig{
+ smc := summary.Utils{
ExpectationsStore: idx.expectationsStore,
DiffStore: idx.diffStore,
DigestCounter: dCounter,
Blamer: idx.blamer,
}
- sum, err := summary.NewSummaryMap(smc, idx.cpxTile.GetTile(is), idx.testNames, nil, true)
+ sum, err := summary.Calculate(smc, idx.cpxTile.GetTile(is), idx.testNames, nil, true)
if err != nil {
- return skerr.Fmt("Could not calculate summaries with ignore state %d: %s", is, err)
+ return skerr.Wrapf(err, "calculating summaries for %d tests with ignore state %v", len(idx.testNames), is)
}
- if len(idx.testNames) > 0 && idx.summaries[is] != nil {
- idx.summaries[is] = idx.summaries[is].Combine(sum)
+ // If we have recalculated only a subset of tests, we want to keep the results from
+ // the previous scans and overwrite what we have just recomputed.
+ if len(idx.testNames) > 0 && len(idx.summaries[is]) > 0 {
+ idx.summaries[is] = summary.MergeSorted(idx.summaries[is], sum)
} else {
idx.summaries[is] = sum
}
diff --git a/golden/go/indexer/indexer_test.go b/golden/go/indexer/indexer_test.go
index 35a3bea..2e40688 100644
--- a/golden/go/indexer/indexer_test.go
+++ b/golden/go/indexer/indexer_test.go
@@ -91,10 +91,10 @@
}).Return(nil)
// The summary and counter are computed in indexer, so we should spot check their data.
- summaryMatcher := mock.MatchedBy(func(sm summary.SummaryMap) bool {
+ summaryMatcher := mock.MatchedBy(func(sm []*summary.TriageStatus) bool {
// There's only one untriaged digest for each test
- assert.Equal(t, types.DigestSlice{data.AlphaUntriaged1Digest}, sm[data.AlphaTest].UntHashes)
- assert.Equal(t, types.DigestSlice{data.BetaUntriaged1Digest}, sm[data.BetaTest].UntHashes)
+ assert.Equal(t, types.DigestSlice{data.AlphaUntriaged1Digest}, sm[0].UntHashes)
+ assert.Equal(t, types.DigestSlice{data.BetaUntriaged1Digest}, sm[1].UntHashes)
return true
})
@@ -157,7 +157,11 @@
// Make sure PrecomputeDiffs is only told to recompute BetaTest.
tn := types.TestNameSet{data.BetaTest: true}
- async(mdw.On("PrecomputeDiffs", testutils.AnyContext, mock.AnythingOfType("summary.SummaryMap"), tn, mock.AnythingOfType("*digest_counter.Counter"), mock.AnythingOfType("*digesttools.Impl")).Return(nil))
+ summaryMatcher := mock.MatchedBy(func(sm []*summary.TriageStatus) bool {
+ assert.Len(t, sm, 2)
+ return true
+ })
+ async(mdw.On("PrecomputeDiffs", testutils.AnyContext, summaryMatcher, tn, mock.AnythingOfType("*digest_counter.Counter"), mock.AnythingOfType("*digesttools.Impl")).Return(nil))
ic := IndexerConfig{
EventBus: meb,
@@ -168,8 +172,8 @@
ixr, err := New(ic, 0)
require.NoError(t, err)
- alphaOnly := summary.SummaryMap{
- data.AlphaTest: {
+ alphaOnly := []*summary.TriageStatus{
+ {
Name: data.AlphaTest,
Untriaged: 1,
UntHashes: types.DigestSlice{data.AlphaUntriaged1Digest},
@@ -181,7 +185,7 @@
expectationsStore: mes,
warmer: mdw,
},
- summaries: [2]summary.SummaryMap{alphaOnly, alphaOnly},
+ summaries: [2]countsAndBlames{alphaOnly, alphaOnly},
dCounters: [2]digest_counter.DigestCounter{
digest_counter.New(partialTile),
digest_counter.New(fullTile),
@@ -203,13 +207,14 @@
require.NotNil(t, actualIndex)
sm := actualIndex.GetSummaries(types.ExcludeIgnoredTraces)
- require.Contains(t, sm, data.AlphaTest)
- require.Contains(t, sm, data.BetaTest)
+ require.Len(t, sm, 2)
+ assert.Equal(t, data.AlphaTest, sm[0].Name)
+ assert.Equal(t, data.BetaTest, sm[1].Name)
// Spot check the summaries themselves.
- require.Equal(t, types.DigestSlice{data.AlphaUntriaged1Digest}, sm[data.AlphaTest].UntHashes)
+ require.Equal(t, types.DigestSlice{data.AlphaUntriaged1Digest}, sm[0].UntHashes)
- require.Equal(t, &summary.Summary{
+ require.Equal(t, &summary.TriageStatus{
Name: data.BetaTest,
Pos: 1,
Neg: 0,
@@ -218,7 +223,7 @@
Num: 1,
Corpus: "gm",
Blame: []blame.WeightedBlame{},
- }, sm[data.BetaTest])
+ }, sm[1])
// Block until all async calls are finished so the assertExpectations calls
// can properly check that their functions were called.
wg.Wait()
diff --git a/golden/go/indexer/mocks/IndexSearcher.go b/golden/go/indexer/mocks/IndexSearcher.go
index b1ca601..6d3009f 100644
--- a/golden/go/indexer/mocks/IndexSearcher.go
+++ b/golden/go/indexer/mocks/IndexSearcher.go
@@ -25,15 +25,15 @@
}
// CalcSummaries provides a mock function with given fields: query, is, head
-func (_m *IndexSearcher) CalcSummaries(query url.Values, is types.IgnoreState, head bool) (summary.SummaryMap, error) {
+func (_m *IndexSearcher) CalcSummaries(query url.Values, is types.IgnoreState, head bool) ([]*summary.TriageStatus, error) {
ret := _m.Called(query, is, head)
- var r0 summary.SummaryMap
- if rf, ok := ret.Get(0).(func(url.Values, types.IgnoreState, bool) summary.SummaryMap); ok {
+ var r0 []*summary.TriageStatus
+ if rf, ok := ret.Get(0).(func(url.Values, types.IgnoreState, bool) []*summary.TriageStatus); ok {
r0 = rf(query, is, head)
} else {
if ret.Get(0) != nil {
- r0 = ret.Get(0).(summary.SummaryMap)
+ r0 = ret.Get(0).([]*summary.TriageStatus)
}
}
@@ -158,15 +158,15 @@
}
// GetSummaries provides a mock function with given fields: is
-func (_m *IndexSearcher) GetSummaries(is types.IgnoreState) summary.SummaryMap {
+func (_m *IndexSearcher) GetSummaries(is types.IgnoreState) []*summary.TriageStatus {
ret := _m.Called(is)
- var r0 summary.SummaryMap
- if rf, ok := ret.Get(0).(func(types.IgnoreState) summary.SummaryMap); ok {
+ var r0 []*summary.TriageStatus
+ if rf, ok := ret.Get(0).(func(types.IgnoreState) []*summary.TriageStatus); ok {
r0 = rf(is)
} else {
if ret.Get(0) != nil {
- r0 = ret.Get(0).(summary.SummaryMap)
+ r0 = ret.Get(0).([]*summary.TriageStatus)
}
}
diff --git a/golden/go/indexer/types.go b/golden/go/indexer/types.go
index 52a28ca..7d59e4e 100644
--- a/golden/go/indexer/types.go
+++ b/golden/go/indexer/types.go
@@ -40,11 +40,11 @@
DigestCountsByQuery(query url.Values, is types.IgnoreState) digest_counter.DigestCount
// GetSummaries returns all summaries that were computed for this index.
- GetSummaries(is types.IgnoreState) summary.SummaryMap
+ GetSummaries(is types.IgnoreState) []*summary.TriageStatus
// CalcSummaries returns those summaries that match the given inputs. They may
// be filtered by any of: query, is at head or not.
- CalcSummaries(query url.Values, is types.IgnoreState, head bool) (summary.SummaryMap, error)
+ CalcSummaries(query url.Values, is types.IgnoreState, head bool) ([]*summary.TriageStatus, error)
// GetParamsetSummary Returns the ParamSetSummary that matches the given test/digest.
GetParamsetSummary(test types.TestName, digest types.Digest, is types.IgnoreState) paramtools.ParamSet
diff --git a/golden/go/search/digest_table.go b/golden/go/search/digest_table.go
index f8e2bf6..e477cd6 100644
--- a/golden/go/search/digest_table.go
+++ b/golden/go/search/digest_table.go
@@ -93,10 +93,17 @@
}
// Get the summaries of all tests in the result.
+
testSummaries := idx.GetSummaries(types.ExcludeIgnoredTraces)
dtSummaries := make(map[types.TestName]*frontend.DTSummary, len(uniqueTests))
for testName := range uniqueTests {
- dtSummaries[testName] = dtSummaryFromSummary(testSummaries[testName])
+ i := sort.Search(len(testSummaries), func(i int) bool {
+ // FIXME(kjlubick): if two corpora have tests of the same name, this will not work.
+ return testSummaries[i].Name >= testName
+ })
+ if i < len(testSummaries) && testSummaries[i].Name == testName {
+ dtSummaries[testName] = dtSummaryFromSummary(testSummaries[i])
+ }
}
ret := &frontend.DigestTable{
@@ -323,7 +330,7 @@
func (c *dtDiffMetricsSlice) Less(i, j int) bool { return c.lessFn(c, i, j) }
func (c *dtDiffMetricsSlice) Swap(i, j int) { c.data[i], c.data[j] = c.data[j], c.data[i] }
-func dtSummaryFromSummary(sum *summary.Summary) *frontend.DTSummary {
+func dtSummaryFromSummary(sum *summary.TriageStatus) *frontend.DTSummary {
return &frontend.DTSummary{
Pos: sum.Pos,
Neg: sum.Neg,
diff --git a/golden/go/summary/summary.go b/golden/go/summary/summary.go
index 7cc3366..f0c3307 100644
--- a/golden/go/summary/summary.go
+++ b/golden/go/summary/summary.go
@@ -1,4 +1,4 @@
-// summary summarizes the current state of triaging.
+// Package summary summarizes the current state of triaging.
package summary
import (
@@ -19,32 +19,28 @@
"go.skia.org/infra/golden/go/types/expectations"
)
-// TODO(kjlubick) This data type does not do well if multiple corpora have the same test name.
-// Additionally, in all the uses of this (poorly named) object, we just iterate over everything.
-// Therefore, it should be straight forward enough to remove this type and use []Summary
-// everywhere.
-type SummaryMap map[types.TestName]*Summary
-
-// Summary contains rolled up metrics for one test.
+// TriageStatus contains rolled up digest counts/blames for one test in one corpus.
// It is immutable and should be thread safe.
-type Summary struct {
+type TriageStatus struct {
+ // TODO(kjlubick) Change Name/Corpus to be a more generic "Grouping"
Name types.TestName `json:"name"`
- Diameter int `json:"diameter"`
+ Corpus string `json:"corpus"`
Pos int `json:"pos"`
Neg int `json:"neg"`
Untriaged int `json:"untriaged"`
- UntHashes types.DigestSlice `json:"untHashes"`
Num int `json:"num"`
- Corpus string `json:"corpus"`
+ UntHashes types.DigestSlice `json:"untHashes"`
Blame []blame.WeightedBlame `json:"blame"`
+ // currently unused
+ Diameter int `json:"diameter"`
}
// TODO(jcgregorio) Make diameter faster, and also make the actual diameter
// metric better. Until then disable it.
const computeDiameter = false
-// SummaryMapConfig is a helper struct for calculating SummaryMap.
-type SummaryMapConfig struct {
+// Utils is a helper struct filed with types needed to compute the summaries.
+type Utils struct {
ExpectationsStore expstorage.ExpectationsStore
DiffStore diff.DiffStore // only needed if computeDiameter = true
@@ -52,59 +48,81 @@
Blamer blame.Blamer
}
-// NewSummaryMap creates a new instance of Summaries.
-func NewSummaryMap(smc SummaryMapConfig, tile *tiling.Tile, testNames types.TestNameSet, query url.Values, head bool) (SummaryMap, error) {
+// Calculate calculates a slice of TriageStatus for the given data. It will have its entries
+// sorted by TestName first, then sorted by Corpus in the event of a tie.
+func Calculate(smc Utils, tile *tiling.Tile, testNames types.TestNameSet, query url.Values, head bool) ([]*TriageStatus, error) {
return smc.calcSummaries(tile, testNames, query, head)
}
-// Combine creates a new SummaryMap from this and the passed
-// in map. The passed in map will "win" in the event there are tests
-// in both.
-func (s SummaryMap) Combine(other SummaryMap) SummaryMap {
- copied := make(SummaryMap, len(s))
- for k, v := range s {
- copied[k] = v
- }
+// MergeSorted creates a new []*TriageStatus from this and the passed
+// in slices. The passed in data will "win" in the event there are tests
+// in both. We assume that the two passed in slices are sorted by TestName,Corpus already.
+func MergeSorted(existing, newOnes []*TriageStatus) []*TriageStatus {
+ ret := make([]*TriageStatus, 0, len(existing)+len(newOnes))
- for k, v := range other {
- copied[k] = v
+ // Basic algorithm for merging two sorted arrays, with a small tweak to have
+ // the second one win for an exact match on Name and Corpus.
+ i, j := 0, 0
+ for i < len(existing) && j < len(newOnes) {
+ e, n := existing[i], newOnes[j]
+ if e.Name == n.Name && e.Corpus == n.Corpus {
+ ret = append(ret, n)
+ i++
+ j++
+ } else if e.Name > n.Name || (e.Name == n.Name && e.Corpus > n.Corpus) {
+ ret = append(ret, n)
+ j++
+ } else {
+ ret = append(ret, e)
+ i++
+ }
}
- return copied
+ // Only one of these will actually append something, since j or i are at the end.
+ ret = append(ret, newOnes[j:]...)
+ ret = append(ret, existing[i:]...)
+
+ return ret
+}
+
+type grouping struct {
+ test types.TestName
+ corpus string
}
// tracePair is used to hold traces, along with their ids.
type tracePair struct {
id tiling.TraceId
- tr tiling.Trace
+ tr *types.GoldenTrace
}
-// calcSummaries returns a Summary of the given tile. If testNames is not empty,
+// calcSummaries returns a TriageStatus of the given tile. If testNames is not empty,
// then restrict the results to only tests with those names. If query is not empty,
// it will be used as an additional filter. Finally, if head is true, only consider
// the single most recent digest per trace.
-func (s *SummaryMapConfig) calcSummaries(tile *tiling.Tile, testNames types.TestNameSet, query url.Values, head bool) (SummaryMap, error) {
+func (s *Utils) calcSummaries(tile *tiling.Tile, testNames types.TestNameSet, query url.Values, head bool) ([]*TriageStatus, error) {
defer shared.NewMetricsTimer("calc_summaries_total").Stop()
sklog.Infof("CalcSummaries: head %v", head)
- ret := SummaryMap{}
+ var ret []*TriageStatus
e, err := s.ExpectationsStore.Get()
if err != nil {
return nil, skerr.Wrapf(err, "getting expectations")
}
// Filter down to just the traces we are interested in, based on query.
- filtered := map[types.TestName][]*tracePair{}
+ filtered := map[grouping][]*tracePair{}
t := shared.NewMetricsTimer("calc_summaries_filter_traces")
for id, tr := range tile.Traces {
- name := types.TestName(tr.Params()[types.PRIMARY_KEY_FIELD])
- if len(testNames) > 0 && !testNames[name] {
+ gt := tr.(*types.GoldenTrace)
+ if len(testNames) > 0 && !testNames[gt.TestName()] {
continue
}
if tiling.Matches(tr, query) {
- if slice, ok := filtered[name]; ok {
- filtered[name] = append(slice, &tracePair{tr: tr, id: id})
+ k := grouping{test: gt.TestName(), corpus: gt.Corpus()}
+ if slice, ok := filtered[k]; ok {
+ filtered[k] = append(slice, &tracePair{tr: gt, id: id})
} else {
- filtered[name] = []*tracePair{{tr: tr, id: id}}
+ filtered[k] = []*tracePair{{tr: gt, id: id}}
}
}
}
@@ -115,51 +133,50 @@
// Now create summaries for each test using the filtered set of traces.
t = shared.NewMetricsTimer("calc_summaries_tally")
lastCommitIndex := tile.LastCommitIndex()
- for name, traces := range filtered {
+ for k, traces := range filtered {
digestMap := types.DigestSet{}
- corpus := ""
- for _, trid := range traces {
- corpus = trid.tr.Params()[types.CORPUS_FIELD]
+ for _, pair := range traces {
if head {
// Find the last non-missing value in the trace.
for i := lastCommitIndex; i >= 0; i-- {
- if trid.tr.IsMissing(i) {
+ if pair.tr.IsMissing(i) {
continue
} else {
- digestMap[trid.tr.(*types.GoldenTrace).Digests[i]] = true
+ digestMap[pair.tr.Digests[i]] = true
break
}
}
} else {
// Use the digestsByTrace if available, otherwise just inspect the trace.
- if t, ok := digestsByTrace[trid.id]; ok {
- for k := range t {
- digestMap[k] = true
+ if t, ok := digestsByTrace[pair.id]; ok {
+ for d := range t {
+ digestMap[d] = true
}
} else {
for i := lastCommitIndex; i >= 0; i-- {
- if !trid.tr.IsMissing(i) {
- digestMap[trid.tr.(*types.GoldenTrace).Digests[i]] = true
+ if !pair.tr.IsMissing(i) {
+ digestMap[pair.tr.Digests[i]] = true
}
}
}
}
}
- ret[name] = s.makeSummary(name, e, corpus, digestMap.Keys())
+ ret = append(ret, s.makeSummary(k.test, e, k.corpus, digestMap.Keys()))
}
t.Stop()
+ // Sort for determinism and to allow clients to use binary search.
+ t = shared.NewMetricsTimer("calc_summaries_sort")
+ sort.Slice(ret, func(i, j int) bool {
+ return ret[i].Name < ret[j].Name || (ret[i].Name == ret[j].Name && ret[i].Corpus < ret[j].Corpus)
+ })
+ t.Stop()
+
return ret, nil
}
-// DigestInfo is test name and a digest found in that test. Returned from Search.
-type DigestInfo struct {
- Test types.TestName `json:"test"`
- Digest types.Digest `json:"digest"`
-}
-
-// makeSummary returns a Summary for the given digests.
-func (s *SummaryMapConfig) makeSummary(name types.TestName, exp expectations.ReadOnly, corpus string, digests types.DigestSlice) *Summary {
+// makeSummary returns a TriageStatus for the given digests.
+func (s *Utils) makeSummary(name types.TestName, exp expectations.ReadOnly, corpus string, digests types.DigestSlice) *TriageStatus {
pos := 0
neg := 0
unt := 0
@@ -186,7 +203,7 @@
if computeDiameter {
d = diameter(diamDigests, s.DiffStore)
}
- return &Summary{
+ return &TriageStatus{
Name: name,
Diameter: d,
Pos: pos,
diff --git a/golden/go/summary/summary_test.go b/golden/go/summary/summary_test.go
index ebe0f96..86bdf58 100644
--- a/golden/go/summary/summary_test.go
+++ b/golden/go/summary/summary_test.go
@@ -85,98 +85,128 @@
func TestSummaryMap_AllGMsWithIgnores(t *testing.T) {
unittest.SmallTest(t)
- sum := summaryMapHelper(t, makeTileWithIgnores(), nil, url.Values{types.CORPUS_FIELD: {"gm"}}, false)
+ sum := computeHelper(t, makeTileWithIgnores(), nil, url.Values{types.CORPUS_FIELD: {"gm"}}, false)
require.Len(t, sum, 2)
- triageCountsCorrect(t, sum, FirstTest, 2, 1, 0)
- triageCountsCorrect(t, sum, SecondTest, 0, 1, 1)
- assert.NotContains(t, sum, ThirdTest) // no gms for ThirdTest
+ s1 := find(sum, FirstTest)
+ require.NotNil(t, s1)
+ assert.Empty(t, s1.UntHashes)
// The only 2 untriaged digests for this test ignored because they were 565
- assert.Empty(t, sum[FirstTest].UntHashes)
- assert.Equal(t, types.DigestSlice{"ggg"}, sum[SecondTest].UntHashes)
+ triageCountsCorrect(t, s1, 2, 1, 0)
+
+ s2 := find(sum, SecondTest)
+ require.NotNil(t, s2)
+ assert.Equal(t, types.DigestSlice{"ggg"}, s2.UntHashes)
+ triageCountsCorrect(t, s2, 0, 1, 1)
+ // no gms for ThirdTest
}
func TestSummaryMap_AllGMsFullTile(t *testing.T) {
unittest.SmallTest(t)
- sum := summaryMapHelper(t, makeFullTile(), nil, url.Values{types.CORPUS_FIELD: {"gm"}}, false)
+ sum := computeHelper(t, makeFullTile(), nil, url.Values{types.CORPUS_FIELD: {"gm"}}, false)
require.Len(t, sum, 2)
- triageCountsCorrect(t, sum, FirstTest, 2, 1, 2)
- triageCountsCorrect(t, sum, SecondTest, 0, 1, 1)
- assert.NotContains(t, sum, ThirdTest) // no gms for ThirdTest
- assert.Equal(t, types.DigestSlice{"ccc", "ddd"}, sum[FirstTest].UntHashes)
- assert.Equal(t, types.DigestSlice{"ggg"}, sum[SecondTest].UntHashes)
+ s1 := find(sum, FirstTest)
+ require.NotNil(t, s1)
+ assert.Equal(t, types.DigestSlice{"ccc", "ddd"}, s1.UntHashes)
+ triageCountsCorrect(t, s1, 2, 1, 2)
+
+ s2 := find(sum, SecondTest)
+ require.NotNil(t, s2)
+ assert.Equal(t, types.DigestSlice{"ggg"}, s2.UntHashes)
+ triageCountsCorrect(t, s2, 0, 1, 1)
+ // no gms for ThirdTest
}
func TestSummaryMap_FirstTestFullTile(t *testing.T) {
unittest.SmallTest(t)
- sum := summaryMapHelper(t, makeFullTile(), types.TestNameSet{FirstTest: true}, url.Values{types.CORPUS_FIELD: {"gm"}}, false)
+ sum := computeHelper(t, makeFullTile(), types.TestNameSet{FirstTest: true}, url.Values{types.CORPUS_FIELD: {"gm"}}, false)
require.Len(t, sum, 1)
- triageCountsCorrect(t, sum, FirstTest, 2, 1, 2)
- assert.Equal(t, types.DigestSlice{"ccc", "ddd"}, sum[FirstTest].UntHashes)
- assert.NotContains(t, sum, SecondTest)
- assert.NotContains(t, sum, ThirdTest)
+ s1 := find(sum, FirstTest)
+ require.NotNil(t, s1)
+ assert.Equal(t, types.DigestSlice{"ccc", "ddd"}, s1.UntHashes)
+ triageCountsCorrect(t, s1, 2, 1, 2)
+ // Only FirstTest results should be here, rest are known to be nil since map length == 1.
}
func TestSummaryMap_FirstTestIgnores(t *testing.T) {
unittest.SmallTest(t)
- sum := summaryMapHelper(t, makeTileWithIgnores(), types.TestNameSet{FirstTest: true}, nil, false)
+ sum := computeHelper(t, makeTileWithIgnores(), types.TestNameSet{FirstTest: true}, nil, false)
require.Len(t, sum, 1)
- triageCountsCorrect(t, sum, FirstTest, 2, 1, 0)
- // Again, the only untriaged hashes are removed from the ignore
- assert.Empty(t, sum[FirstTest].UntHashes)
- assert.NotContains(t, sum, SecondTest)
- assert.NotContains(t, sum, ThirdTest)
+ s1 := find(sum, FirstTest)
+ require.NotNil(t, s1)
+ assert.Empty(t, s1.UntHashes)
+ triageCountsCorrect(t, s1, 2, 1, 0)
+ // Only FirstTest results should be here, rest are known to be nil since map length == 1.
}
func TestSummaryMap_8888Or565Ignores(t *testing.T) {
unittest.SmallTest(t)
- sum := summaryMapHelper(t, makeTileWithIgnores(), nil, url.Values{"config": {"8888", "565"}}, false)
+ sum := computeHelper(t, makeTileWithIgnores(), nil, url.Values{"config": {"8888", "565"}}, false)
require.Len(t, sum, 3)
- triageCountsCorrect(t, sum, FirstTest, 1, 1, 0)
- triageCountsCorrect(t, sum, SecondTest, 0, 1, 1)
- triageCountsCorrect(t, sum, ThirdTest, 0, 0, 1)
+ s1 := find(sum, FirstTest)
+ require.NotNil(t, s1)
+ assert.Empty(t, s1.UntHashes)
// Even though we queried for the 565, the untriaged ones won't show up because of ignores.
- assert.Empty(t, sum[FirstTest].UntHashes)
- assert.Equal(t, types.DigestSlice{"ggg"}, sum[SecondTest].UntHashes)
- assert.Equal(t, types.DigestSlice{"jjj"}, sum[ThirdTest].UntHashes)
+ triageCountsCorrect(t, s1, 1, 1, 0)
+
+ s2 := find(sum, SecondTest)
+ require.NotNil(t, s2)
+ assert.Equal(t, types.DigestSlice{"ggg"}, s2.UntHashes)
+ triageCountsCorrect(t, s2, 0, 1, 1)
+
+ s3 := find(sum, ThirdTest)
+ assert.NotNil(t, s3)
+ assert.Equal(t, types.DigestSlice{"jjj"}, s3.UntHashes)
+ triageCountsCorrect(t, s3, 0, 0, 1)
}
func TestSummaryMap_8888Or565IgnoresHead(t *testing.T) {
unittest.SmallTest(t)
- sum := summaryMapHelper(t, makeTileWithIgnores(), nil, url.Values{"config": {"8888", "565"}}, true)
+ sum := computeHelper(t, makeTileWithIgnores(), nil, url.Values{"config": {"8888", "565"}}, true)
require.Len(t, sum, 3)
+ s1 := find(sum, FirstTest)
+ require.NotNil(t, s1)
+ assert.Empty(t, s1.UntHashes)
// These numbers are are a bit lower because we are only looking at head.
// Those with missing digests should "pull forward" their last result (see ThirdTest)
- triageCountsCorrect(t, sum, FirstTest, 0, 1, 0)
- triageCountsCorrect(t, sum, SecondTest, 0, 0, 1)
- triageCountsCorrect(t, sum, ThirdTest, 0, 0, 1)
- assert.Empty(t, sum[FirstTest].UntHashes)
- assert.Equal(t, types.DigestSlice{"ggg"}, sum[SecondTest].UntHashes)
- assert.Equal(t, types.DigestSlice{"jjj"}, sum[ThirdTest].UntHashes)
+ triageCountsCorrect(t, s1, 0, 1, 0)
+
+ s2 := find(sum, SecondTest)
+ require.NotNil(t, s2)
+ assert.Equal(t, types.DigestSlice{"ggg"}, s2.UntHashes)
+ triageCountsCorrect(t, s2, 0, 0, 1)
+
+ s3 := find(sum, ThirdTest)
+ assert.NotNil(t, s3)
+ assert.Equal(t, types.DigestSlice{"jjj"}, s3.UntHashes)
+ triageCountsCorrect(t, s3, 0, 0, 1)
}
func TestSummaryMap_GPUConfigIgnores(t *testing.T) {
unittest.SmallTest(t)
- sum := summaryMapHelper(t, makeTileWithIgnores(), nil, url.Values{"config": {"gpu"}}, false)
+ sum := computeHelper(t, makeTileWithIgnores(), nil, url.Values{"config": {"gpu"}}, false)
require.Len(t, sum, 1)
- // Only one digest should be found, and it is not triaged.
- triageCountsCorrect(t, sum, FirstTest, 1, 0, 0)
- require.Empty(t, sum[FirstTest].UntHashes)
+ s1 := find(sum, FirstTest)
+ require.NotNil(t, s1)
+ assert.Empty(t, s1.UntHashes)
+ // Only one digest should be found, and it is positive.
+ triageCountsCorrect(t, s1, 1, 0, 0)
+ // No other tests have gpu config
}
func TestSummaryMap_UnknownConfigIgnores(t *testing.T) {
unittest.SmallTest(t)
- sum := summaryMapHelper(t, makeTileWithIgnores(), nil, url.Values{"config": {"unknown"}}, false)
- require.Equal(t, 0, len(sum))
+ sum := computeHelper(t, makeTileWithIgnores(), nil, url.Values{"config": {"unknown"}}, false)
+ require.Empty(t, sum)
}
-func summaryMapHelper(t *testing.T, tile *tiling.Tile, testNames types.TestNameSet, query url.Values, head bool) SummaryMap {
+func computeHelper(t *testing.T, tile *tiling.Tile, testNames types.TestNameSet, query url.Values, head bool) []*TriageStatus {
mes := &mocks.ExpectationsStore{}
defer mes.AssertExpectations(t)
@@ -186,14 +216,14 @@
blamer, err := blame.New(makeFullTile(), makeExpectations())
require.NoError(t, err)
- smc := SummaryMapConfig{
+ smc := Utils{
ExpectationsStore: mes,
DiffStore: nil, // diameter is disabled, so this can be nil.
DigestCounter: dc,
Blamer: blamer,
}
- sum, err := NewSummaryMap(smc, tile, testNames, query, head)
+ sum, err := Calculate(smc, tile, testNames, query, head)
require.NoError(t, err)
return sum
}
@@ -204,8 +234,8 @@
unittest.SmallTest(t)
sum := bugRevertHelper(t, url.Values{types.CORPUS_FIELD: {"gm"}}, false)
- require.Equal(t, SummaryMap{
- bug_revert.TestOne: {
+ require.Equal(t, []*TriageStatus{
+ {
Name: bug_revert.TestOne,
Pos: 1,
Untriaged: 1,
@@ -219,7 +249,7 @@
},
},
},
- bug_revert.TestTwo: {
+ {
Name: bug_revert.TestTwo,
Pos: 2,
Untriaged: 2,
@@ -246,8 +276,8 @@
unittest.SmallTest(t)
sum := bugRevertHelper(t, url.Values{types.CORPUS_FIELD: {"gm"}}, true)
- require.Equal(t, SummaryMap{
- bug_revert.TestOne: {
+ require.Equal(t, []*TriageStatus{
+ {
Name: bug_revert.TestOne,
Pos: 1,
Untriaged: 0,
@@ -263,7 +293,7 @@
},
},
},
- bug_revert.TestTwo: {
+ {
Name: bug_revert.TestTwo,
Pos: 2,
Untriaged: 1,
@@ -288,10 +318,10 @@
unittest.SmallTest(t)
sum := bugRevertHelper(t, url.Values{types.CORPUS_FIELD: {"does-not-exist"}}, false)
- require.Equal(t, SummaryMap{}, sum)
+ require.Empty(t, sum)
}
-func bugRevertHelper(t *testing.T, query url.Values, head bool) SummaryMap {
+func bugRevertHelper(t *testing.T, query url.Values, head bool) []*TriageStatus {
mes := &mocks.ExpectationsStore{}
defer mes.AssertExpectations(t)
@@ -301,14 +331,14 @@
blamer, err := blame.New(bug_revert.MakeTestTile(), bug_revert.MakeTestExpectations())
require.NoError(t, err)
- smc := SummaryMapConfig{
+ smc := Utils{
ExpectationsStore: mes,
DiffStore: nil, // diameter is disabled, so this can be nil.
DigestCounter: dc,
Blamer: blamer,
}
- sum, err := NewSummaryMap(smc, bug_revert.MakeTestTile(), nil, query, head)
+ sum, err := Calculate(smc, bug_revert.MakeTestTile(), nil, query, head)
require.NoError(t, err)
return sum
}
@@ -316,7 +346,6 @@
// TestSummaryMap_OverlappingCorpora makes sure that if we have two corpora that share a test name,
// we handle things correctly.
func TestSummaryMap_OverlappingCorpora(t *testing.T) {
- t.Skip("currently broken because of how SummaryMap is designed")
unittest.SmallTest(t)
const corpusOneUntriaged = "1114c84eaa5dde4a247c93d9b93a136e"
@@ -365,25 +394,61 @@
blamer, err := blame.New(tile, &e)
require.NoError(t, err)
- smc := SummaryMapConfig{
+ smc := Utils{
ExpectationsStore: mes,
DiffStore: nil, // diameter is disabled, so this can be nil.
DigestCounter: dc,
Blamer: blamer,
}
- sum, err := NewSummaryMap(smc, tile, nil, nil, true)
+ sum, err := Calculate(smc, tile, nil, nil, true)
require.NoError(t, err)
assert.Len(t, sum, 2)
+ require.Equal(t, []*TriageStatus{
+ {
+ Name: bug_revert.TestOne,
+ Untriaged: 1,
+ UntHashes: types.DigestSlice{corpusOneUntriaged},
+ Num: 1,
+ Corpus: corpusOne,
+ Blame: []blame.WeightedBlame{
+ {
+ Author: bug_revert.InnocentAuthor,
+ Prob: 0.5,
+ },
+ {
+ Author: bug_revert.BuggyAuthor,
+ Prob: 0.5,
+ },
+ },
+ },
+ {
+ Name: bug_revert.TestOne,
+ Untriaged: 1,
+ UntHashes: types.DigestSlice{corpusTwoUntriaged},
+ Num: 1,
+ Corpus: corpusTwo,
+ Blame: []blame.WeightedBlame{
+ {
+ Author: bug_revert.InnocentAuthor,
+ Prob: 0.5,
+ },
+ {
+ Author: bug_revert.BuggyAuthor,
+ Prob: 0.5,
+ },
+ },
+ },
+ }, sum)
}
-// TestCombine ensures we can combine two summaries to make sure
-// the Blames and test names are properly combined.
-func TestCombine(t *testing.T) {
+// TestMergeSorted ensures we can combine two slices of TriageStatus to make sure
+// we merge them correctly
+func TestMergeSorted(t *testing.T) {
unittest.SmallTest(t)
- first := SummaryMap{
- FirstTest: {
+ first := []*TriageStatus{
+ {
Name: FirstTest,
Diameter: 4,
Pos: 2,
@@ -399,7 +464,7 @@
},
},
},
- SecondTest: {
+ {
Name: SecondTest,
Diameter: 14,
Pos: 12,
@@ -421,8 +486,8 @@
},
}
- second := SummaryMap{
- FirstTest: {
+ second := []*TriageStatus{
+ {
Name: FirstTest,
Diameter: 24,
Pos: 22,
@@ -433,7 +498,27 @@
Corpus: "gm",
Blame: []blame.WeightedBlame{},
},
- ThirdTest: {
+ {
+ Name: SecondTest,
+ Diameter: 14,
+ Pos: 12,
+ Neg: 13,
+ Untriaged: 1,
+ UntHashes: types.DigestSlice{BetaDigest},
+ Num: 26,
+ Corpus: "zzz",
+ Blame: []blame.WeightedBlame{
+ {
+ Author: "other@example.com",
+ Prob: 0.5,
+ },
+ {
+ Author: "test@example.com",
+ Prob: 0.5,
+ },
+ },
+ },
+ {
Name: ThirdTest,
Diameter: 34,
Pos: 32,
@@ -451,28 +536,33 @@
},
}
- result := first.Combine(second)
-
- // Originals first and second should be unchanged
+ res := MergeSorted(first, second)
+ // first and second should remain unchanged
require.Len(t, first, 2)
- require.Len(t, second, 2)
- require.Len(t, result, 3)
+ assert.Equal(t, FirstTest, first[0].Name)
+ assert.Equal(t, SecondTest, first[1].Name)
+ require.Len(t, second, 3)
+ assert.Equal(t, FirstTest, second[0].Name)
+ assert.Equal(t, SecondTest, second[1].Name)
+ assert.Equal(t, ThirdTest, second[2].Name)
- require.Len(t, first[FirstTest].Blame, 1)
- require.Len(t, second[FirstTest].Blame, 0)
- require.Len(t, result[FirstTest].Blame, 0)
-
- require.Contains(t, result, FirstTest)
- require.Contains(t, result, SecondTest)
- require.Contains(t, result, ThirdTest)
+ require.Equal(t, []*TriageStatus{second[0], first[1], second[1], second[2]}, res)
}
-func triageCountsCorrect(t *testing.T, sum SummaryMap, name types.TestName, pos, neg, unt int) {
- s, ok := sum[name]
- require.True(t, ok, "Could not find %s in %#v", name, sum)
- assert.Equal(t, pos, s.Pos, "Postive count wrong")
- assert.Equal(t, neg, s.Neg, "Negative count wrong")
- assert.Equal(t, unt, s.Untriaged, "Untriaged count wrong")
+func find(sum []*TriageStatus, name types.TestName) *TriageStatus {
+ for _, dft := range sum {
+ if dft.Name == name {
+ return dft
+ }
+ }
+ return nil
+}
+
+func triageCountsCorrect(t *testing.T, ts *TriageStatus, pos, neg, unt int) {
+ require.NotNil(t, ts)
+ assert.Equal(t, pos, ts.Pos, "Positive count wrong")
+ assert.Equal(t, neg, ts.Neg, "Negative count wrong")
+ assert.Equal(t, unt, ts.Untriaged, "Untriaged count wrong")
}
const (
diff --git a/golden/go/warmer/mocks/DiffWarmer.go b/golden/go/warmer/mocks/DiffWarmer.go
index 3604019..335e9be 100644
--- a/golden/go/warmer/mocks/DiffWarmer.go
+++ b/golden/go/warmer/mocks/DiffWarmer.go
@@ -21,11 +21,11 @@
}
// PrecomputeDiffs provides a mock function with given fields: ctx, summaries, testNames, dCounter, diffFinder
-func (_m *DiffWarmer) PrecomputeDiffs(ctx context.Context, summaries summary.SummaryMap, testNames types.TestNameSet, dCounter digest_counter.DigestCounter, diffFinder digesttools.ClosestDiffFinder) error {
+func (_m *DiffWarmer) PrecomputeDiffs(ctx context.Context, summaries []*summary.TriageStatus, testNames types.TestNameSet, dCounter digest_counter.DigestCounter, diffFinder digesttools.ClosestDiffFinder) error {
ret := _m.Called(ctx, summaries, testNames, dCounter, diffFinder)
var r0 error
- if rf, ok := ret.Get(0).(func(context.Context, summary.SummaryMap, types.TestNameSet, digest_counter.DigestCounter, digesttools.ClosestDiffFinder) error); ok {
+ if rf, ok := ret.Get(0).(func(context.Context, []*summary.TriageStatus, types.TestNameSet, digest_counter.DigestCounter, digesttools.ClosestDiffFinder) error); ok {
r0 = rf(ctx, summaries, testNames, dCounter, diffFinder)
} else {
r0 = ret.Error(0)
diff --git a/golden/go/warmer/warmer.go b/golden/go/warmer/warmer.go
index aee0ab2..6590bb5 100644
--- a/golden/go/warmer/warmer.go
+++ b/golden/go/warmer/warmer.go
@@ -24,7 +24,7 @@
// has those diffs pre-drawn and can serve them quickly to the frontend.
// If testNames is not empty, only those the diffs for those names will be
// precomputed.
- PrecomputeDiffs(ctx context.Context, summaries summary.SummaryMap, testNames types.TestNameSet, dCounter digest_counter.DigestCounter, diffFinder digesttools.ClosestDiffFinder) error
+ PrecomputeDiffs(ctx context.Context, summaries []*summary.TriageStatus, testNames types.TestNameSet, dCounter digest_counter.DigestCounter, diffFinder digesttools.ClosestDiffFinder) error
}
type WarmerImpl struct {
@@ -36,7 +36,7 @@
}
// PrecomputeDiffs implements the DiffWarmer interface
-func (w *WarmerImpl) PrecomputeDiffs(ctx context.Context, summaries summary.SummaryMap, testNames types.TestNameSet, dCounter digest_counter.DigestCounter, diffFinder digesttools.ClosestDiffFinder) error {
+func (w *WarmerImpl) PrecomputeDiffs(ctx context.Context, summaries []*summary.TriageStatus, testNames types.TestNameSet, dCounter digest_counter.DigestCounter, diffFinder digesttools.ClosestDiffFinder) error {
defer shared.NewMetricsTimer("warmer_loop").Stop()
err := diffFinder.Precompute(ctx)
if err != nil {
@@ -47,7 +47,8 @@
// context signals us to stop).
var firstErr error
errCount := 0
- for test, sum := range summaries {
+ for _, sum := range summaries {
+ test := sum.Name
if ctx.Err() != nil {
sklog.Warningf("PrecomputeDiffs stopped by context error: %s", ctx.Err())
break
diff --git a/golden/go/warmer/warmer_test.go b/golden/go/warmer/warmer_test.go
index 2b25831..c17642f 100644
--- a/golden/go/warmer/warmer_test.go
+++ b/golden/go/warmer/warmer_test.go
@@ -49,22 +49,8 @@
mdf.On("ClosestDigest", testutils.AnyContext, data.BetaTest, data.BetaUntriaged1Digest, expectations.Positive).Return(nil, nil).Once()
mdf.On("ClosestDigest", testutils.AnyContext, data.BetaTest, data.BetaUntriaged1Digest, expectations.Negative).Return(nil, nil).Once()
- sm := summary.SummaryMap{
- data.AlphaTest: &summary.Summary{
- Name: data.AlphaTest,
- Untriaged: 1,
- UntHashes: types.DigestSlice{data.AlphaUntriaged1Digest},
- // warmer doesn't care about elided fields
- },
- data.BetaTest: &summary.Summary{
- Name: data.BetaTest,
- Untriaged: 1,
- UntHashes: types.DigestSlice{data.BetaUntriaged1Digest},
- },
- }
-
w := New()
- require.NoError(t, w.PrecomputeDiffs(context.Background(), sm, nil, mdc, mdf))
+ require.NoError(t, w.PrecomputeDiffs(context.Background(), makeComputedSummaries(), nil, mdc, mdf))
}
// TestPrecomputeDiffsErrors tests to see if we keep going after some diffstore errors happen
@@ -99,22 +85,8 @@
mdf.On("ClosestDigest", testutils.AnyContext, data.BetaTest, data.BetaUntriaged1Digest, expectations.Positive).Return(nil, nil).Once()
mdf.On("ClosestDigest", testutils.AnyContext, data.BetaTest, data.BetaUntriaged1Digest, expectations.Negative).Return(nil, errors.New("sentient AI error")).Once()
- sm := summary.SummaryMap{
- data.AlphaTest: &summary.Summary{
- Name: data.AlphaTest,
- Untriaged: 1,
- UntHashes: types.DigestSlice{data.AlphaUntriaged1Digest},
- // warmer doesn't care about elided fields
- },
- data.BetaTest: &summary.Summary{
- Name: data.BetaTest,
- Untriaged: 1,
- UntHashes: types.DigestSlice{data.BetaUntriaged1Digest},
- },
- }
-
w := New()
- err := w.PrecomputeDiffs(context.Background(), sm, nil, mdc, mdf)
+ err := w.PrecomputeDiffs(context.Background(), makeComputedSummaries(), nil, mdc, mdf)
require.Error(t, err)
assert.Contains(t, err.Error(), "and 1 other error")
}
@@ -132,24 +104,10 @@
// No calls to ClosestDigest, since we have a cancelled context.
- sm := summary.SummaryMap{
- data.AlphaTest: &summary.Summary{
- Name: data.AlphaTest,
- Untriaged: 1,
- UntHashes: types.DigestSlice{data.AlphaUntriaged1Digest},
- // warmer doesn't care about elided fields
- },
- data.BetaTest: &summary.Summary{
- Name: data.BetaTest,
- Untriaged: 1,
- UntHashes: types.DigestSlice{data.BetaUntriaged1Digest},
- },
- }
-
w := New()
ctx, cancel := context.WithCancel(context.Background())
cancel()
- err := w.PrecomputeDiffs(ctx, sm, nil, mdc, mdf)
+ err := w.PrecomputeDiffs(ctx, makeComputedSummaries(), nil, mdc, mdf)
require.Error(t, err)
assert.Equal(t, context.Canceled, err)
}
@@ -185,20 +143,22 @@
mdf.On("ClosestDigest", testutils.AnyContext, data.BetaTest, data.BetaUntriaged1Digest, expectations.Positive).Return(nil, nil).Once()
mdf.On("ClosestDigest", testutils.AnyContext, data.BetaTest, data.BetaUntriaged1Digest, expectations.Negative).Return(nil, nil).Once()
- sm := summary.SummaryMap{
- data.AlphaTest: &summary.Summary{
+ w := New()
+ require.NoError(t, w.PrecomputeDiffs(context.Background(), makeComputedSummaries(), types.TestNameSet{data.BetaTest: true}, mdc, mdf))
+}
+
+func makeComputedSummaries() []*summary.TriageStatus {
+ return []*summary.TriageStatus{
+ {
Name: data.AlphaTest,
Untriaged: 1,
UntHashes: types.DigestSlice{data.AlphaUntriaged1Digest},
// warmer doesn't care about elided fields
},
- data.BetaTest: &summary.Summary{
+ {
Name: data.BetaTest,
Untriaged: 1,
UntHashes: types.DigestSlice{data.BetaUntriaged1Digest},
},
}
-
- w := New()
- require.NoError(t, w.PrecomputeDiffs(context.Background(), sm, types.TestNameSet{data.BetaTest: true}, mdc, mdf))
}
diff --git a/golden/go/web/web.go b/golden/go/web/web.go
index 3158d26..99db2b9 100644
--- a/golden/go/web/web.go
+++ b/golden/go/web/web.go
@@ -217,7 +217,8 @@
// map [groupid] [test] TestRollup
rollups := map[string]map[types.TestName]TestRollup{}
- for test, s := range untriagedSummaries {
+ for _, s := range untriagedSummaries {
+ test := s.Name
for _, d := range s.UntHashes {
dist := idx.GetBlame(test, d, commits)
if dist.IsEmpty() {
@@ -969,7 +970,7 @@
idx := wh.Indexer.GetIndex()
corpus, hasSourceType := q.TraceValues[types.CORPUS_FIELD]
- sumSlice := []*summary.Summary{}
+ sumSlice := []*summary.TriageStatus{}
if !q.IncludeIgnores && q.Head && len(q.TraceValues) == 1 && hasSourceType {
sumMap := idx.GetSummaries(types.ExcludeIgnoredTraces)
for _, sum := range sumMap {
@@ -1000,13 +1001,13 @@
}
// includeSummary returns true if the given summary matches the query flags.
-func includeSummary(s *summary.Summary, q *query.Search) bool {
+func includeSummary(s *summary.TriageStatus, q *query.Search) bool {
return ((s.Pos > 0) && (q.Pos)) ||
((s.Neg > 0) && (q.Neg)) ||
((s.Untriaged > 0) && (q.Unt))
}
-type SummarySlice []*summary.Summary
+type SummarySlice []*summary.TriageStatus
func (p SummarySlice) Len() int { return len(p) }
func (p SummarySlice) Less(i, j int) bool { return p[i].Untriaged > p[j].Untriaged }