blob: 24febd7b449207b20c328d7ee39375b4854e6e79 [file] [log] [blame]
<!--
The res/js/status.js file must be included before this file.
This in an HTML Import-able file this contains the definition
of the following elements:
<commits-data-sk>
This element performs an ajax request to the status backend, parses the json response and
returns bindable properties to be used to render the various components of the status page.
This element takes some filter inputs (i.e. builder-filter and builder-search) and if
either changes, the data will be re-filtered to reflect that.
To use this file import it:
<link href="/res/imp/commits-data-sk.html" rel="import" />
Usage:
<commits-data-sk></commits-data-sk>
Properties:
// inputs
autoroll_current: String, the commit hash of the current autoroll.
autoroll_last: String the commit hash of the previous autoroll.
commits_to_load: Number, the number of commits to load from the backend
filter: String, the builder filter to be used.
reload: How often (in seconds) to reload the data.
search: String, the string to be used if filter is "search".
// outputs
branch_heads: Array<Object>, an array of hashes and branch names of the commits.
build_details: Object, a map of commit hash to an object that has the build results by builder.
builders: Object, a map of the builder names to an object that has, among other things, category, subcategory, comments and master.
builds: Object, a map of the builder names to an object that maps build numbers to build results.
categories: Object, a map of the builder categories to an object that has the subcategories and the colspan (total number of included builders).
category_list: Array<String>, an array of the builder category names.
commits: Array<Object>, the commit objects, in chronological order.
commits_map: Object, a map of commit hash to commit objects.
loading: Boolean, if the data is being fetched from the server or parsed.
num_builders: Number, the number of builders with data, after filtering.
relanded_map: Object, a map of an issue number that was relanded to the commit that relands it
reverted_map: Object, a map of an issue number that was reverted to the commit that reverts it
Methods:
None.
Events:
None.
-->
<link rel="import" href="/res/imp/bower_components/iron-ajax/iron-ajax.html">
<link rel="import" href="/res/common/imp/timer-sk.html">
<dom-module id="commits-data-sk">
<template>
<iron-ajax id="data"
loading="{{_activeAJAX}}"
auto url="{{_computeURL(repo)}}"
handle-as="json"
params="[[_params]]"
timeout="60000"
last-response="{{_data}}">
</iron-ajax>
<timer-sk period="[[reload]]" on-trigger="_reload">
</template>
<script>
(function() {
var COLOR_BUILD_PENDING = "rgba(230, 171, 2, 0.0)";
var COLOR_BUILD_SUCCESS = "rgba(102, 166, 30, 0.3)";
var COLOR_BUILD_FAILED = "#D95F02";
var COLOR_BUILD_EXCEPTION = "#7570B3";
var VALID_BUILDER_CATEGORIES = ["Build", "Housekeeper", "Infra", "Perf", "Test", "Upload"];
var BUILDBOT_RESULT_SUCCESS = 0;
var BUILDBOT_RESULT_WARNINGS = 1;
var BUILDBOT_RESULT_FAILURE = 2;
var BUILDBOT_RESULT_SKIPPED = 3;
var BUILDBOT_RESULT_EXCEPTION = 4;
var BUILDBOT_RESULT_CANCELLED = 5;
var FILTER_ALL = "all";
var FILTER_INTERESTING = "interesting";
var FILTER_FAILURES = "failures";
var FILTER_FAIL_NO_COMMENT = "nocomment";
var FILTER_COMMENTS = "comments";
var FILTER_SEARCH = "search";
var TIME_POINTS = [
{
label:"-1h",
offset: 60 * 60 * 1000,
},
{
label:"-3h",
offset: 3 * 60 * 60 * 1000,
},
{
label:"-1d",
offset: 24* 60 * 60 * 1000,
},
];
// shortCommit returns the first 7 characters of a commit hash.
function shortCommit(commit) {
return commit.substring(0, 7);
}
// shortAuthor shortens the commit author field by returning the
// parenthesized email address if it exists. If it does not exist, the
// entire author field is used.
function shortAuthor(author) {
var re = /.*\((.+)\)/;
var match = re.exec(author);
var res = author;
if (match) {
res = match[1];
}
return res.split("@")[0];
}
// shortSubject truncates a commit subject line to 72 characters if needed.
// If the text was shortened, the last three characters are replaced by
// ellipsis.
function shortSubject(subject) {
return sk.truncate(subject, 72);
}
// findIssueAndReviewTool returns an object literal of the form
// {issue, patchStorage}. patchStorage will be either Gerrit or Rietveld and
// issue will be the CL number.
// If an issue cannot be determined then an empty string is returned for
// both issue and patchStorage.
function findIssueAndReviewTool(commit) {
// See if it is a Gerrit CL.
var gerritRE = /(.|[\r\n])*Reviewed-on: https:\/\/skia-review.googlesource.com\/([0-9]*)/g;
var gerritTokens = gerritRE.exec(commit.body);
if (gerritTokens) {
return {
issue: gerritTokens[gerritTokens.length - 1],
patchStorage: 'gerrit'
};
}
// See if it is a Rietveld CL.
var rietveldRE = /(.|[\r\n])*Review-Url: https:\/\/codereview.chromium.org\/([0-9]*)/g;
var rietveldTokens = rietveldRE.exec(commit.body);
if (rietveldTokens) {
return {
issue: rietveldTokens[rietveldTokens.length - 1],
patchStorage: 'rietveld'
};
}
// Could not find a CL number return an empty string.
return {
issue: '',
patchStorage: ''
};
}
function isGerritIssue(commit) {
return commit.patchStorage === 'gerrit';
}
function isRietveldIssue(commit) {
return commit.patchStorage === 'rietveld';
}
// Return whether this commit reverted another commit.
function isRevert(commit) {
if (isGerritIssue(commit)) {
// The subject of the CL needs to start with 'Revert "' for Gerrit
// issues.
return /^Revert "/g.test(commit.subject);
} else if (isRietveldIssue(commit)) {
// The subject of the CL needs to start with 'Revert ' or 'Reland ' for
// Rietveld issues.
return (/^Revert /g.test(commit.subject) || /^Reland /g.test(commit.subject));
}
return false;
}
// Return whether this commit relanded another commit.
function isReland(commit) {
if (isGerritIssue(commit)) {
// The subject of the CL needs to start with 'Revert "' for Gerrit
// issues. Reland issues look like this: 'Revert "Revert "...'
var numRevertBlocks = 0;
var tmpSubject = commit.subject;
var m;
while ((m = /^Revert "(.*)/g.exec(tmpSubject)) != null) {
tmpSubject = m[1]; // Move on to the next block.
numRevertBlocks++;
}
if (numRevertBlocks == 0) {
return false;
} else if (numRevertBlocks >= 2) {
// There are an even number of Revert blocks. This means that it is
// a reland.
return true;
}
} else if (isRietveldIssue(commit)) {
// For Rietveld issues the subject of the CL needs to start with
// 'Reland '.
return /^Reland /g.test(commit.subject);
}
return false;
}
// Find and return the issue number of the reverted commit that is the specified number of
// levels deep.
function findRevertedIssue(commit, levelsDeep) {
var levelsDeepStr = "";
var patt;
if (isGerritIssue(commit)) {
for (i=0; i<levelsDeep; i++) {
levelsDeepStr += "\\> ";
}
patt = new RegExp("(.|[\\r\\n])*" + levelsDeepStr + "Reviewed-on\\: https://skia-review\.googlesource\.com/([0-9]*)", "g");
var tokens = patt.exec(commit.body);
if (tokens) {
return tokens[tokens.length - 1];
}
} else if (isRietveldIssue(commit)) {
var contents = commit.subject;
for (i=1; i<levelsDeep; i++) {
// Search in the body of a commit if the level is deeper than 1.
contents = commit.body;
levelsDeepStr += "\\> ";
}
patt = new RegExp("(.|[\\r\\n])*" + levelsDeepStr + "(Revert of |Reland of ).*https://codereview\.chromium\.org/([0-9]*)/ \\)");
var tokens = patt.exec(contents);
if (tokens) {
return tokens[tokens.length - 1];
}
}
return '';
}
function getBuildColor(build) {
if (!build.finished) {
return COLOR_BUILD_PENDING;
} else if (build.results == BUILDBOT_RESULT_SUCCESS ||
build.results == BUILDBOT_RESULT_WARNINGS ||
build.results == BUILDBOT_RESULT_SKIPPED) {
return COLOR_BUILD_SUCCESS;
} else if (build.results == BUILDBOT_RESULT_FAILURE) {
return COLOR_BUILD_FAILED;
} else if (build.results == BUILDBOT_RESULT_EXCEPTION ||
build.results == BUILDBOT_RESULT_CANCELLED) {
return COLOR_BUILD_EXCEPTION;
}
console.log("Unknown color for build "+build);
return COLOR_BUILD_PENDING;
}
function maybeSetBuildUrls(build) {
for (var i = 0; i < build.properties.length; i++) {
if (build.properties[i][0] == "taskURL") {
build.url = build.properties[i][1];
}
if (build.properties[i][0] == "taskRetryURL") {
build.retryUrl = build.properties[i][1];
}
if (build.properties[i][0] == "taskSpecTasklistURL") {
build.dashboardBuilderUrl = build.properties[i][1];
}
if (build.properties[i][0] == "botTasklistURL") {
build.dashboardSlaveUrl = build.properties[i][1];
}
if (build.properties[i][0] == "botDetailURL") {
build.slaveHostUrl = build.properties[i][1];
}
}
}
Polymer({
is: "commits-data-sk",
properties: {
// input only
autoroll_current:{
type: String,
value: "",
},
autoroll_last: {
type: String,
value: "",
},
commits_to_load: {
type: Number,
value: 35,
},
filter: {
type: String,
observer:"_filterBuilders",
},
reload: {
type: Number,
value: 60,
},
repo: {
type: String,
},
search: {
type: String,
observer:"_searchChanged",
},
// output only
branch_heads: {
type: Array,
value: function() {
return [];
},
notify:true,
},
build_details: {
type: Object,
value: function() {
return {};
},
notify:true,
},
builds: {
type: Object,
value: function() {
return {};
},
notify:true,
},
builders: {
type: Object,
value: function() {
return {};
},
notify:true,
},
categories: {
type: Object,
value: function() {
return {};
},
notify:true,
},
category_list: {
type: Array,
value: function() {
return [];
},
notify:true,
},
commits: {
type: Array,
notify:true,
},
commits_map: {
type: Object,
notify:true,
},
loading: {
type: Number,
notify: true,
computed: "_or(_activeAJAX, _filtering)",
},
num_builders: {
type: Number,
notify:true,
},
purple_builds: {
type: Array,
notify: true,
},
relanded_map: {
type: Object,
value: function() {
return {};
},
notify:true,
},
reverted_map: {
type: Object,
value: function() {
return {};
},
notify:true,
},
task_scheduler_url: {
type: String,
notify: true,
},
time_points: {
type: Object,
value: function() {
return {};
},
notify:true,
},
//private
_activeAJAX: {
type: Boolean,
value: false,
},
_filtering: {
type: Boolean,
value: false,
},
_params: {
type: String,
computed: "_getUrlParams(commits_to_load)",
},
_data: {
type: Object,
value: function() {
return {};
},
observer:"",
},
},
observers: [
"_parseData(_data, autoroll_current, autoroll_last)",
],
_computeURL: function(repo) {
return "/json/"+repo+"/commits/";
},
_getUrlParams: function(commits_to_load) {
return {"n":commits_to_load};
},
_or: function(a,b) {
return a || b;
},
_makeCommitsMap: function(arr) {
if (!arr || arr.length == 0) {
this.set("commits_map", {});
return;
}
var m = {};
arr.forEach(function(c){
m[c.hash] = c;
});
this.set("commits_map", m);
},
_reload: function() {
this.$.data.generateRequest();
},
_parseData: function(data, autoroll_current, autoroll_last) {
if (!data || !data.commits) {
console.log("Nothing to parse");
return;
}
this.set("_filtering", true);
console.time("processData");
for (var i = 0; i < data.commits.length; i++) {
data.commits[i].comments = data.comments[data.commits[i].hash] || [];
}
this.lastLoaded = new Date().toLocaleTimeString();
this.task_scheduler_url = data.task_scheduler_url;
var commits = data.commits;
// Prepare build data.
console.log("Preparing build data.");
var builds = {};
var builders = {};
var build_details = this.build_details;
for (var k in data.builds) {
build_details[k] = data.builds[k];
}
for (var i = 0; i < commits.length; i++) {
var commit = commits[i];
commit.shortAuthor = shortAuthor(commit.author);
commit.shortHash = shortCommit(commit.hash);
commit.shortSubject = shortSubject(commit.subject);
var c = findIssueAndReviewTool(commit);
commit.issue = c.issue;
commit.patchStorage = c.patchStorage;
commit.isRevert = isRevert(commit);
if (commit.isRevert && findRevertedIssue(commit, 1)) {
this.reverted_map[findRevertedIssue(commit, 1)] = commit;
}
commit.isReland = isReland(commit);
if (commit.isReland && findRevertedIssue(commit, 2)) {
this.relanded_map[findRevertedIssue(commit, 2)] = commit;
}
commit.ignoreFailure = commit.comments &&
commit.comments.length > 0 &&
commit.comments[commit.comments.length-1].ignoreFailure;
commit.displayClass = {};
for (var builder in build_details[commit.hash]) {
var build = build_details[commit.hash][builder];
build.color = getBuildColor(build);
maybeSetBuildUrls(build);
if (!builds[builder]) {
// This is the first time we've seen this builder.
builds[builder] = {};
var builderDetails = {
"comments": data.builders[builder] || [],
"builder": builder,
"master": build.master,
// We're traveling backward in time, so the first build we
// find for a given builder is its most recent.
"color": build.color,
"flaky": false,
"ignoreFailure": false,
};
if (build.dashboardBuilderUrl) {
builderDetails.url = build.dashboardBuilderUrl
}
var split = builder.split("-");
if (split.length >= 2 && VALID_BUILDER_CATEGORIES.indexOf(split[0]) != -1) {
builderDetails.category = split[0];
builderDetails.subcategory = split[1];
}
if (builderDetails.comments.length > 0) {
builderDetails.flaky = !!builderDetails.comments[builderDetails.comments.length-1].flaky;
builderDetails.ignoreFailure = !!builderDetails.comments[builderDetails.comments.length-1].ignoreFailure
}
builders[builder] = builderDetails;
}
builds[builder][build.number] = build;
// Figure out the display class to use.
var classes = [CLASS_BUILD_SINGLE];
if (i > 0) {
// We are drawing from most recent on back in time. prevCommit is really the "next"
// commit in a temporal timeline. But, it was the previously drawn commit, so the
// name sticks.
var prevCommit = commits[i-1];
var prevDetails = build_details[prevCommit.hash] || {};
if (prevCommit.parent.indexOf(commit.hash) === -1) {
// We skipped one or more commits. This is likely due to a branch. We need to find the last drawn commit whose parent is this one.
prevCommit = undefined;
for (var j = i-1; j>= 0; j--) {
if (commits[j].parent.indexOf(commit.hash) !== -1) {
prevCommit = commits[j];
break;
}
}
if (prevCommit) {
// If the previously drawn commit does not exist, it basically means we are the
// head of the branch. If it does exist, we change it to have a dashed bottom
// and for this commit to have a dashed top.
prevDetails = build_details[prevCommit.hash] || {};
var prevBuild = prevDetails[builder];
// Only continue drawing if it's actually the same build
if (prevBuild && prevBuild.number == build.number) {
classes = [CLASS_BUILD_BOTTOM, CLASS_DASHED_TOP];
if (prevCommit.displayClass[builder].indexOf(CLASS_BUILD_SINGLE) >= 0) {
prevCommit.displayClass[builder] = [CLASS_BUILD_TOP, CLASS_DASHED_BOTTOM];
} else {
prevCommit.displayClass[builder] = [CLASS_BUILD_MIDDLE, CLASS_DASHED_BOTTOM];
}
}
}
} else if (prevDetails) {
var prevBuild = prevDetails[builder];
// Only continue drawing if it's actually the same build
if (prevBuild && prevBuild.number == build.number) {
classes = [CLASS_BUILD_BOTTOM];
var prevClasses = prevCommit.displayClass[builder];
if (prevClasses.indexOf(CLASS_BUILD_SINGLE) >= 0) {
prevCommit.displayClass[builder] = [CLASS_BUILD_TOP];
} else if (prevClasses.indexOf(CLASS_BUILD_BOTTOM) >= 0) {
var j = prevClasses.indexOf(CLASS_BUILD_BOTTOM);
prevClasses[j] = CLASS_BUILD_MIDDLE;
prevCommit.displayClass[builder] = prevClasses;
}
}
}
}
commit.displayClass[builder] = classes;
}
}
this._makeCommitsMap(commits);
this.builds = builds;
this.set("build_details", build_details);
this.set("builders", builders);
this._filterBuilders();
// Add autoroll tags as branch heads.
var filteredBranchHeads = [];
for (var i = 0; i < data.branch_heads.length; i++) {
var b = data.branch_heads[i];
filteredBranchHeads.push(b);
}
if (autoroll_last) {
filteredBranchHeads.push({
name: "last DEPS roll",
head: autoroll_last,
});
}
if (autoroll_current) {
filteredBranchHeads.push({
name: "current DEPS roll",
head: autoroll_current,
});
}
this.set("branch_heads", filteredBranchHeads);
var timeIdx = 0;
var now = new Date();
var time_points = {};
// If the first commit happened after our first time point cutoff, we advance past it.
while ((timeIdx < TIME_POINTS.length) && (now - TIME_POINTS[timeIdx].offset) > new Date(commits[0].timestamp)) {
timeIdx++;
}
// Going backwards in time, we place a marker if the current commit happened before the time offset and the following commit happened after. Once we find a cutoff, start looking for the next time point.
var commitIdx = 0;
while (commitIdx < (commits.length - 1) && timeIdx < TIME_POINTS.length) {
var c = commits[commitIdx];
var curr = new Date(c.timestamp);
var next = new Date(commits[commitIdx+1].timestamp);
if ((now - TIME_POINTS[timeIdx].offset) <= curr && (now - TIME_POINTS[timeIdx].offset) > next) {
time_points[c.hash] = TIME_POINTS[timeIdx];
timeIdx++;
// We don't increment commitIdx because we want to double check the current cutoff.
// Example: commit A happened 59 minutes ago and commit B happened 1.3 days ago.
// The time point between them should be the -1d one, not the -1h one. Since time_points
// is based off of commit, we can recheck and replace the shorter cutoffs if necessary.
} else {
commitIdx++;
}
}
// Check for the last commit as well, except we don't compare it to the following commit.
var last = commits[commits.length - 1];
if ((timeIdx < TIME_POINTS.length) && (now - TIME_POINTS[timeIdx].offset) <= last.timestamp) {
time_points[last.hash] = TIME_POINTS[timeIdx];
}
console.log("Prepared build data.");
console.timeEnd("processData");
this.set("_filtering", false);
// Actually draw the commits.
this.set("commits", commits);
this.set("time_points", time_points);
console.timeEnd("totalLoadTime");
},
// Filter out green bots unless "showAllBuilders" is checked.
_filterBuilders: function() {
console.time("filterBuilders");
var filteredBuilders = [];
var selected = this.filter;
if (selected == FILTER_ALL) {
for (var builder in this.builders) {
filteredBuilders.push(builder);
}
} else if (selected == FILTER_INTERESTING || selected == FILTER_FAILURES || selected == FILTER_FAIL_NO_COMMENT) {
for (var builder in this.builders) {
var failed = false;
var succeeded = false;
for (var buildNum in this.builds[builder]) {
var build = this.builds[builder][buildNum];
if (!build.finished) {
continue;
}
// If interesting or "failing w/o comment" is selected, compute ignoreFailure
// and skip this build if it belongs entirely to commits that have been ignored.
if (selected === FILTER_INTERESTING || selected === FILTER_FAIL_NO_COMMENT) {
var commits = build.commits || [];
var isIgnored = commits.length > 0;
commits.forEach(function(c){
var o = this.commits_map[c];
isIgnored = isIgnored && o && o.ignoreFailure;
}.bind(this));
if (isIgnored) {
continue;
}
}
if (build.results == BUILDBOT_RESULT_SUCCESS) {
succeeded = true;
} else {
failed = true;
}
if (selected == FILTER_INTERESTING) {
if (succeeded && failed && !this.builders[builder].ignoreFailure) {
filteredBuilders.push(builder);
break;
}
} else if (selected == FILTER_FAILURES) {
if (failed) {
filteredBuilders.push(builder);
break;
}
} else if (selected == FILTER_FAIL_NO_COMMENT) {
if (build.results != BUILDBOT_RESULT_SUCCESS && this.builders[builder].comments.length == 0) {
if (!build.comments || build.comments.length == 0) {
filteredBuilders.push(builder);
break;
}
}
}
}
}
} else if (selected == FILTER_COMMENTS) {
for (var builder in this.builders) {
if (this.builders[builder].comments.length > 0) {
filteredBuilders.push(builder);
continue;
}
for (var buildNum in this.builds[builder]) {
var build = this.builds[builder][buildNum];
if (!build.finished) {
continue;
}
if (build.comments && build.comments.length > 0) {
filteredBuilders.push(builder);
break;
}
}
}
} else if (selected == FILTER_SEARCH) {
var matchText = this.search;
for (var builder in this.builders) {
if (builder.toLowerCase().match(matchText.toLowerCase())) {
filteredBuilders.push(builder);
}
}
} else {
console.error("Invalid builder filter selection: " + selected);
}
sk.sortStrings(filteredBuilders);
this.num_builders = filteredBuilders.length;
var categories = {};
var categoryList = [];
var purpleBuilds = [];
for (var i = 0; i < filteredBuilders.length; i++) {
var builderName = filteredBuilders[i];
var category = this.builders[builderName].category;
if (!category) {
category = "Other";
}
if (!categories[category]) {
categories[category] = {
colspan: 0,
subcategoryList: [],
subcategories: {},
};
categoryList.push(category);
}
var subcategory = this.builders[builderName].subcategory;
if (!subcategory) {
subcategory = "Other";
}
if (!categories[category].subcategories[subcategory]) {
categories[category].subcategories[subcategory] = {
builders: [],
};
categories[category].subcategoryList.push(subcategory);
}
categories[category].subcategories[subcategory].builders.push(builderName);
categories[category].colspan++;
// Find any purple builds for this builder.
for (var buildNum in this.builds[builderName]) {
var build = this.builds[builderName][buildNum];
if (build.results === BUILDBOT_RESULT_EXCEPTION) {
// TODO(borenet): This is a bit of a hack.
// Jobs are named after build, test, or perf tasks, but not
// uploads. If this is an upload, trim the prefix.
var jobName = builderName;
if (jobName.startsWith("Upload-")) {
jobName = jobName.substring("Upload-".length);
}
purpleBuilds.push({"name": jobName, "commit": build.got_revision});
}
}
}
this.set("categories", categories);
this.set("category_list", categoryList);
this.set("purple_builds", purpleBuilds);
console.timeEnd("filterBuilders");
},
_searchChanged: function() {
// This callback fires every time the user presses a key inside the
// input box. Updating the display can be really slow when there are
// a lot of builders, so we should wait until the user is done typing
// before re-filtering.
if (this.builderSearchChangedTimeout) {
window.clearTimeout(this.builderSearchChangedTimeout);
}
this.builderSearchChangedTimeout = window.setTimeout(function() {
this.builderSearchChangedTimeout = null;
// If "search" is already selected, filter the builders.
if (this.filter == "search") {
this._filterBuilders();
}
}.bind(this), 400);
}
});
})();
</script>
</dom-module