blob: d72e5bf5ab371dcc9dc78505e1bc55d73b27be99 [file] [log] [blame]
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Presubmit checks for the buildbot code."""
import subprocess
def _MakeFileFilter(input_api, include_extensions=None,
"""Return a filter to pass to AffectedSourceFiles.
The filter will include all files with a file extension in include_extensions,
and will ignore all files with a file extension in exclude_extensions.
If include_extensions is empty, all files, even those without any extension,
are included.
white_list = ['.+')]
if include_extensions:
white_list = ['.+\.%s$' % ext)
for ext in include_extensions]
# If black_list is empty, the InputApi default is used, so always include at
# least one regexp.
black_list = ['^$')]
if exclude_extensions:
black_list = ['.+\.%s$' % ext)
for ext in exclude_extensions]
return lambda x: input_api.FilterSourceFile(x, white_list=white_list,
def _CheckNonAscii(input_api, output_api):
"""Check for non-ASCII characters and throw warnings if any are found."""
results = []
files_with_unicode_lines = []
# We keep track of the longest file (in line count) so that we can pad the
# numbers when displaying output. This makes it easier to see the indention.
max_lines_in_any_file = 0
FILE_EXTENSIONS = ['bat', 'cfg', 'cmd', 'conf', 'css', 'gyp', 'gypi', 'htm',
'html', 'js', 'json', 'ps1', 'py', 'sh', 'tac', 'yaml']
file_filter = _MakeFileFilter(input_api, FILE_EXTENSIONS)
for affected_file in input_api.AffectedSourceFiles(file_filter):
affected_filepath = affected_file.LocalPath()
unicode_lines = []
with open(affected_filepath, 'r+b') as f:
total_lines = 0
for line in f:
total_lines += 1
except UnicodeDecodeError:
unicode_lines.append((total_lines, line.rstrip()))
if unicode_lines:
files_with_unicode_lines.append((affected_filepath, unicode_lines))
if total_lines > max_lines_in_any_file:
max_lines_in_any_file = total_lines
if files_with_unicode_lines:
padding = len(str(max_lines_in_any_file))
long_text = 'The following files contain non-ASCII characters:\n'
for filename, unicode_lines in files_with_unicode_lines:
long_text += ' %s\n' % filename
for line_num, line in unicode_lines:
long_text += ' %s: %s\n' % (str(line_num).rjust(padding), line)
long_text += '\n'
message='Some files contain non-ASCII characters.',
return results
def _CheckBannedGoAPIs(input_api, output_api):
"""Check go source code for functions and packages that should not be used."""
# TODO(benjaminwagner): A textual search is easy, but it would be more
# accurate to parse and analyze the source due to package aliases.
# A list of tuples of a regex to match an API and a suggested replacement for
# that API.
banned_replacements = [
(r'\breflect\.DeepEqual\b', 'DeepEqual in'),
(r'\bgithub\.com/golang/glog\b', ''),
(r'\bgithub\.com/skia-dev/glog\b', ''),
(r'\bhttp\.Get\b', 'NewTimeoutClient in'),
(r'\bhttp\.Head\b', 'NewTimeoutClient in'),
(r'\bhttp\.Post\b', 'NewTimeoutClient in'),
'NewTimeoutClient in'),
(r'\bos\.Interrupt\b', 'AtExit in'),
(r'\bsignal\.Notify\b', 'AtExit in'),
(r'\bsyscall.SIGINT\b', 'AtExit in'),
(r'\bsyscall.SIGTERM\b', 'AtExit in'),
compiled_replacements = []
for (re, replacement) in banned_replacements:
compiled_re =
compiled_replacements.append((compiled_re, replacement))
errors = []
file_filter = _MakeFileFilter(input_api, ['go'])
for affected_file in input_api.AffectedSourceFiles(file_filter):
affected_filepath = affected_file.LocalPath()
for (line_num, line) in affected_file.ChangedContents():
for (re, replacement) in compiled_replacements:
match =
if match:
errors.append('%s:%s: Instead of %s, please use %s.' % (
affected_filepath, line_num,, replacement))
if errors:
return [output_api.PresubmitPromptWarning('\n'.join(errors))]
return []
def CheckChange(input_api, output_api):
"""Presubmit checks for the change on upload or commit.
The presubmit checks have been handpicked from the list of canned checks
The following are the presubmit checks:
* Pylint is run if the change contains any .py files.
* Enforces max length for all lines is 100.
* Checks that the user didn't add TODO(name) without an owner.
* Checks that there is no stray whitespace at source lines end.
* Checks that there are no tab characters in any of the text files.
results = []
pylint_blacklist = [
pylint_disabled_warnings = (
'F0401', # Unable to import.
'E0611', # No name in module.
'W0232', # Class has no __init__ method.
'E1002', # Use of super on an old style class.
'W0403', # Relative import used.
'R0201', # Method could be a function.
'E1003', # Using class name in super.
'W0613', # Unused argument.
results += input_api.canned_checks.RunPylint(
input_api, output_api,
# Use 100 for max length for files other than python. Python length is
# already checked during the Pylint above. No max length for Go files.
IGNORE_LINE_LENGTH = ['go', 'html', 'py']
file_filter = _MakeFileFilter(input_api,
results += input_api.canned_checks.CheckLongLines(input_api, output_api, 100,
file_filter = _MakeFileFilter(input_api)
results += input_api.canned_checks.CheckChangeTodoHasOwner(
input_api, output_api, source_file_filter=file_filter)
results += input_api.canned_checks.CheckChangeHasNoStrayWhitespace(
input_api, output_api, source_file_filter=file_filter)
# CheckChangeHasNoTabs automatically ignores makefiles.
IGNORE_TABS = ['go']
file_filter = _MakeFileFilter(input_api, exclude_extensions=IGNORE_TABS)
results += input_api.canned_checks.CheckChangeHasNoTabs(input_api, output_api)
results += _CheckBannedGoAPIs(input_api, output_api)
return results
def CheckChangeOnUpload(input_api, output_api):
results = CheckChange(input_api, output_api)
# Give warnings for non-ASCII characters on upload but not commit, since they
# may be intentional.
results.extend(_CheckNonAscii(input_api, output_api))
return results
def CheckChangeOnCommit(input_api, output_api):
results = CheckChange(input_api, output_api)
return results