+ ) | set('%s@appspot.gserviceaccount.com' % s for s in ('findit-for-me',)
+ ) | set('%s@developer.gserviceaccount.com' % s for s in ('3su6n15k.default',)
+ ) | set('%s@chops-service-accounts.iam.gserviceaccount.com' % s
+ for s in ('bling-autoroll-builder', 'v8-ci-autoroll-builder',
+ 'wpt-autoroller', 'chrome-weblayer-builder',
+ 'lacros-version-skew-roller', 'skylab-test-cros-roller',
+ 'infra-try-recipes-tester', 'lacros-tracking-roller',
+ 'lacros-sdk-version-roller', 'chrome-automated-expectation',
+ 'chromium-automated-expectation', 'chrome-branch-day',
+ 'chromium-autosharder')
+ ) | set('%s@skia-public.iam.gserviceaccount.com' % s
+ for s in ('chromium-autoroll', 'chromium-release-autoroll')
+ ) | set('%s@skia-corp.google.com.iam.gserviceaccount.com' % s
+ for s in ('chromium-internal-autoroll',)
+ ) | set('%s@owners-cleanup-prod.google.com.iam.gserviceaccount.com' % s
+ for s in ('swarming-tasks',)
+ ) | set('%s@fuchsia-infra.iam.gserviceaccount.com' % s
+ for s in ('global-integration-try-builder',
+ 'global-integration-ci-builder')
+ ) | set('%s@prod.google.com' % s
+ for s in ('chops-security-borg',
+ 'chops-security-cronjobs-cpesuggest'))
+
+_INVALID_GRD_FILE_LINE = [
+ (r'<file lang=.* path=.*', 'Path should come before lang in GRD files.')
+]
+
+def _IsCPlusPlusFile(input_api, file_path):
+ """Returns True if this file contains C++-like code (and not Python,
+ Go, Java, MarkDown, ...)"""
+
+ ext = input_api.os_path.splitext(file_path)[1]
+ # This list is compatible with CppChecker.IsCppFile but we should
+ # consider adding ".c" to it. If we do that we can use this function
+ # at more places in the code.
+ return ext in (
+ '.h',
+ '.cc',
+ '.cpp',
+ '.m',
+ '.mm',
+ )
+
+
+def _IsCPlusPlusHeaderFile(input_api, file_path):
+ return input_api.os_path.splitext(file_path)[1] == ".h"
+
+
+def _IsJavaFile(input_api, file_path):
+ return input_api.os_path.splitext(file_path)[1] == ".java"
+
+
+def _IsProtoFile(input_api, file_path):
+ return input_api.os_path.splitext(file_path)[1] == ".proto"
+
+
+def _IsXmlOrGrdFile(input_api, file_path):
+ ext = input_api.os_path.splitext(file_path)[1]
+ return ext in ('.grd', '.xml')
+
+
+def _IsMojomFile(input_api, file_path):
+ return input_api.os_path.splitext(file_path)[1] == ".mojom"
+
+
+def CheckNoUpstreamDepsOnClank(input_api, output_api):
+ """Prevent additions of dependencies from the upstream repo on //clank."""
+ # clank can depend on clank
+ if input_api.change.RepositoryRoot().endswith('clank'):
+ return []
+ build_file_patterns = [
+ r'(.+/)?BUILD\.gn',
+ r'.+\.gni',
+ ]
+ excluded_files = [r'build[/\\]config[/\\]android[/\\]config\.gni']
+ bad_pattern = input_api.re.compile(r'^[^#]*//clank')
+
+ error_message = 'Disallowed import on //clank in an upstream build file:'
+
+ def FilterFile(affected_file):
+ return input_api.FilterSourceFile(affected_file,
+ files_to_check=build_file_patterns,
+ files_to_skip=excluded_files)
+
+ problems = []
+ for f in input_api.AffectedSourceFiles(FilterFile):
+ local_path = f.LocalPath()
+ for line_number, line in f.ChangedContents():
+ if (bad_pattern.search(line)):
+ problems.append('%s:%d\n %s' %
+ (local_path, line_number, line.strip()))
+ if problems:
+ return [output_api.PresubmitPromptOrNotify(error_message, problems)]
+ else:
+ return []
+
+
+def CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api):
+ """Attempts to prevent use of functions intended only for testing in
+ non-testing code. For now this is just a best-effort implementation
+ that ignores header files and may have some false positives. A
+ better implementation would probably need a proper C++ parser.
+ """
+ # We only scan .cc files and the like, as the declaration of
+ # for-testing functions in header files are hard to distinguish from
+ # calls to such functions without a proper C++ parser.
+ file_inclusion_pattern = [r'.+%s' % _IMPLEMENTATION_EXTENSIONS]
+
+ base_function_pattern = r'[ :]test::[^\s]+|ForTest(s|ing)?|for_test(s|ing)?'
+ inclusion_pattern = input_api.re.compile(r'(%s)\s*\(' %
+ base_function_pattern)
+ comment_pattern = input_api.re.compile(r'//.*(%s)' % base_function_pattern)
+ allowlist_pattern = input_api.re.compile(r'// IN-TEST$')
+ exclusion_pattern = input_api.re.compile(
+ r'::[A-Za-z0-9_]+(%s)|(%s)[^;]+\{' %
+ (base_function_pattern, base_function_pattern))
+ # Avoid a false positive in this case, where the method name, the ::, and
+ # the closing { are all on different lines due to line wrapping.
+ # HelperClassForTesting::
+ # HelperClassForTesting(
+ # args)
+ # : member(0) {}
+ method_defn_pattern = input_api.re.compile(r'[A-Za-z0-9_]+::$')
+
+ def FilterFile(affected_file):
+ files_to_skip = (_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS +
+ input_api.DEFAULT_FILES_TO_SKIP)
+ return input_api.FilterSourceFile(
+ affected_file,
+ files_to_check=file_inclusion_pattern,
+ files_to_skip=files_to_skip)
+
+ problems = []
+ for f in input_api.AffectedSourceFiles(FilterFile):
+ local_path = f.LocalPath()
+ in_method_defn = False
+ for line_number, line in f.ChangedContents():
+ if (inclusion_pattern.search(line)
+ and not comment_pattern.search(line)
+ and not exclusion_pattern.search(line)
+ and not allowlist_pattern.search(line)
+ and not in_method_defn):
+ problems.append('%s:%d\n %s' %
+ (local_path, line_number, line.strip()))
+ in_method_defn = method_defn_pattern.search(line)
+
+ if problems:
+ return [
+ output_api.PresubmitPromptOrNotify(_TEST_ONLY_WARNING, problems)
+ ]
+ else:
+ return []
+
+
+def CheckNoProductionCodeUsingTestOnlyFunctionsJava(input_api, output_api):
+ """This is a simplified version of
+ CheckNoProductionCodeUsingTestOnlyFunctions for Java files.
+ """
+ javadoc_start_re = input_api.re.compile(r'^\s*/\*\*')
+ javadoc_end_re = input_api.re.compile(r'^\s*\*/')
+ name_pattern = r'ForTest(s|ing)?'
+ # Describes an occurrence of "ForTest*" inside a // comment.
+ comment_re = input_api.re.compile(r'//.*%s' % name_pattern)
+ # Describes @VisibleForTesting(otherwise = VisibleForTesting.PROTECTED)
+ annotation_re = input_api.re.compile(r'@VisibleForTesting\(')
+ # Catch calls.
+ inclusion_re = input_api.re.compile(r'(%s)\s*\(' % name_pattern)
+ # Ignore definitions. (Comments are ignored separately.)
+ exclusion_re = input_api.re.compile(r'(%s)[^;]+\{' % name_pattern)
+ allowlist_re = input_api.re.compile(r'// IN-TEST$')
+
+ problems = []
+ sources = lambda x: input_api.FilterSourceFile(
+ x,
+ files_to_skip=(('(?i).*test', r'.*\/junit\/') + input_api.
+ DEFAULT_FILES_TO_SKIP),
+ files_to_check=[r'.*\.java$'])
+ for f in input_api.AffectedFiles(include_deletes=False,
+ file_filter=sources):
+ local_path = f.LocalPath()
+ is_inside_javadoc = False
+ for line_number, line in f.ChangedContents():
+ if is_inside_javadoc and javadoc_end_re.search(line):
+ is_inside_javadoc = False
+ if not is_inside_javadoc and javadoc_start_re.search(line):
+ is_inside_javadoc = True
+ if is_inside_javadoc:
+ continue
+ if (inclusion_re.search(line) and not comment_re.search(line)
+ and not annotation_re.search(line)
+ and not allowlist_re.search(line)
+ and not exclusion_re.search(line)):
+ problems.append('%s:%d\n %s' %
+ (local_path, line_number, line.strip()))
+
+ if problems:
+ return [
+ output_api.PresubmitPromptOrNotify(_TEST_ONLY_WARNING, problems)
+ ]
+ else:
+ return []
+
+
+def CheckNoIOStreamInHeaders(input_api, output_api):
+ """Checks to make sure no .h files include <iostream>."""
+ files = []
+ pattern = input_api.re.compile(r'^#include\s*<iostream>',
+ input_api.re.MULTILINE)
+ for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
+ if not f.LocalPath().endswith('.h'):
+ continue
+ contents = input_api.ReadFile(f)
+ if pattern.search(contents):
+ files.append(f)
+
+ if len(files):
+ return [
+ output_api.PresubmitError(
+ 'Do not #include <iostream> in header files, since it inserts static '
+ 'initialization into every file including the header. Instead, '
+ '#include <ostream>. See http://crbug.com/94794', files)
+ ]
+ return []
+
+
+def CheckNoStrCatRedefines(input_api, output_api):
+ """Checks no windows headers with StrCat redefined are included directly."""
+ files = []
+ files_to_check = (r'.+%s' % _HEADER_EXTENSIONS,
+ r'.+%s' % _IMPLEMENTATION_EXTENSIONS)
+ files_to_skip = (input_api.DEFAULT_FILES_TO_SKIP +
+ _NON_BASE_DEPENDENT_PATHS)
+ sources_filter = lambda f: input_api.FilterSourceFile(
+ f, files_to_check=files_to_check, files_to_skip=files_to_skip)
+
+ pattern_deny = input_api.re.compile(
+ r'^#include\s*[<"](shlwapi|atlbase|propvarutil|sphelper).h[">]',
+ input_api.re.MULTILINE)
+ pattern_allow = input_api.re.compile(
+ r'^#include\s"base/win/windows_defines.inc"', input_api.re.MULTILINE)
+ for f in input_api.AffectedSourceFiles(sources_filter):
+ contents = input_api.ReadFile(f)
+ if pattern_deny.search(
+ contents) and not pattern_allow.search(contents):
+ files.append(f.LocalPath())
+
+ if len(files):
+ return [
+ output_api.PresubmitError(
+ 'Do not #include shlwapi.h, atlbase.h, propvarutil.h or sphelper.h '
+ 'directly since they pollute code with StrCat macro. Instead, '
+ 'include matching header from base/win. See http://crbug.com/856536',
+ files)
+ ]
+ return []
+
+
+def _CheckNoUNIT_TESTInSourceFiles(input_api, f):
+ problems = []
+
+ unit_test_macro = input_api.re.compile(
+ '^\s*#.*(?:ifn?def\s+UNIT_TEST|defined\s*\(?\s*UNIT_TEST\s*\)?)(?:$|\s+)')
+ for line_num, line in f.ChangedContents():
+ if unit_test_macro.match(line):
+ problems.append(' %s:%d' % (f.LocalPath(), line_num))
+
+ return problems
+
+
+def CheckNoUNIT_TESTInSourceFiles(input_api, output_api):
+ """Checks to make sure no source files use UNIT_TEST."""
+ problems = []
+ for f in input_api.AffectedFiles():
+ if (not f.LocalPath().endswith(('.cc', '.mm'))):
+ continue
+ problems.extend(
+ _CheckNoUNIT_TESTInSourceFiles(input_api, f))
+
+ if not problems:
+ return []
+ return [
+ output_api.PresubmitPromptWarning('UNIT_TEST is only for headers.\n' +
+ '\n'.join(problems))
+ ]
+
+
+def CheckNoDISABLETypoInTests(input_api, output_api):
+ """Checks to prevent attempts to disable tests with DISABLE_ prefix.
+
+ This test warns if somebody tries to disable a test with the DISABLE_ prefix
+ instead of DISABLED_. To filter false positives, reports are only generated
+ if a corresponding MAYBE_ line exists.
+ """
+ problems = []
+
+ # The following two patterns are looked for in tandem - is a test labeled
+ # as MAYBE_ followed by a DISABLE_ (instead of the correct DISABLED)
+ maybe_pattern = input_api.re.compile(r'MAYBE_([a-zA-Z0-9_]+)')
+ disable_pattern = input_api.re.compile(r'DISABLE_([a-zA-Z0-9_]+)')
+
+ # This is for the case that a test is disabled on all platforms.
+ full_disable_pattern = input_api.re.compile(
+ r'^\s*TEST[^(]*\([a-zA-Z0-9_]+,\s*DISABLE_[a-zA-Z0-9_]+\)',
+ input_api.re.MULTILINE)
+
+ for f in input_api.AffectedFiles(False):
+ if not 'test' in f.LocalPath() or not f.LocalPath().endswith('.cc'):
+ continue
+
+ # Search for MABYE_, DISABLE_ pairs.
+ disable_lines = {} # Maps of test name to line number.
+ maybe_lines = {}
+ for line_num, line in f.ChangedContents():
+ disable_match = disable_pattern.search(line)
+ if disable_match:
+ disable_lines[disable_match.group(1)] = line_num
+ maybe_match = maybe_pattern.search(line)
+ if maybe_match:
+ maybe_lines[maybe_match.group(1)] = line_num
+
+ # Search for DISABLE_ occurrences within a TEST() macro.
+ disable_tests = set(disable_lines.keys())
+ maybe_tests = set(maybe_lines.keys())
+ for test in disable_tests.intersection(maybe_tests):
+ problems.append(' %s:%d' % (f.LocalPath(), disable_lines[test]))
+
+ contents = input_api.ReadFile(f)
+ full_disable_match = full_disable_pattern.search(contents)
+ if full_disable_match:
+ problems.append(' %s' % f.LocalPath())
+
+ if not problems:
+ return []
+ return [
+ output_api.PresubmitPromptWarning(
+ 'Attempt to disable a test with DISABLE_ instead of DISABLED_?\n' +
+ '\n'.join(problems))
+ ]
+
+
+def CheckForgettingMAYBEInTests(input_api, output_api):
+ """Checks to make sure tests disabled conditionally are not missing a
+ corresponding MAYBE_ prefix.
+ """
+ # Expect at least a lowercase character in the test name. This helps rule out
+ # false positives with macros wrapping the actual tests name.
+ define_maybe_pattern = input_api.re.compile(
+ r'^\#define MAYBE_(?P<test_name>\w*[a-z]\w*)')
+ # The test_maybe_pattern needs to handle all of these forms. The standard:
+ # IN_PROC_TEST_F(SyncTest, MAYBE_Start) {
+ # With a wrapper macro around the test name:
+ # IN_PROC_TEST_F(SyncTest, E2E_ENABLED(MAYBE_Start)) {
+ # And the odd-ball NACL_BROWSER_TEST_f format:
+ # NACL_BROWSER_TEST_F(NaClBrowserTest, SimpleLoad, {
+ # The optional E2E_ENABLED-style is handled with (\w*\()?
+ # The NACL_BROWSER_TEST_F pattern is handled by allowing a trailing comma or
+ # trailing ')'.
+ test_maybe_pattern = (
+ r'^\s*\w*TEST[^(]*\(\s*\w+,\s*(\w*\()?MAYBE_{test_name}[\),]')
+ suite_maybe_pattern = r'^\s*\w*TEST[^(]*\(\s*MAYBE_{test_name}[\),]'
+ warnings = []
+
+ # Read the entire files. We can't just read the affected lines, forgetting to
+ # add MAYBE_ on a change would not show up otherwise.
+ for f in input_api.AffectedFiles(False):
+ if not 'test' in f.LocalPath() or not f.LocalPath().endswith('.cc'):
+ continue
+ contents = input_api.ReadFile(f)
+ lines = contents.splitlines(True)
+ current_position = 0
+ warning_test_names = set()
+ for line_num, line in enumerate(lines, start=1):
+ current_position += len(line)
+ maybe_match = define_maybe_pattern.search(line)
+ if maybe_match:
+ test_name = maybe_match.group('test_name')
+ # Do not warn twice for the same test.
+ if (test_name in warning_test_names):
+ continue
+ warning_test_names.add(test_name)
+
+ # Attempt to find the corresponding MAYBE_ test or suite, starting from
+ # the current position.
+ test_match = input_api.re.compile(
+ test_maybe_pattern.format(test_name=test_name),
+ input_api.re.MULTILINE).search(contents, current_position)
+ suite_match = input_api.re.compile(
+ suite_maybe_pattern.format(test_name=test_name),
+ input_api.re.MULTILINE).search(contents, current_position)
+ if not test_match and not suite_match:
+ warnings.append(
+ output_api.PresubmitPromptWarning(
+ '%s:%d found MAYBE_ defined without corresponding test %s'
+ % (f.LocalPath(), line_num, test_name)))
+ return warnings
+
+
+def CheckDCHECK_IS_ONHasBraces(input_api, output_api):
+ """Checks to make sure DCHECK_IS_ON() does not skip the parentheses."""
+ errors = []
+ pattern = input_api.re.compile(r'\bDCHECK_IS_ON\b(?!\(\))',
+ input_api.re.MULTILINE)
+ for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
+ if (not f.LocalPath().endswith(('.cc', '.mm', '.h'))):
+ continue
+ for lnum, line in f.ChangedContents():
+ if input_api.re.search(pattern, line):
+ errors.append(
+ output_api.PresubmitError((
+ '%s:%d: Use of DCHECK_IS_ON() must be written as "#if '
+ + 'DCHECK_IS_ON()", not forgetting the parentheses.') %
+ (f.LocalPath(), lnum)))
+ return errors
+
+
+# TODO(crbug/1138055): Reimplement CheckUmaHistogramChangesOnUpload check in a
+# more reliable way. See
+# https://chromium-review.googlesource.com/c/chromium/src/+/2500269
+
+
+def CheckFlakyTestUsage(input_api, output_api):
+ """Check that FlakyTest annotation is our own instead of the android one"""
+ pattern = input_api.re.compile(r'import android.test.FlakyTest;')
+ files = []
+ for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
+ if f.LocalPath().endswith('Test.java'):
+ if pattern.search(input_api.ReadFile(f)):
+ files.append(f)
+ if len(files):
+ return [
+ output_api.PresubmitError(
+ 'Use org.chromium.base.test.util.FlakyTest instead of '
+ 'android.test.FlakyTest', files)
+ ]
+ return []
+
+
+def CheckNoDEPSGIT(input_api, output_api):
+ """Make sure .DEPS.git is never modified manually."""
+ if any(f.LocalPath().endswith('.DEPS.git')
+ for f in input_api.AffectedFiles()):
+ return [
+ output_api.PresubmitError(
+ 'Never commit changes to .DEPS.git. This file is maintained by an\n'
+ 'automated system based on what\'s in DEPS and your changes will be\n'
+ 'overwritten.\n'
+ 'See https://sites.google.com/a/chromium.org/dev/developers/how-tos/'
+ 'get-the-code#Rolling_DEPS\n'
+ 'for more information')
+ ]
+ return []
+
+
+def CheckCrosApiNeedBrowserTest(input_api, output_api):
+ """Check new crosapi should add browser test."""
+ has_new_crosapi = False
+ has_browser_test = False
+ for f in input_api.AffectedFiles():
+ path = f.LocalPath()
+ if (path.startswith('chromeos/crosapi/mojom') and
+ _IsMojomFile(input_api, path) and f.Action() == 'A'):
+ has_new_crosapi = True
+ if path.endswith('browsertest.cc') or path.endswith('browser_test.cc'):
+ has_browser_test = True
+ if has_new_crosapi and not has_browser_test:
+ return [
+ output_api.PresubmitPromptWarning(
+ 'You are adding a new crosapi, but there is no file ends with '
+ 'browsertest.cc file being added or modified. It is important '
+ 'to add crosapi browser test coverage to avoid version '
+ ' skew issues.\n'
+ 'Check //docs/lacros/test_instructions.md for more information.'
+ )
+ ]
+ return []
+
+
+def CheckValidHostsInDEPSOnUpload(input_api, output_api):
+ """Checks that DEPS file deps are from allowed_hosts."""
+ # Run only if DEPS file has been modified to annoy fewer bystanders.
+ if all(f.LocalPath() != 'DEPS' for f in input_api.AffectedFiles()):
+ return []
+ # Outsource work to gclient verify
+ try:
+ gclient_path = input_api.os_path.join(input_api.PresubmitLocalPath(),
+ 'third_party', 'depot_tools',
+ 'gclient.py')
+ input_api.subprocess.check_output(
+ [input_api.python3_executable, gclient_path, 'verify'],
+ stderr=input_api.subprocess.STDOUT)
+ return []
+ except input_api.subprocess.CalledProcessError as error:
+ return [
+ output_api.PresubmitError(
+ 'DEPS file must have only git dependencies.',
+ long_text=error.output)
+ ]
+
+
+def _GetMessageForMatchingType(input_api, affected_file, line_number, line,
+ ban_rule):
+ """Helper method for checking for banned constructs.
+
+ Returns an string composed of the name of the file, the line number where the
+ match has been found and the additional text passed as |message| in case the
+ target type name matches the text inside the line passed as parameter.
+ """
+ result = []
+
+ # Ignore comments about banned types.
+ if input_api.re.search(r"^ *//", line):
+ return result
+ # A // nocheck comment will bypass this error.
+ if line.endswith(" nocheck"):
+ return result
+
+ matched = False
+ if ban_rule.pattern[0:1] == '/':
+ regex = ban_rule.pattern[1:]
+ if input_api.re.search(regex, line):
+ matched = True
+ elif ban_rule.pattern in line:
+ matched = True
+
+ if matched:
+ result.append(' %s:%d:' % (affected_file.LocalPath(), line_number))
+ for line in ban_rule.explanation:
+ result.append(' %s' % line)
+
+ return result
+
+
+def CheckNoBannedFunctions(input_api, output_api):
+ """Make sure that banned functions are not used."""
+ warnings = []
+ errors = []
+
+ def IsExcludedFile(affected_file, excluded_paths):
+ if not excluded_paths:
+ return False
+
+ local_path = affected_file.LocalPath()
+ # Consistently use / as path separator to simplify the writing of regex
+ # expressions.
+ local_path = local_path.replace(input_api.os_path.sep, '/')
+ for item in excluded_paths:
+ if input_api.re.match(item, local_path):
+ return True
+ return False
+
+ def IsIosObjcFile(affected_file):
+ local_path = affected_file.LocalPath()
+ if input_api.os_path.splitext(local_path)[-1] not in ('.mm', '.m',
+ '.h'):
+ return False
+ basename = input_api.os_path.basename(local_path)
+ if 'ios' in basename.split('_'):
+ return True
+ for sep in (input_api.os_path.sep, input_api.os_path.altsep):
+ if sep and 'ios' in local_path.split(sep):
+ return True
+ return False
+
+ def CheckForMatch(affected_file, line_num: int, line: str,
+ ban_rule: BanRule):
+ if IsExcludedFile(affected_file, ban_rule.excluded_paths):
+ return
+
+ problems = _GetMessageForMatchingType(input_api, f, line_num, line,
+ ban_rule)
+ if problems:
+ if ban_rule.treat_as_error is not None and ban_rule.treat_as_error:
+ errors.extend(problems)
+ else:
+ warnings.extend(problems)
+
+ file_filter = lambda f: f.LocalPath().endswith(('.java'))
+ for f in input_api.AffectedFiles(file_filter=file_filter):
+ for line_num, line in f.ChangedContents():
+ for ban_rule in _BANNED_JAVA_FUNCTIONS:
+ CheckForMatch(f, line_num, line, ban_rule)
+
+ file_filter = lambda f: f.LocalPath().endswith(('.js', '.ts'))
+ for f in input_api.AffectedFiles(file_filter=file_filter):
+ for line_num, line in f.ChangedContents():
+ for ban_rule in _BANNED_JAVASCRIPT_FUNCTIONS:
+ CheckForMatch(f, line_num, line, ban_rule)
+
+ file_filter = lambda f: f.LocalPath().endswith(('.mm', '.m', '.h'))
+ for f in input_api.AffectedFiles(file_filter=file_filter):
+ for line_num, line in f.ChangedContents():
+ for ban_rule in _BANNED_OBJC_FUNCTIONS:
+ CheckForMatch(f, line_num, line, ban_rule)
+
+ for f in input_api.AffectedFiles(file_filter=IsIosObjcFile):
+ for line_num, line in f.ChangedContents():
+ for ban_rule in _BANNED_IOS_OBJC_FUNCTIONS:
+ CheckForMatch(f, line_num, line, ban_rule)
+
+ egtest_filter = lambda f: f.LocalPath().endswith(('_egtest.mm'))
+ for f in input_api.AffectedFiles(file_filter=egtest_filter):
+ for line_num, line in f.ChangedContents():
+ for ban_rule in _BANNED_IOS_EGTEST_FUNCTIONS:
+ CheckForMatch(f, line_num, line, ban_rule)
+
+ file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm', '.h'))
+ for f in input_api.AffectedFiles(file_filter=file_filter):
+ for line_num, line in f.ChangedContents():
+ for ban_rule in _BANNED_CPP_FUNCTIONS:
+ CheckForMatch(f, line_num, line, ban_rule)
+
+ file_filter = lambda f: f.LocalPath().endswith(('.mojom'))
+ for f in input_api.AffectedFiles(file_filter=file_filter):
+ for line_num, line in f.ChangedContents():
+ for ban_rule in _BANNED_MOJOM_PATTERNS:
+ CheckForMatch(f, line_num, line, ban_rule)
+
+
+ result = []
+ if (warnings):
+ result.append(
+ output_api.PresubmitPromptWarning('Banned functions were used.\n' +
+ '\n'.join(warnings)))
+ if (errors):
+ result.append(
+ output_api.PresubmitError('Banned functions were used.\n' +
+ '\n'.join(errors)))
+ return result
+
+def CheckNoLayoutCallsInTests(input_api, output_api):
+ """Make sure there are no explicit calls to View::Layout() in tests"""
+ warnings = []
+ ban_rule = BanRule(
+ r'/(\.|->)Layout\(\);',
+ (
+ 'Direct calls to View::Layout() are not allowed in tests. '
+ 'If the view must be laid out here, use RunScheduledLayout(view). It '
+ 'is found in //ui/views/test/views_test_utils.h. '
+ 'See http://crbug.com/1350521 for more details.',
+ ),
+ False,
+ )
+ file_filter = lambda f: input_api.re.search(
+ r'_(unittest|browsertest|ui_test).*\.(cc|mm)$', f.LocalPath())
+ for f in input_api.AffectedFiles(file_filter = file_filter):
+ for line_num, line in f.ChangedContents():
+ problems = _GetMessageForMatchingType(input_api, f,
+ line_num, line,
+ ban_rule)
+ if problems:
+ warnings.extend(problems)
+ result = []
+ if (warnings):
+ result.append(
+ output_api.PresubmitPromptWarning(
+ 'Banned call to View::Layout() in tests.\n\n'.join(warnings)))
+ return result
+
+def _CheckAndroidNoBannedImports(input_api, output_api):
+ """Make sure that banned java imports are not used."""
+ errors = []
+
+ file_filter = lambda f: f.LocalPath().endswith(('.java'))
+ for f in input_api.AffectedFiles(file_filter=file_filter):
+ for line_num, line in f.ChangedContents():
+ for ban_rule in _BANNED_JAVA_IMPORTS:
+ # Consider merging this into the above function. There is no
+ # real difference anymore other than helping with a little
+ # bit of boilerplate text. Doing so means things like
+ # `treat_as_error` will also be uniformly handled.
+ problems = _GetMessageForMatchingType(input_api, f, line_num,
+ line, ban_rule)
+ if problems:
+ errors.extend(problems)
+ result = []
+ if (errors):
+ result.append(
+ output_api.PresubmitError('Banned imports were used.\n' +
+ '\n'.join(errors)))
+ return result
+
+
+def CheckNoPragmaOnce(input_api, output_api):
+ """Make sure that banned functions are not used."""
+ files = []
+ pattern = input_api.re.compile(r'^#pragma\s+once', input_api.re.MULTILINE)
+ for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
+ if not f.LocalPath().endswith('.h'):
+ continue
+ if f.LocalPath().endswith('com_imported_mstscax.h'):
+ continue
+ contents = input_api.ReadFile(f)
+ if pattern.search(contents):
+ files.append(f)
+
+ if files:
+ return [
+ output_api.PresubmitError(
+ 'Do not use #pragma once in header files.\n'
+ 'See http://www.chromium.org/developers/coding-style#TOC-File-headers',
+ files)
+ ]
+ return []
+
+
+def CheckNoTrinaryTrueFalse(input_api, output_api):
+ """Checks to make sure we don't introduce use of foo ? true : false."""
+ problems = []
+ pattern = input_api.re.compile(r'\?\s*(true|false)\s*:\s*(true|false)')
+ for f in input_api.AffectedFiles():
+ if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
+ continue
+
+ for line_num, line in f.ChangedContents():
+ if pattern.match(line):
+ problems.append(' %s:%d' % (f.LocalPath(), line_num))
+
+ if not problems:
+ return []
+ return [
+ output_api.PresubmitPromptWarning(
+ 'Please consider avoiding the "? true : false" pattern if possible.\n'
+ + '\n'.join(problems))
+ ]
+
+
+def CheckUnwantedDependencies(input_api, output_api):
+ """Runs checkdeps on #include and import statements added in this
+ change. Breaking - rules is an error, breaking ! rules is a
+ warning.
+ """
+ # Return early if no relevant file types were modified.
+ for f in input_api.AffectedFiles():
+ path = f.LocalPath()
+ if (_IsCPlusPlusFile(input_api, path) or _IsProtoFile(input_api, path)
+ or _IsJavaFile(input_api, path)):
+ break
+ else:
+ return []
+
+ import sys
+ # We need to wait until we have an input_api object and use this
+ # roundabout construct to import checkdeps because this file is
+ # eval-ed and thus doesn't have __file__.
+ original_sys_path = sys.path
+ try:
+ sys.path = sys.path + [
+ input_api.os_path.join(input_api.PresubmitLocalPath(),
+ 'buildtools', 'checkdeps')
+ ]
+ import checkdeps
+ from rules import Rule
+ finally:
+ # Restore sys.path to what it was before.
+ sys.path = original_sys_path
+
+ added_includes = []
+ added_imports = []
+ added_java_imports = []
+ for f in input_api.AffectedFiles():
+ if _IsCPlusPlusFile(input_api, f.LocalPath()):
+ changed_lines = [line for _, line in f.ChangedContents()]
+ added_includes.append([f.AbsoluteLocalPath(), changed_lines])
+ elif _IsProtoFile(input_api, f.LocalPath()):
+ changed_lines = [line for _, line in f.ChangedContents()]
+ added_imports.append([f.AbsoluteLocalPath(), changed_lines])
+ elif _IsJavaFile(input_api, f.LocalPath()):
+ changed_lines = [line for _, line in f.ChangedContents()]
+ added_java_imports.append([f.AbsoluteLocalPath(), changed_lines])
+
+ deps_checker = checkdeps.DepsChecker(input_api.PresubmitLocalPath())
+
+ error_descriptions = []
+ warning_descriptions = []
+ error_subjects = set()
+ warning_subjects = set()
+
+ for path, rule_type, rule_description in deps_checker.CheckAddedCppIncludes(
+ added_includes):
+ path = input_api.os_path.relpath(path, input_api.PresubmitLocalPath())
+ description_with_path = '%s\n %s' % (path, rule_description)
+ if rule_type == Rule.DISALLOW:
+ error_descriptions.append(description_with_path)
+ error_subjects.add("#includes")
+ else:
+ warning_descriptions.append(description_with_path)
+ warning_subjects.add("#includes")
+
+ for path, rule_type, rule_description in deps_checker.CheckAddedProtoImports(
+ added_imports):
+ path = input_api.os_path.relpath(path, input_api.PresubmitLocalPath())
+ description_with_path = '%s\n %s' % (path, rule_description)
+ if rule_type == Rule.DISALLOW:
+ error_descriptions.append(description_with_path)
+ error_subjects.add("imports")
+ else:
+ warning_descriptions.append(description_with_path)
+ warning_subjects.add("imports")
+
+ for path, rule_type, rule_description in deps_checker.CheckAddedJavaImports(
+ added_java_imports, _JAVA_MULTIPLE_DEFINITION_EXCLUDED_PATHS):
+ path = input_api.os_path.relpath(path, input_api.PresubmitLocalPath())
+ description_with_path = '%s\n %s' % (path, rule_description)
+ if rule_type == Rule.DISALLOW:
+ error_descriptions.append(description_with_path)
+ error_subjects.add("imports")
+ else:
+ warning_descriptions.append(description_with_path)
+ warning_subjects.add("imports")
+
+ results = []
+ if error_descriptions:
+ results.append(
+ output_api.PresubmitError(
+ 'You added one or more %s that violate checkdeps rules.' %
+ " and ".join(error_subjects), error_descriptions))
+ if warning_descriptions:
+ results.append(
+ output_api.PresubmitPromptOrNotify(
+ 'You added one or more %s of files that are temporarily\n'
+ 'allowed but being removed. Can you avoid introducing the\n'
+ '%s? See relevant DEPS file(s) for details and contacts.' %
+ (" and ".join(warning_subjects), "/".join(warning_subjects)),
+ warning_descriptions))
+ return results
+
+
+def CheckFilePermissions(input_api, output_api):
+ """Check that all files have their permissions properly set."""
+ if input_api.platform == 'win32':
+ return []
+ checkperms_tool = input_api.os_path.join(input_api.PresubmitLocalPath(),
+ 'tools', 'checkperms',
+ 'checkperms.py')
+ args = [
+ input_api.python3_executable, checkperms_tool, '--root',
+ input_api.change.RepositoryRoot()
+ ]
+ with input_api.CreateTemporaryFile() as file_list:
+ for f in input_api.AffectedFiles():
+ # checkperms.py file/directory arguments must be relative to the
+ # repository.
+ file_list.write((f.LocalPath() + '\n').encode('utf8'))
+ file_list.close()
+ args += ['--file-list', file_list.name]
+ try:
+ input_api.subprocess.check_output(args)
+ return []
+ except input_api.subprocess.CalledProcessError as error:
+ return [
+ output_api.PresubmitError('checkperms.py failed:',
+ long_text=error.output.decode(
+ 'utf-8', 'ignore'))
+ ]
+
+
+def CheckNoAuraWindowPropertyHInHeaders(input_api, output_api):
+ """Makes sure we don't include ui/aura/window_property.h
+ in header files.
+ """
+ pattern = input_api.re.compile(r'^#include\s*"ui/aura/window_property.h"')
+ errors = []
+ for f in input_api.AffectedFiles():
+ if not f.LocalPath().endswith('.h'):
+ continue
+ for line_num, line in f.ChangedContents():
+ if pattern.match(line):
+ errors.append(' %s:%d' % (f.LocalPath(), line_num))
+
+ results = []
+ if errors:
+ results.append(
+ output_api.PresubmitError(
+ 'Header files should not include ui/aura/window_property.h',
+ errors))
+ return results
+
+
+def CheckNoInternalHeapIncludes(input_api, output_api):
+ """Makes sure we don't include any headers from
+ third_party/blink/renderer/platform/heap/impl or
+ third_party/blink/renderer/platform/heap/v8_wrapper from files outside of
+ third_party/blink/renderer/platform/heap
+ """
+ impl_pattern = input_api.re.compile(
+ r'^\s*#include\s*"third_party/blink/renderer/platform/heap/impl/.*"')
+ v8_wrapper_pattern = input_api.re.compile(
+ r'^\s*#include\s*"third_party/blink/renderer/platform/heap/v8_wrapper/.*"'
+ )
+ # Consistently use / as path separator to simplify the writing of regex
+ # expressions.
+ file_filter = lambda f: not input_api.re.match(
+ r"^third_party/blink/renderer/platform/heap/.*",
+ f.LocalPath().replace(input_api.os_path.sep, '/'))
+ errors = []
+
+ for f in input_api.AffectedFiles(file_filter=file_filter):
+ for line_num, line in f.ChangedContents():
+ if impl_pattern.match(line) or v8_wrapper_pattern.match(line):
+ errors.append(' %s:%d' % (f.LocalPath(), line_num))
+
+ results = []
+ if errors:
+ results.append(
+ output_api.PresubmitError(
+ 'Do not include files from third_party/blink/renderer/platform/heap/impl'
+ ' or third_party/blink/renderer/platform/heap/v8_wrapper. Use the '
+ 'relevant counterparts from third_party/blink/renderer/platform/heap',
+ errors))
+ return results
+
+
+def _CheckForVersionControlConflictsInFile(input_api, f):
+ pattern = input_api.re.compile('^(?:<<<<<<<|>>>>>>>) |^=======$')
+ errors = []
+ for line_num, line in f.ChangedContents():
+ if f.LocalPath().endswith(('.md', '.rst', '.txt')):
+ # First-level headers in markdown look a lot like version control
+ # conflict markers. http://daringfireball.net/projects/markdown/basics
+ continue
+ if pattern.match(line):
+ errors.append(' %s:%d %s' % (f.LocalPath(), line_num, line))
+ return errors
+
+
+def CheckForVersionControlConflicts(input_api, output_api):
+ """Usually this is not intentional and will cause a compile failure."""
+ errors = []
+ for f in input_api.AffectedFiles():
+ errors.extend(_CheckForVersionControlConflictsInFile(input_api, f))
+
+ results = []
+ if errors:
+ results.append(
+ output_api.PresubmitError(
+ 'Version control conflict markers found, please resolve.',
+ errors))
+ return results
+
+
+def CheckGoogleSupportAnswerUrlOnUpload(input_api, output_api):
+ pattern = input_api.re.compile('support\.google\.com\/chrome.*/answer')
+ errors = []
+ for f in input_api.AffectedFiles():
+ for line_num, line in f.ChangedContents():
+ if pattern.search(line):
+ errors.append(' %s:%d %s' % (f.LocalPath(), line_num, line))
+
+ results = []
+ if errors:
+ results.append(
+ output_api.PresubmitPromptWarning(
+ 'Found Google support URL addressed by answer number. Please replace '
+ 'with a p= identifier instead. See crbug.com/679462\n',
+ errors))
+ return results
+
+
+def CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api):
+ def FilterFile(affected_file):
+ """Filter function for use with input_api.AffectedSourceFiles,
+ below. This filters out everything except non-test files from
+ top-level directories that generally speaking should not hard-code
+ service URLs (e.g. src/android_webview/, src/content/ and others).
+ """
+ return input_api.FilterSourceFile(
+ affected_file,
+ files_to_check=[r'^(android_webview|base|content|net)/.*'],
+ files_to_skip=(_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS +
+ input_api.DEFAULT_FILES_TO_SKIP))
+
+ base_pattern = ('"[^"]*(google|googleapis|googlezip|googledrive|appspot)'
+ '\.(com|net)[^"]*"')
+ comment_pattern = input_api.re.compile('//.*%s' % base_pattern)
+ pattern = input_api.re.compile(base_pattern)
+ problems = [] # items are (filename, line_number, line)
+ for f in input_api.AffectedSourceFiles(FilterFile):
+ for line_num, line in f.ChangedContents():
+ if not comment_pattern.search(line) and pattern.search(line):
+ problems.append((f.LocalPath(), line_num, line))
+
+ if problems:
+ return [
+ output_api.PresubmitPromptOrNotify(
+ 'Most layers below src/chrome/ should not hardcode service URLs.\n'
+ 'Are you sure this is correct?', [
+ ' %s:%d: %s' % (problem[0], problem[1], problem[2])
+ for problem in problems
+ ])
+ ]
+ else:
+ return []
+
+
+def CheckChromeOsSyncedPrefRegistration(input_api, output_api):
+ """Warns if Chrome OS C++ files register syncable prefs as browser prefs."""
+
+ def FileFilter(affected_file):
+ """Includes directories known to be Chrome OS only."""
+ return input_api.FilterSourceFile(
+ affected_file,
+ files_to_check=(
+ '^ash/',
+ '^chromeos/', # Top-level src/chromeos.
+ '.*/chromeos/', # Any path component.
+ '^components/arc',
+ '^components/exo'),
+ files_to_skip=(input_api.DEFAULT_FILES_TO_SKIP))
+
+ prefs = []
+ priority_prefs = []
+ for f in input_api.AffectedFiles(file_filter=FileFilter):
+ for line_num, line in f.ChangedContents():
+ if input_api.re.search('PrefRegistrySyncable::SYNCABLE_PREF',
+ line):
+ prefs.append(' %s:%d:' % (f.LocalPath(), line_num))
+ prefs.append(' %s' % line)
+ if input_api.re.search(
+ 'PrefRegistrySyncable::SYNCABLE_PRIORITY_PREF', line):
+ priority_prefs.append(' %s:%d' % (f.LocalPath(), line_num))
+ priority_prefs.append(' %s' % line)
+
+ results = []
+ if (prefs):
+ results.append(
+ output_api.PresubmitPromptWarning(
+ 'Preferences were registered as SYNCABLE_PREF and will be controlled '
+ 'by browser sync settings. If these prefs should be controlled by OS '
+ 'sync settings use SYNCABLE_OS_PREF instead.\n' +
+ '\n'.join(prefs)))
+ if (priority_prefs):
+ results.append(
+ output_api.PresubmitPromptWarning(
+ 'Preferences were registered as SYNCABLE_PRIORITY_PREF and will be '
+ 'controlled by browser sync settings. If these prefs should be '
+ 'controlled by OS sync settings use SYNCABLE_OS_PRIORITY_PREF '
+ 'instead.\n' + '\n'.join(prefs)))
+ return results
+
+
+def CheckNoAbbreviationInPngFileName(input_api, output_api):
+ """Makes sure there are no abbreviations in the name of PNG files.
+ The native_client_sdk directory is excluded because it has auto-generated PNG
+ files for documentation.
+ """
+ errors = []
+ files_to_check = [r'.*\.png$']
+ files_to_skip = [r'^native_client_sdk/',
+ r'^services/test/',
+ r'^third_party/blink/web_tests/',
+ ]
+ file_filter = lambda f: input_api.FilterSourceFile(
+ f, files_to_check=files_to_check, files_to_skip=files_to_skip)
+ abbreviation = input_api.re.compile('.+_[a-z]\.png|.+_[a-z]_.*\.png')
+ for f in input_api.AffectedFiles(include_deletes=False,
+ file_filter=file_filter):
+ file_name = input_api.os_path.split(f.LocalPath())[1]
+ if abbreviation.search(file_name):
+ errors.append(' %s' % f.LocalPath())
+
+ results = []
+ if errors:
+ results.append(
+ output_api.PresubmitError(
+ 'The name of PNG files should not have abbreviations. \n'
+ 'Use _hover.png, _center.png, instead of _h.png, _c.png.\n'
+ 'Contact oshima@chromium.org if you have questions.', errors))
+ return results
+
+def CheckNoProductIconsAddedToPublicRepo(input_api, output_api):
+ """Heuristically identifies product icons based on their file name and reminds
+ contributors not to add them to the Chromium repository.
+ """
+ errors = []
+ files_to_check = [r'.*google.*\.png$|.*google.*\.svg$|.*google.*\.icon$']
+ file_filter = lambda f: input_api.FilterSourceFile(
+ f, files_to_check=files_to_check)
+ for f in input_api.AffectedFiles(include_deletes=False,
+ file_filter=file_filter):
+ errors.append(' %s' % f.LocalPath())
+
+ results = []
+ if errors:
+ # Give warnings instead of errors on presubmit --all and presubmit
+ # --files.
+ message_type = (output_api.PresubmitNotifyResult if input_api.no_diffs
+ else output_api.PresubmitError)
+ results.append(
+ message_type(
+ 'Trademarked images should not be added to the public repo. '
+ 'See crbug.com/944754', errors))
+ return results
+
+
+def _ExtractAddRulesFromParsedDeps(parsed_deps):
+ """Extract the rules that add dependencies from a parsed DEPS file.
+
+ Args:
+ parsed_deps: the locals dictionary from evaluating the DEPS file."""
+ add_rules = set()
+ add_rules.update([
+ rule[1:] for rule in parsed_deps.get('include_rules', [])
+ if rule.startswith('+') or rule.startswith('!')
+ ])
+ for _, rules in parsed_deps.get('specific_include_rules', {}).items():
+ add_rules.update([
+ rule[1:] for rule in rules
+ if rule.startswith('+') or rule.startswith('!')
+ ])
+ return add_rules
+
+
+def _ParseDeps(contents):
+ """Simple helper for parsing DEPS files."""
+
+ # Stubs for handling special syntax in the root DEPS file.
+ class _VarImpl:
+ def __init__(self, local_scope):
+ self._local_scope = local_scope
+
+ def Lookup(self, var_name):
+ """Implements the Var syntax."""
+ try:
+ return self._local_scope['vars'][var_name]
+ except KeyError:
+ raise Exception('Var is not defined: %s' % var_name)
+
+ local_scope = {}
+ global_scope = {
+ 'Var': _VarImpl(local_scope).Lookup,
+ 'Str': str,
+ }
+
+ exec(contents, global_scope, local_scope)
+ return local_scope
+
+
+def _CalculateAddedDeps(os_path, old_contents, new_contents):
+ """Helper method for CheckAddedDepsHaveTargetApprovals. Returns
+ a set of DEPS entries that we should look up.
+
+ For a directory (rather than a specific filename) we fake a path to
+ a specific filename by adding /DEPS. This is chosen as a file that
+ will seldom or never be subject to per-file include_rules.
+ """
+ # We ignore deps entries on auto-generated directories.
+ AUTO_GENERATED_DIRS = ['grit', 'jni']
+
+ old_deps = _ExtractAddRulesFromParsedDeps(_ParseDeps(old_contents))
+ new_deps = _ExtractAddRulesFromParsedDeps(_ParseDeps(new_contents))
+
+ added_deps = new_deps.difference(old_deps)
+
+ results = set()
+ for added_dep in added_deps:
+ if added_dep.split('/')[0] in AUTO_GENERATED_DIRS:
+ continue
+ # Assume that a rule that ends in .h is a rule for a specific file.
+ if added_dep.endswith('.h'):
+ results.add(added_dep)
+ else:
+ results.add(os_path.join(added_dep, 'DEPS'))
+ return results
+
+
+def CheckAddedDepsHaveTargetApprovals(input_api, output_api):
+ """When a dependency prefixed with + is added to a DEPS file, we
+ want to make sure that the change is reviewed by an OWNER of the
+ target file or directory, to avoid layering violations from being
+ introduced. This check verifies that this happens.
+ """
+ # We rely on Gerrit's code-owners to check approvals.
+ # input_api.gerrit is always set for Chromium, but other projects
+ # might not use Gerrit.
+ if not input_api.gerrit or input_api.no_diffs:
+ return []
+ if 'PRESUBMIT_SKIP_NETWORK' in input_api.environ:
+ return []
+ try:
+ if (input_api.change.issue and
+ input_api.gerrit.IsOwnersOverrideApproved(
+ input_api.change.issue)):
+ # Skip OWNERS check when Owners-Override label is approved. This is
+ # intended for global owners, trusted bots, and on-call sheriffs.
+ # Review is still required for these changes.
+ return []
+ except Exception as e:
+ return [output_api.PresubmitPromptWarning(
+ 'Failed to retrieve owner override status - %s' % str(e))]
+
+ virtual_depended_on_files = set()
+
+ # Consistently use / as path separator to simplify the writing of regex
+ # expressions.
+ file_filter = lambda f: not input_api.re.match(
+ r"^third_party/blink/.*",
+ f.LocalPath().replace(input_api.os_path.sep, '/'))
+ for f in input_api.AffectedFiles(include_deletes=False,
+ file_filter=file_filter):
+ filename = input_api.os_path.basename(f.LocalPath())
+ if filename == 'DEPS':
+ virtual_depended_on_files.update(
+ _CalculateAddedDeps(input_api.os_path,
+ '\n'.join(f.OldContents()),
+ '\n'.join(f.NewContents())))
+
+ if not virtual_depended_on_files:
+ return []
+
+ if input_api.is_committing:
+ if input_api.tbr:
+ return [
+ output_api.PresubmitNotifyResult(
+ '--tbr was specified, skipping OWNERS check for DEPS additions'
+ )
+ ]
+ # TODO(dcheng): Make this generate an error on dry runs if the reviewer
+ # is not added, to prevent review serialization.
+ if input_api.dry_run:
+ return [
+ output_api.PresubmitNotifyResult(
+ 'This is a dry run, skipping OWNERS check for DEPS additions'
+ )
+ ]
+ if not input_api.change.issue:
+ return [
+ output_api.PresubmitError(
+ "DEPS approval by OWNERS check failed: this change has "
+ "no change number, so we can't check it for approvals.")
+ ]
+ output = output_api.PresubmitError
+ else:
+ output = output_api.PresubmitNotifyResult
+
+ owner_email, reviewers = (
+ input_api.canned_checks.GetCodereviewOwnerAndReviewers(
+ input_api, None, approval_needed=input_api.is_committing))
+
+ owner_email = owner_email or input_api.change.author_email
+
+ approval_status = input_api.owners_client.GetFilesApprovalStatus(
+ virtual_depended_on_files, reviewers.union([owner_email]), [])
+ missing_files = [
+ f for f in virtual_depended_on_files
+ if approval_status[f] != input_api.owners_client.APPROVED
+ ]
+
+ # We strip the /DEPS part that was added by
+ # _FilesToCheckForIncomingDeps to fake a path to a file in a
+ # directory.
+ def StripDeps(path):
+ start_deps = path.rfind('/DEPS')
+ if start_deps != -1:
+ return path[:start_deps]
+ else:
+ return path
+
+ unapproved_dependencies = [
+ "'+%s'," % StripDeps(path) for path in missing_files
+ ]
+
+ if unapproved_dependencies:
+ output_list = [
+ output(
+ 'You need LGTM from owners of depends-on paths in DEPS that were '
+ 'modified in this CL:\n %s' %
+ '\n '.join(sorted(unapproved_dependencies)))
+ ]
+ suggested_owners = input_api.owners_client.SuggestOwners(
+ missing_files, exclude=[owner_email])
+ output_list.append(
+ output('Suggested missing target path OWNERS:\n %s' %
+ '\n '.join(suggested_owners or [])))
+ return output_list
+
+ return []
+
+
+# TODO: add unit tests.
+def CheckSpamLogging(input_api, output_api):
+ file_inclusion_pattern = [r'.+%s' % _IMPLEMENTATION_EXTENSIONS]
+ files_to_skip = (
+ _EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS +
+ input_api.DEFAULT_FILES_TO_SKIP + (
+ r"^base/fuchsia/scoped_fx_logger\.cc$",
+ r"^base/logging\.h$",
+ r"^base/logging\.cc$",
+ r"^base/task/thread_pool/task_tracker\.cc$",
+ r"^chrome/app/chrome_main_delegate\.cc$",
+ r"^chrome/browser/ash/arc/enterprise/cert_store/arc_cert_installer\.cc$",
+ r"^chrome/browser/ash/policy/remote_commands/user_command_arc_job\.cc$",
+ r"^chrome/browser/chrome_browser_main\.cc$",
+ r"^chrome/browser/ui/startup/startup_browser_creator\.cc$",
+ r"^chrome/browser/browser_switcher/bho/.*",
+ r"^chrome/browser/diagnostics/diagnostics_writer\.cc$",
+ r"^chrome/chrome_cleaner/.*",
+ r"^chrome/chrome_elf/dll_hash/dll_hash_main\.cc$",
+ r"^chrome/installer/setup/.*",
+ r"^chromecast/",
+ r"^components/cast",
+ r"^components/media_control/renderer/media_playback_options\.cc$",
+ r"^components/policy/core/common/policy_logger\.cc$",
+ r"^components/viz/service/display/"
+ r"overlay_strategy_underlay_cast\.cc$",
+ r"^components/zucchini/.*",
+ # TODO(peter): Remove exception. https://crbug.com/534537
+ r"^content/browser/notifications/"
+ r"notification_event_dispatcher_impl\.cc$",
+ r"^content/common/gpu/client/gl_helper_benchmark\.cc$",
+ r"^courgette/courgette_minimal_tool\.cc$",
+ r"^courgette/courgette_tool\.cc$",
+ r"^extensions/renderer/logging_native_handler\.cc$",
+ r"^fuchsia_web/common/init_logging\.cc$",
+ r"^fuchsia_web/runners/common/web_component\.cc$",
+ r"^fuchsia_web/shell/.*\.cc$",
+ r"^headless/app/headless_shell\.cc$",
+ r"^ipc/ipc_logging\.cc$",
+ r"^native_client_sdk/",
+ r"^remoting/base/logging\.h$",
+ r"^remoting/host/.*",
+ r"^sandbox/linux/.*",
+ r"^storage/browser/file_system/dump_file_system\.cc$",
+ r"^tools/",
+ r"^ui/base/resource/data_pack\.cc$",
+ r"^ui/aura/bench/bench_main\.cc$",
+ r"^ui/ozone/platform/cast/",
+ r"^ui/base/x/xwmstartupcheck/"
+ r"xwmstartupcheck\.cc$"))
+ source_file_filter = lambda x: input_api.FilterSourceFile(
+ x, files_to_check=file_inclusion_pattern, files_to_skip=files_to_skip)
+
+ log_info = set([])
+ printf = set([])
+
+ for f in input_api.AffectedSourceFiles(source_file_filter):
+ for _, line in f.ChangedContents():
+ if input_api.re.search(r"\bD?LOG\s*\(\s*INFO\s*\)", line):
+ log_info.add(f.LocalPath())
+ elif input_api.re.search(r"\bD?LOG_IF\s*\(\s*INFO\s*,", line):
+ log_info.add(f.LocalPath())
+
+ if input_api.re.search(r"\bprintf\(", line):
+ printf.add(f.LocalPath())
+ elif input_api.re.search(r"\bfprintf\((stdout|stderr)", line):
+ printf.add(f.LocalPath())
+
+ if log_info:
+ return [
+ output_api.PresubmitError(
+ 'These files spam the console log with LOG(INFO):',
+ items=log_info)
+ ]
+ if printf:
+ return [
+ output_api.PresubmitError(
+ 'These files spam the console log with printf/fprintf:',
+ items=printf)
+ ]
+ return []
+
+
+def CheckForAnonymousVariables(input_api, output_api):
+ """These types are all expected to hold locks while in scope and
+ so should never be anonymous (which causes them to be immediately
+ destroyed)."""
+ they_who_must_be_named = [
+ 'base::AutoLock',
+ 'base::AutoReset',
+ 'base::AutoUnlock',
+ 'SkAutoAlphaRestore',
+ 'SkAutoBitmapShaderInstall',
+ 'SkAutoBlitterChoose',
+ 'SkAutoBounderCommit',
+ 'SkAutoCallProc',
+ 'SkAutoCanvasRestore',
+ 'SkAutoCommentBlock',
+ 'SkAutoDescriptor',
+ 'SkAutoDisableDirectionCheck',
+ 'SkAutoDisableOvalCheck',
+ 'SkAutoFree',
+ 'SkAutoGlyphCache',
+ 'SkAutoHDC',
+ 'SkAutoLockColors',
+ 'SkAutoLockPixels',
+ 'SkAutoMalloc',
+ 'SkAutoMaskFreeImage',
+ 'SkAutoMutexAcquire',
+ 'SkAutoPathBoundsUpdate',
+ 'SkAutoPDFRelease',
+ 'SkAutoRasterClipValidate',
+ 'SkAutoRef',
+ 'SkAutoTime',
+ 'SkAutoTrace',
+ 'SkAutoUnref',
+ ]
+ anonymous = r'(%s)\s*[({]' % '|'.join(they_who_must_be_named)
+ # bad: base::AutoLock(lock.get());
+ # not bad: base::AutoLock lock(lock.get());
+ bad_pattern = input_api.re.compile(anonymous)
+ # good: new base::AutoLock(lock.get())
+ good_pattern = input_api.re.compile(r'\bnew\s*' + anonymous)
+ errors = []
+
+ for f in input_api.AffectedFiles():
+ if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
+ continue
+ for linenum, line in f.ChangedContents():
+ if bad_pattern.search(line) and not good_pattern.search(line):
+ errors.append('%s:%d' % (f.LocalPath(), linenum))
+
+ if errors:
+ return [
+ output_api.PresubmitError(
+ 'These lines create anonymous variables that need to be named:',
+ items=errors)
+ ]
+ return []
+
+
+def CheckUniquePtrOnUpload(input_api, output_api):
+ # Returns whether |template_str| is of the form <T, U...> for some types T
+ # and U. Assumes that |template_str| is already in the form <...>.
+ def HasMoreThanOneArg(template_str):
+ # Level of <...> nesting.
+ nesting = 0
+ for c in template_str:
+ if c == '<':
+ nesting += 1
+ elif c == '>':
+ nesting -= 1
+ elif c == ',' and nesting == 1:
+ return True
+ return False
+
+ file_inclusion_pattern = [r'.+%s' % _IMPLEMENTATION_EXTENSIONS]
+ sources = lambda affected_file: input_api.FilterSourceFile(
+ affected_file,
+ files_to_skip=(_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS + input_api.
+ DEFAULT_FILES_TO_SKIP),
+ files_to_check=file_inclusion_pattern)
+
+ # Pattern to capture a single "<...>" block of template arguments. It can
+ # handle linearly nested blocks, such as "<std::vector<std::set<T>>>", but
+ # cannot handle branching structures, such as "<pair<set<T>,set<U>>". The
+ # latter would likely require counting that < and > match, which is not
+ # expressible in regular languages. Should the need arise, one can introduce
+ # limited counting (matching up to a total number of nesting depth), which
+ # should cover all practical cases for already a low nesting limit.
+ template_arg_pattern = (
+ r'<[^>]*' # Opening block of <.
+ r'>([^<]*>)?') # Closing block of >.
+ # Prefix expressing that whatever follows is not already inside a <...>
+ # block.
+ not_inside_template_arg_pattern = r'(^|[^<,\s]\s*)'
+ null_construct_pattern = input_api.re.compile(
+ not_inside_template_arg_pattern + r'\bstd::unique_ptr' +
+ template_arg_pattern + r'\(\)')
+
+ # Same as template_arg_pattern, but excluding type arrays, e.g., <T[]>.
+ template_arg_no_array_pattern = (
+ r'<[^>]*[^]]' # Opening block of <.
+ r'>([^(<]*[^]]>)?') # Closing block of >.
+ # Prefix saying that what follows is the start of an expression.
+ start_of_expr_pattern = r'(=|\breturn|^)\s*'
+ # Suffix saying that what follows are call parentheses with a non-empty list
+ # of arguments.
+ nonempty_arg_list_pattern = r'\(([^)]|$)'
+ # Put the template argument into a capture group for deeper examination later.
+ return_construct_pattern = input_api.re.compile(
+ start_of_expr_pattern + r'std::unique_ptr' + '(?P<template_arg>' +
+ template_arg_no_array_pattern + ')' + nonempty_arg_list_pattern)
+
+ problems_constructor = []
+ problems_nullptr = []
+ for f in input_api.AffectedSourceFiles(sources):
+ for line_number, line in f.ChangedContents():
+ # Disallow:
+ # return std::unique_ptr<T>(foo);
+ # bar = std::unique_ptr<T>(foo);
+ # But allow:
+ # return std::unique_ptr<T[]>(foo);
+ # bar = std::unique_ptr<T[]>(foo);
+ # And also allow cases when the second template argument is present. Those
+ # cases cannot be handled by std::make_unique:
+ # return std::unique_ptr<T, U>(foo);
+ # bar = std::unique_ptr<T, U>(foo);
+ local_path = f.LocalPath()
+ return_construct_result = return_construct_pattern.search(line)
+ if return_construct_result and not HasMoreThanOneArg(
+ return_construct_result.group('template_arg')):
+ problems_constructor.append(
+ '%s:%d\n %s' % (local_path, line_number, line.strip()))
+ # Disallow:
+ # std::unique_ptr<T>()
+ if null_construct_pattern.search(line):
+ problems_nullptr.append(
+ '%s:%d\n %s' % (local_path, line_number, line.strip()))
+
+ errors = []
+ if problems_nullptr:
+ errors.append(
+ output_api.PresubmitPromptWarning(
+ 'The following files use std::unique_ptr<T>(). Use nullptr instead.',
+ problems_nullptr))
+ if problems_constructor:
+ errors.append(
+ output_api.PresubmitError(
+ 'The following files use explicit std::unique_ptr constructor. '
+ 'Use std::make_unique<T>() instead, or use base::WrapUnique if '
+ 'std::make_unique is not an option.', problems_constructor))
+ return errors
+
+
+def CheckUserActionUpdate(input_api, output_api):
+ """Checks if any new user action has been added."""
+ if any('actions.xml' == input_api.os_path.basename(f)
+ for f in input_api.LocalPaths()):
+ # If actions.xml is already included in the changelist, the PRESUBMIT
+ # for actions.xml will do a more complete presubmit check.
+ return []
+
+ file_inclusion_pattern = [r'.*\.(cc|mm)$']
+ files_to_skip = (_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS +
+ input_api.DEFAULT_FILES_TO_SKIP)
+ file_filter = lambda f: input_api.FilterSourceFile(
+ f, files_to_check=file_inclusion_pattern, files_to_skip=files_to_skip)
+
+ action_re = r'[^a-zA-Z]UserMetricsAction\("([^"]*)'
+ current_actions = None
+ for f in input_api.AffectedFiles(file_filter=file_filter):
+ for line_num, line in f.ChangedContents():
+ match = input_api.re.search(action_re, line)
+ if match:
+ # Loads contents in tools/metrics/actions/actions.xml to memory. It's
+ # loaded only once.
+ if not current_actions:
+ with open('tools/metrics/actions/actions.xml',
+ encoding='utf-8') as actions_f:
+ current_actions = actions_f.read()
+ # Search for the matched user action name in |current_actions|.
+ for action_name in match.groups():
+ action = 'name="{0}"'.format(action_name)
+ if action not in current_actions:
+ return [
+ output_api.PresubmitPromptWarning(
+ 'File %s line %d: %s is missing in '
+ 'tools/metrics/actions/actions.xml. Please run '
+ 'tools/metrics/actions/extract_actions.py to update.'
+ % (f.LocalPath(), line_num, action_name))
+ ]
+ return []
+
+
+def _ImportJSONCommentEater(input_api):
+ import sys
+ sys.path = sys.path + [
+ input_api.os_path.join(input_api.PresubmitLocalPath(), 'tools',
+ 'json_comment_eater')
+ ]
+ import json_comment_eater
+ return json_comment_eater
+
+
+def _GetJSONParseError(input_api, filename, eat_comments=True):
+ try:
+ contents = input_api.ReadFile(filename)
+ if eat_comments:
+ json_comment_eater = _ImportJSONCommentEater(input_api)
+ contents = json_comment_eater.Nom(contents)
+
+ input_api.json.loads(contents)
+ except ValueError as e:
+ return e
+ return None
+
+
+def _GetIDLParseError(input_api, filename):
+ try:
+ contents = input_api.ReadFile(filename)
+ for i, char in enumerate(contents):
+ if not char.isascii():
+ return (
+ 'Non-ascii character "%s" (ord %d) found at offset %d.' %
+ (char, ord(char), i))
+ idl_schema = input_api.os_path.join(input_api.PresubmitLocalPath(),
+ 'tools', 'json_schema_compiler',
+ 'idl_schema.py')
+ process = input_api.subprocess.Popen(
+ [input_api.python3_executable, idl_schema],
+ stdin=input_api.subprocess.PIPE,
+ stdout=input_api.subprocess.PIPE,
+ stderr=input_api.subprocess.PIPE,
+ universal_newlines=True)
+ (_, error) = process.communicate(input=contents)
+ return error or None
+ except ValueError as e:
+ return e
+
+
+def CheckParseErrors(input_api, output_api):
+ """Check that IDL and JSON files do not contain syntax errors."""
+ actions = {
+ '.idl': _GetIDLParseError,
+ '.json': _GetJSONParseError,
+ }
+ # Most JSON files are preprocessed and support comments, but these do not.
+ json_no_comments_patterns = [
+ r'^testing/',
+ ]
+ # Only run IDL checker on files in these directories.
+ idl_included_patterns = [
+ r'^chrome/common/extensions/api/',
+ r'^extensions/common/api/',
+ ]
+
+ def get_action(affected_file):
+ filename = affected_file.LocalPath()
+ return actions.get(input_api.os_path.splitext(filename)[1])
+
+ def FilterFile(affected_file):
+ action = get_action(affected_file)
+ if not action:
+ return False
+ path = affected_file.LocalPath()
+
+ if _MatchesFile(input_api,
+ _KNOWN_TEST_DATA_AND_INVALID_JSON_FILE_PATTERNS, path):
+ return False
+
+ if (action == _GetIDLParseError
+ and not _MatchesFile(input_api, idl_included_patterns, path)):
+ return False
+ return True
+
+ results = []
+ for affected_file in input_api.AffectedFiles(file_filter=FilterFile,
+ include_deletes=False):
+ action = get_action(affected_file)
+ kwargs = {}
+ if (action == _GetJSONParseError
+ and _MatchesFile(input_api, json_no_comments_patterns,
+ affected_file.LocalPath())):
+ kwargs['eat_comments'] = False
+ parse_error = action(input_api, affected_file.AbsoluteLocalPath(),
+ **kwargs)
+ if parse_error:
+ results.append(
+ output_api.PresubmitError(
+ '%s could not be parsed: %s' %
+ (affected_file.LocalPath(), parse_error)))
+ return results
+
+
+def CheckJavaStyle(input_api, output_api):
+ """Runs checkstyle on changed java files and returns errors if any exist."""
+
+ # Return early if no java files were modified.
+ if not any(
+ _IsJavaFile(input_api, f.LocalPath())
+ for f in input_api.AffectedFiles()):
+ return []
+
+ import sys
+ original_sys_path = sys.path
+ try:
+ sys.path = sys.path + [
+ input_api.os_path.join(input_api.PresubmitLocalPath(), 'tools',
+ 'android', 'checkstyle')
+ ]
+ import checkstyle
+ finally:
+ # Restore sys.path to what it was before.
+ sys.path = original_sys_path
+
+ return checkstyle.run_presubmit(
+ input_api,
+ output_api,
+ files_to_skip=_EXCLUDED_PATHS + input_api.DEFAULT_FILES_TO_SKIP)
+
+
+def CheckPythonDevilInit(input_api, output_api):
+ """Checks to make sure devil is initialized correctly in python scripts."""
+ script_common_initialize_pattern = input_api.re.compile(
+ r'script_common\.InitializeEnvironment\(')
+ devil_env_config_initialize = input_api.re.compile(
+ r'devil_env\.config\.Initialize\(')
+
+ errors = []
+
+ sources = lambda affected_file: input_api.FilterSourceFile(
+ affected_file,
+ files_to_skip=(_EXCLUDED_PATHS + input_api.DEFAULT_FILES_TO_SKIP + (
+ r'^build/android/devil_chromium\.py',
+ r'^third_party/.*',
+ )),
+ files_to_check=[r'.*\.py$'])
+
+ for f in input_api.AffectedSourceFiles(sources):
+ for line_num, line in f.ChangedContents():
+ if (script_common_initialize_pattern.search(line)
+ or devil_env_config_initialize.search(line)):
+ errors.append("%s:%d" % (f.LocalPath(), line_num))
+
+ results = []
+
+ if errors:
+ results.append(
+ output_api.PresubmitError(
+ 'Devil initialization should always be done using '
+ 'devil_chromium.Initialize() in the chromium project, to use better '
+ 'defaults for dependencies (ex. up-to-date version of adb).',
+ errors))
+
+ return results
+
+
+def _MatchesFile(input_api, patterns, path):
+ # Consistently use / as path separator to simplify the writing of regex
+ # expressions.
+ path = path.replace(input_api.os_path.sep, '/')
+ for pattern in patterns:
+ if input_api.re.search(pattern, path):
+ return True
+ return False
+
+
+def _ChangeHasSecurityReviewer(input_api, owners_file):
+ """Returns True iff the CL has a reviewer from SECURITY_OWNERS.
+
+ Args:
+ input_api: The presubmit input API.
+ owners_file: OWNERS file with required reviewers. Typically, this is
+ something like ipc/SECURITY_OWNERS.
+
+ Note: if the presubmit is running for commit rather than for upload, this
+ only returns True if a security reviewer has also approved the CL.
+ """
+ # Owners-Override should bypass all additional OWNERS enforcement checks.
+ # A CR+1 vote will still be required to land this change.
+ if (input_api.change.issue and input_api.gerrit.IsOwnersOverrideApproved(
+ input_api.change.issue)):
+ return True
+
+ owner_email, reviewers = (
+ input_api.canned_checks.GetCodereviewOwnerAndReviewers(
+ input_api,
+ None,
+ approval_needed=input_api.is_committing and not input_api.dry_run))
+
+ security_owners = input_api.owners_client.ListOwners(owners_file)
+ return any(owner in reviewers for owner in security_owners)
+
+
+@dataclass
+class _SecurityProblemWithItems:
+ problem: str
+ items: Sequence[str]
+
+
+@dataclass
+class _MissingSecurityOwnersResult:
+ owners_file_problems: Sequence[_SecurityProblemWithItems]
+ has_security_sensitive_files: bool
+ missing_reviewer_problem: Optional[_SecurityProblemWithItems]
+
+
+def _FindMissingSecurityOwners(input_api,
+ output_api,
+ file_patterns: Sequence[str],
+ excluded_patterns: Sequence[str],
+ required_owners_file: str,
+ custom_rule_function: Optional[Callable] = None
+ ) -> _MissingSecurityOwnersResult:
+ """Find OWNERS files missing per-file rules for security-sensitive files.
+
+ Args:
+ input_api: the PRESUBMIT input API object.
+ output_api: the PRESUBMIT output API object.
+ file_patterns: basename patterns that require a corresponding per-file
+ security restriction.
+ excluded_patterns: path patterns that should be exempted from
+ requiring a security restriction.
+ required_owners_file: path to the required OWNERS file, e.g.
+ ipc/SECURITY_OWNERS
+ cc_alias: If not None, email that will be CCed automatically if the
+ change contains security-sensitive files, as determined by
+ `file_patterns` and `excluded_patterns`.
+ custom_rule_function: If not None, will be called with `input_api` and
+ the current file under consideration. Returning True will add an
+ exact match per-file rule check for the current file.
+ """
+
+ # `to_check` is a mapping of an OWNERS file path to Patterns.
+ #
+ # Patterns is a dictionary mapping glob patterns (suitable for use in
+ # per-file rules) to a PatternEntry.
+ #
+ # PatternEntry is a dictionary with two keys:
+ # - 'files': the files that are matched by this pattern
+ # - 'rules': the per-file rules needed for this pattern
+ #
+ # For example, if we expect OWNERS file to contain rules for *.mojom and
+ # *_struct_traits*.*, Patterns might look like this:
+ # {
+ # '*.mojom': {
+ # 'files': ...,
+ # 'rules': [
+ # 'per-file *.mojom=set noparent',
+ # 'per-file *.mojom=file://ipc/SECURITY_OWNERS',
+ # ],
+ # },
+ # '*_struct_traits*.*': {
+ # 'files': ...,
+ # 'rules': [
+ # 'per-file *_struct_traits*.*=set noparent',
+ # 'per-file *_struct_traits*.*=file://ipc/SECURITY_OWNERS',
+ # ],
+ # },
+ # }
+ to_check = {}
+ files_to_review = []
+
+ def AddPatternToCheck(file, pattern):
+ owners_file = input_api.os_path.join(
+ input_api.os_path.dirname(file.LocalPath()), 'OWNERS')
+ if owners_file not in to_check:
+ to_check[owners_file] = {}
+ if pattern not in to_check[owners_file]:
+ to_check[owners_file][pattern] = {
+ 'files': [],
+ 'rules': [
+ f'per-file {pattern}=set noparent',
+ f'per-file {pattern}=file://{required_owners_file}',
+ ]
+ }
+ to_check[owners_file][pattern]['files'].append(file.LocalPath())
+ files_to_review.append(file.LocalPath())
+
+ # Only enforce security OWNERS rules for a directory if that directory has a
+ # file that matches `file_patterns`. For example, if a directory only
+ # contains *.mojom files and no *_messages*.h files, the check should only
+ # ensure that rules for *.mojom files are present.
+ for file in input_api.AffectedFiles(include_deletes=False):
+ file_basename = input_api.os_path.basename(file.LocalPath())
+ if custom_rule_function is not None and custom_rule_function(
+ input_api, file):
+ AddPatternToCheck(file, file_basename)
+ continue
+
+ if any(
+ input_api.fnmatch.fnmatch(file.LocalPath(), pattern)
+ for pattern in excluded_patterns):
+ continue
+
+ for pattern in file_patterns:
+ # Unlike `excluded_patterns`, `file_patterns` is checked only against the
+ # file's basename.
+ if input_api.fnmatch.fnmatch(file_basename, pattern):
+ AddPatternToCheck(file, pattern)
+ break
+
+ has_security_sensitive_files = bool(to_check)
+
+ # Check if any newly added lines in OWNERS files intersect with required
+ # per-file OWNERS lines. If so, ensure that a security reviewer is included.
+ # This is a hack, but is needed because the OWNERS check (by design) ignores
+ # new OWNERS entries; otherwise, a non-owner could add someone as a new
+ # OWNER and have that newly-added OWNER self-approve their own addition.
+ newly_covered_files = []
+ for file in input_api.AffectedFiles(include_deletes=False):
+ if not file.LocalPath() in to_check:
+ continue
+ for _, line in file.ChangedContents():
+ for _, entry in to_check[file.LocalPath()].items():
+ if line in entry['rules']:
+ newly_covered_files.extend(entry['files'])
+
+ missing_reviewer_problems = None
+ if newly_covered_files and not _ChangeHasSecurityReviewer(
+ input_api, required_owners_file):
+ missing_reviewer_problems = _SecurityProblemWithItems(
+ f'Review from an owner in {required_owners_file} is required for '
+ 'the following newly-added files:',
+ [f'{file}' for file in sorted(set(newly_covered_files))])
+
+ # Go through the OWNERS files to check, filtering out rules that are already
+ # present in that OWNERS file.
+ for owners_file, patterns in to_check.items():
+ try:
+ lines = set(
+ input_api.ReadFile(
+ input_api.os_path.join(input_api.change.RepositoryRoot(),
+ owners_file)).splitlines())
+ for entry in patterns.values():
+ entry['rules'] = [
+ rule for rule in entry['rules'] if rule not in lines
+ ]
+ except IOError:
+ # No OWNERS file, so all the rules are definitely missing.
+ continue
+
+ # All the remaining lines weren't found in OWNERS files, so emit an error.
+ owners_file_problems = []
+
+ for owners_file, patterns in to_check.items():
+ missing_lines = []
+ files = []
+ for _, entry in patterns.items():
+ files.extend(entry['files'])
+ missing_lines.extend(entry['rules'])
+ if missing_lines:
+ joined_missing_lines = '\n'.join(line for line in missing_lines)
+ owners_file_problems.append(
+ _SecurityProblemWithItems(
+ 'Found missing OWNERS lines for security-sensitive files. '
+ f'Please add the following lines to {owners_file}:\n'
+ f'{joined_missing_lines}\n\nTo ensure security review for:',
+ files))
+
+ return _MissingSecurityOwnersResult(owners_file_problems,
+ has_security_sensitive_files,
+ missing_reviewer_problems)
+
+
+def _CheckChangeForIpcSecurityOwners(input_api, output_api):
+ # Whether or not a file affects IPC is (mostly) determined by a simple list
+ # of filename patterns.
+ file_patterns = [
+ # Legacy IPC:
+ '*_messages.cc',
+ '*_messages*.h',
+ '*_param_traits*.*',
+ # Mojo IPC:
+ '*.mojom',
+ '*_mojom_traits*.*',
+ '*_type_converter*.*',
+ # Android native IPC:
+ '*.aidl',
+ ]
+
+ excluded_patterns = [
+ # These third_party directories do not contain IPCs, but contain files
+ # matching the above patterns, which trigger false positives.
+ 'third_party/crashpad/*',
+ 'third_party/blink/renderer/platform/bindings/*',
+ 'third_party/protobuf/benchmarks/python/*',
+ 'third_party/win_build_output/*',
+ # Enum-only mojoms used for web metrics, so no security review needed.
+ 'third_party/blink/public/mojom/use_counter/metrics/*',
+ # These files are just used to communicate between class loaders running
+ # in the same process.
+ 'weblayer/browser/java/org/chromium/weblayer_private/interfaces/*',
+ 'weblayer/browser/java/org/chromium/weblayer_private/test_interfaces/*',
+ ]
+
+ def IsMojoServiceManifestFile(input_api, file):
+ manifest_pattern = input_api.re.compile('manifests?\.(cc|h)$')
+ test_manifest_pattern = input_api.re.compile('test_manifests?\.(cc|h)')
+ if not manifest_pattern.search(file.LocalPath()):
+ return False
+
+ if test_manifest_pattern.search(file.LocalPath()):
+ return False
+
+ # All actual service manifest files should contain at least one
+ # qualified reference to service_manager::Manifest.
+ return any('service_manager::Manifest' in line
+ for line in file.NewContents())
+
+ return _FindMissingSecurityOwners(
+ input_api,
+ output_api,
+ file_patterns,
+ excluded_patterns,
+ 'ipc/SECURITY_OWNERS',
+ custom_rule_function=IsMojoServiceManifestFile)
+
+
+def _CheckChangeForFuchsiaSecurityOwners(input_api, output_api):
+ file_patterns = [
+ # Component specifications.
+ '*.cml', # Component Framework v2.
+ '*.cmx', # Component Framework v1.
+
+ # Fuchsia IDL protocol specifications.
+ '*.fidl',
+ ]
+
+ # Don't check for owners files for changes in these directories.
+ excluded_patterns = [
+ 'third_party/crashpad/*',
+ ]
+
+ return _FindMissingSecurityOwners(input_api, output_api, file_patterns,
+ excluded_patterns,
+ 'build/fuchsia/SECURITY_OWNERS')
+
+
+def CheckSecurityOwners(input_api, output_api):
+ """Checks that various security-sensitive files have an IPC OWNERS rule."""
+ ipc_results = _CheckChangeForIpcSecurityOwners(input_api, output_api)
+ fuchsia_results = _CheckChangeForFuchsiaSecurityOwners(
+ input_api, output_api)
+
+ if ipc_results.has_security_sensitive_files:
+ output_api.AppendCC('ipc-security-reviews@chromium.org')
+
+ results = []
+
+ missing_reviewer_problems = []
+ if ipc_results.missing_reviewer_problem:
+ missing_reviewer_problems.append(ipc_results.missing_reviewer_problem)
+ if fuchsia_results.missing_reviewer_problem:
+ missing_reviewer_problems.append(
+ fuchsia_results.missing_reviewer_problem)
+
+ # Missing reviewers are an error unless there's no issue number
+ # associated with this branch; in that case, the presubmit is being run
+ # with --all or --files.
+ #
+ # Note that upload should never be an error; otherwise, it would be
+ # impossible to upload changes at all.
+ if input_api.is_committing and input_api.change.issue:
+ make_presubmit_message = output_api.PresubmitError
+ else:
+ make_presubmit_message = output_api.PresubmitNotifyResult
+ for problem in missing_reviewer_problems:
+ results.append(
+ make_presubmit_message(problem.problem, items=problem.items))
+
+ owners_file_problems = []
+ owners_file_problems.extend(ipc_results.owners_file_problems)
+ owners_file_problems.extend(fuchsia_results.owners_file_problems)
+
+ for problem in owners_file_problems:
+ # Missing per-file rules are always an error. While swarming and caching
+ # means that uploading a patchset with updated OWNERS files and sending
+ # it to the CQ again should not have a large incremental cost, it is
+ # still frustrating to discover the error only after the change has
+ # already been uploaded.
+ results.append(
+ output_api.PresubmitError(problem.problem, items=problem.items))
+
+ return results
+
+
+def _GetFilesUsingSecurityCriticalFunctions(input_api):
+ """Checks affected files for changes to security-critical calls. This
+ function checks the full change diff, to catch both additions/changes
+ and removals.
+
+ Returns a dict keyed by file name, and the value is a set of detected
+ functions.
+ """
+ # Map of function pretty name (displayed in an error) to the pattern to
+ # match it with.
+ _PATTERNS_TO_CHECK = {
+ 'content::GetServiceSandboxType<>()': 'GetServiceSandboxType\\<'
+ }
+ _PATTERNS_TO_CHECK = {
+ k: input_api.re.compile(v)
+ for k, v in _PATTERNS_TO_CHECK.items()
+ }
+
+ # We don't want to trigger on strings within this file.
+ def presubmit_file_filter(f):
+ return 'PRESUBMIT.py' != input_api.os_path.split(f.LocalPath())[1]
+
+ # Scan all affected files for changes touching _FUNCTIONS_TO_CHECK.
+ files_to_functions = {}
+ for f in input_api.AffectedFiles(file_filter=presubmit_file_filter):
+ diff = f.GenerateScmDiff()
+ for line in diff.split('\n'):
+ # Not using just RightHandSideLines() because removing a
+ # call to a security-critical function can be just as important
+ # as adding or changing the arguments.
+ if line.startswith('-') or (line.startswith('+')
+ and not line.startswith('++')):
+ for name, pattern in _PATTERNS_TO_CHECK.items():
+ if pattern.search(line):
+ path = f.LocalPath()
+ if not path in files_to_functions:
+ files_to_functions[path] = set()
+ files_to_functions[path].add(name)
+ return files_to_functions
+
+
+def CheckSecurityChanges(input_api, output_api):
+ """Checks that changes involving security-critical functions are reviewed
+ by the security team.
+ """
+ files_to_functions = _GetFilesUsingSecurityCriticalFunctions(input_api)
+ if not len(files_to_functions):
+ return []
+
+ owners_file = 'ipc/SECURITY_OWNERS'
+ if _ChangeHasSecurityReviewer(input_api, owners_file):
+ return []
+
+ msg = 'The following files change calls to security-sensitive functions\n' \
+ 'that need to be reviewed by {}.\n'.format(owners_file)
+ for path, names in files_to_functions.items():
+ msg += ' {}\n'.format(path)
+ for name in names:
+ msg += ' {}\n'.format(name)
+ msg += '\n'
+
+ if input_api.is_committing:
+ output = output_api.PresubmitError
+ else:
+ output = output_api.PresubmitNotifyResult
+ return [output(msg)]
+
+
+def CheckSetNoParent(input_api, output_api):
+ """Checks that set noparent is only used together with an OWNERS file in
+ //build/OWNERS.setnoparent (see also
+ //docs/code_reviews.md#owners-files-details)
+ """
+ # Return early if no OWNERS files were modified.
+ if not any(f.LocalPath().endswith('OWNERS')
+ for f in input_api.AffectedFiles(include_deletes=False)):
+ return []
+
+ errors = []
+
+ allowed_owners_files_file = 'build/OWNERS.setnoparent'
+ allowed_owners_files = set()
+ with open(allowed_owners_files_file, 'r', encoding='utf-8') as f:
+ for line in f:
+ line = line.strip()
+ if not line or line.startswith('#'):
+ continue
+ allowed_owners_files.add(line)
+
+ per_file_pattern = input_api.re.compile('per-file (.+)=(.+)')
+
+ for f in input_api.AffectedFiles(include_deletes=False):
+ if not f.LocalPath().endswith('OWNERS'):
+ continue
+
+ found_owners_files = set()
+ found_set_noparent_lines = dict()
+
+ # Parse the OWNERS file.
+ for lineno, line in enumerate(f.NewContents(), 1):
+ line = line.strip()
+ if line.startswith('set noparent'):
+ found_set_noparent_lines[''] = lineno
+ if line.startswith('file://'):
+ if line in allowed_owners_files:
+ found_owners_files.add('')
+ if line.startswith('per-file'):
+ match = per_file_pattern.match(line)
+ if match:
+ glob = match.group(1).strip()
+ directive = match.group(2).strip()
+ if directive == 'set noparent':
+ found_set_noparent_lines[glob] = lineno
+ if directive.startswith('file://'):
+ if directive in allowed_owners_files:
+ found_owners_files.add(glob)
+
+ # Check that every set noparent line has a corresponding file:// line
+ # listed in build/OWNERS.setnoparent. An exception is made for top level
+ # directories since src/OWNERS shouldn't review them.
+ linux_path = f.LocalPath().replace(input_api.os_path.sep, '/')
+ if (linux_path.count('/') != 1
+ and (not linux_path in _EXCLUDED_SET_NO_PARENT_PATHS)):
+ for set_noparent_line in found_set_noparent_lines:
+ if set_noparent_line in found_owners_files:
+ continue
+ errors.append(' %s:%d' %
+ (linux_path,
+ found_set_noparent_lines[set_noparent_line]))
+
+ results = []
+ if errors:
+ if input_api.is_committing:
+ output = output_api.PresubmitError
+ else:
+ output = output_api.PresubmitPromptWarning
+ results.append(
+ output(
+ 'Found the following "set noparent" restrictions in OWNERS files that '
+ 'do not include owners from build/OWNERS.setnoparent:',
+ long_text='\n\n'.join(errors)))
+ return results
+
+
+def CheckUselessForwardDeclarations(input_api, output_api):
+ """Checks that added or removed lines in non third party affected
+ header files do not lead to new useless class or struct forward
+ declaration.
+ """
+ results = []
+ class_pattern = input_api.re.compile(r'^class\s+(\w+);$',
+ input_api.re.MULTILINE)
+ struct_pattern = input_api.re.compile(r'^struct\s+(\w+);$',
+ input_api.re.MULTILINE)
+ for f in input_api.AffectedFiles(include_deletes=False):
+ if (f.LocalPath().startswith('third_party')
+ and not f.LocalPath().startswith('third_party/blink')
+ and not f.LocalPath().startswith('third_party\\blink')):
+ continue
+
+ if not f.LocalPath().endswith('.h'):
+ continue
+
+ contents = input_api.ReadFile(f)
+ fwd_decls = input_api.re.findall(class_pattern, contents)
+ fwd_decls.extend(input_api.re.findall(struct_pattern, contents))
+
+ useless_fwd_decls = []
+ for decl in fwd_decls:
+ count = sum(1 for _ in input_api.re.finditer(
+ r'\b%s\b' % input_api.re.escape(decl), contents))
+ if count == 1:
+ useless_fwd_decls.append(decl)
+
+ if not useless_fwd_decls:
+ continue
+
+ for line in f.GenerateScmDiff().splitlines():
+ if (line.startswith('-') and not line.startswith('--')
+ or line.startswith('+') and not line.startswith('++')):
+ for decl in useless_fwd_decls:
+ if input_api.re.search(r'\b%s\b' % decl, line[1:]):
+ results.append(
+ output_api.PresubmitPromptWarning(
+ '%s: %s forward declaration is no longer needed'
+ % (f.LocalPath(), decl)))
+ useless_fwd_decls.remove(decl)
+
+ return results
+
+
+def _CheckAndroidDebuggableBuild(input_api, output_api):
+ """Checks that code uses BuildInfo.isDebugAndroid() instead of
+ Build.TYPE.equals('') or ''.equals(Build.TYPE) to check if
+ this is a debuggable build of Android.
+ """
+ build_type_check_pattern = input_api.re.compile(
+ r'\bBuild\.TYPE\.equals\(|\.equals\(\s*\bBuild\.TYPE\)')
+
+ errors = []
+
+ sources = lambda affected_file: input_api.FilterSourceFile(
+ affected_file,
+ files_to_skip=(
+ _EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS + input_api.
+ DEFAULT_FILES_TO_SKIP + (
+ r"^android_webview/support_library/boundary_interfaces/",
+ r"^chrome/android/webapk/.*",
+ r'^third_party/.*',
+ r"tools/android/customtabs_benchmark/.*",
+ r"webview/chromium/License.*",
+ )),
+ files_to_check=[r'.*\.java$'])
+
+ for f in input_api.AffectedSourceFiles(sources):
+ for line_num, line in f.ChangedContents():
+ if build_type_check_pattern.search(line):
+ errors.append("%s:%d" % (f.LocalPath(), line_num))
+
+ results = []
+
+ if errors:
+ results.append(
+ output_api.PresubmitPromptWarning(
+ 'Build.TYPE.equals or .equals(Build.TYPE) usage is detected.'
+ ' Please use BuildInfo.isDebugAndroid() instead.', errors))
+
+ return results
+
+# TODO: add unit tests
+def _CheckAndroidToastUsage(input_api, output_api):
+ """Checks that code uses org.chromium.ui.widget.Toast instead of
+ android.widget.Toast (Chromium Toast doesn't force hardware
+ acceleration on low-end devices, saving memory).
+ """
+ toast_import_pattern = input_api.re.compile(
+ r'^import android\.widget\.Toast;$')
+
+ errors = []
+
+ sources = lambda affected_file: input_api.FilterSourceFile(
+ affected_file,
+ files_to_skip=(_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS + input_api.
+ DEFAULT_FILES_TO_SKIP + (r'^chromecast/.*',
+ r'^remoting/.*')),
+ files_to_check=[r'.*\.java$'])
+
+ for f in input_api.AffectedSourceFiles(sources):
+ for line_num, line in f.ChangedContents():
+ if toast_import_pattern.search(line):
+ errors.append("%s:%d" % (f.LocalPath(), line_num))
+
+ results = []
+
+ if errors:
+ results.append(
+ output_api.PresubmitError(
+ 'android.widget.Toast usage is detected. Android toasts use hardware'
+ ' acceleration, and can be\ncostly on low-end devices. Please use'
+ ' org.chromium.ui.widget.Toast instead.\n'
+ 'Contact dskiba@chromium.org if you have any questions.',
+ errors))
+
+ return results
+
+
+def _CheckAndroidCrLogUsage(input_api, output_api):
+ """Checks that new logs using org.chromium.base.Log:
+ - Are using 'TAG' as variable name for the tags (warn)
+ - Are using a tag that is shorter than 20 characters (error)
+ """
+
+ # Do not check format of logs in the given files
+ cr_log_check_excluded_paths = [
+ # //chrome/android/webapk cannot depend on //base
+ r"^chrome/android/webapk/.*",
+ # WebView license viewer code cannot depend on //base; used in stub APK.
+ r"^android_webview/glue/java/src/com/android/"
+ r"webview/chromium/License.*",
+ # The customtabs_benchmark is a small app that does not depend on Chromium
+ # java pieces.
+ r"tools/android/customtabs_benchmark/.*",
+ ]
+
+ cr_log_import_pattern = input_api.re.compile(
+ r'^import org\.chromium\.base\.Log;$', input_api.re.MULTILINE)
+ class_in_base_pattern = input_api.re.compile(
+ r'^package org\.chromium\.base;$', input_api.re.MULTILINE)
+ has_some_log_import_pattern = input_api.re.compile(r'^import .*\.Log;$',
+ input_api.re.MULTILINE)
+ # Extract the tag from lines like `Log.d(TAG, "*");` or `Log.d("TAG", "*");`
+ log_call_pattern = input_api.re.compile(r'\bLog\.\w\((?P<tag>\"?\w+)')
+ log_decl_pattern = input_api.re.compile(
+ r'static final String TAG = "(?P<name>(.*))"')
+ rough_log_decl_pattern = input_api.re.compile(r'\bString TAG\s*=')
+
+ REF_MSG = ('See docs/android_logging.md for more info.')
+ sources = lambda x: input_api.FilterSourceFile(
+ x,
+ files_to_check=[r'.*\.java$'],
+ files_to_skip=cr_log_check_excluded_paths)
+
+ tag_decl_errors = []
+ tag_length_errors = []
+ tag_errors = []
+ tag_with_dot_errors = []
+ util_log_errors = []
+
+ for f in input_api.AffectedSourceFiles(sources):
+ file_content = input_api.ReadFile(f)
+ has_modified_logs = False
+ # Per line checks
+ if (cr_log_import_pattern.search(file_content)
+ or (class_in_base_pattern.search(file_content)
+ and not has_some_log_import_pattern.search(file_content))):
+ # Checks to run for files using cr log
+ for line_num, line in f.ChangedContents():
+ if rough_log_decl_pattern.search(line):
+ has_modified_logs = True
+
+ # Check if the new line is doing some logging
+ match = log_call_pattern.search(line)
+ if match:
+ has_modified_logs = True
+
+ # Make sure it uses "TAG"
+ if not match.group('tag') == 'TAG':
+ tag_errors.append("%s:%d" % (f.LocalPath(), line_num))
+ else:
+ # Report non cr Log function calls in changed lines
+ for line_num, line in f.ChangedContents():
+ if log_call_pattern.search(line):
+ util_log_errors.append("%s:%d" % (f.LocalPath(), line_num))
+
+ # Per file checks
+ if has_modified_logs:
+ # Make sure the tag is using the "cr" prefix and is not too long
+ match = log_decl_pattern.search(file_content)
+ tag_name = match.group('name') if match else None
+ if not tag_name:
+ tag_decl_errors.append(f.LocalPath())
+ elif len(tag_name) > 20:
+ tag_length_errors.append(f.LocalPath())
+ elif '.' in tag_name:
+ tag_with_dot_errors.append(f.LocalPath())
+
+ results = []
+ if tag_decl_errors:
+ results.append(
+ output_api.PresubmitPromptWarning(
+ 'Please define your tags using the suggested format: .\n'
+ '"private static final String TAG = "<package tag>".\n'
+ 'They will be prepended with "cr_" automatically.\n' + REF_MSG,
+ tag_decl_errors))
+
+ if tag_length_errors:
+ results.append(
+ output_api.PresubmitError(
+ 'The tag length is restricted by the system to be at most '
+ '20 characters.\n' + REF_MSG, tag_length_errors))
+
+ if tag_errors:
+ results.append(
+ output_api.PresubmitPromptWarning(
+ 'Please use a variable named "TAG" for your log tags.\n' +
+ REF_MSG, tag_errors))
+
+ if util_log_errors:
+ results.append(
+ output_api.PresubmitPromptWarning(
+ 'Please use org.chromium.base.Log for new logs.\n' + REF_MSG,
+ util_log_errors))
+
+ if tag_with_dot_errors:
+ results.append(
+ output_api.PresubmitPromptWarning(
+ 'Dot in log tags cause them to be elided in crash reports.\n' +
+ REF_MSG, tag_with_dot_errors))
+
+ return results
+
+
+def _CheckAndroidTestJUnitFrameworkImport(input_api, output_api):
+ """Checks that junit.framework.* is no longer used."""
+ deprecated_junit_framework_pattern = input_api.re.compile(
+ r'^import junit\.framework\..*;', input_api.re.MULTILINE)
+ sources = lambda x: input_api.FilterSourceFile(
+ x, files_to_check=[r'.*\.java$'], files_to_skip=None)
+ errors = []
+ for f in input_api.AffectedFiles(file_filter=sources):
+ for line_num, line in f.ChangedContents():
+ if deprecated_junit_framework_pattern.search(line):
+ errors.append("%s:%d" % (f.LocalPath(), line_num))
+
+ results = []
+ if errors:
+ results.append(
+ output_api.PresubmitError(
+ 'APIs from junit.framework.* are deprecated, please use JUnit4 framework'
+ '(org.junit.*) from //third_party/junit. Contact yolandyan@chromium.org'
+ ' if you have any question.', errors))
+ return results
+
+
+def _CheckAndroidTestJUnitInheritance(input_api, output_api):
+ """Checks that if new Java test classes have inheritance.
+ Either the new test class is JUnit3 test or it is a JUnit4 test class
+ with a base class, either case is undesirable.
+ """
+ class_declaration_pattern = input_api.re.compile(r'^public class \w*Test ')
+
+ sources = lambda x: input_api.FilterSourceFile(
+ x, files_to_check=[r'.*Test\.java$'], files_to_skip=None)
+ errors = []
+ for f in input_api.AffectedFiles(file_filter=sources):
+ if not f.OldContents():
+ class_declaration_start_flag = False
+ for line_num, line in f.ChangedContents():
+ if class_declaration_pattern.search(line):
+ class_declaration_start_flag = True
+ if class_declaration_start_flag and ' extends ' in line:
+ errors.append('%s:%d' % (f.LocalPath(), line_num))
+ if '{' in line:
+ class_declaration_start_flag = False
+
+ results = []
+ if errors:
+ results.append(
+ output_api.PresubmitPromptWarning(
+ 'The newly created files include Test classes that inherits from base'
+ ' class. Please do not use inheritance in JUnit4 tests or add new'
+ ' JUnit3 tests. Contact yolandyan@chromium.org if you have any'
+ ' questions.', errors))
+ return results
+
+
+def _CheckAndroidTestAnnotationUsage(input_api, output_api):
+ """Checks that android.test.suitebuilder.annotation.* is no longer used."""
+ deprecated_annotation_import_pattern = input_api.re.compile(
+ r'^import android\.test\.suitebuilder\.annotation\..*;',
+ input_api.re.MULTILINE)
+ sources = lambda x: input_api.FilterSourceFile(
+ x, files_to_check=[r'.*\.java$'], files_to_skip=None)
+ errors = []
+ for f in input_api.AffectedFiles(file_filter=sources):
+ for line_num, line in f.ChangedContents():
+ if deprecated_annotation_import_pattern.search(line):
+ errors.append("%s:%d" % (f.LocalPath(), line_num))
+
+ results = []
+ if errors:
+ results.append(
+ output_api.PresubmitError(
+ 'Annotations in android.test.suitebuilder.annotation have been'
+ ' deprecated since API level 24. Please use androidx.test.filters'
+ ' from //third_party/androidx:androidx_test_runner_java instead.'
+ ' Contact yolandyan@chromium.org if you have any questions.',
+ errors))
+ return results
+
+
+def _CheckAndroidNewMdpiAssetLocation(input_api, output_api):
+ """Checks if MDPI assets are placed in a correct directory."""
+ file_filter = lambda f: (f.LocalPath().endswith(
+ '.png') and ('/res/drawable/'.replace('/', input_api.os_path.sep) in f.
+ LocalPath() or '/res/drawable-ldrtl/'.replace(
+ '/', input_api.os_path.sep) in f.LocalPath()))
+ errors = []
+ for f in input_api.AffectedFiles(include_deletes=False,
+ file_filter=file_filter):
+ errors.append(' %s' % f.LocalPath())
+
+ results = []
+ if errors:
+ results.append(
+ output_api.PresubmitError(
+ 'MDPI assets should be placed in /res/drawable-mdpi/ or '
+ '/res/drawable-ldrtl-mdpi/\ninstead of /res/drawable/ and'
+ '/res/drawable-ldrtl/.\n'
+ 'Contact newt@chromium.org if you have questions.', errors))
+ return results
+
+
+def _CheckAndroidWebkitImports(input_api, output_api):
+ """Checks that code uses org.chromium.base.Callback instead of
+ android.webview.ValueCallback except in the WebView glue layer
+ and WebLayer.
+ """
+ valuecallback_import_pattern = input_api.re.compile(
+ r'^import android\.webkit\.ValueCallback;$')
+
+ errors = []
+
+ sources = lambda affected_file: input_api.FilterSourceFile(
+ affected_file,
+ files_to_skip=(_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS + input_api.
+ DEFAULT_FILES_TO_SKIP + (
+ r'^android_webview/glue/.*',
+ r'^weblayer/.*',
+ )),
+ files_to_check=[r'.*\.java$'])
+
+ for f in input_api.AffectedSourceFiles(sources):
+ for line_num, line in f.ChangedContents():
+ if valuecallback_import_pattern.search(line):
+ errors.append("%s:%d" % (f.LocalPath(), line_num))
+
+ results = []
+
+ if errors:
+ results.append(
+ output_api.PresubmitError(
+ 'android.webkit.ValueCallback usage is detected outside of the glue'
+ ' layer. To stay compatible with the support library, android.webkit.*'
+ ' classes should only be used inside the glue layer and'
+ ' org.chromium.base.Callback should be used instead.', errors))
+
+ return results
+
+
+def _CheckAndroidXmlStyle(input_api, output_api, is_check_on_upload):
+ """Checks Android XML styles """
+
+ # Return early if no relevant files were modified.
+ if not any(
+ _IsXmlOrGrdFile(input_api, f.LocalPath())
+ for f in input_api.AffectedFiles(include_deletes=False)):
+ return []
+
+ import sys
+ original_sys_path = sys.path
+ try:
+ sys.path = sys.path + [
+ input_api.os_path.join(input_api.PresubmitLocalPath(), 'tools',
+ 'android', 'checkxmlstyle')
+ ]
+ import checkxmlstyle
+ finally:
+ # Restore sys.path to what it was before.
+ sys.path = original_sys_path
+
+ if is_check_on_upload:
+ return checkxmlstyle.CheckStyleOnUpload(input_api, output_api)
+ else:
+ return checkxmlstyle.CheckStyleOnCommit(input_api, output_api)
+
+
+def _CheckAndroidInfoBarDeprecation(input_api, output_api):
+ """Checks Android Infobar Deprecation """
+
+ import sys
+ original_sys_path = sys.path
+ try:
+ sys.path = sys.path + [
+ input_api.os_path.join(input_api.PresubmitLocalPath(), 'tools',
+ 'android', 'infobar_deprecation')
+ ]
+ import infobar_deprecation
+ finally:
+ # Restore sys.path to what it was before.
+ sys.path = original_sys_path
+
+ return infobar_deprecation.CheckDeprecationOnUpload(input_api, output_api)
+
+
+class _PydepsCheckerResult:
+ def __init__(self, cmd, pydeps_path, process, old_contents):
+ self._cmd = cmd
+ self._pydeps_path = pydeps_path
+ self._process = process
+ self._old_contents = old_contents
+
+ def GetError(self):
+ """Returns an error message, or None."""
+ import difflib
+ new_contents = self._process.stdout.read().splitlines()[2:]
+ if self._process.wait() != 0:
+ # STDERR should already be printed.
+ return 'Command failed: ' + self._cmd
+ if self._old_contents != new_contents:
+ diff = '\n'.join(
+ difflib.context_diff(self._old_contents, new_contents))
+ return ('File is stale: {}\n'
+ 'Diff (apply to fix):\n'
+ '{}\n'
+ 'To regenerate, run:\n\n'
+ ' {}').format(self._pydeps_path, diff, self._cmd)
+ return None
+
+
+class PydepsChecker:
+ def __init__(self, input_api, pydeps_files):
+ self._file_cache = {}
+ self._input_api = input_api
+ self._pydeps_files = pydeps_files
+
+ def _LoadFile(self, path):
+ """Returns the list of paths within a .pydeps file relative to //."""
+ if path not in self._file_cache:
+ with open(path, encoding='utf-8') as f:
+ self._file_cache[path] = f.read()
+ return self._file_cache[path]
+
+ def _ComputeNormalizedPydepsEntries(self, pydeps_path):
+ """Returns an iterable of paths within the .pydep, relativized to //."""
+ pydeps_data = self._LoadFile(pydeps_path)
+ uses_gn_paths = '--gn-paths' in pydeps_data
+ entries = (l for l in pydeps_data.splitlines()
+ if not l.startswith('#'))
+ if uses_gn_paths:
+ # Paths look like: //foo/bar/baz
+ return (e[2:] for e in entries)
+ else:
+ # Paths look like: path/relative/to/file.pydeps
+ os_path = self._input_api.os_path
+ pydeps_dir = os_path.dirname(pydeps_path)
+ return (os_path.normpath(os_path.join(pydeps_dir, e))
+ for e in entries)
+
+ def _CreateFilesToPydepsMap(self):
+ """Returns a map of local_path -> list_of_pydeps."""
+ ret = {}
+ for pydep_local_path in self._pydeps_files:
+ for path in self._ComputeNormalizedPydepsEntries(pydep_local_path):
+ ret.setdefault(path, []).append(pydep_local_path)
+ return ret
+
+ def ComputeAffectedPydeps(self):
+ """Returns an iterable of .pydeps files that might need regenerating."""
+ affected_pydeps = set()
+ file_to_pydeps_map = None
+ for f in self._input_api.AffectedFiles(include_deletes=True):
+ local_path = f.LocalPath()
+ # Changes to DEPS can lead to .pydeps changes if any .py files are in
+ # subrepositories. We can't figure out which files change, so re-check
+ # all files.
+ # Changes to print_python_deps.py affect all .pydeps.
+ if local_path in ('DEPS', 'PRESUBMIT.py'
+ ) or local_path.endswith('print_python_deps.py'):
+ return self._pydeps_files
+ elif local_path.endswith('.pydeps'):
+ if local_path in self._pydeps_files:
+ affected_pydeps.add(local_path)
+ elif local_path.endswith('.py'):
+ if file_to_pydeps_map is None:
+ file_to_pydeps_map = self._CreateFilesToPydepsMap()
+ affected_pydeps.update(file_to_pydeps_map.get(local_path, ()))
+ return affected_pydeps
+
+ def DetermineIfStaleAsync(self, pydeps_path):
+ """Runs print_python_deps.py to see if the files is stale."""
+ import os
+
+ old_pydeps_data = self._LoadFile(pydeps_path).splitlines()
+ if old_pydeps_data:
+ cmd = old_pydeps_data[1][1:].strip()
+ if '--output' not in cmd:
+ cmd += ' --output ' + pydeps_path
+ old_contents = old_pydeps_data[2:]
+ else:
+ # A default cmd that should work in most cases (as long as pydeps filename
+ # matches the script name) so that PRESUBMIT.py does not crash if pydeps
+ # file is empty/new.
+ cmd = 'build/print_python_deps.py {} --root={} --output={}'.format(
+ pydeps_path[:-4], os.path.dirname(pydeps_path), pydeps_path)
+ old_contents = []
+ env = dict(os.environ)
+ env['PYTHONDONTWRITEBYTECODE'] = '1'
+ process = self._input_api.subprocess.Popen(
+ cmd + ' --output ""',
+ shell=True,
+ env=env,
+ stdout=self._input_api.subprocess.PIPE,
+ encoding='utf-8')
+ return _PydepsCheckerResult(cmd, pydeps_path, process, old_contents)
+
+
+def _ParseGclientArgs():
+ args = {}
+ with open('build/config/gclient_args.gni', 'r') as f:
+ for line in f:
+ line = line.strip()
+ if not line or line.startswith('#'):
+ continue
+ attribute, value = line.split('=')
+ args[attribute.strip()] = value.strip()
+ return args
+
+
+def CheckPydepsNeedsUpdating(input_api, output_api, checker_for_tests=None):
+ """Checks if a .pydeps file needs to be regenerated."""
+ # This check is for Python dependency lists (.pydeps files), and involves
+ # paths not only in the PRESUBMIT.py, but also in the .pydeps files. It
+ # doesn't work on Windows and Mac, so skip it on other platforms.
+ if not input_api.platform.startswith('linux'):
+ return []
+
+ results = []
+ # First, check for new / deleted .pydeps.
+ for f in input_api.AffectedFiles(include_deletes=True):
+ # Check whether we are running the presubmit check for a file in src.
+ # f.LocalPath is relative to repo (src, or internal repo).
+ # os_path.exists is relative to src repo.
+ # Therefore if os_path.exists is true, it means f.LocalPath is relative
+ # to src and we can conclude that the pydeps is in src.
+ if f.LocalPath().endswith('.pydeps'):
+ if input_api.os_path.exists(f.LocalPath()):
+ if f.Action() == 'D' and f.LocalPath() in _ALL_PYDEPS_FILES:
+ results.append(
+ output_api.PresubmitError(
+ 'Please update _ALL_PYDEPS_FILES within //PRESUBMIT.py to '
+ 'remove %s' % f.LocalPath()))
+ elif f.Action() != 'D' and f.LocalPath(
+ ) not in _ALL_PYDEPS_FILES:
+ results.append(
+ output_api.PresubmitError(
+ 'Please update _ALL_PYDEPS_FILES within //PRESUBMIT.py to '
+ 'include %s' % f.LocalPath()))
+
+ if results:
+ return results
+
+ is_android = _ParseGclientArgs().get('checkout_android', 'false') == 'true'
+ checker = checker_for_tests or PydepsChecker(input_api, _ALL_PYDEPS_FILES)
+ affected_pydeps = set(checker.ComputeAffectedPydeps())
+ affected_android_pydeps = affected_pydeps.intersection(
+ set(_ANDROID_SPECIFIC_PYDEPS_FILES))
+ if affected_android_pydeps and not is_android:
+ results.append(
+ output_api.PresubmitPromptOrNotify(
+ 'You have changed python files that may affect pydeps for android\n'
+ 'specific scripts. However, the relevant presubmit check cannot be\n'
+ 'run because you are not using an Android checkout. To validate that\n'
+ 'the .pydeps are correct, re-run presubmit in an Android checkout, or\n'
+ 'use the android-internal-presubmit optional trybot.\n'
+ 'Possibly stale pydeps files:\n{}'.format(
+ '\n'.join(affected_android_pydeps))))
+
+ all_pydeps = _ALL_PYDEPS_FILES if is_android else _GENERIC_PYDEPS_FILES
+ pydeps_to_check = affected_pydeps.intersection(all_pydeps)
+ # Process these concurrently, as each one takes 1-2 seconds.
+ pydep_results = [checker.DetermineIfStaleAsync(p) for p in pydeps_to_check]
+ for result in pydep_results:
+ error_msg = result.GetError()
+ if error_msg:
+ results.append(output_api.PresubmitError(error_msg))
+
+ return results
+
+
+def CheckSingletonInHeaders(input_api, output_api):
+ """Checks to make sure no header files have |Singleton<|."""
+
+ def FileFilter(affected_file):
+ # It's ok for base/memory/singleton.h to have |Singleton<|.
+ files_to_skip = (_EXCLUDED_PATHS + input_api.DEFAULT_FILES_TO_SKIP +
+ (r"^base/memory/singleton\.h$",
+ r"^net/quic/platform/impl/quic_singleton_impl\.h$"))
+ return input_api.FilterSourceFile(affected_file,
+ files_to_skip=files_to_skip)
+
+ pattern = input_api.re.compile(r'(?<!class\sbase::)Singleton\s*<')
+ files = []
+ for f in input_api.AffectedSourceFiles(FileFilter):
+ if (f.LocalPath().endswith('.h') or f.LocalPath().endswith('.hxx')
+ or f.LocalPath().endswith('.hpp')
+ or f.LocalPath().endswith('.inl')):
+ contents = input_api.ReadFile(f)
+ for line in contents.splitlines(False):
+ if (not line.lstrip().startswith('//')
+ and # Strip C++ comment.
+ pattern.search(line)):
+ files.append(f)
+ break
+
+ if files:
+ return [
+ output_api.PresubmitError(
+ 'Found base::Singleton<T> in the following header files.\n' +
+ 'Please move them to an appropriate source file so that the ' +
+ 'template gets instantiated in a single compilation unit.',
+ files)
+ ]