1 # Copyright 2012 The Chromium Authors
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 """Top-level presubmit script for Chromium.
7 See https://www.chromium.org/developers/how-tos/depottools/presubmit-scripts/
8 for more details about the presubmit API built into depot_tools.
11 from typing import Callable
12 from typing import Optional
13 from typing import Sequence
14 from dataclasses import dataclass
16 PRESUBMIT_VERSION = '2.0.0'
21 (r"chrome/android/webapk/shell_apk/src/org/chromium"
22 r"/webapk/lib/runtime_library/IWebApkApi.java"),
23 # File needs to write to stdout to emulate a tool it's replacing.
24 r"chrome/updater/mac/keystone/ksadmin.mm",
26 (r"^components/variations/proto/devtools/"
27 r"client_variations.js"),
28 # These are video files, not typescript.
29 r"^media/test/data/.*.ts",
30 r"^native_client_sdksrc/build_tools/make_rules.py",
31 r"^native_client_sdk/src/build_tools/make_simple.py",
32 r"^native_client_sdk/src/tools/.*.mk",
33 r"^net/tools/spdyshark/.*",
35 r"^third_party/blink/.*",
36 r"^third_party/breakpad/.*",
37 # sqlite is an imported third party dependency.
38 r"^third_party/sqlite/.*",
42 r".+_pb2(_grpc)?\.py$",
44 r"^gpu/config/.*_list_json\.cc$",
45 r"tools/md_browser/.*\.css$",
46 # Test pages for Maps telemetry tests.
47 r"tools/perf/page_sets/maps_perf_test.*",
48 # Test pages for WebRTC telemetry tests.
49 r"tools/perf/page_sets/webrtc_cases.*",
50 # Test file compared with generated output.
51 r"tools/polymer/tests/html_to_wrapper/.*.html.ts$",
54 _EXCLUDED_SET_NO_PARENT_PATHS = (
55 # It's for historical reasons that blink isn't a top level directory, where
56 # it would be allowed to have "set noparent" to avoid top level owners
57 # accidentally +1ing changes.
58 'third_party/blink/OWNERS',
62 # Fragment of a regular expression that matches C++ and Objective-C++
63 # implementation files.
64 _IMPLEMENTATION_EXTENSIONS = r'\.(cc|cpp|cxx|mm)$'
67 # Fragment of a regular expression that matches C++ and Objective-C++
69 _HEADER_EXTENSIONS = r'\.(h|hpp|hxx)$'
72 # Paths with sources that don't use //base.
73 _NON_BASE_DEPENDENT_PATHS = (
74 r"^chrome/browser/browser_switcher/bho/",
79 # Regular expression that matches code only used for test binaries
81 _TEST_CODE_EXCLUDED_PATHS = (
82 r'.*/(fake_|test_|mock_).+%s' % _IMPLEMENTATION_EXTENSIONS,
83 r'.+_test_(base|support|util)%s' % _IMPLEMENTATION_EXTENSIONS,
84 # Test suite files, like:
86 # bar_unittest_mac.cc (suffix)
87 # baz_unittests.cc (plural)
88 r'.+_(api|browser|eg|int|perf|pixel|unit|ui)?test(s)?(_[a-z]+)?%s' %
89 _IMPLEMENTATION_EXTENSIONS,
90 r'.+_(fuzz|fuzzer)(_[a-z]+)?%s' % _IMPLEMENTATION_EXTENSIONS,
91 r'.+sync_service_impl_harness%s' % _IMPLEMENTATION_EXTENSIONS,
92 r'.*/(test|tool(s)?)/.*',
93 # content_shell is used for running content_browsertests.
96 r'content/web_test/.*',
97 # Non-production example code.
99 # Launcher for running iOS tests on the simulator.
100 r'testing/iossim/iossim\.mm$',
101 # EarlGrey app side code for tests.
102 r'ios/.*_app_interface\.mm$',
103 # Views Examples code
104 r'ui/views/examples/.*',
109 _THIRD_PARTY_EXCEPT_BLINK = 'third_party/(?!blink/)'
111 _TEST_ONLY_WARNING = (
112 'You might be calling functions intended only for testing from\n'
113 'production code. If you are doing this from inside another method\n'
114 'named as *ForTesting(), then consider exposing things to have tests\n'
115 'make that same call directly.\n'
116 'If that is not possible, you may put a comment on the same line with\n'
118 'to tell the PRESUBMIT script that the code is inside a *ForTesting()\n'
119 'method and can be ignored. Do not do this inside production code.\n'
120 'The android-binary-size trybot will block if the method exists in the\n'
126 # String pattern. If the pattern begins with a slash, the pattern will be
127 # treated as a regular expression instead.
129 # Explanation as a sequence of strings. Each string in the sequence will be
130 # printed on its own line.
131 explanation: Sequence[str]
132 # Whether or not to treat this ban as a fatal error. If unspecified,
134 treat_as_error: Optional[bool] = None
135 # Paths that should be excluded from the ban check. Each string is a regular
136 # expression that will be matched against the path of the file being checked
137 # relative to the root of the source tree.
138 excluded_paths: Optional[Sequence[str]] = None
141 _BANNED_JAVA_IMPORTS : Sequence[BanRule] = (
143 'import java.net.URI;',
145 'Use org.chromium.url.GURL instead of java.net.URI, where possible.',
148 (r'net/android/javatests/src/org/chromium/net/'
149 'AndroidProxySelectorTest\.java'),
150 r'components/cronet/',
151 r'third_party/robolectric/local/',
155 'import android.annotation.TargetApi;',
157 'Do not use TargetApi, use @androidx.annotation.RequiresApi instead. '
158 'RequiresApi ensures that any calls are guarded by the appropriate '
159 'SDK_INT check. See https://crbug.com/1116486.',
163 'import androidx.test.rule.UiThreadTestRule;',
165 'Do not use UiThreadTestRule, just use '
166 '@org.chromium.base.test.UiThreadTest on test methods that should run '
167 'on the UI thread. See https://crbug.com/1111893.',
171 'import androidx.test.annotation.UiThreadTest;',
172 ('Do not use androidx.test.annotation.UiThreadTest, use '
173 'org.chromium.base.test.UiThreadTest instead. See '
174 'https://crbug.com/1111893.',
178 'import androidx.test.rule.ActivityTestRule;',
180 'Do not use ActivityTestRule, use '
181 'org.chromium.base.test.BaseActivityTestRule instead.',
184 'components/cronet/',
188 'import androidx.vectordrawable.graphics.drawable.VectorDrawableCompat;',
190 'Do not use VectorDrawableCompat, use getResources().getDrawable() to '
191 'avoid extra indirections. Please also add trace event as the call '
192 'might take more than 20 ms to complete.',
197 _BANNED_JAVA_FUNCTIONS : Sequence[BanRule] = (
199 'StrictMode.allowThreadDiskReads()',
201 'Prefer using StrictModeContext.allowDiskReads() to using StrictMode '
207 'StrictMode.allowThreadDiskWrites()',
209 'Prefer using StrictModeContext.allowDiskWrites() to using StrictMode '
215 '.waitForIdleSync()',
217 'Do not use waitForIdleSync as it masks underlying issues. There is '
218 'almost always something else you should wait on instead.',
223 r'/(?<!\bsuper\.)(?<!\bIntent )\bregisterReceiver\(',
225 'Do not call android.content.Context.registerReceiver (or an override) '
226 'directly. Use one of the wrapper methods defined in '
227 'org.chromium.base.ContextUtils, such as '
228 'registerProtectedBroadcastReceiver, '
229 'registerExportedBroadcastReceiver, or '
230 'registerNonExportedBroadcastReceiver. See their documentation for '
237 'base/android/java/src/org/chromium/base/ContextUtils.java',
238 'chromecast/browser/android/apk/src/org/chromium/chromecast/shell/BroadcastReceiverScope.java',
242 r'/(?:extends|new)\s*(?:android.util.)?Property<[A-Za-z.]+,\s*(?:Integer|Float)>',
244 'Do not use Property<..., Integer|Float>, but use FloatProperty or '
245 'IntProperty because it will avoid unnecessary autoboxing of '
252 'Layouts can be expensive. Prefer using ViewUtils.requestLayout(), '
253 'which emits a trace event with additional information to help with '
254 'scroll jank investigations. See http://crbug.com/1354176.',
258 'ui/android/java/src/org/chromium/ui/base/ViewUtils.java',
262 'Profile.getLastUsedRegularProfile()',
264 'Prefer passing in the Profile reference instead of relying on the '
265 'static getLastUsedRegularProfile() call. Only top level entry points '
266 '(e.g. Activities) should call this method. Otherwise, the Profile '
267 'should either be passed in explicitly or retreived from an existing '
268 'entity with a reference to the Profile (e.g. WebContents).',
272 r'.*Test[A-Z]?.*\.java',
276 r'/(ResourcesCompat|getResources\(\))\.getDrawable\(\)',
278 'getDrawable() can be expensive. If you have a lot of calls to '
279 'GetDrawable() or your code may introduce janks, please put your calls '
284 r'.*Test[A-Z]?.*\.java',
288 r'/RecordHistogram\.getHistogram(ValueCount|TotalCount|Samples)ForTesting\(',
290 'Raw histogram counts are easy to misuse; for example they don\'t reset '
291 'between batched tests. Use HistogramWatcher to check histogram records '
296 'base/android/javatests/src/org/chromium/base/metrics/RecordHistogramTest.java',
297 'base/test/android/javatests/src/org/chromium/base/test/util/HistogramWatcher.java',
302 _BANNED_JAVASCRIPT_FUNCTIONS : Sequence [BanRule] = (
304 r'/\bchrome\.send\b',
306 'The use of chrome.send is disallowed in Chrome (context: https://chromium.googlesource.com/chromium/src/+/refs/heads/main/docs/security/handling-messages-from-web-content.md).',
307 'Please use mojo instead for new webuis. https://docs.google.com/document/d/1RF-GSUoveYa37eoyZ9EhwMtaIwoW7Z88pIgNZ9YzQi4/edit#heading=h.gkk22wgk6wff',
311 r'^(?!ash\/webui).+',
312 # TODO(crbug.com/1385601): pre-existing violations still need to be
314 'ash/webui/common/resources/cr.m.js',
315 'ash/webui/common/resources/multidevice_setup/multidevice_setup_browser_proxy.js',
316 'ash/webui/common/resources/quick_unlock/lock_screen_constants.ts',
317 'ash/webui/common/resources/smb_shares/smb_browser_proxy.js',
318 'ash/webui/connectivity_diagnostics/resources/connectivity_diagnostics.js',
319 'ash/webui/diagnostics_ui/resources/diagnostics_browser_proxy.ts',
320 'ash/webui/multidevice_debug/resources/logs.js',
321 'ash/webui/multidevice_debug/resources/webui.js',
322 'ash/webui/projector_app/resources/annotator/trusted/annotator_browser_proxy.js',
323 'ash/webui/projector_app/resources/app/trusted/projector_browser_proxy.js',
324 # TODO(b/301634378): Remove violation exception once Scanning App
325 # migrated off usage of `chrome.send`.
326 'ash/webui/scanning/resources/scanning_browser_proxy.ts',
331 _BANNED_OBJC_FUNCTIONS : Sequence[BanRule] = (
335 'The use of -[NSView addTrackingRect:owner:userData:assumeInside:] is'
336 'prohibited. Please use CrTrackingArea instead.',
337 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
342 r'/NSTrackingArea\W',
344 'The use of NSTrackingAreas is prohibited. Please use CrTrackingArea',
346 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
351 'convertPointFromBase:',
353 'The use of -[NSView convertPointFromBase:] is almost certainly wrong.',
354 'Please use |convertPoint:(point) fromView:nil| instead.',
355 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
360 'convertPointToBase:',
362 'The use of -[NSView convertPointToBase:] is almost certainly wrong.',
363 'Please use |convertPoint:(point) toView:nil| instead.',
364 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
369 'convertRectFromBase:',
371 'The use of -[NSView convertRectFromBase:] is almost certainly wrong.',
372 'Please use |convertRect:(point) fromView:nil| instead.',
373 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
378 'convertRectToBase:',
380 'The use of -[NSView convertRectToBase:] is almost certainly wrong.',
381 'Please use |convertRect:(point) toView:nil| instead.',
382 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
387 'convertSizeFromBase:',
389 'The use of -[NSView convertSizeFromBase:] is almost certainly wrong.',
390 'Please use |convertSize:(point) fromView:nil| instead.',
391 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
396 'convertSizeToBase:',
398 'The use of -[NSView convertSizeToBase:] is almost certainly wrong.',
399 'Please use |convertSize:(point) toView:nil| instead.',
400 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
405 r"/\s+UTF8String\s*]",
407 'The use of -[NSString UTF8String] is dangerous as it can return null',
408 'even if |canBeConvertedToEncoding:NSUTF8StringEncoding| returns YES.',
409 'Please use |SysNSStringToUTF8| instead.',
413 '^third_party/ocmock/OCMock/',
417 r'__unsafe_unretained',
419 'The use of __unsafe_unretained is almost certainly wrong, unless',
420 'when interacting with NSFastEnumeration or NSInvocation.',
421 'Please use __weak in files build with ARC, nothing otherwise.',
428 'The use of "freeWhenDone:NO" with the NoCopy creation of ',
429 'Foundation types is prohibited.',
434 'This file requires ARC support.',
436 'ARC compilation is default in Chromium; do not add boilerplate to ',
437 'files that require ARC.',
443 _BANNED_IOS_OBJC_FUNCTIONS = (
447 'TEST() macro should not be used in Objective-C++ code as it does not ',
448 'drain the autorelease pool at the end of the test. Use TEST_F() ',
449 'macro instead with a fixture inheriting from PlatformTest (or a ',
455 r'/\btesting::Test\b',
457 'testing::Test should not be used in Objective-C++ code as it does ',
458 'not drain the autorelease pool at the end of the test. Use ',
459 'PlatformTest instead.'
464 ' systemImageNamed:',
466 '+[UIImage systemImageNamed:] should not be used to create symbols.',
467 'Instead use a wrapper defined in:',
468 'ios/chrome/browser/ui/icons/symbol_helpers.h'
472 'ios/chrome/browser/shared/ui/symbols/symbol_helpers.mm',
473 'ios/chrome/search_widget_extension/',
478 _BANNED_IOS_EGTEST_FUNCTIONS : Sequence[BanRule] = (
480 r'/\bEXPECT_OCMOCK_VERIFY\b',
482 'EXPECT_OCMOCK_VERIFY should not be used in EarlGrey tests because ',
483 'it is meant for GTests. Use [mock verify] instead.'
489 _BANNED_CPP_FUNCTIONS : Sequence[BanRule] = (
493 'Zero-padded values that use "#" to add prefixes don\'t exhibit ',
494 'consistent behavior, since the prefix is not prepended for zero ',
495 'values. Use "0x%0..." instead.',
498 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
501 r'/\busing namespace ',
503 'Using directives ("using namespace x") are banned by the Google Style',
504 'Guide ( http://google.github.io/styleguide/cppguide.html#Namespaces ).',
505 'Explicitly qualify symbols or use using declarations ("using x::foo").',
508 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
510 # Make sure that gtest's FRIEND_TEST() macro is not used; the
511 # FRIEND_TEST_ALL_PREFIXES() macro from base/gtest_prod_util.h should be
512 # used instead since that allows for FLAKY_ and DISABLED_ prefixes.
516 'Chromium code should not use gtest\'s FRIEND_TEST() macro. Include',
517 'base/gtest_prod_util.h and use FRIEND_TEST_ALL_PREFIXES() instead.',
521 "base/gtest_prod_util.h",
522 "base/allocator/partition_allocator/src/partition_alloc/partition_alloc_base/gtest_prod_util.h",
528 'Overriding setMatrixClip() is prohibited; ',
529 'the base function is deprecated. ',
537 'The use of SkRefPtr is prohibited. ',
538 'Please use sk_sp<> instead.'
546 'The indirect use of SkRefPtr via SkAutoRef is prohibited. ',
547 'Please use sk_sp<> instead.'
555 'The use of SkAutoTUnref is dangerous because it implicitly ',
556 'converts to a raw pointer. Please use sk_sp<> instead.'
564 'The indirect use of SkAutoTUnref through SkAutoUnref is dangerous ',
565 'because it implicitly converts to a raw pointer. ',
566 'Please use sk_sp<> instead.'
572 r'/HANDLE_EINTR\(.*close',
574 'HANDLE_EINTR(close) is invalid. If close fails with EINTR, the file',
575 'descriptor will be closed, and it is incorrect to retry the close.',
576 'Either call close directly and ignore its return value, or wrap close',
577 'in IGNORE_EINTR to use its return value. See http://crbug.com/269623'
583 r'/IGNORE_EINTR\((?!.*close)',
585 'IGNORE_EINTR is only valid when wrapping close. To wrap other system',
586 'calls, use HANDLE_EINTR. See http://crbug.com/269623',
590 # Files that #define IGNORE_EINTR.
591 r'^base/posix/eintr_wrapper\.h$',
592 r'^ppapi/tests/test_broker\.cc$',
598 'Do not introduce new v8::Extensions into the code base, use',
599 'gin::Wrappable instead. See http://crbug.com/334679',
603 r'extensions/renderer/safe_builtins\.*',
607 '#pragma comment(lib,',
609 'Specify libraries to link with in build files and not in the source.',
613 r'^base/third_party/symbolize/.*',
614 r'^third_party/abseil-cpp/.*',
618 r'/base::SequenceChecker\b',
620 'Consider using SEQUENCE_CHECKER macros instead of the class directly.',
626 r'/base::ThreadChecker\b',
628 'Consider using THREAD_CHECKER macros instead of the class directly.',
634 r'/\b(?!(Sequenced|SingleThread))\w*TaskRunner::(GetCurrentDefault|CurrentDefaultHandle)',
636 'It is not allowed to call these methods from the subclasses ',
637 'of Sequenced or SingleThread task runners.',
643 r'/(Time(|Delta|Ticks)|ThreadTicks)::FromInternalValue|ToInternalValue',
645 'base::TimeXXX::FromInternalValue() and ToInternalValue() are',
646 'deprecated (http://crbug.com/634507). Please avoid converting away',
647 'from the Time types in Chromium code, especially if any math is',
648 'being done on time values. For interfacing with platform/library',
649 'APIs, use base::Time::(From,To)DeltaSinceWindowsEpoch() or',
650 'base::{TimeDelta::In}Microseconds(), or one of the other type',
651 'converter methods instead. For faking TimeXXX values (for unit',
652 'testing only), use TimeXXX() + Microseconds(N). For',
653 'other use cases, please contact base/time/OWNERS.',
658 "base/allocator/partition_allocator/src/partition_alloc/partition_alloc_base/time/time.h",
662 'CallJavascriptFunctionUnsafe',
664 "Don't use CallJavascriptFunctionUnsafe() in new code. Instead, use",
665 'AllowJavascript(), OnJavascriptAllowed()/OnJavascriptDisallowed(),',
666 'and CallJavascriptFunction(). See https://goo.gl/qivavq.',
670 r'^content/browser/webui/web_ui_impl\.(cc|h)$',
671 r'^content/public/browser/web_ui\.h$',
672 r'^content/public/test/test_web_ui\.(cc|h)$',
678 'Instead of leveldb::DB::Open() use leveldb_env::OpenDB() from',
679 'third_party/leveldatabase/env_chromium.h. It exposes databases to',
680 "Chrome's tracing, making their memory usage visible.",
684 r'^third_party/leveldatabase/.*\.(cc|h)$',
688 'leveldb::NewMemEnv',
690 'Instead of leveldb::NewMemEnv() use leveldb_chrome::NewMemEnv() from',
691 'third_party/leveldatabase/leveldb_chrome.h. It exposes environments',
692 "to Chrome's tracing, making their memory usage visible.",
696 r'^third_party/leveldatabase/.*\.(cc|h)$',
700 'RunLoop::QuitCurrent',
702 'Please migrate away from RunLoop::QuitCurrent*() methods. Use member',
703 'methods of a specific RunLoop instance instead.',
709 'base::ScopedMockTimeMessageLoopTaskRunner',
711 'ScopedMockTimeMessageLoopTaskRunner is deprecated. Prefer',
712 'TaskEnvironment::TimeSource::MOCK_TIME. There are still a',
713 'few cases that may require a ScopedMockTimeMessageLoopTaskRunner',
714 '(i.e. mocking the main MessageLoopForUI in browser_tests), but check',
715 'with gab@ first if you think you need it)',
723 'Using std::regex adds unnecessary binary size to Chrome. Please use',
724 're2::RE2 instead (crbug.com/755321)',
728 # Abseil's benchmarks never linked into chrome.
729 'third_party/abseil-cpp/.*_benchmark.cc',
733 r'/\bstd::sto(i|l|ul|ll|ull)\b',
735 'std::sto{i,l,ul,ll,ull}() use exceptions to communicate results. ',
736 'Use base::StringTo[U]Int[64]() instead.',
739 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
742 r'/\bstd::sto(f|d|ld)\b',
744 'std::sto{f,d,ld}() use exceptions to communicate results. ',
745 'For locale-independent values, e.g. reading numbers from disk',
746 'profiles, use base::StringToDouble().',
747 'For user-visible values, parse using ICU.',
750 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
753 r'/\bstd::to_string\b',
755 'std::to_string() is locale dependent and slower than alternatives.',
756 'For locale-independent strings, e.g. writing numbers to disk',
757 'profiles, use base::NumberToString().',
758 'For user-visible strings, use base::FormatNumber() and',
759 'the related functions in base/i18n/number_formatting.h.',
761 False, # Only a warning since it is already used.
762 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
765 r'/#include <(cctype|ctype\.h|cwctype|wctype.h)>',
767 '<cctype>/<ctype.h>/<cwctype>/<wctype.h> are banned. Use',
768 '"third_party/abseil-cpp/absl/strings/ascii.h" instead.',
771 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
774 r'/\bstd::shared_ptr\b',
776 'std::shared_ptr is banned. Use scoped_refptr instead.',
780 # Needed for interop with third-party library.
781 '^third_party/blink/renderer/core/typed_arrays/array_buffer/' +
782 'array_buffer_contents\.(cc|h)',
783 '^third_party/blink/renderer/bindings/core/v8/' +
784 'v8_wasm_response_extensions.cc',
785 '^gin/array_buffer\.(cc|h)',
786 '^gin/per_isolate_data\.(cc|h)',
787 '^chrome/services/sharing/nearby/',
788 # Needed for interop with third-party library libunwindstack.
789 '^base/profiler/libunwindstack_unwinder_android\.(cc|h)',
790 '^base/profiler/native_unwinder_android_memory_regions_map_impl.(cc|h)',
791 # Needed for interop with third-party boringssl cert verifier
792 '^third_party/boringssl/',
794 '^net/tools/cert_verify_tool/',
795 '^services/cert_verifier/',
796 '^components/certificate_transparency/',
797 '^components/media_router/common/providers/cast/certificate/',
798 # gRPC provides some C++ libraries that use std::shared_ptr<>.
799 '^chromeos/ash/services/libassistant/grpc/',
800 '^chromecast/cast_core/grpc',
801 '^chromecast/cast_core/runtime/browser',
802 '^ios/chrome/test/earl_grey/chrome_egtest_plugin_client\.(mm|h)',
803 # Fuchsia provides C++ libraries that use std::shared_ptr<>.
804 '^base/fuchsia/.*\.(cc|h)',
805 '.*fuchsia.*test\.(cc|h)',
806 # Clang plugins have different build config.
807 '^tools/clang/plugins/',
808 _THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
811 r'/\bstd::weak_ptr\b',
813 'std::weak_ptr is banned. Use base::WeakPtr instead.',
816 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
821 'long long is banned. Use [u]int64_t instead.',
823 False, # Only a warning since it is already used.
824 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
827 r'/\b(absl|std)::any\b',
829 '{absl,std}::any are banned due to incompatibility with the component ',
833 # Not an error in third party folders, though it probably should be :)
834 [_THIRD_PARTY_EXCEPT_BLINK],
839 'std::bind() is banned because of lifetime risks. Use ',
840 'base::Bind{Once,Repeating}() instead.',
843 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
848 r'linear_congruential_engine|mersenne_twister_engine|'
849 r'subtract_with_carry_engine|discard_block_engine|'
850 r'independent_bits_engine|shuffle_order_engine|'
851 r'minstd_rand0?|mt19937(_64)?|ranlux(24|48)(_base)?|knuth_b|'
852 r'default_random_engine|'
858 'STL random number engines and generators are banned. Use the ',
859 'helpers in base/rand_util.h instead, e.g. base::RandBytes() or ',
860 'base::RandomBitGenerator.'
862 'Please reach out to cxx@chromium.org if the base APIs are ',
863 'insufficient for your needs.',
867 # Not an error in third_party folders.
868 _THIRD_PARTY_EXCEPT_BLINK,
869 # Various tools which build outside of Chrome.
870 r'testing/libfuzzer',
871 r'tools/android/io_benchmark/',
872 # Fuzzers are allowed to use standard library random number generators
873 # since fuzzing speed + reproducibility is important.
874 r'tools/ipc_fuzzer/',
876 r'.+_fuzzertest\.cc$',
877 # TODO(https://crbug.com/1380528): These are all unsanctioned uses of
878 # the standard library's random number generators, and should be
879 # migrated to the //base equivalent.
880 r'ash/ambient/model/ambient_topic_queue\.cc',
881 r'base/allocator/partition_allocator/src/partition_alloc/partition_alloc_unittest\.cc',
882 r'base/ranges/algorithm_unittest\.cc',
883 r'base/test/launcher/test_launcher\.cc',
884 r'cc/metrics/video_playback_roughness_reporter_unittest\.cc',
885 r'chrome/browser/apps/app_service/metrics/website_metrics\.cc',
886 r'chrome/browser/ash/power/auto_screen_brightness/monotone_cubic_spline_unittest\.cc',
887 r'chrome/browser/ash/printing/zeroconf_printer_detector_unittest\.cc',
888 r'chrome/browser/nearby_sharing/contacts/nearby_share_contact_manager_impl_unittest\.cc',
889 r'chrome/browser/nearby_sharing/contacts/nearby_share_contacts_sorter_unittest\.cc',
890 r'chrome/browser/privacy_budget/mesa_distribution_unittest\.cc',
891 r'chrome/browser/web_applications/test/web_app_test_utils\.cc',
892 r'chrome/browser/web_applications/test/web_app_test_utils\.cc',
893 r'chrome/browser/win/conflicts/module_blocklist_cache_util_unittest\.cc',
894 r'chrome/chrome_cleaner/logging/detailed_info_sampler\.cc',
895 r'chromeos/ash/components/memory/userspace_swap/swap_storage_unittest\.cc',
896 r'chromeos/ash/components/memory/userspace_swap/userspace_swap\.cc',
897 r'components/metrics/metrics_state_manager\.cc',
898 r'components/omnibox/browser/history_quick_provider_performance_unittest\.cc',
899 r'components/zucchini/disassembler_elf_unittest\.cc',
900 r'content/browser/webid/federated_auth_request_impl\.cc',
901 r'content/browser/webid/federated_auth_request_impl\.cc',
902 r'media/cast/test/utility/udp_proxy\.h',
903 r'sql/recover_module/module_unittest\.cc',
904 r'components/search_engines/template_url_prepopulate_data.cc',
905 # Do not add new entries to this list. If you have a use case which is
906 # not satisfied by the current APIs (i.e. you need an explicitly-seeded
907 # sequence, or stability of some sort is required), please contact
912 r'/\b(absl,std)::bind_front\b',
914 '{absl,std}::bind_front() are banned. Use base::Bind{Once,Repeating}() '
918 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
923 'ABSL_FLAG is banned. Use base::CommandLine instead.',
926 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
931 'Abseil container utilities are banned. Use base/ranges/algorithm.h ',
935 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
938 r'/\babsl::FixedArray\b',
940 'absl::FixedArray is banned. Use base::FixedArray instead.',
944 # base::FixedArray provides canonical access.
945 r'^base/types/fixed_array.h',
946 # Not an error in third_party folders.
947 _THIRD_PARTY_EXCEPT_BLINK,
951 r'/\babsl::FunctionRef\b',
953 'absl::FunctionRef is banned. Use base::FunctionRef instead.',
957 # base::Bind{Once,Repeating} references absl::FunctionRef to disallow
959 r'^base/functional/bind_internal\.h',
960 # base::FunctionRef is implemented on top of absl::FunctionRef.
961 r'^base/functional/function_ref.*\..+',
962 # Not an error in third_party folders.
963 _THIRD_PARTY_EXCEPT_BLINK,
967 r'/\babsl::(Insecure)?BitGen\b',
969 'absl random number generators are banned. Use the helpers in '
970 'base/rand_util.h instead, e.g. base::RandBytes() or ',
971 'base::RandomBitGenerator.'
974 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
977 r'/(\babsl::Span\b|#include <span>)',
979 'absl::Span is banned and <span> is not allowed yet ',
980 '(https://crbug.com/1414652). Use base::span instead.',
984 # Needed to use QUICHE API.
985 r'services/network/web_transport\.cc',
986 r'chrome/browser/ip_protection/.*',
987 # Not an error in third_party folders.
988 _THIRD_PARTY_EXCEPT_BLINK
992 r'/\babsl::StatusOr\b',
994 'absl::StatusOr is banned. Use base::expected instead.',
998 # Needed to use liburlpattern API.
999 r'third_party/blink/renderer/core/url_pattern/.*',
1000 r'third_party/blink/renderer/modules/manifest/manifest_parser\.cc',
1001 # Needed to use QUICHE API.
1002 r'chrome/browser/ip_protection/.*',
1003 # Needed to use MediaPipe API.
1004 r'components/media_effects/.*\.cc',
1005 # Not an error in third_party folders.
1006 _THIRD_PARTY_EXCEPT_BLINK
1010 r'/\babsl::StrFormat\b',
1012 'absl::StrFormat() is not allowed yet (https://crbug.com/1371963). ',
1013 'Use base::StringPrintf() instead.',
1016 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
1019 r'/\babsl::(StrSplit|StrJoin|StrCat|StrAppend|Substitute|StrContains)\b',
1021 'Abseil string utilities are banned. Use base/strings instead.',
1024 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
1027 r'/\babsl::(Mutex|CondVar|Notification|Barrier|BlockingCounter)\b',
1029 'Abseil synchronization primitives are banned. Use',
1030 'base/synchronization instead.',
1033 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
1036 r'/\babsl::(Duration|Time|TimeZone|CivilDay)\b',
1038 'Abseil\'s time library is banned. Use base/time instead.',
1042 # Needed to use QUICHE API.
1043 r'chrome/browser/ip_protection/.*',
1044 r'services/network/web_transport.*',
1045 _THIRD_PARTY_EXCEPT_BLINK # Not an error in third_party folders.
1049 r'/#include <chrono>',
1051 '<chrono> is banned. Use base/time instead.',
1055 # Not an error in third_party folders:
1056 _THIRD_PARTY_EXCEPT_BLINK,
1057 # PartitionAlloc's starscan, doesn't depend on base/. It can't use
1058 # base::ConditionalVariable::TimedWait(..).
1059 "base/allocator/partition_allocator/src/partition_alloc/starscan/pcscan_internal.cc",
1063 r'/#include <exception>',
1065 'Exceptions are banned and disabled in Chromium.',
1068 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
1071 r'/\bstd::function\b',
1073 'std::function is banned. Use base::{Once,Repeating}Callback instead.',
1077 # Has tests that template trait helpers don't unintentionally match
1079 r'base/functional/callback_helpers_unittest\.cc',
1080 # Required to implement interfaces from the third-party perfetto
1082 r'base/tracing/perfetto_task_runner\.cc',
1083 r'base/tracing/perfetto_task_runner\.h',
1084 # Needed for interop with the third-party nearby library type
1085 # location::nearby::connections::ResultCallback.
1086 'chrome/services/sharing/nearby/nearby_connections_conversions\.cc'
1087 # Needed for interop with the internal libassistant library.
1088 'chromeos/ash/services/libassistant/callback_utils\.h',
1089 # Needed for interop with Fuchsia fidl APIs.
1090 'fuchsia_web/webengine/browser/context_impl_browsertest\.cc',
1091 'fuchsia_web/webengine/browser/cookie_manager_impl_unittest\.cc',
1092 'fuchsia_web/webengine/browser/media_player_impl_unittest\.cc',
1093 # Required to interop with interfaces from the third-party perfetto
1095 'services/tracing/public/cpp/perfetto/custom_event_recorder\.cc',
1096 'services/tracing/public/cpp/perfetto/perfetto_traced_process\.cc',
1097 'services/tracing/public/cpp/perfetto/perfetto_traced_process\.h',
1098 'services/tracing/public/cpp/perfetto/perfetto_tracing_backend\.cc',
1099 'services/tracing/public/cpp/perfetto/producer_client\.cc',
1100 'services/tracing/public/cpp/perfetto/producer_client\.h',
1101 'services/tracing/public/cpp/perfetto/producer_test_utils\.cc',
1102 'services/tracing/public/cpp/perfetto/producer_test_utils\.h',
1103 # Required for interop with the third-party webrtc library.
1104 'third_party/blink/renderer/modules/peerconnection/mock_peer_connection_impl\.cc',
1105 'third_party/blink/renderer/modules/peerconnection/mock_peer_connection_impl\.h',
1106 # This code is in the process of being extracted into a third-party library.
1107 # See https://crbug.com/1322914
1108 '^net/cert/pki/path_builder_unittest\.cc',
1109 # TODO(https://crbug.com/1364577): Various uses that should be
1110 # migrated to something else.
1111 # Should use base::OnceCallback or base::RepeatingCallback.
1112 'base/allocator/dispatcher/initializer_unittest\.cc',
1113 'chrome/browser/ash/accessibility/speech_monitor\.cc',
1114 'chrome/browser/ash/accessibility/speech_monitor\.h',
1115 'chrome/browser/ash/login/ash_hud_login_browsertest\.cc',
1116 'chromecast/base/observer_unittest\.cc',
1117 'chromecast/browser/cast_web_view\.h',
1118 'chromecast/public/cast_media_shlib\.h',
1119 'device/bluetooth/floss/exported_callback_manager\.h',
1120 'device/bluetooth/floss/floss_dbus_client\.h',
1121 'device/fido/cable/v2_handshake_unittest\.cc',
1122 'device/fido/pin\.cc',
1123 'services/tracing/perfetto/test_utils\.h',
1124 # Should use base::FunctionRef.
1125 'chrome/browser/media/webrtc/test_stats_dictionary\.cc',
1126 'chrome/browser/media/webrtc/test_stats_dictionary\.h',
1127 'chromeos/ash/services/libassistant/device_settings_controller\.cc',
1128 'components/browser_ui/client_certificate/android/ssl_client_certificate_request\.cc',
1129 'components/gwp_asan/client/sampling_malloc_shims_unittest\.cc',
1130 'content/browser/font_unique_name_lookup/font_unique_name_lookup_unittest\.cc',
1131 # Does not need std::function at all.
1132 'components/omnibox/browser/autocomplete_result\.cc',
1133 'device/fido/win/webauthn_api\.cc',
1134 'media/audio/alsa/alsa_util\.cc',
1135 'media/remoting/stream_provider\.h',
1136 'sql/vfs_wrapper\.cc',
1137 # TODO(https://crbug.com/1364585): Remove usage and exception list
1139 'extensions/renderer/api/automation/automation_internal_custom_bindings\.cc',
1140 'extensions/renderer/api/automation/automation_internal_custom_bindings\.h',
1141 # TODO(https://crbug.com/1364579): Remove usage and exception list
1143 'ui/views/controls/focus_ring\.h',
1145 # Various pre-existing uses in //tools that is low-priority to fix.
1146 'tools/binary_size/libsupersize/viewer/caspian/diff\.cc',
1147 'tools/binary_size/libsupersize/viewer/caspian/model\.cc',
1148 'tools/binary_size/libsupersize/viewer/caspian/model\.h',
1149 'tools/binary_size/libsupersize/viewer/caspian/tree_builder\.h',
1150 'tools/clang/base_bind_rewriters/BaseBindRewriters\.cpp',
1152 # Not an error in third_party folders.
1153 _THIRD_PARTY_EXCEPT_BLINK
1159 'Do not use Xlib. Use xproto (from //ui/gfx/x:xproto) instead.',
1162 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
1167 'std::ratio is banned by the Google Style Guide.',
1170 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
1173 r'/\bstd::aligned_alloc\b',
1175 'std::aligned_alloc() is not yet allowed (crbug.com/1412818). Use ',
1176 'base::AlignedAlloc() instead.',
1179 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
1182 r'/#include <(barrier|latch|semaphore|stop_token)>',
1184 'The thread support library is banned. Use base/synchronization '
1188 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
1191 r'/\bstd::(c8rtomb|mbrtoc8)\b',
1193 'std::c8rtomb() and std::mbrtoc8() are banned.',
1196 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
1199 r'/\bchar8_t|std::u8string\b',
1201 'char8_t and std::u8string are not yet allowed. Can you use [unsigned]',
1202 ' char and std::string instead?',
1206 # The demangler does not use this type but needs to know about it.
1207 'base/third_party/symbolize/demangle\.cc',
1208 # Don't warn in third_party folders.
1209 _THIRD_PARTY_EXCEPT_BLINK
1213 r'/(\b(co_await|co_return|co_yield)\b|#include <coroutine>)',
1215 'Coroutines are not yet allowed (https://crbug.com/1403840).',
1218 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
1221 r'/^\s*(export\s|import\s+["<:\w]|module(;|\s+[:\w]))',
1223 'Modules are disallowed for now due to lack of toolchain support.',
1226 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
1229 r'/\[\[(un)?likely\]\]',
1231 '[[likely]] and [[unlikely]] are not yet allowed ',
1232 '(https://crbug.com/1414620). Use [UN]LIKELY instead.',
1235 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
1238 r'/#include <format>',
1240 '<format> is not yet allowed. Use base::StringPrintf() instead.',
1243 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
1246 r'/#include <ranges>',
1248 '<ranges> is not yet allowed. Use base/ranges/algorithm.h instead.',
1251 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
1254 r'/#include <source_location>',
1256 '<source_location> is not yet allowed. Use base/location.h instead.',
1259 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
1262 r'/#include <syncstream>',
1264 '<syncstream> is banned.',
1267 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
1270 r'/\bRunMessageLoop\b',
1272 'RunMessageLoop is deprecated, use RunLoop instead.',
1278 'RunAllPendingInMessageLoop()',
1280 "Prefer RunLoop over RunAllPendingInMessageLoop, please contact gab@",
1281 "if you're convinced you need this.",
1287 'RunAllPendingInMessageLoop(BrowserThread',
1289 'RunAllPendingInMessageLoop is deprecated. Use RunLoop for',
1290 'BrowserThread::UI, BrowserTaskEnvironment::RunIOThreadUntilIdle',
1291 'for BrowserThread::IO, and prefer RunLoop::QuitClosure to observe',
1292 'async events instead of flushing threads.',
1298 r'MessageLoopRunner',
1300 'MessageLoopRunner is deprecated, use RunLoop instead.',
1306 'GetDeferredQuitTaskForRunLoop',
1308 "GetDeferredQuitTaskForRunLoop shouldn't be needed, please contact",
1309 "gab@ if you found a use case where this is the only solution.",
1315 'sqlite3_initialize(',
1317 'Instead of calling sqlite3_initialize(), depend on //sql, ',
1318 '#include "sql/initialize.h" and use sql::EnsureSqliteInitialized().',
1322 r'^sql/initialization\.(cc|h)$',
1323 r'^third_party/sqlite/.*\.(c|cc|h)$',
1329 'SQL views are disabled in Chromium feature code',
1330 'https://chromium.googlesource.com/chromium/src/+/HEAD/sql#no-views',
1334 _THIRD_PARTY_EXCEPT_BLINK,
1335 # sql/ itself uses views when using memory-mapped IO.
1337 # Various performance tools that do not build as part of Chrome.
1344 'CREATE VIRTUAL TABLE',
1346 'SQL virtual tables are disabled in Chromium feature code',
1347 'https://chromium.googlesource.com/chromium/src/+/HEAD/sql#no-virtual-tables',
1351 _THIRD_PARTY_EXCEPT_BLINK,
1352 # sql/ itself uses virtual tables in the recovery module and tests.
1354 # TODO(https://crbug.com/695592): Remove once WebSQL is deprecated.
1355 r'third_party/blink/web_tests/storage/websql/.*'
1356 # Various performance tools that do not build as part of Chrome.
1362 'std::random_shuffle',
1364 'std::random_shuffle is deprecated in C++14, and removed in C++17. Use',
1365 'base::RandomShuffle instead.'
1371 'ios/web/public/test/http_server',
1373 'web::HTTPserver is deprecated use net::EmbeddedTestServer instead.',
1381 'Improper use of Microsoft::WRL::ComPtr<T>::GetAddressOf() has been ',
1382 'implicated in a few leaks. ReleaseAndGetAddressOf() is safe but ',
1383 'operator& is generally recommended. So always use operator& instead. ',
1384 'See http://crbug.com/914910 for more conversion guidance.'
1392 'SHFileOperation was deprecated in Windows Vista, and there are less ',
1393 'complex functions to achieve the same goals. Use IFileOperation for ',
1394 'any esoteric actions instead.'
1402 'StringFromGUID2 introduces an unnecessary dependency on ole32.dll.',
1403 'Use base::win::WStringFromGUID instead.'
1407 r'/base/win/win_util_unittest.cc',
1413 'StringFromCLSID introduces an unnecessary dependency on ole32.dll.',
1414 'Use base::win::WStringFromGUID instead.'
1418 r'/base/win/win_util_unittest.cc',
1424 'The use of kCFAllocatorNull with the NoCopy creation of ',
1425 'CoreFoundation types is prohibited.',
1433 'mojo::ConvertTo and TypeConverter are deprecated. Please consider',
1434 'StructTraits / UnionTraits / EnumTraits / ArrayTraits / MapTraits /',
1435 'StringTraits if you would like to convert between custom types and',
1436 'the wire format of mojom types.'
1440 r'^fuchsia_web/webengine/browser/url_request_rewrite_rules_manager\.cc$',
1441 r'^fuchsia_web/webengine/url_request_rewrite_type_converters\.cc$',
1442 r'^third_party/blink/.*\.(cc|h)$',
1443 r'^content/renderer/.*\.(cc|h)$',
1447 'GetInterfaceProvider',
1449 'InterfaceProvider is deprecated.',
1450 'Please use ExecutionContext::GetBrowserInterfaceBroker and overrides',
1451 'or Platform::GetBrowserInterfaceBroker.'
1459 'New code should use Microsoft::WRL::ComPtr from wrl/client.h as a ',
1460 'replacement for CComPtr from ATL. See http://crbug.com/5027 for more ',
1467 r'/\b(IFACE|STD)METHOD_?\(',
1469 'IFACEMETHOD() and STDMETHOD() make code harder to format and read.',
1470 'Instead, always use IFACEMETHODIMP in the declaration.'
1473 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
1476 'set_owned_by_client',
1478 'set_owned_by_client is deprecated.',
1479 'views::View already owns the child views by default. This introduces ',
1480 'a competing ownership model which makes the code difficult to reason ',
1481 'about. See http://crbug.com/1044687 for more details.'
1487 'RemoveAllChildViewsWithoutDeleting',
1489 'RemoveAllChildViewsWithoutDeleting is deprecated.',
1490 'This method is deemed dangerous as, unless raw pointers are re-added,',
1491 'calls to this method introduce memory leaks.'
1497 r'/\bTRACE_EVENT_ASYNC_',
1499 'Please use TRACE_EVENT_NESTABLE_ASYNC_.. macros instead',
1500 'of TRACE_EVENT_ASYNC_.. (crbug.com/1038710).',
1504 r'^base/trace_event/.*',
1505 r'^base/tracing/.*',
1509 r'/\bbase::debug::DumpWithoutCrashingUnthrottled[(][)]',
1511 'base::debug::DumpWithoutCrashingUnthrottled() does not throttle',
1512 'dumps and may spam crash reports. Consider if the throttled',
1513 'variants suffice instead.',
1521 'Improper use of [base::win]::RoInitialize() has been implicated in a ',
1522 'few COM initialization leaks. Use base::win::ScopedWinrtInitializer ',
1523 'instead. See http://crbug.com/1197722 for more information.'
1527 r'^base/win/scoped_winrt_initializer\.cc$',
1528 r'^third_party/abseil-cpp/absl/.*',
1534 'base::Watchdog is deprecated because it creates its own thread.',
1535 'Instead, manually start a timer on a SequencedTaskRunner.',
1543 'Do not use base::Passed. It is a legacy helper for capturing ',
1544 'move-only types with base::BindRepeating, but invoking the ',
1545 'resulting RepeatingCallback moves the captured value out of ',
1546 'the callback storage, and subsequent invocations may pass the ',
1547 'value in a valid but undefined state. Prefer base::BindOnce().',
1548 'See http://crbug.com/1326449 for context.'
1552 # False positive, but it is also fine to let bind internals reference
1554 r'^base[\\/]functional[\\/]bind\.h',
1555 r'^base[\\/]functional[\\/]bind_internal\.h',
1561 'Please use BASE_DECLARE_FEATURE() or BASE_FEATURE() instead of ',
1562 'directly declaring/defining features.'
1566 _THIRD_PARTY_EXCEPT_BLINK,
1572 'chartorune is not memory-safe, unless you can guarantee the input ',
1573 'string is always null-terminated. Otherwise, please use charntorune ',
1574 'from libphonenumber instead.'
1578 _THIRD_PARTY_EXCEPT_BLINK,
1579 # Exceptions to this rule should have a fuzzer.
1583 r'/\b#include "base/atomicops\.h"\b',
1585 'Do not use base::subtle atomics, but std::atomic, which are simpler '
1586 'to use, have better understood, clearer and richer semantics, and are '
1587 'harder to mis-use. See details in base/atomicops.h.',
1590 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
1593 r'CrossThreadPersistent<',
1595 'Do not use blink::CrossThreadPersistent, but '
1596 'blink::CrossThreadHandle. It is harder to mis-use.',
1598 'https://docs.google.com/document/d/1GIT0ysdQ84sGhIo1r9EscF_fFt93lmNVM_q4vvHj2FQ/edit#heading=h.3e4d6y61tgs',
1599 'Please contact platform-architecture-dev@ before adding new instances.'
1605 r'CrossThreadWeakPersistent<',
1607 'Do not use blink::CrossThreadWeakPersistent, but '
1608 'blink::CrossThreadWeakHandle. It is harder to mis-use.',
1610 'https://docs.google.com/document/d/1GIT0ysdQ84sGhIo1r9EscF_fFt93lmNVM_q4vvHj2FQ/edit#heading=h.3e4d6y61tgs',
1611 'Please contact platform-architecture-dev@ before adding new instances.'
1619 'Do not include <objc/objc.h>. It defines away ARC lifetime '
1620 'annotations, and is thus dangerous.',
1621 'Please use the pimpl pattern; search for `ObjCStorage` for examples.',
1622 'For further reading on how to safely mix C++ and Obj-C, see',
1623 'https://chromium.googlesource.com/chromium/src/+/main/docs/mac/mixing_cpp_and_objc.md'
1629 r'/#include <filesystem>',
1631 'libc++ <filesystem> is banned per the Google C++ styleguide.',
1634 # This fuzzing framework is a standalone open source project and
1635 # cannot rely on Chromium base.
1636 (r'third_party/centipede'),
1641 'TopDocument() does not work correctly with out-of-process iframes. '
1642 'Please do not introduce new uses.',
1646 # TODO(crbug.com/617677): Remove all remaining uses.
1647 r'^third_party/blink/renderer/core/dom/document\.cc',
1648 r'^third_party/blink/renderer/core/dom/document\.h',
1649 r'^third_party/blink/renderer/core/dom/element\.cc',
1650 r'^third_party/blink/renderer/core/exported/web_disallow_transition_scope_test\.cc',
1651 r'^third_party/blink/renderer/core/exported/web_document_test\.cc',
1652 r'^third_party/blink/renderer/core/html/html_anchor_element\.cc',
1653 r'^third_party/blink/renderer/core/html/html_dialog_element\.cc',
1654 r'^third_party/blink/renderer/core/html/html_element\.cc',
1655 r'^third_party/blink/renderer/core/html/html_frame_owner_element\.cc',
1656 r'^third_party/blink/renderer/core/html/media/video_wake_lock\.cc',
1657 r'^third_party/blink/renderer/core/loader/anchor_element_interaction_tracker\.cc',
1658 r'^third_party/blink/renderer/core/page/scrolling/root_scroller_controller\.cc',
1659 r'^third_party/blink/renderer/core/page/scrolling/top_document_root_scroller_controller\.cc',
1660 r'^third_party/blink/renderer/core/page/scrolling/top_document_root_scroller_controller\.h',
1661 r'^third_party/blink/renderer/core/script/classic_pending_script\.cc',
1662 r'^third_party/blink/renderer/core/script/script_loader\.cc',
1666 pattern = r'base::raw_ptr<',
1668 'Do not use base::raw_ptr, use raw_ptr.',
1670 treat_as_error = True,
1677 pattern = r'base:raw_ref<',
1679 'Do not use base::raw_ref, use raw_ref.',
1681 treat_as_error = True,
1688 pattern = r'/raw_ptr<[^;}]*\w{};',
1690 'Do not use {} for raw_ptr initialization, use = nullptr instead.',
1692 treat_as_error = True,
1699 pattern = r'/#include "base/allocator/.*/raw_'
1700 r'(ptr|ptr_cast|ptr_exclusion|ref).h"',
1702 'Please include the corresponding facade headers:',
1703 '- #include "base/memory/raw_ptr.h"',
1704 '- #include "base/memory/raw_ptr_cast.h"',
1705 '- #include "base/memory/raw_ptr_exclusion.h"',
1706 '- #include "base/memory/raw_ref.h"',
1708 treat_as_error = True,
1715 pattern = r'ContentSettingsType::COOKIES',
1717 'Do not use ContentSettingsType::COOKIES to check whether cookies are '
1718 'supported in the provided context. Instead rely on the '
1719 'content_settings::CookieSettings API. If you are using '
1720 'ContentSettingsType::COOKIES to check the user preference setting '
1721 'specifically, disregard this warning.',
1723 treat_as_error = False,
1725 '^chrome/browser/ui/content_settings/',
1726 '^components/content_settings/',
1727 '^services/network/cookie_settings.cc',
1732 pattern = r'\bg_signal_connect',
1734 'Use ScopedGSignal instead of g_signal_connect*()',
1736 treat_as_error = True,
1738 '^ui/base/glib/scoped_gsignal.h',
1742 pattern = r'features::kIsolatedWebApps',
1744 'Do not use `features::kIsolatedWebApps` directly to guard Isolated ',
1746 'Use `content::IsolatedWebAppsPolicy::AreIsolatedWebAppsEnabled()` in ',
1747 'the browser process or check the `kEnableIsolatedWebAppsInRenderer` ',
1748 'command line flag in the renderer process.',
1750 treat_as_error = True,
1751 excluded_paths = _TEST_CODE_EXCLUDED_PATHS + (
1752 '^chrome/browser/about_flags.cc',
1753 '^chrome/browser/chrome_content_browser_client.cc',
1754 '^chrome/browser/ui/startup/bad_flags_prompt.cc',
1755 '^content/shell/browser/shell_content_browser_client.cc'
1760 _BANNED_MOJOM_PATTERNS : Sequence[BanRule] = (
1762 'handle<shared_buffer>',
1764 'Please use one of the more specific shared memory types instead:',
1765 ' mojo_base.mojom.ReadOnlySharedMemoryRegion',
1766 ' mojo_base.mojom.WritableSharedMemoryRegion',
1767 ' mojo_base.mojom.UnsafeSharedMemoryRegion',
1773 _IPC_ENUM_TRAITS_DEPRECATED = (
1774 'You are using IPC_ENUM_TRAITS() in your code. It has been deprecated.\n'
1775 'See http://www.chromium.org/Home/chromium-security/education/'
1776 'security-tips-for-ipc')
1778 _LONG_PATH_ERROR = (
1779 'Some files included in this CL have file names that are too long (> 200'
1780 ' characters). If committed, these files will cause issues on Windows. See'
1781 ' https://crbug.com/612667 for more details.'
1784 _JAVA_MULTIPLE_DEFINITION_EXCLUDED_PATHS = [
1785 r".*/AppHooksImpl\.java",
1786 r".*/BuildHooksAndroidImpl\.java",
1787 r".*/LicenseContentProvider\.java",
1788 r".*/PlatformServiceBridgeImpl.java",
1789 r".*chrome/android/feed/dummy/.*\.java",
1792 # List of image extensions that are used as resources in chromium.
1793 _IMAGE_EXTENSIONS = ['.svg', '.png', '.webp']
1795 # These paths contain test data and other known invalid JSON files.
1796 _KNOWN_TEST_DATA_AND_INVALID_JSON_FILE_PATTERNS = [
1798 r'testing/buildbot/',
1799 r'^components/policy/resources/policy_templates\.json$',
1800 r'^third_party/protobuf/',
1801 r'^third_party/blink/perf_tests/speedometer.*/resources/todomvc/learn\.json',
1802 r'^third_party/blink/renderer/devtools/protocol\.json$',
1803 r'^third_party/blink/web_tests/external/wpt/',
1805 r'^tools/traceline/svgui/startup-release.json',
1806 # vscode configuration files allow comments
1810 # These are not checked on the public chromium-presubmit trybot.
1811 # Add files here that rely on .py files that exists only for target_os="android"
1813 _ANDROID_SPECIFIC_PYDEPS_FILES = [
1814 'chrome/android/features/create_stripped_java_factory.pydeps',
1818 _GENERIC_PYDEPS_FILES = [
1819 'android_webview/test/components/run_webview_component_smoketest.pydeps',
1820 'android_webview/tools/run_cts.pydeps',
1821 'build/android/apk_operations.pydeps',
1822 'build/android/devil_chromium.pydeps',
1823 'build/android/gyp/aar.pydeps',
1824 'build/android/gyp/aidl.pydeps',
1825 'build/android/gyp/allot_native_libraries.pydeps',
1826 'build/android/gyp/apkbuilder.pydeps',
1827 'build/android/gyp/assert_static_initializers.pydeps',
1828 'build/android/gyp/binary_baseline_profile.pydeps',
1829 'build/android/gyp/bytecode_processor.pydeps',
1830 'build/android/gyp/bytecode_rewriter.pydeps',
1831 'build/android/gyp/check_flag_expectations.pydeps',
1832 'build/android/gyp/compile_java.pydeps',
1833 'build/android/gyp/compile_kt.pydeps',
1834 'build/android/gyp/compile_resources.pydeps',
1835 'build/android/gyp/copy_ex.pydeps',
1836 'build/android/gyp/create_apk_operations_script.pydeps',
1837 'build/android/gyp/create_app_bundle.pydeps',
1838 'build/android/gyp/create_app_bundle_apks.pydeps',
1839 'build/android/gyp/create_bundle_wrapper_script.pydeps',
1840 'build/android/gyp/create_java_binary_script.pydeps',
1841 'build/android/gyp/create_r_java.pydeps',
1842 'build/android/gyp/create_r_txt.pydeps',
1843 'build/android/gyp/create_size_info_files.pydeps',
1844 'build/android/gyp/create_test_apk_wrapper_script.pydeps',
1845 'build/android/gyp/create_ui_locale_resources.pydeps',
1846 'build/android/gyp/dex.pydeps',
1847 'build/android/gyp/dist_aar.pydeps',
1848 'build/android/gyp/filter_zip.pydeps',
1849 'build/android/gyp/flatc_java.pydeps',
1850 'build/android/gyp/gcc_preprocess.pydeps',
1851 'build/android/gyp/generate_linker_version_script.pydeps',
1852 'build/android/gyp/ijar.pydeps',
1853 'build/android/gyp/jacoco_instr.pydeps',
1854 'build/android/gyp/java_cpp_enum.pydeps',
1855 'build/android/gyp/java_cpp_features.pydeps',
1856 'build/android/gyp/java_cpp_strings.pydeps',
1857 'build/android/gyp/java_google_api_keys.pydeps',
1858 'build/android/gyp/jinja_template.pydeps',
1859 'build/android/gyp/lint.pydeps',
1860 'build/android/gyp/merge_manifest.pydeps',
1861 'build/android/gyp/optimize_resources.pydeps',
1862 'build/android/gyp/prepare_resources.pydeps',
1863 'build/android/gyp/process_native_prebuilt.pydeps',
1864 'build/android/gyp/proguard.pydeps',
1865 'build/android/gyp/system_image_apks.pydeps',
1866 'build/android/gyp/trace_event_bytecode_rewriter.pydeps',
1867 'build/android/gyp/turbine.pydeps',
1868 'build/android/gyp/unused_resources.pydeps',
1869 'build/android/gyp/validate_static_library_dex_references.pydeps',
1870 'build/android/gyp/write_build_config.pydeps',
1871 'build/android/gyp/write_native_libraries_java.pydeps',
1872 'build/android/gyp/zip.pydeps',
1873 'build/android/incremental_install/generate_android_manifest.pydeps',
1874 'build/android/incremental_install/write_installer_json.pydeps',
1875 'build/android/pylib/results/presentation/test_results_presentation.pydeps',
1876 'build/android/resource_sizes.pydeps',
1877 'build/android/test_runner.pydeps',
1878 'build/android/test_wrapper/logdog_wrapper.pydeps',
1879 'build/lacros/lacros_resource_sizes.pydeps',
1880 'build/protoc_java.pydeps',
1881 'chrome/android/monochrome/scripts/monochrome_python_tests.pydeps',
1882 'chrome/test/chromedriver/log_replay/client_replay_unittest.pydeps',
1883 'chrome/test/chromedriver/test/run_py_tests.pydeps',
1884 'chromecast/resource_sizes/chromecast_resource_sizes.pydeps',
1885 'components/cronet/tools/generate_javadoc.pydeps',
1886 'components/cronet/tools/jar_src.pydeps',
1887 'components/module_installer/android/module_desc_java.pydeps',
1888 'content/public/android/generate_child_service.pydeps',
1889 'net/tools/testserver/testserver.pydeps',
1890 'testing/scripts/run_isolated_script_test.pydeps',
1891 'testing/merge_scripts/standard_isolated_script_merge.pydeps',
1892 'testing/merge_scripts/standard_gtest_merge.pydeps',
1893 'testing/merge_scripts/code_coverage/merge_results.pydeps',
1894 'testing/merge_scripts/code_coverage/merge_steps.pydeps',
1895 'third_party/android_platform/development/scripts/stack.pydeps',
1896 'third_party/blink/renderer/bindings/scripts/build_web_idl_database.pydeps',
1897 'third_party/blink/renderer/bindings/scripts/check_generated_file_list.pydeps',
1898 'third_party/blink/renderer/bindings/scripts/collect_idl_files.pydeps',
1899 'third_party/blink/renderer/bindings/scripts/generate_bindings.pydeps',
1900 'third_party/blink/renderer/bindings/scripts/generate_event_interface_names.pydeps',
1901 'third_party/blink/renderer/bindings/scripts/validate_web_idl.pydeps',
1902 'third_party/blink/tools/blinkpy/web_tests/merge_results.pydeps',
1903 'third_party/blink/tools/merge_web_test_results.pydeps',
1904 'tools/binary_size/sizes.pydeps',
1905 'tools/binary_size/supersize.pydeps',
1906 'tools/perf/process_perf_results.pydeps',
1910 _ALL_PYDEPS_FILES = _ANDROID_SPECIFIC_PYDEPS_FILES + _GENERIC_PYDEPS_FILES
1913 # Bypass the AUTHORS check for these accounts.
1914 _KNOWN_ROBOTS = set(
1915 ) | set('%s@appspot.gserviceaccount.com' % s for s in ('findit-for-me',)
1916 ) | set('%s@developer.gserviceaccount.com' % s for s in ('3su6n15k.default',)
1917 ) | set('%s@chops-service-accounts.iam.gserviceaccount.com' % s
1918 for s in ('bling-autoroll-builder', 'v8-ci-autoroll-builder',
1919 'wpt-autoroller', 'chrome-weblayer-builder',
1920 'lacros-version-skew-roller', 'skylab-test-cros-roller',
1921 'infra-try-recipes-tester', 'lacros-tracking-roller',
1922 'lacros-sdk-version-roller', 'chrome-automated-expectation',
1923 'chromium-automated-expectation', 'chrome-branch-day',
1924 'chromium-autosharder')
1925 ) | set('%s@skia-public.iam.gserviceaccount.com' % s
1926 for s in ('chromium-autoroll', 'chromium-release-autoroll')
1927 ) | set('%s@skia-corp.google.com.iam.gserviceaccount.com' % s
1928 for s in ('chromium-internal-autoroll',)
1929 ) | set('%s@owners-cleanup-prod.google.com.iam.gserviceaccount.com' % s
1930 for s in ('swarming-tasks',)
1931 ) | set('%s@fuchsia-infra.iam.gserviceaccount.com' % s
1932 for s in ('global-integration-try-builder',
1933 'global-integration-ci-builder')
1934 ) | set('%s@prod.google.com' % s
1935 for s in ('chops-security-borg',
1936 'chops-security-cronjobs-cpesuggest'))
1938 _INVALID_GRD_FILE_LINE = [
1939 (r'<file lang=.* path=.*', 'Path should come before lang in GRD files.')
1942 def _IsCPlusPlusFile(input_api, file_path):
1943 """Returns True if this file contains C++-like code (and not Python,
1944 Go, Java, MarkDown, ...)"""
1946 ext = input_api.os_path.splitext(file_path)[1]
1947 # This list is compatible with CppChecker.IsCppFile but we should
1948 # consider adding ".c" to it. If we do that we can use this function
1949 # at more places in the code.
1959 def _IsCPlusPlusHeaderFile(input_api, file_path):
1960 return input_api.os_path.splitext(file_path)[1] == ".h"
1963 def _IsJavaFile(input_api, file_path):
1964 return input_api.os_path.splitext(file_path)[1] == ".java"
1967 def _IsProtoFile(input_api, file_path):
1968 return input_api.os_path.splitext(file_path)[1] == ".proto"
1971 def _IsXmlOrGrdFile(input_api, file_path):
1972 ext = input_api.os_path.splitext(file_path)[1]
1973 return ext in ('.grd', '.xml')
1976 def _IsMojomFile(input_api, file_path):
1977 return input_api.os_path.splitext(file_path)[1] == ".mojom"
1980 def CheckNoUpstreamDepsOnClank(input_api, output_api):
1981 """Prevent additions of dependencies from the upstream repo on //clank."""
1982 # clank can depend on clank
1983 if input_api.change.RepositoryRoot().endswith('clank'):
1985 build_file_patterns = [
1989 excluded_files = [r'build[/\\]config[/\\]android[/\\]config\.gni']
1990 bad_pattern = input_api.re.compile(r'^[^#]*//clank')
1992 error_message = 'Disallowed import on //clank in an upstream build file:'
1994 def FilterFile(affected_file):
1995 return input_api.FilterSourceFile(affected_file,
1996 files_to_check=build_file_patterns,
1997 files_to_skip=excluded_files)
2000 for f in input_api.AffectedSourceFiles(FilterFile):
2001 local_path = f.LocalPath()
2002 for line_number, line in f.ChangedContents():
2003 if (bad_pattern.search(line)):
2004 problems.append('%s:%d\n %s' %
2005 (local_path, line_number, line.strip()))
2007 return [output_api.PresubmitPromptOrNotify(error_message, problems)]
2012 def CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api):
2013 """Attempts to prevent use of functions intended only for testing in
2014 non-testing code. For now this is just a best-effort implementation
2015 that ignores header files and may have some false positives. A
2016 better implementation would probably need a proper C++ parser.
2018 # We only scan .cc files and the like, as the declaration of
2019 # for-testing functions in header files are hard to distinguish from
2020 # calls to such functions without a proper C++ parser.
2021 file_inclusion_pattern = [r'.+%s' % _IMPLEMENTATION_EXTENSIONS]
2023 base_function_pattern = r'[ :]test::[^\s]+|ForTest(s|ing)?|for_test(s|ing)?'
2024 inclusion_pattern = input_api.re.compile(r'(%s)\s*\(' %
2025 base_function_pattern)
2026 comment_pattern = input_api.re.compile(r'//.*(%s)' % base_function_pattern)
2027 allowlist_pattern = input_api.re.compile(r'// IN-TEST$')
2028 exclusion_pattern = input_api.re.compile(
2029 r'::[A-Za-z0-9_]+(%s)|(%s)[^;]+\{' %
2030 (base_function_pattern, base_function_pattern))
2031 # Avoid a false positive in this case, where the method name, the ::, and
2032 # the closing { are all on different lines due to line wrapping.
2033 # HelperClassForTesting::
2034 # HelperClassForTesting(
2037 method_defn_pattern = input_api.re.compile(r'[A-Za-z0-9_]+::$')
2039 def FilterFile(affected_file):
2040 files_to_skip = (_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS +
2041 input_api.DEFAULT_FILES_TO_SKIP)
2042 return input_api.FilterSourceFile(
2044 files_to_check=file_inclusion_pattern,
2045 files_to_skip=files_to_skip)
2048 for f in input_api.AffectedSourceFiles(FilterFile):
2049 local_path = f.LocalPath()
2050 in_method_defn = False
2051 for line_number, line in f.ChangedContents():
2052 if (inclusion_pattern.search(line)
2053 and not comment_pattern.search(line)
2054 and not exclusion_pattern.search(line)
2055 and not allowlist_pattern.search(line)
2056 and not in_method_defn):
2057 problems.append('%s:%d\n %s' %
2058 (local_path, line_number, line.strip()))
2059 in_method_defn = method_defn_pattern.search(line)
2063 output_api.PresubmitPromptOrNotify(_TEST_ONLY_WARNING, problems)
2069 def CheckNoProductionCodeUsingTestOnlyFunctionsJava(input_api, output_api):
2070 """This is a simplified version of
2071 CheckNoProductionCodeUsingTestOnlyFunctions for Java files.
2073 javadoc_start_re = input_api.re.compile(r'^\s*/\*\*')
2074 javadoc_end_re = input_api.re.compile(r'^\s*\*/')
2075 name_pattern = r'ForTest(s|ing)?'
2076 # Describes an occurrence of "ForTest*" inside a // comment.
2077 comment_re = input_api.re.compile(r'//.*%s' % name_pattern)
2078 # Describes @VisibleForTesting(otherwise = VisibleForTesting.PROTECTED)
2079 annotation_re = input_api.re.compile(r'@VisibleForTesting\(')
2081 inclusion_re = input_api.re.compile(r'(%s)\s*\(' % name_pattern)
2082 # Ignore definitions. (Comments are ignored separately.)
2083 exclusion_re = input_api.re.compile(r'(%s)[^;]+\{' % name_pattern)
2084 allowlist_re = input_api.re.compile(r'// IN-TEST$')
2087 sources = lambda x: input_api.FilterSourceFile(
2089 files_to_skip=(('(?i).*test', r'.*\/junit\/') + input_api.
2090 DEFAULT_FILES_TO_SKIP),
2091 files_to_check=[r'.*\.java$'])
2092 for f in input_api.AffectedFiles(include_deletes=False,
2093 file_filter=sources):
2094 local_path = f.LocalPath()
2095 is_inside_javadoc = False
2096 for line_number, line in f.ChangedContents():
2097 if is_inside_javadoc and javadoc_end_re.search(line):
2098 is_inside_javadoc = False
2099 if not is_inside_javadoc and javadoc_start_re.search(line):
2100 is_inside_javadoc = True
2101 if is_inside_javadoc:
2103 if (inclusion_re.search(line) and not comment_re.search(line)
2104 and not annotation_re.search(line)
2105 and not allowlist_re.search(line)
2106 and not exclusion_re.search(line)):
2107 problems.append('%s:%d\n %s' %
2108 (local_path, line_number, line.strip()))
2112 output_api.PresubmitPromptOrNotify(_TEST_ONLY_WARNING, problems)
2118 def CheckNoIOStreamInHeaders(input_api, output_api):
2119 """Checks to make sure no .h files include <iostream>."""
2121 pattern = input_api.re.compile(r'^#include\s*<iostream>',
2122 input_api.re.MULTILINE)
2123 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
2124 if not f.LocalPath().endswith('.h'):
2126 contents = input_api.ReadFile(f)
2127 if pattern.search(contents):
2132 output_api.PresubmitError(
2133 'Do not #include <iostream> in header files, since it inserts static '
2134 'initialization into every file including the header. Instead, '
2135 '#include <ostream>. See http://crbug.com/94794', files)
2140 def CheckNoStrCatRedefines(input_api, output_api):
2141 """Checks no windows headers with StrCat redefined are included directly."""
2143 files_to_check = (r'.+%s' % _HEADER_EXTENSIONS,
2144 r'.+%s' % _IMPLEMENTATION_EXTENSIONS)
2145 files_to_skip = (input_api.DEFAULT_FILES_TO_SKIP +
2146 _NON_BASE_DEPENDENT_PATHS)
2147 sources_filter = lambda f: input_api.FilterSourceFile(
2148 f, files_to_check=files_to_check, files_to_skip=files_to_skip)
2150 pattern_deny = input_api.re.compile(
2151 r'^#include\s*[<"](shlwapi|atlbase|propvarutil|sphelper).h[">]',
2152 input_api.re.MULTILINE)
2153 pattern_allow = input_api.re.compile(
2154 r'^#include\s"base/win/windows_defines.inc"', input_api.re.MULTILINE)
2155 for f in input_api.AffectedSourceFiles(sources_filter):
2156 contents = input_api.ReadFile(f)
2157 if pattern_deny.search(
2158 contents) and not pattern_allow.search(contents):
2159 files.append(f.LocalPath())
2163 output_api.PresubmitError(
2164 'Do not #include shlwapi.h, atlbase.h, propvarutil.h or sphelper.h '
2165 'directly since they pollute code with StrCat macro. Instead, '
2166 'include matching header from base/win. See http://crbug.com/856536',
2172 def _CheckNoUNIT_TESTInSourceFiles(input_api, f):
2175 unit_test_macro = input_api.re.compile(
2176 '^\s*#.*(?:ifn?def\s+UNIT_TEST|defined\s*\(?\s*UNIT_TEST\s*\)?)(?:$|\s+)')
2177 for line_num, line in f.ChangedContents():
2178 if unit_test_macro.match(line):
2179 problems.append(' %s:%d' % (f.LocalPath(), line_num))
2184 def CheckNoUNIT_TESTInSourceFiles(input_api, output_api):
2185 """Checks to make sure no source files use UNIT_TEST."""
2187 for f in input_api.AffectedFiles():
2188 if (not f.LocalPath().endswith(('.cc', '.mm'))):
2191 _CheckNoUNIT_TESTInSourceFiles(input_api, f))
2196 output_api.PresubmitPromptWarning('UNIT_TEST is only for headers.\n' +
2197 '\n'.join(problems))
2201 def CheckNoDISABLETypoInTests(input_api, output_api):
2202 """Checks to prevent attempts to disable tests with DISABLE_ prefix.
2204 This test warns if somebody tries to disable a test with the DISABLE_ prefix
2205 instead of DISABLED_. To filter false positives, reports are only generated
2206 if a corresponding MAYBE_ line exists.
2210 # The following two patterns are looked for in tandem - is a test labeled
2211 # as MAYBE_ followed by a DISABLE_ (instead of the correct DISABLED)
2212 maybe_pattern = input_api.re.compile(r'MAYBE_([a-zA-Z0-9_]+)')
2213 disable_pattern = input_api.re.compile(r'DISABLE_([a-zA-Z0-9_]+)')
2215 # This is for the case that a test is disabled on all platforms.
2216 full_disable_pattern = input_api.re.compile(
2217 r'^\s*TEST[^(]*\([a-zA-Z0-9_]+,\s*DISABLE_[a-zA-Z0-9_]+\)',
2218 input_api.re.MULTILINE)
2220 for f in input_api.AffectedFiles(False):
2221 if not 'test' in f.LocalPath() or not f.LocalPath().endswith('.cc'):
2224 # Search for MABYE_, DISABLE_ pairs.
2225 disable_lines = {} # Maps of test name to line number.
2227 for line_num, line in f.ChangedContents():
2228 disable_match = disable_pattern.search(line)
2230 disable_lines[disable_match.group(1)] = line_num
2231 maybe_match = maybe_pattern.search(line)
2233 maybe_lines[maybe_match.group(1)] = line_num
2235 # Search for DISABLE_ occurrences within a TEST() macro.
2236 disable_tests = set(disable_lines.keys())
2237 maybe_tests = set(maybe_lines.keys())
2238 for test in disable_tests.intersection(maybe_tests):
2239 problems.append(' %s:%d' % (f.LocalPath(), disable_lines[test]))
2241 contents = input_api.ReadFile(f)
2242 full_disable_match = full_disable_pattern.search(contents)
2243 if full_disable_match:
2244 problems.append(' %s' % f.LocalPath())
2249 output_api.PresubmitPromptWarning(
2250 'Attempt to disable a test with DISABLE_ instead of DISABLED_?\n' +
2251 '\n'.join(problems))
2255 def CheckForgettingMAYBEInTests(input_api, output_api):
2256 """Checks to make sure tests disabled conditionally are not missing a
2257 corresponding MAYBE_ prefix.
2259 # Expect at least a lowercase character in the test name. This helps rule out
2260 # false positives with macros wrapping the actual tests name.
2261 define_maybe_pattern = input_api.re.compile(
2262 r'^\#define MAYBE_(?P<test_name>\w*[a-z]\w*)')
2263 # The test_maybe_pattern needs to handle all of these forms. The standard:
2264 # IN_PROC_TEST_F(SyncTest, MAYBE_Start) {
2265 # With a wrapper macro around the test name:
2266 # IN_PROC_TEST_F(SyncTest, E2E_ENABLED(MAYBE_Start)) {
2267 # And the odd-ball NACL_BROWSER_TEST_f format:
2268 # NACL_BROWSER_TEST_F(NaClBrowserTest, SimpleLoad, {
2269 # The optional E2E_ENABLED-style is handled with (\w*\()?
2270 # The NACL_BROWSER_TEST_F pattern is handled by allowing a trailing comma or
2272 test_maybe_pattern = (
2273 r'^\s*\w*TEST[^(]*\(\s*\w+,\s*(\w*\()?MAYBE_{test_name}[\),]')
2274 suite_maybe_pattern = r'^\s*\w*TEST[^(]*\(\s*MAYBE_{test_name}[\),]'
2277 # Read the entire files. We can't just read the affected lines, forgetting to
2278 # add MAYBE_ on a change would not show up otherwise.
2279 for f in input_api.AffectedFiles(False):
2280 if not 'test' in f.LocalPath() or not f.LocalPath().endswith('.cc'):
2282 contents = input_api.ReadFile(f)
2283 lines = contents.splitlines(True)
2284 current_position = 0
2285 warning_test_names = set()
2286 for line_num, line in enumerate(lines, start=1):
2287 current_position += len(line)
2288 maybe_match = define_maybe_pattern.search(line)
2290 test_name = maybe_match.group('test_name')
2291 # Do not warn twice for the same test.
2292 if (test_name in warning_test_names):
2294 warning_test_names.add(test_name)
2296 # Attempt to find the corresponding MAYBE_ test or suite, starting from
2297 # the current position.
2298 test_match = input_api.re.compile(
2299 test_maybe_pattern.format(test_name=test_name),
2300 input_api.re.MULTILINE).search(contents, current_position)
2301 suite_match = input_api.re.compile(
2302 suite_maybe_pattern.format(test_name=test_name),
2303 input_api.re.MULTILINE).search(contents, current_position)
2304 if not test_match and not suite_match:
2306 output_api.PresubmitPromptWarning(
2307 '%s:%d found MAYBE_ defined without corresponding test %s'
2308 % (f.LocalPath(), line_num, test_name)))
2312 def CheckDCHECK_IS_ONHasBraces(input_api, output_api):
2313 """Checks to make sure DCHECK_IS_ON() does not skip the parentheses."""
2315 pattern = input_api.re.compile(r'\bDCHECK_IS_ON\b(?!\(\))',
2316 input_api.re.MULTILINE)
2317 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
2318 if (not f.LocalPath().endswith(('.cc', '.mm', '.h'))):
2320 for lnum, line in f.ChangedContents():
2321 if input_api.re.search(pattern, line):
2323 output_api.PresubmitError((
2324 '%s:%d: Use of DCHECK_IS_ON() must be written as "#if '
2325 + 'DCHECK_IS_ON()", not forgetting the parentheses.') %
2326 (f.LocalPath(), lnum)))
2330 # TODO(crbug/1138055): Reimplement CheckUmaHistogramChangesOnUpload check in a
2331 # more reliable way. See
2332 # https://chromium-review.googlesource.com/c/chromium/src/+/2500269
2335 def CheckFlakyTestUsage(input_api, output_api):
2336 """Check that FlakyTest annotation is our own instead of the android one"""
2337 pattern = input_api.re.compile(r'import android.test.FlakyTest;')
2339 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
2340 if f.LocalPath().endswith('Test.java'):
2341 if pattern.search(input_api.ReadFile(f)):
2345 output_api.PresubmitError(
2346 'Use org.chromium.base.test.util.FlakyTest instead of '
2347 'android.test.FlakyTest', files)
2352 def CheckNoDEPSGIT(input_api, output_api):
2353 """Make sure .DEPS.git is never modified manually."""
2354 if any(f.LocalPath().endswith('.DEPS.git')
2355 for f in input_api.AffectedFiles()):
2357 output_api.PresubmitError(
2358 'Never commit changes to .DEPS.git. This file is maintained by an\n'
2359 'automated system based on what\'s in DEPS and your changes will be\n'
2361 'See https://sites.google.com/a/chromium.org/dev/developers/how-tos/'
2362 'get-the-code#Rolling_DEPS\n'
2363 'for more information')
2368 def CheckCrosApiNeedBrowserTest(input_api, output_api):
2369 """Check new crosapi should add browser test."""
2370 has_new_crosapi = False
2371 has_browser_test = False
2372 for f in input_api.AffectedFiles():
2373 path = f.LocalPath()
2374 if (path.startswith('chromeos/crosapi/mojom') and
2375 _IsMojomFile(input_api, path) and f.Action() == 'A'):
2376 has_new_crosapi = True
2377 if path.endswith('browsertest.cc') or path.endswith('browser_test.cc'):
2378 has_browser_test = True
2379 if has_new_crosapi and not has_browser_test:
2381 output_api.PresubmitPromptWarning(
2382 'You are adding a new crosapi, but there is no file ends with '
2383 'browsertest.cc file being added or modified. It is important '
2384 'to add crosapi browser test coverage to avoid version '
2386 'Check //docs/lacros/test_instructions.md for more information.'
2392 def CheckValidHostsInDEPSOnUpload(input_api, output_api):
2393 """Checks that DEPS file deps are from allowed_hosts."""
2394 # Run only if DEPS file has been modified to annoy fewer bystanders.
2395 if all(f.LocalPath() != 'DEPS' for f in input_api.AffectedFiles()):
2397 # Outsource work to gclient verify
2399 gclient_path = input_api.os_path.join(input_api.PresubmitLocalPath(),
2400 'third_party', 'depot_tools',
2402 input_api.subprocess.check_output(
2403 [input_api.python3_executable, gclient_path, 'verify'],
2404 stderr=input_api.subprocess.STDOUT)
2406 except input_api.subprocess.CalledProcessError as error:
2408 output_api.PresubmitError(
2409 'DEPS file must have only git dependencies.',
2410 long_text=error.output)
2414 def _GetMessageForMatchingType(input_api, affected_file, line_number, line,
2416 """Helper method for checking for banned constructs.
2418 Returns an string composed of the name of the file, the line number where the
2419 match has been found and the additional text passed as |message| in case the
2420 target type name matches the text inside the line passed as parameter.
2424 # Ignore comments about banned types.
2425 if input_api.re.search(r"^ *//", line):
2427 # A // nocheck comment will bypass this error.
2428 if line.endswith(" nocheck"):
2432 if ban_rule.pattern[0:1] == '/':
2433 regex = ban_rule.pattern[1:]
2434 if input_api.re.search(regex, line):
2436 elif ban_rule.pattern in line:
2440 result.append(' %s:%d:' % (affected_file.LocalPath(), line_number))
2441 for line in ban_rule.explanation:
2442 result.append(' %s' % line)
2447 def CheckNoBannedFunctions(input_api, output_api):
2448 """Make sure that banned functions are not used."""
2452 def IsExcludedFile(affected_file, excluded_paths):
2453 if not excluded_paths:
2456 local_path = affected_file.LocalPath()
2457 # Consistently use / as path separator to simplify the writing of regex
2459 local_path = local_path.replace(input_api.os_path.sep, '/')
2460 for item in excluded_paths:
2461 if input_api.re.match(item, local_path):
2465 def IsIosObjcFile(affected_file):
2466 local_path = affected_file.LocalPath()
2467 if input_api.os_path.splitext(local_path)[-1] not in ('.mm', '.m',
2470 basename = input_api.os_path.basename(local_path)
2471 if 'ios' in basename.split('_'):
2473 for sep in (input_api.os_path.sep, input_api.os_path.altsep):
2474 if sep and 'ios' in local_path.split(sep):
2478 def CheckForMatch(affected_file, line_num: int, line: str,
2480 if IsExcludedFile(affected_file, ban_rule.excluded_paths):
2483 problems = _GetMessageForMatchingType(input_api, f, line_num, line,
2486 if ban_rule.treat_as_error is not None and ban_rule.treat_as_error:
2487 errors.extend(problems)
2489 warnings.extend(problems)
2491 file_filter = lambda f: f.LocalPath().endswith(('.java'))
2492 for f in input_api.AffectedFiles(file_filter=file_filter):
2493 for line_num, line in f.ChangedContents():
2494 for ban_rule in _BANNED_JAVA_FUNCTIONS:
2495 CheckForMatch(f, line_num, line, ban_rule)
2497 file_filter = lambda f: f.LocalPath().endswith(('.js', '.ts'))
2498 for f in input_api.AffectedFiles(file_filter=file_filter):
2499 for line_num, line in f.ChangedContents():
2500 for ban_rule in _BANNED_JAVASCRIPT_FUNCTIONS:
2501 CheckForMatch(f, line_num, line, ban_rule)
2503 file_filter = lambda f: f.LocalPath().endswith(('.mm', '.m', '.h'))
2504 for f in input_api.AffectedFiles(file_filter=file_filter):
2505 for line_num, line in f.ChangedContents():
2506 for ban_rule in _BANNED_OBJC_FUNCTIONS:
2507 CheckForMatch(f, line_num, line, ban_rule)
2509 for f in input_api.AffectedFiles(file_filter=IsIosObjcFile):
2510 for line_num, line in f.ChangedContents():
2511 for ban_rule in _BANNED_IOS_OBJC_FUNCTIONS:
2512 CheckForMatch(f, line_num, line, ban_rule)
2514 egtest_filter = lambda f: f.LocalPath().endswith(('_egtest.mm'))
2515 for f in input_api.AffectedFiles(file_filter=egtest_filter):
2516 for line_num, line in f.ChangedContents():
2517 for ban_rule in _BANNED_IOS_EGTEST_FUNCTIONS:
2518 CheckForMatch(f, line_num, line, ban_rule)
2520 file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm', '.h'))
2521 for f in input_api.AffectedFiles(file_filter=file_filter):
2522 for line_num, line in f.ChangedContents():
2523 for ban_rule in _BANNED_CPP_FUNCTIONS:
2524 CheckForMatch(f, line_num, line, ban_rule)
2526 file_filter = lambda f: f.LocalPath().endswith(('.mojom'))
2527 for f in input_api.AffectedFiles(file_filter=file_filter):
2528 for line_num, line in f.ChangedContents():
2529 for ban_rule in _BANNED_MOJOM_PATTERNS:
2530 CheckForMatch(f, line_num, line, ban_rule)
2536 output_api.PresubmitPromptWarning('Banned functions were used.\n' +
2537 '\n'.join(warnings)))
2540 output_api.PresubmitError('Banned functions were used.\n' +
2544 def CheckNoLayoutCallsInTests(input_api, output_api):
2545 """Make sure there are no explicit calls to View::Layout() in tests"""
2548 r'/(\.|->)Layout\(\);',
2550 'Direct calls to View::Layout() are not allowed in tests. '
2551 'If the view must be laid out here, use RunScheduledLayout(view). It '
2552 'is found in //ui/views/test/views_test_utils.h. '
2553 'See http://crbug.com/1350521 for more details.',
2557 file_filter = lambda f: input_api.re.search(
2558 r'_(unittest|browsertest|ui_test).*\.(cc|mm)$', f.LocalPath())
2559 for f in input_api.AffectedFiles(file_filter = file_filter):
2560 for line_num, line in f.ChangedContents():
2561 problems = _GetMessageForMatchingType(input_api, f,
2565 warnings.extend(problems)
2569 output_api.PresubmitPromptWarning(
2570 'Banned call to View::Layout() in tests.\n\n'.join(warnings)))
2573 def _CheckAndroidNoBannedImports(input_api, output_api):
2574 """Make sure that banned java imports are not used."""
2577 file_filter = lambda f: f.LocalPath().endswith(('.java'))
2578 for f in input_api.AffectedFiles(file_filter=file_filter):
2579 for line_num, line in f.ChangedContents():
2580 for ban_rule in _BANNED_JAVA_IMPORTS:
2581 # Consider merging this into the above function. There is no
2582 # real difference anymore other than helping with a little
2583 # bit of boilerplate text. Doing so means things like
2584 # `treat_as_error` will also be uniformly handled.
2585 problems = _GetMessageForMatchingType(input_api, f, line_num,
2588 errors.extend(problems)
2592 output_api.PresubmitError('Banned imports were used.\n' +
2597 def CheckNoPragmaOnce(input_api, output_api):
2598 """Make sure that banned functions are not used."""
2600 pattern = input_api.re.compile(r'^#pragma\s+once', input_api.re.MULTILINE)
2601 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
2602 if not f.LocalPath().endswith('.h'):
2604 if f.LocalPath().endswith('com_imported_mstscax.h'):
2606 contents = input_api.ReadFile(f)
2607 if pattern.search(contents):
2612 output_api.PresubmitError(
2613 'Do not use #pragma once in header files.\n'
2614 'See http://www.chromium.org/developers/coding-style#TOC-File-headers',
2620 def CheckNoTrinaryTrueFalse(input_api, output_api):
2621 """Checks to make sure we don't introduce use of foo ? true : false."""
2623 pattern = input_api.re.compile(r'\?\s*(true|false)\s*:\s*(true|false)')
2624 for f in input_api.AffectedFiles():
2625 if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
2628 for line_num, line in f.ChangedContents():
2629 if pattern.match(line):
2630 problems.append(' %s:%d' % (f.LocalPath(), line_num))
2635 output_api.PresubmitPromptWarning(
2636 'Please consider avoiding the "? true : false" pattern if possible.\n'
2637 + '\n'.join(problems))
2641 def CheckUnwantedDependencies(input_api, output_api):
2642 """Runs checkdeps on #include and import statements added in this
2643 change. Breaking - rules is an error, breaking ! rules is a
2646 # Return early if no relevant file types were modified.
2647 for f in input_api.AffectedFiles():
2648 path = f.LocalPath()
2649 if (_IsCPlusPlusFile(input_api, path) or _IsProtoFile(input_api, path)
2650 or _IsJavaFile(input_api, path)):
2656 # We need to wait until we have an input_api object and use this
2657 # roundabout construct to import checkdeps because this file is
2658 # eval-ed and thus doesn't have __file__.
2659 original_sys_path = sys.path
2661 sys.path = sys.path + [
2662 input_api.os_path.join(input_api.PresubmitLocalPath(),
2663 'buildtools', 'checkdeps')
2666 from rules import Rule
2668 # Restore sys.path to what it was before.
2669 sys.path = original_sys_path
2673 added_java_imports = []
2674 for f in input_api.AffectedFiles():
2675 if _IsCPlusPlusFile(input_api, f.LocalPath()):
2676 changed_lines = [line for _, line in f.ChangedContents()]
2677 added_includes.append([f.AbsoluteLocalPath(), changed_lines])
2678 elif _IsProtoFile(input_api, f.LocalPath()):
2679 changed_lines = [line for _, line in f.ChangedContents()]
2680 added_imports.append([f.AbsoluteLocalPath(), changed_lines])
2681 elif _IsJavaFile(input_api, f.LocalPath()):
2682 changed_lines = [line for _, line in f.ChangedContents()]
2683 added_java_imports.append([f.AbsoluteLocalPath(), changed_lines])
2685 deps_checker = checkdeps.DepsChecker(input_api.PresubmitLocalPath())
2687 error_descriptions = []
2688 warning_descriptions = []
2689 error_subjects = set()
2690 warning_subjects = set()
2692 for path, rule_type, rule_description in deps_checker.CheckAddedCppIncludes(
2694 path = input_api.os_path.relpath(path, input_api.PresubmitLocalPath())
2695 description_with_path = '%s\n %s' % (path, rule_description)
2696 if rule_type == Rule.DISALLOW:
2697 error_descriptions.append(description_with_path)
2698 error_subjects.add("#includes")
2700 warning_descriptions.append(description_with_path)
2701 warning_subjects.add("#includes")
2703 for path, rule_type, rule_description in deps_checker.CheckAddedProtoImports(
2705 path = input_api.os_path.relpath(path, input_api.PresubmitLocalPath())
2706 description_with_path = '%s\n %s' % (path, rule_description)
2707 if rule_type == Rule.DISALLOW:
2708 error_descriptions.append(description_with_path)
2709 error_subjects.add("imports")
2711 warning_descriptions.append(description_with_path)
2712 warning_subjects.add("imports")
2714 for path, rule_type, rule_description in deps_checker.CheckAddedJavaImports(
2715 added_java_imports, _JAVA_MULTIPLE_DEFINITION_EXCLUDED_PATHS):
2716 path = input_api.os_path.relpath(path, input_api.PresubmitLocalPath())
2717 description_with_path = '%s\n %s' % (path, rule_description)
2718 if rule_type == Rule.DISALLOW:
2719 error_descriptions.append(description_with_path)
2720 error_subjects.add("imports")
2722 warning_descriptions.append(description_with_path)
2723 warning_subjects.add("imports")
2726 if error_descriptions:
2728 output_api.PresubmitError(
2729 'You added one or more %s that violate checkdeps rules.' %
2730 " and ".join(error_subjects), error_descriptions))
2731 if warning_descriptions:
2733 output_api.PresubmitPromptOrNotify(
2734 'You added one or more %s of files that are temporarily\n'
2735 'allowed but being removed. Can you avoid introducing the\n'
2736 '%s? See relevant DEPS file(s) for details and contacts.' %
2737 (" and ".join(warning_subjects), "/".join(warning_subjects)),
2738 warning_descriptions))
2742 def CheckFilePermissions(input_api, output_api):
2743 """Check that all files have their permissions properly set."""
2744 if input_api.platform == 'win32':
2746 checkperms_tool = input_api.os_path.join(input_api.PresubmitLocalPath(),
2747 'tools', 'checkperms',
2750 input_api.python3_executable, checkperms_tool, '--root',
2751 input_api.change.RepositoryRoot()
2753 with input_api.CreateTemporaryFile() as file_list:
2754 for f in input_api.AffectedFiles():
2755 # checkperms.py file/directory arguments must be relative to the
2757 file_list.write((f.LocalPath() + '\n').encode('utf8'))
2759 args += ['--file-list', file_list.name]
2761 input_api.subprocess.check_output(args)
2763 except input_api.subprocess.CalledProcessError as error:
2765 output_api.PresubmitError('checkperms.py failed:',
2766 long_text=error.output.decode(
2771 def CheckNoAuraWindowPropertyHInHeaders(input_api, output_api):
2772 """Makes sure we don't include ui/aura/window_property.h
2775 pattern = input_api.re.compile(r'^#include\s*"ui/aura/window_property.h"')
2777 for f in input_api.AffectedFiles():
2778 if not f.LocalPath().endswith('.h'):
2780 for line_num, line in f.ChangedContents():
2781 if pattern.match(line):
2782 errors.append(' %s:%d' % (f.LocalPath(), line_num))
2787 output_api.PresubmitError(
2788 'Header files should not include ui/aura/window_property.h',
2793 def CheckNoInternalHeapIncludes(input_api, output_api):
2794 """Makes sure we don't include any headers from
2795 third_party/blink/renderer/platform/heap/impl or
2796 third_party/blink/renderer/platform/heap/v8_wrapper from files outside of
2797 third_party/blink/renderer/platform/heap
2799 impl_pattern = input_api.re.compile(
2800 r'^\s*#include\s*"third_party/blink/renderer/platform/heap/impl/.*"')
2801 v8_wrapper_pattern = input_api.re.compile(
2802 r'^\s*#include\s*"third_party/blink/renderer/platform/heap/v8_wrapper/.*"'
2804 # Consistently use / as path separator to simplify the writing of regex
2806 file_filter = lambda f: not input_api.re.match(
2807 r"^third_party/blink/renderer/platform/heap/.*",
2808 f.LocalPath().replace(input_api.os_path.sep, '/'))
2811 for f in input_api.AffectedFiles(file_filter=file_filter):
2812 for line_num, line in f.ChangedContents():
2813 if impl_pattern.match(line) or v8_wrapper_pattern.match(line):
2814 errors.append(' %s:%d' % (f.LocalPath(), line_num))
2819 output_api.PresubmitError(
2820 'Do not include files from third_party/blink/renderer/platform/heap/impl'
2821 ' or third_party/blink/renderer/platform/heap/v8_wrapper. Use the '
2822 'relevant counterparts from third_party/blink/renderer/platform/heap',
2827 def _CheckForVersionControlConflictsInFile(input_api, f):
2828 pattern = input_api.re.compile('^(?:<<<<<<<|>>>>>>>) |^=======$')
2830 for line_num, line in f.ChangedContents():
2831 if f.LocalPath().endswith(('.md', '.rst', '.txt')):
2832 # First-level headers in markdown look a lot like version control
2833 # conflict markers. http://daringfireball.net/projects/markdown/basics
2835 if pattern.match(line):
2836 errors.append(' %s:%d %s' % (f.LocalPath(), line_num, line))
2840 def CheckForVersionControlConflicts(input_api, output_api):
2841 """Usually this is not intentional and will cause a compile failure."""
2843 for f in input_api.AffectedFiles():
2844 errors.extend(_CheckForVersionControlConflictsInFile(input_api, f))
2849 output_api.PresubmitError(
2850 'Version control conflict markers found, please resolve.',
2855 def CheckGoogleSupportAnswerUrlOnUpload(input_api, output_api):
2856 pattern = input_api.re.compile('support\.google\.com\/chrome.*/answer')
2858 for f in input_api.AffectedFiles():
2859 for line_num, line in f.ChangedContents():
2860 if pattern.search(line):
2861 errors.append(' %s:%d %s' % (f.LocalPath(), line_num, line))
2866 output_api.PresubmitPromptWarning(
2867 'Found Google support URL addressed by answer number. Please replace '
2868 'with a p= identifier instead. See crbug.com/679462\n',
2873 def CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api):
2874 def FilterFile(affected_file):
2875 """Filter function for use with input_api.AffectedSourceFiles,
2876 below. This filters out everything except non-test files from
2877 top-level directories that generally speaking should not hard-code
2878 service URLs (e.g. src/android_webview/, src/content/ and others).
2880 return input_api.FilterSourceFile(
2882 files_to_check=[r'^(android_webview|base|content|net)/.*'],
2883 files_to_skip=(_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS +
2884 input_api.DEFAULT_FILES_TO_SKIP))
2886 base_pattern = ('"[^"]*(google|googleapis|googlezip|googledrive|appspot)'
2887 '\.(com|net)[^"]*"')
2888 comment_pattern = input_api.re.compile('//.*%s' % base_pattern)
2889 pattern = input_api.re.compile(base_pattern)
2890 problems = [] # items are (filename, line_number, line)
2891 for f in input_api.AffectedSourceFiles(FilterFile):
2892 for line_num, line in f.ChangedContents():
2893 if not comment_pattern.search(line) and pattern.search(line):
2894 problems.append((f.LocalPath(), line_num, line))
2898 output_api.PresubmitPromptOrNotify(
2899 'Most layers below src/chrome/ should not hardcode service URLs.\n'
2900 'Are you sure this is correct?', [
2901 ' %s:%d: %s' % (problem[0], problem[1], problem[2])
2902 for problem in problems
2909 def CheckChromeOsSyncedPrefRegistration(input_api, output_api):
2910 """Warns if Chrome OS C++ files register syncable prefs as browser prefs."""
2912 def FileFilter(affected_file):
2913 """Includes directories known to be Chrome OS only."""
2914 return input_api.FilterSourceFile(
2918 '^chromeos/', # Top-level src/chromeos.
2919 '.*/chromeos/', # Any path component.
2922 files_to_skip=(input_api.DEFAULT_FILES_TO_SKIP))
2926 for f in input_api.AffectedFiles(file_filter=FileFilter):
2927 for line_num, line in f.ChangedContents():
2928 if input_api.re.search('PrefRegistrySyncable::SYNCABLE_PREF',
2930 prefs.append(' %s:%d:' % (f.LocalPath(), line_num))
2931 prefs.append(' %s' % line)
2932 if input_api.re.search(
2933 'PrefRegistrySyncable::SYNCABLE_PRIORITY_PREF', line):
2934 priority_prefs.append(' %s:%d' % (f.LocalPath(), line_num))
2935 priority_prefs.append(' %s' % line)
2940 output_api.PresubmitPromptWarning(
2941 'Preferences were registered as SYNCABLE_PREF and will be controlled '
2942 'by browser sync settings. If these prefs should be controlled by OS '
2943 'sync settings use SYNCABLE_OS_PREF instead.\n' +
2945 if (priority_prefs):
2947 output_api.PresubmitPromptWarning(
2948 'Preferences were registered as SYNCABLE_PRIORITY_PREF and will be '
2949 'controlled by browser sync settings. If these prefs should be '
2950 'controlled by OS sync settings use SYNCABLE_OS_PRIORITY_PREF '
2951 'instead.\n' + '\n'.join(prefs)))
2955 def CheckNoAbbreviationInPngFileName(input_api, output_api):
2956 """Makes sure there are no abbreviations in the name of PNG files.
2957 The native_client_sdk directory is excluded because it has auto-generated PNG
2958 files for documentation.
2961 files_to_check = [r'.*\.png$']
2962 files_to_skip = [r'^native_client_sdk/',
2964 r'^third_party/blink/web_tests/',
2966 file_filter = lambda f: input_api.FilterSourceFile(
2967 f, files_to_check=files_to_check, files_to_skip=files_to_skip)
2968 abbreviation = input_api.re.compile('.+_[a-z]\.png|.+_[a-z]_.*\.png')
2969 for f in input_api.AffectedFiles(include_deletes=False,
2970 file_filter=file_filter):
2971 file_name = input_api.os_path.split(f.LocalPath())[1]
2972 if abbreviation.search(file_name):
2973 errors.append(' %s' % f.LocalPath())
2978 output_api.PresubmitError(
2979 'The name of PNG files should not have abbreviations. \n'
2980 'Use _hover.png, _center.png, instead of _h.png, _c.png.\n'
2981 'Contact oshima@chromium.org if you have questions.', errors))
2984 def CheckNoProductIconsAddedToPublicRepo(input_api, output_api):
2985 """Heuristically identifies product icons based on their file name and reminds
2986 contributors not to add them to the Chromium repository.
2989 files_to_check = [r'.*google.*\.png$|.*google.*\.svg$|.*google.*\.icon$']
2990 file_filter = lambda f: input_api.FilterSourceFile(
2991 f, files_to_check=files_to_check)
2992 for f in input_api.AffectedFiles(include_deletes=False,
2993 file_filter=file_filter):
2994 errors.append(' %s' % f.LocalPath())
2998 # Give warnings instead of errors on presubmit --all and presubmit
3000 message_type = (output_api.PresubmitNotifyResult if input_api.no_diffs
3001 else output_api.PresubmitError)
3004 'Trademarked images should not be added to the public repo. '
3005 'See crbug.com/944754', errors))
3009 def _ExtractAddRulesFromParsedDeps(parsed_deps):
3010 """Extract the rules that add dependencies from a parsed DEPS file.
3013 parsed_deps: the locals dictionary from evaluating the DEPS file."""
3016 rule[1:] for rule in parsed_deps.get('include_rules', [])
3017 if rule.startswith('+') or rule.startswith('!')
3019 for _, rules in parsed_deps.get('specific_include_rules', {}).items():
3021 rule[1:] for rule in rules
3022 if rule.startswith('+') or rule.startswith('!')
3027 def _ParseDeps(contents):
3028 """Simple helper for parsing DEPS files."""
3030 # Stubs for handling special syntax in the root DEPS file.
3032 def __init__(self, local_scope):
3033 self._local_scope = local_scope
3035 def Lookup(self, var_name):
3036 """Implements the Var syntax."""
3038 return self._local_scope['vars'][var_name]
3040 raise Exception('Var is not defined: %s' % var_name)
3044 'Var': _VarImpl(local_scope).Lookup,
3048 exec(contents, global_scope, local_scope)
3052 def _CalculateAddedDeps(os_path, old_contents, new_contents):
3053 """Helper method for CheckAddedDepsHaveTargetApprovals. Returns
3054 a set of DEPS entries that we should look up.
3056 For a directory (rather than a specific filename) we fake a path to
3057 a specific filename by adding /DEPS. This is chosen as a file that
3058 will seldom or never be subject to per-file include_rules.
3060 # We ignore deps entries on auto-generated directories.
3061 AUTO_GENERATED_DIRS = ['grit', 'jni']
3063 old_deps = _ExtractAddRulesFromParsedDeps(_ParseDeps(old_contents))
3064 new_deps = _ExtractAddRulesFromParsedDeps(_ParseDeps(new_contents))
3066 added_deps = new_deps.difference(old_deps)
3069 for added_dep in added_deps:
3070 if added_dep.split('/')[0] in AUTO_GENERATED_DIRS:
3072 # Assume that a rule that ends in .h is a rule for a specific file.
3073 if added_dep.endswith('.h'):
3074 results.add(added_dep)
3076 results.add(os_path.join(added_dep, 'DEPS'))
3080 def CheckAddedDepsHaveTargetApprovals(input_api, output_api):
3081 """When a dependency prefixed with + is added to a DEPS file, we
3082 want to make sure that the change is reviewed by an OWNER of the
3083 target file or directory, to avoid layering violations from being
3084 introduced. This check verifies that this happens.
3086 # We rely on Gerrit's code-owners to check approvals.
3087 # input_api.gerrit is always set for Chromium, but other projects
3088 # might not use Gerrit.
3089 if not input_api.gerrit or input_api.no_diffs:
3091 if 'PRESUBMIT_SKIP_NETWORK' in input_api.environ:
3094 if (input_api.change.issue and
3095 input_api.gerrit.IsOwnersOverrideApproved(
3096 input_api.change.issue)):
3097 # Skip OWNERS check when Owners-Override label is approved. This is
3098 # intended for global owners, trusted bots, and on-call sheriffs.
3099 # Review is still required for these changes.
3101 except Exception as e:
3102 return [output_api.PresubmitPromptWarning(
3103 'Failed to retrieve owner override status - %s' % str(e))]
3105 virtual_depended_on_files = set()
3107 # Consistently use / as path separator to simplify the writing of regex
3109 file_filter = lambda f: not input_api.re.match(
3110 r"^third_party/blink/.*",
3111 f.LocalPath().replace(input_api.os_path.sep, '/'))
3112 for f in input_api.AffectedFiles(include_deletes=False,
3113 file_filter=file_filter):
3114 filename = input_api.os_path.basename(f.LocalPath())
3115 if filename == 'DEPS':
3116 virtual_depended_on_files.update(
3117 _CalculateAddedDeps(input_api.os_path,
3118 '\n'.join(f.OldContents()),
3119 '\n'.join(f.NewContents())))
3121 if not virtual_depended_on_files:
3124 if input_api.is_committing:
3127 output_api.PresubmitNotifyResult(
3128 '--tbr was specified, skipping OWNERS check for DEPS additions'
3131 # TODO(dcheng): Make this generate an error on dry runs if the reviewer
3132 # is not added, to prevent review serialization.
3133 if input_api.dry_run:
3135 output_api.PresubmitNotifyResult(
3136 'This is a dry run, skipping OWNERS check for DEPS additions'
3139 if not input_api.change.issue:
3141 output_api.PresubmitError(
3142 "DEPS approval by OWNERS check failed: this change has "
3143 "no change number, so we can't check it for approvals.")
3145 output = output_api.PresubmitError
3147 output = output_api.PresubmitNotifyResult
3149 owner_email, reviewers = (
3150 input_api.canned_checks.GetCodereviewOwnerAndReviewers(
3151 input_api, None, approval_needed=input_api.is_committing))
3153 owner_email = owner_email or input_api.change.author_email
3155 approval_status = input_api.owners_client.GetFilesApprovalStatus(
3156 virtual_depended_on_files, reviewers.union([owner_email]), [])
3158 f for f in virtual_depended_on_files
3159 if approval_status[f] != input_api.owners_client.APPROVED
3162 # We strip the /DEPS part that was added by
3163 # _FilesToCheckForIncomingDeps to fake a path to a file in a
3165 def StripDeps(path):
3166 start_deps = path.rfind('/DEPS')
3167 if start_deps != -1:
3168 return path[:start_deps]
3172 unapproved_dependencies = [
3173 "'+%s'," % StripDeps(path) for path in missing_files
3176 if unapproved_dependencies:
3179 'You need LGTM from owners of depends-on paths in DEPS that were '
3180 'modified in this CL:\n %s' %
3181 '\n '.join(sorted(unapproved_dependencies)))
3183 suggested_owners = input_api.owners_client.SuggestOwners(
3184 missing_files, exclude=[owner_email])
3186 output('Suggested missing target path OWNERS:\n %s' %
3187 '\n '.join(suggested_owners or [])))
3193 # TODO: add unit tests.
3194 def CheckSpamLogging(input_api, output_api):
3195 file_inclusion_pattern = [r'.+%s' % _IMPLEMENTATION_EXTENSIONS]
3197 _EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS +
3198 input_api.DEFAULT_FILES_TO_SKIP + (
3199 r"^base/fuchsia/scoped_fx_logger\.cc$",
3200 r"^base/logging\.h$",
3201 r"^base/logging\.cc$",
3202 r"^base/task/thread_pool/task_tracker\.cc$",
3203 r"^chrome/app/chrome_main_delegate\.cc$",
3204 r"^chrome/browser/ash/arc/enterprise/cert_store/arc_cert_installer\.cc$",
3205 r"^chrome/browser/ash/policy/remote_commands/user_command_arc_job\.cc$",
3206 r"^chrome/browser/chrome_browser_main\.cc$",
3207 r"^chrome/browser/ui/startup/startup_browser_creator\.cc$",
3208 r"^chrome/browser/browser_switcher/bho/.*",
3209 r"^chrome/browser/diagnostics/diagnostics_writer\.cc$",
3210 r"^chrome/chrome_cleaner/.*",
3211 r"^chrome/chrome_elf/dll_hash/dll_hash_main\.cc$",
3212 r"^chrome/installer/setup/.*",
3214 r"^components/cast",
3215 r"^components/media_control/renderer/media_playback_options\.cc$",
3216 r"^components/policy/core/common/policy_logger\.cc$",
3217 r"^components/viz/service/display/"
3218 r"overlay_strategy_underlay_cast\.cc$",
3219 r"^components/zucchini/.*",
3220 # TODO(peter): Remove exception. https://crbug.com/534537
3221 r"^content/browser/notifications/"
3222 r"notification_event_dispatcher_impl\.cc$",
3223 r"^content/common/gpu/client/gl_helper_benchmark\.cc$",
3224 r"^courgette/courgette_minimal_tool\.cc$",
3225 r"^courgette/courgette_tool\.cc$",
3226 r"^extensions/renderer/logging_native_handler\.cc$",
3227 r"^fuchsia_web/common/init_logging\.cc$",
3228 r"^fuchsia_web/runners/common/web_component\.cc$",
3229 r"^fuchsia_web/shell/.*\.cc$",
3230 r"^headless/app/headless_shell\.cc$",
3231 r"^ipc/ipc_logging\.cc$",
3232 r"^native_client_sdk/",
3233 r"^remoting/base/logging\.h$",
3234 r"^remoting/host/.*",
3235 r"^sandbox/linux/.*",
3236 r"^storage/browser/file_system/dump_file_system\.cc$",
3238 r"^ui/base/resource/data_pack\.cc$",
3239 r"^ui/aura/bench/bench_main\.cc$",
3240 r"^ui/ozone/platform/cast/",
3241 r"^ui/base/x/xwmstartupcheck/"
3242 r"xwmstartupcheck\.cc$"))
3243 source_file_filter = lambda x: input_api.FilterSourceFile(
3244 x, files_to_check=file_inclusion_pattern, files_to_skip=files_to_skip)
3249 for f in input_api.AffectedSourceFiles(source_file_filter):
3250 for _, line in f.ChangedContents():
3251 if input_api.re.search(r"\bD?LOG\s*\(\s*INFO\s*\)", line):
3252 log_info.add(f.LocalPath())
3253 elif input_api.re.search(r"\bD?LOG_IF\s*\(\s*INFO\s*,", line):
3254 log_info.add(f.LocalPath())
3256 if input_api.re.search(r"\bprintf\(", line):
3257 printf.add(f.LocalPath())
3258 elif input_api.re.search(r"\bfprintf\((stdout|stderr)", line):
3259 printf.add(f.LocalPath())
3263 output_api.PresubmitError(
3264 'These files spam the console log with LOG(INFO):',
3269 output_api.PresubmitError(
3270 'These files spam the console log with printf/fprintf:',
3276 def CheckForAnonymousVariables(input_api, output_api):
3277 """These types are all expected to hold locks while in scope and
3278 so should never be anonymous (which causes them to be immediately
3280 they_who_must_be_named = [
3284 'SkAutoAlphaRestore',
3285 'SkAutoBitmapShaderInstall',
3286 'SkAutoBlitterChoose',
3287 'SkAutoBounderCommit',
3289 'SkAutoCanvasRestore',
3290 'SkAutoCommentBlock',
3292 'SkAutoDisableDirectionCheck',
3293 'SkAutoDisableOvalCheck',
3300 'SkAutoMaskFreeImage',
3301 'SkAutoMutexAcquire',
3302 'SkAutoPathBoundsUpdate',
3304 'SkAutoRasterClipValidate',
3310 anonymous = r'(%s)\s*[({]' % '|'.join(they_who_must_be_named)
3311 # bad: base::AutoLock(lock.get());
3312 # not bad: base::AutoLock lock(lock.get());
3313 bad_pattern = input_api.re.compile(anonymous)
3314 # good: new base::AutoLock(lock.get())
3315 good_pattern = input_api.re.compile(r'\bnew\s*' + anonymous)
3318 for f in input_api.AffectedFiles():
3319 if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
3321 for linenum, line in f.ChangedContents():
3322 if bad_pattern.search(line) and not good_pattern.search(line):
3323 errors.append('%s:%d' % (f.LocalPath(), linenum))
3327 output_api.PresubmitError(
3328 'These lines create anonymous variables that need to be named:',
3334 def CheckUniquePtrOnUpload(input_api, output_api):
3335 # Returns whether |template_str| is of the form <T, U...> for some types T
3336 # and U. Assumes that |template_str| is already in the form <...>.
3337 def HasMoreThanOneArg(template_str):
3338 # Level of <...> nesting.
3340 for c in template_str:
3345 elif c == ',' and nesting == 1:
3349 file_inclusion_pattern = [r'.+%s' % _IMPLEMENTATION_EXTENSIONS]
3350 sources = lambda affected_file: input_api.FilterSourceFile(
3352 files_to_skip=(_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS + input_api.
3353 DEFAULT_FILES_TO_SKIP),
3354 files_to_check=file_inclusion_pattern)
3356 # Pattern to capture a single "<...>" block of template arguments. It can
3357 # handle linearly nested blocks, such as "<std::vector<std::set<T>>>", but
3358 # cannot handle branching structures, such as "<pair<set<T>,set<U>>". The
3359 # latter would likely require counting that < and > match, which is not
3360 # expressible in regular languages. Should the need arise, one can introduce
3361 # limited counting (matching up to a total number of nesting depth), which
3362 # should cover all practical cases for already a low nesting limit.
3363 template_arg_pattern = (
3364 r'<[^>]*' # Opening block of <.
3365 r'>([^<]*>)?') # Closing block of >.
3366 # Prefix expressing that whatever follows is not already inside a <...>
3368 not_inside_template_arg_pattern = r'(^|[^<,\s]\s*)'
3369 null_construct_pattern = input_api.re.compile(
3370 not_inside_template_arg_pattern + r'\bstd::unique_ptr' +
3371 template_arg_pattern + r'\(\)')
3373 # Same as template_arg_pattern, but excluding type arrays, e.g., <T[]>.
3374 template_arg_no_array_pattern = (
3375 r'<[^>]*[^]]' # Opening block of <.
3376 r'>([^(<]*[^]]>)?') # Closing block of >.
3377 # Prefix saying that what follows is the start of an expression.
3378 start_of_expr_pattern = r'(=|\breturn|^)\s*'
3379 # Suffix saying that what follows are call parentheses with a non-empty list
3381 nonempty_arg_list_pattern = r'\(([^)]|$)'
3382 # Put the template argument into a capture group for deeper examination later.
3383 return_construct_pattern = input_api.re.compile(
3384 start_of_expr_pattern + r'std::unique_ptr' + '(?P<template_arg>' +
3385 template_arg_no_array_pattern + ')' + nonempty_arg_list_pattern)
3387 problems_constructor = []
3388 problems_nullptr = []
3389 for f in input_api.AffectedSourceFiles(sources):
3390 for line_number, line in f.ChangedContents():
3392 # return std::unique_ptr<T>(foo);
3393 # bar = std::unique_ptr<T>(foo);
3395 # return std::unique_ptr<T[]>(foo);
3396 # bar = std::unique_ptr<T[]>(foo);
3397 # And also allow cases when the second template argument is present. Those
3398 # cases cannot be handled by std::make_unique:
3399 # return std::unique_ptr<T, U>(foo);
3400 # bar = std::unique_ptr<T, U>(foo);
3401 local_path = f.LocalPath()
3402 return_construct_result = return_construct_pattern.search(line)
3403 if return_construct_result and not HasMoreThanOneArg(
3404 return_construct_result.group('template_arg')):
3405 problems_constructor.append(
3406 '%s:%d\n %s' % (local_path, line_number, line.strip()))
3408 # std::unique_ptr<T>()
3409 if null_construct_pattern.search(line):
3410 problems_nullptr.append(
3411 '%s:%d\n %s' % (local_path, line_number, line.strip()))
3414 if problems_nullptr:
3416 output_api.PresubmitPromptWarning(
3417 'The following files use std::unique_ptr<T>(). Use nullptr instead.',
3419 if problems_constructor:
3421 output_api.PresubmitError(
3422 'The following files use explicit std::unique_ptr constructor. '
3423 'Use std::make_unique<T>() instead, or use base::WrapUnique if '
3424 'std::make_unique is not an option.', problems_constructor))
3428 def CheckUserActionUpdate(input_api, output_api):
3429 """Checks if any new user action has been added."""
3430 if any('actions.xml' == input_api.os_path.basename(f)
3431 for f in input_api.LocalPaths()):
3432 # If actions.xml is already included in the changelist, the PRESUBMIT
3433 # for actions.xml will do a more complete presubmit check.
3436 file_inclusion_pattern = [r'.*\.(cc|mm)$']
3437 files_to_skip = (_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS +
3438 input_api.DEFAULT_FILES_TO_SKIP)
3439 file_filter = lambda f: input_api.FilterSourceFile(
3440 f, files_to_check=file_inclusion_pattern, files_to_skip=files_to_skip)
3442 action_re = r'[^a-zA-Z]UserMetricsAction\("([^"]*)'
3443 current_actions = None
3444 for f in input_api.AffectedFiles(file_filter=file_filter):
3445 for line_num, line in f.ChangedContents():
3446 match = input_api.re.search(action_re, line)
3448 # Loads contents in tools/metrics/actions/actions.xml to memory. It's
3450 if not current_actions:
3451 with open('tools/metrics/actions/actions.xml',
3452 encoding='utf-8') as actions_f:
3453 current_actions = actions_f.read()
3454 # Search for the matched user action name in |current_actions|.
3455 for action_name in match.groups():
3456 action = 'name="{0}"'.format(action_name)
3457 if action not in current_actions:
3459 output_api.PresubmitPromptWarning(
3460 'File %s line %d: %s is missing in '
3461 'tools/metrics/actions/actions.xml. Please run '
3462 'tools/metrics/actions/extract_actions.py to update.'
3463 % (f.LocalPath(), line_num, action_name))
3468 def _ImportJSONCommentEater(input_api):
3470 sys.path = sys.path + [
3471 input_api.os_path.join(input_api.PresubmitLocalPath(), 'tools',
3472 'json_comment_eater')
3474 import json_comment_eater
3475 return json_comment_eater
3478 def _GetJSONParseError(input_api, filename, eat_comments=True):
3480 contents = input_api.ReadFile(filename)
3482 json_comment_eater = _ImportJSONCommentEater(input_api)
3483 contents = json_comment_eater.Nom(contents)
3485 input_api.json.loads(contents)
3486 except ValueError as e:
3491 def _GetIDLParseError(input_api, filename):
3493 contents = input_api.ReadFile(filename)
3494 for i, char in enumerate(contents):
3495 if not char.isascii():
3497 'Non-ascii character "%s" (ord %d) found at offset %d.' %
3498 (char, ord(char), i))
3499 idl_schema = input_api.os_path.join(input_api.PresubmitLocalPath(),
3500 'tools', 'json_schema_compiler',
3502 process = input_api.subprocess.Popen(
3503 [input_api.python3_executable, idl_schema],
3504 stdin=input_api.subprocess.PIPE,
3505 stdout=input_api.subprocess.PIPE,
3506 stderr=input_api.subprocess.PIPE,
3507 universal_newlines=True)
3508 (_, error) = process.communicate(input=contents)
3509 return error or None
3510 except ValueError as e:
3514 def CheckParseErrors(input_api, output_api):
3515 """Check that IDL and JSON files do not contain syntax errors."""
3517 '.idl': _GetIDLParseError,
3518 '.json': _GetJSONParseError,
3520 # Most JSON files are preprocessed and support comments, but these do not.
3521 json_no_comments_patterns = [
3524 # Only run IDL checker on files in these directories.
3525 idl_included_patterns = [
3526 r'^chrome/common/extensions/api/',
3527 r'^extensions/common/api/',
3530 def get_action(affected_file):
3531 filename = affected_file.LocalPath()
3532 return actions.get(input_api.os_path.splitext(filename)[1])
3534 def FilterFile(affected_file):
3535 action = get_action(affected_file)
3538 path = affected_file.LocalPath()
3540 if _MatchesFile(input_api,
3541 _KNOWN_TEST_DATA_AND_INVALID_JSON_FILE_PATTERNS, path):
3544 if (action == _GetIDLParseError
3545 and not _MatchesFile(input_api, idl_included_patterns, path)):
3550 for affected_file in input_api.AffectedFiles(file_filter=FilterFile,
3551 include_deletes=False):
3552 action = get_action(affected_file)
3554 if (action == _GetJSONParseError
3555 and _MatchesFile(input_api, json_no_comments_patterns,
3556 affected_file.LocalPath())):
3557 kwargs['eat_comments'] = False
3558 parse_error = action(input_api, affected_file.AbsoluteLocalPath(),
3562 output_api.PresubmitError(
3563 '%s could not be parsed: %s' %
3564 (affected_file.LocalPath(), parse_error)))
3568 def CheckJavaStyle(input_api, output_api):
3569 """Runs checkstyle on changed java files and returns errors if any exist."""
3571 # Return early if no java files were modified.
3573 _IsJavaFile(input_api, f.LocalPath())
3574 for f in input_api.AffectedFiles()):
3578 original_sys_path = sys.path
3580 sys.path = sys.path + [
3581 input_api.os_path.join(input_api.PresubmitLocalPath(), 'tools',
3582 'android', 'checkstyle')
3586 # Restore sys.path to what it was before.
3587 sys.path = original_sys_path
3589 return checkstyle.run_presubmit(
3592 files_to_skip=_EXCLUDED_PATHS + input_api.DEFAULT_FILES_TO_SKIP)
3595 def CheckPythonDevilInit(input_api, output_api):
3596 """Checks to make sure devil is initialized correctly in python scripts."""
3597 script_common_initialize_pattern = input_api.re.compile(
3598 r'script_common\.InitializeEnvironment\(')
3599 devil_env_config_initialize = input_api.re.compile(
3600 r'devil_env\.config\.Initialize\(')
3604 sources = lambda affected_file: input_api.FilterSourceFile(
3606 files_to_skip=(_EXCLUDED_PATHS + input_api.DEFAULT_FILES_TO_SKIP + (
3607 r'^build/android/devil_chromium\.py',
3610 files_to_check=[r'.*\.py$'])
3612 for f in input_api.AffectedSourceFiles(sources):
3613 for line_num, line in f.ChangedContents():
3614 if (script_common_initialize_pattern.search(line)
3615 or devil_env_config_initialize.search(line)):
3616 errors.append("%s:%d" % (f.LocalPath(), line_num))
3622 output_api.PresubmitError(
3623 'Devil initialization should always be done using '
3624 'devil_chromium.Initialize() in the chromium project, to use better '
3625 'defaults for dependencies (ex. up-to-date version of adb).',
3631 def _MatchesFile(input_api, patterns, path):
3632 # Consistently use / as path separator to simplify the writing of regex
3634 path = path.replace(input_api.os_path.sep, '/')
3635 for pattern in patterns:
3636 if input_api.re.search(pattern, path):
3641 def _ChangeHasSecurityReviewer(input_api, owners_file):
3642 """Returns True iff the CL has a reviewer from SECURITY_OWNERS.
3645 input_api: The presubmit input API.
3646 owners_file: OWNERS file with required reviewers. Typically, this is
3647 something like ipc/SECURITY_OWNERS.
3649 Note: if the presubmit is running for commit rather than for upload, this
3650 only returns True if a security reviewer has also approved the CL.
3652 # Owners-Override should bypass all additional OWNERS enforcement checks.
3653 # A CR+1 vote will still be required to land this change.
3654 if (input_api.change.issue and input_api.gerrit.IsOwnersOverrideApproved(
3655 input_api.change.issue)):
3658 owner_email, reviewers = (
3659 input_api.canned_checks.GetCodereviewOwnerAndReviewers(
3662 approval_needed=input_api.is_committing and not input_api.dry_run))
3664 security_owners = input_api.owners_client.ListOwners(owners_file)
3665 return any(owner in reviewers for owner in security_owners)
3669 class _SecurityProblemWithItems:
3671 items: Sequence[str]
3675 class _MissingSecurityOwnersResult:
3676 owners_file_problems: Sequence[_SecurityProblemWithItems]
3677 has_security_sensitive_files: bool
3678 missing_reviewer_problem: Optional[_SecurityProblemWithItems]
3681 def _FindMissingSecurityOwners(input_api,
3683 file_patterns: Sequence[str],
3684 excluded_patterns: Sequence[str],
3685 required_owners_file: str,
3686 custom_rule_function: Optional[Callable] = None
3687 ) -> _MissingSecurityOwnersResult:
3688 """Find OWNERS files missing per-file rules for security-sensitive files.
3691 input_api: the PRESUBMIT input API object.
3692 output_api: the PRESUBMIT output API object.
3693 file_patterns: basename patterns that require a corresponding per-file
3694 security restriction.
3695 excluded_patterns: path patterns that should be exempted from
3696 requiring a security restriction.
3697 required_owners_file: path to the required OWNERS file, e.g.
3699 cc_alias: If not None, email that will be CCed automatically if the
3700 change contains security-sensitive files, as determined by
3701 `file_patterns` and `excluded_patterns`.
3702 custom_rule_function: If not None, will be called with `input_api` and
3703 the current file under consideration. Returning True will add an
3704 exact match per-file rule check for the current file.
3707 # `to_check` is a mapping of an OWNERS file path to Patterns.
3709 # Patterns is a dictionary mapping glob patterns (suitable for use in
3710 # per-file rules) to a PatternEntry.
3712 # PatternEntry is a dictionary with two keys:
3713 # - 'files': the files that are matched by this pattern
3714 # - 'rules': the per-file rules needed for this pattern
3716 # For example, if we expect OWNERS file to contain rules for *.mojom and
3717 # *_struct_traits*.*, Patterns might look like this:
3722 # 'per-file *.mojom=set noparent',
3723 # 'per-file *.mojom=file://ipc/SECURITY_OWNERS',
3726 # '*_struct_traits*.*': {
3729 # 'per-file *_struct_traits*.*=set noparent',
3730 # 'per-file *_struct_traits*.*=file://ipc/SECURITY_OWNERS',
3735 files_to_review = []
3737 def AddPatternToCheck(file, pattern):
3738 owners_file = input_api.os_path.join(
3739 input_api.os_path.dirname(file.LocalPath()), 'OWNERS')
3740 if owners_file not in to_check:
3741 to_check[owners_file] = {}
3742 if pattern not in to_check[owners_file]:
3743 to_check[owners_file][pattern] = {
3746 f'per-file {pattern}=set noparent',
3747 f'per-file {pattern}=file://{required_owners_file}',
3750 to_check[owners_file][pattern]['files'].append(file.LocalPath())
3751 files_to_review.append(file.LocalPath())
3753 # Only enforce security OWNERS rules for a directory if that directory has a
3754 # file that matches `file_patterns`. For example, if a directory only
3755 # contains *.mojom files and no *_messages*.h files, the check should only
3756 # ensure that rules for *.mojom files are present.
3757 for file in input_api.AffectedFiles(include_deletes=False):
3758 file_basename = input_api.os_path.basename(file.LocalPath())
3759 if custom_rule_function is not None and custom_rule_function(
3761 AddPatternToCheck(file, file_basename)
3765 input_api.fnmatch.fnmatch(file.LocalPath(), pattern)
3766 for pattern in excluded_patterns):
3769 for pattern in file_patterns:
3770 # Unlike `excluded_patterns`, `file_patterns` is checked only against the
3772 if input_api.fnmatch.fnmatch(file_basename, pattern):
3773 AddPatternToCheck(file, pattern)
3776 has_security_sensitive_files = bool(to_check)
3778 # Check if any newly added lines in OWNERS files intersect with required
3779 # per-file OWNERS lines. If so, ensure that a security reviewer is included.
3780 # This is a hack, but is needed because the OWNERS check (by design) ignores
3781 # new OWNERS entries; otherwise, a non-owner could add someone as a new
3782 # OWNER and have that newly-added OWNER self-approve their own addition.
3783 newly_covered_files = []
3784 for file in input_api.AffectedFiles(include_deletes=False):
3785 if not file.LocalPath() in to_check:
3787 for _, line in file.ChangedContents():
3788 for _, entry in to_check[file.LocalPath()].items():
3789 if line in entry['rules']:
3790 newly_covered_files.extend(entry['files'])
3792 missing_reviewer_problems = None
3793 if newly_covered_files and not _ChangeHasSecurityReviewer(
3794 input_api, required_owners_file):
3795 missing_reviewer_problems = _SecurityProblemWithItems(
3796 f'Review from an owner in {required_owners_file} is required for '
3797 'the following newly-added files:',
3798 [f'{file}' for file in sorted(set(newly_covered_files))])
3800 # Go through the OWNERS files to check, filtering out rules that are already
3801 # present in that OWNERS file.
3802 for owners_file, patterns in to_check.items():
3806 input_api.os_path.join(input_api.change.RepositoryRoot(),
3807 owners_file)).splitlines())
3808 for entry in patterns.values():
3810 rule for rule in entry['rules'] if rule not in lines
3813 # No OWNERS file, so all the rules are definitely missing.
3816 # All the remaining lines weren't found in OWNERS files, so emit an error.
3817 owners_file_problems = []
3819 for owners_file, patterns in to_check.items():
3822 for _, entry in patterns.items():
3823 files.extend(entry['files'])
3824 missing_lines.extend(entry['rules'])
3826 joined_missing_lines = '\n'.join(line for line in missing_lines)
3827 owners_file_problems.append(
3828 _SecurityProblemWithItems(
3829 'Found missing OWNERS lines for security-sensitive files. '
3830 f'Please add the following lines to {owners_file}:\n'
3831 f'{joined_missing_lines}\n\nTo ensure security review for:',
3834 return _MissingSecurityOwnersResult(owners_file_problems,
3835 has_security_sensitive_files,
3836 missing_reviewer_problems)
3839 def _CheckChangeForIpcSecurityOwners(input_api, output_api):
3840 # Whether or not a file affects IPC is (mostly) determined by a simple list
3841 # of filename patterns.
3846 '*_param_traits*.*',
3849 '*_mojom_traits*.*',
3850 '*_type_converter*.*',
3851 # Android native IPC:
3855 excluded_patterns = [
3856 # These third_party directories do not contain IPCs, but contain files
3857 # matching the above patterns, which trigger false positives.
3858 'third_party/crashpad/*',
3859 'third_party/blink/renderer/platform/bindings/*',
3860 'third_party/protobuf/benchmarks/python/*',
3861 'third_party/win_build_output/*',
3862 # Enum-only mojoms used for web metrics, so no security review needed.
3863 'third_party/blink/public/mojom/use_counter/metrics/*',
3864 # These files are just used to communicate between class loaders running
3865 # in the same process.
3866 'weblayer/browser/java/org/chromium/weblayer_private/interfaces/*',
3867 'weblayer/browser/java/org/chromium/weblayer_private/test_interfaces/*',
3870 def IsMojoServiceManifestFile(input_api, file):
3871 manifest_pattern = input_api.re.compile('manifests?\.(cc|h)$')
3872 test_manifest_pattern = input_api.re.compile('test_manifests?\.(cc|h)')
3873 if not manifest_pattern.search(file.LocalPath()):
3876 if test_manifest_pattern.search(file.LocalPath()):
3879 # All actual service manifest files should contain at least one
3880 # qualified reference to service_manager::Manifest.
3881 return any('service_manager::Manifest' in line
3882 for line in file.NewContents())
3884 return _FindMissingSecurityOwners(
3889 'ipc/SECURITY_OWNERS',
3890 custom_rule_function=IsMojoServiceManifestFile)
3893 def _CheckChangeForFuchsiaSecurityOwners(input_api, output_api):
3895 # Component specifications.
3896 '*.cml', # Component Framework v2.
3897 '*.cmx', # Component Framework v1.
3899 # Fuchsia IDL protocol specifications.
3903 # Don't check for owners files for changes in these directories.
3904 excluded_patterns = [
3905 'third_party/crashpad/*',
3908 return _FindMissingSecurityOwners(input_api, output_api, file_patterns,
3910 'build/fuchsia/SECURITY_OWNERS')
3913 def CheckSecurityOwners(input_api, output_api):
3914 """Checks that various security-sensitive files have an IPC OWNERS rule."""
3915 ipc_results = _CheckChangeForIpcSecurityOwners(input_api, output_api)
3916 fuchsia_results = _CheckChangeForFuchsiaSecurityOwners(
3917 input_api, output_api)
3919 if ipc_results.has_security_sensitive_files:
3920 output_api.AppendCC('ipc-security-reviews@chromium.org')
3924 missing_reviewer_problems = []
3925 if ipc_results.missing_reviewer_problem:
3926 missing_reviewer_problems.append(ipc_results.missing_reviewer_problem)
3927 if fuchsia_results.missing_reviewer_problem:
3928 missing_reviewer_problems.append(
3929 fuchsia_results.missing_reviewer_problem)
3931 # Missing reviewers are an error unless there's no issue number
3932 # associated with this branch; in that case, the presubmit is being run
3933 # with --all or --files.
3935 # Note that upload should never be an error; otherwise, it would be
3936 # impossible to upload changes at all.
3937 if input_api.is_committing and input_api.change.issue:
3938 make_presubmit_message = output_api.PresubmitError
3940 make_presubmit_message = output_api.PresubmitNotifyResult
3941 for problem in missing_reviewer_problems:
3943 make_presubmit_message(problem.problem, items=problem.items))
3945 owners_file_problems = []
3946 owners_file_problems.extend(ipc_results.owners_file_problems)
3947 owners_file_problems.extend(fuchsia_results.owners_file_problems)
3949 for problem in owners_file_problems:
3950 # Missing per-file rules are always an error. While swarming and caching
3951 # means that uploading a patchset with updated OWNERS files and sending
3952 # it to the CQ again should not have a large incremental cost, it is
3953 # still frustrating to discover the error only after the change has
3954 # already been uploaded.
3956 output_api.PresubmitError(problem.problem, items=problem.items))
3961 def _GetFilesUsingSecurityCriticalFunctions(input_api):
3962 """Checks affected files for changes to security-critical calls. This
3963 function checks the full change diff, to catch both additions/changes
3966 Returns a dict keyed by file name, and the value is a set of detected
3969 # Map of function pretty name (displayed in an error) to the pattern to
3971 _PATTERNS_TO_CHECK = {
3972 'content::GetServiceSandboxType<>()': 'GetServiceSandboxType\\<'
3974 _PATTERNS_TO_CHECK = {
3975 k: input_api.re.compile(v)
3976 for k, v in _PATTERNS_TO_CHECK.items()
3979 # We don't want to trigger on strings within this file.
3980 def presubmit_file_filter(f):
3981 return 'PRESUBMIT.py' != input_api.os_path.split(f.LocalPath())[1]
3983 # Scan all affected files for changes touching _FUNCTIONS_TO_CHECK.
3984 files_to_functions = {}
3985 for f in input_api.AffectedFiles(file_filter=presubmit_file_filter):
3986 diff = f.GenerateScmDiff()
3987 for line in diff.split('\n'):
3988 # Not using just RightHandSideLines() because removing a
3989 # call to a security-critical function can be just as important
3990 # as adding or changing the arguments.
3991 if line.startswith('-') or (line.startswith('+')
3992 and not line.startswith('++')):
3993 for name, pattern in _PATTERNS_TO_CHECK.items():
3994 if pattern.search(line):
3995 path = f.LocalPath()
3996 if not path in files_to_functions:
3997 files_to_functions[path] = set()
3998 files_to_functions[path].add(name)
3999 return files_to_functions
4002 def CheckSecurityChanges(input_api, output_api):
4003 """Checks that changes involving security-critical functions are reviewed
4004 by the security team.
4006 files_to_functions = _GetFilesUsingSecurityCriticalFunctions(input_api)
4007 if not len(files_to_functions):
4010 owners_file = 'ipc/SECURITY_OWNERS'
4011 if _ChangeHasSecurityReviewer(input_api, owners_file):
4014 msg = 'The following files change calls to security-sensitive functions\n' \
4015 'that need to be reviewed by {}.\n'.format(owners_file)
4016 for path, names in files_to_functions.items():
4017 msg += ' {}\n'.format(path)
4019 msg += ' {}\n'.format(name)
4022 if input_api.is_committing:
4023 output = output_api.PresubmitError
4025 output = output_api.PresubmitNotifyResult
4026 return [output(msg)]
4029 def CheckSetNoParent(input_api, output_api):
4030 """Checks that set noparent is only used together with an OWNERS file in
4031 //build/OWNERS.setnoparent (see also
4032 //docs/code_reviews.md#owners-files-details)
4034 # Return early if no OWNERS files were modified.
4035 if not any(f.LocalPath().endswith('OWNERS')
4036 for f in input_api.AffectedFiles(include_deletes=False)):
4041 allowed_owners_files_file = 'build/OWNERS.setnoparent'
4042 allowed_owners_files = set()
4043 with open(allowed_owners_files_file, 'r', encoding='utf-8') as f:
4046 if not line or line.startswith('#'):
4048 allowed_owners_files.add(line)
4050 per_file_pattern = input_api.re.compile('per-file (.+)=(.+)')
4052 for f in input_api.AffectedFiles(include_deletes=False):
4053 if not f.LocalPath().endswith('OWNERS'):
4056 found_owners_files = set()
4057 found_set_noparent_lines = dict()
4059 # Parse the OWNERS file.
4060 for lineno, line in enumerate(f.NewContents(), 1):
4062 if line.startswith('set noparent'):
4063 found_set_noparent_lines[''] = lineno
4064 if line.startswith('file://'):
4065 if line in allowed_owners_files:
4066 found_owners_files.add('')
4067 if line.startswith('per-file'):
4068 match = per_file_pattern.match(line)
4070 glob = match.group(1).strip()
4071 directive = match.group(2).strip()
4072 if directive == 'set noparent':
4073 found_set_noparent_lines[glob] = lineno
4074 if directive.startswith('file://'):
4075 if directive in allowed_owners_files:
4076 found_owners_files.add(glob)
4078 # Check that every set noparent line has a corresponding file:// line
4079 # listed in build/OWNERS.setnoparent. An exception is made for top level
4080 # directories since src/OWNERS shouldn't review them.
4081 linux_path = f.LocalPath().replace(input_api.os_path.sep, '/')
4082 if (linux_path.count('/') != 1
4083 and (not linux_path in _EXCLUDED_SET_NO_PARENT_PATHS)):
4084 for set_noparent_line in found_set_noparent_lines:
4085 if set_noparent_line in found_owners_files:
4087 errors.append(' %s:%d' %
4089 found_set_noparent_lines[set_noparent_line]))
4093 if input_api.is_committing:
4094 output = output_api.PresubmitError
4096 output = output_api.PresubmitPromptWarning
4099 'Found the following "set noparent" restrictions in OWNERS files that '
4100 'do not include owners from build/OWNERS.setnoparent:',
4101 long_text='\n\n'.join(errors)))
4105 def CheckUselessForwardDeclarations(input_api, output_api):
4106 """Checks that added or removed lines in non third party affected
4107 header files do not lead to new useless class or struct forward
4111 class_pattern = input_api.re.compile(r'^class\s+(\w+);$',
4112 input_api.re.MULTILINE)
4113 struct_pattern = input_api.re.compile(r'^struct\s+(\w+);$',
4114 input_api.re.MULTILINE)
4115 for f in input_api.AffectedFiles(include_deletes=False):
4116 if (f.LocalPath().startswith('third_party')
4117 and not f.LocalPath().startswith('third_party/blink')
4118 and not f.LocalPath().startswith('third_party\\blink')):
4121 if not f.LocalPath().endswith('.h'):
4124 contents = input_api.ReadFile(f)
4125 fwd_decls = input_api.re.findall(class_pattern, contents)
4126 fwd_decls.extend(input_api.re.findall(struct_pattern, contents))
4128 useless_fwd_decls = []
4129 for decl in fwd_decls:
4130 count = sum(1 for _ in input_api.re.finditer(
4131 r'\b%s\b' % input_api.re.escape(decl), contents))
4133 useless_fwd_decls.append(decl)
4135 if not useless_fwd_decls:
4138 for line in f.GenerateScmDiff().splitlines():
4139 if (line.startswith('-') and not line.startswith('--')
4140 or line.startswith('+') and not line.startswith('++')):
4141 for decl in useless_fwd_decls:
4142 if input_api.re.search(r'\b%s\b' % decl, line[1:]):
4144 output_api.PresubmitPromptWarning(
4145 '%s: %s forward declaration is no longer needed'
4146 % (f.LocalPath(), decl)))
4147 useless_fwd_decls.remove(decl)
4152 def _CheckAndroidDebuggableBuild(input_api, output_api):
4153 """Checks that code uses BuildInfo.isDebugAndroid() instead of
4154 Build.TYPE.equals('') or ''.equals(Build.TYPE) to check if
4155 this is a debuggable build of Android.
4157 build_type_check_pattern = input_api.re.compile(
4158 r'\bBuild\.TYPE\.equals\(|\.equals\(\s*\bBuild\.TYPE\)')
4162 sources = lambda affected_file: input_api.FilterSourceFile(
4165 _EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS + input_api.
4166 DEFAULT_FILES_TO_SKIP + (
4167 r"^android_webview/support_library/boundary_interfaces/",
4168 r"^chrome/android/webapk/.*",
4170 r"tools/android/customtabs_benchmark/.*",
4171 r"webview/chromium/License.*",
4173 files_to_check=[r'.*\.java$'])
4175 for f in input_api.AffectedSourceFiles(sources):
4176 for line_num, line in f.ChangedContents():
4177 if build_type_check_pattern.search(line):
4178 errors.append("%s:%d" % (f.LocalPath(), line_num))
4184 output_api.PresubmitPromptWarning(
4185 'Build.TYPE.equals or .equals(Build.TYPE) usage is detected.'
4186 ' Please use BuildInfo.isDebugAndroid() instead.', errors))
4190 # TODO: add unit tests
4191 def _CheckAndroidToastUsage(input_api, output_api):
4192 """Checks that code uses org.chromium.ui.widget.Toast instead of
4193 android.widget.Toast (Chromium Toast doesn't force hardware
4194 acceleration on low-end devices, saving memory).
4196 toast_import_pattern = input_api.re.compile(
4197 r'^import android\.widget\.Toast;$')
4201 sources = lambda affected_file: input_api.FilterSourceFile(
4203 files_to_skip=(_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS + input_api.
4204 DEFAULT_FILES_TO_SKIP + (r'^chromecast/.*',
4206 files_to_check=[r'.*\.java$'])
4208 for f in input_api.AffectedSourceFiles(sources):
4209 for line_num, line in f.ChangedContents():
4210 if toast_import_pattern.search(line):
4211 errors.append("%s:%d" % (f.LocalPath(), line_num))
4217 output_api.PresubmitError(
4218 'android.widget.Toast usage is detected. Android toasts use hardware'
4219 ' acceleration, and can be\ncostly on low-end devices. Please use'
4220 ' org.chromium.ui.widget.Toast instead.\n'
4221 'Contact dskiba@chromium.org if you have any questions.',
4227 def _CheckAndroidCrLogUsage(input_api, output_api):
4228 """Checks that new logs using org.chromium.base.Log:
4229 - Are using 'TAG' as variable name for the tags (warn)
4230 - Are using a tag that is shorter than 20 characters (error)
4233 # Do not check format of logs in the given files
4234 cr_log_check_excluded_paths = [
4235 # //chrome/android/webapk cannot depend on //base
4236 r"^chrome/android/webapk/.*",
4237 # WebView license viewer code cannot depend on //base; used in stub APK.
4238 r"^android_webview/glue/java/src/com/android/"
4239 r"webview/chromium/License.*",
4240 # The customtabs_benchmark is a small app that does not depend on Chromium
4242 r"tools/android/customtabs_benchmark/.*",
4245 cr_log_import_pattern = input_api.re.compile(
4246 r'^import org\.chromium\.base\.Log;$', input_api.re.MULTILINE)
4247 class_in_base_pattern = input_api.re.compile(
4248 r'^package org\.chromium\.base;$', input_api.re.MULTILINE)
4249 has_some_log_import_pattern = input_api.re.compile(r'^import .*\.Log;$',
4250 input_api.re.MULTILINE)
4251 # Extract the tag from lines like `Log.d(TAG, "*");` or `Log.d("TAG", "*");`
4252 log_call_pattern = input_api.re.compile(r'\bLog\.\w\((?P<tag>\"?\w+)')
4253 log_decl_pattern = input_api.re.compile(
4254 r'static final String TAG = "(?P<name>(.*))"')
4255 rough_log_decl_pattern = input_api.re.compile(r'\bString TAG\s*=')
4257 REF_MSG = ('See docs/android_logging.md for more info.')
4258 sources = lambda x: input_api.FilterSourceFile(
4260 files_to_check=[r'.*\.java$'],
4261 files_to_skip=cr_log_check_excluded_paths)
4263 tag_decl_errors = []
4264 tag_length_errors = []
4266 tag_with_dot_errors = []
4267 util_log_errors = []
4269 for f in input_api.AffectedSourceFiles(sources):
4270 file_content = input_api.ReadFile(f)
4271 has_modified_logs = False
4273 if (cr_log_import_pattern.search(file_content)
4274 or (class_in_base_pattern.search(file_content)
4275 and not has_some_log_import_pattern.search(file_content))):
4276 # Checks to run for files using cr log
4277 for line_num, line in f.ChangedContents():
4278 if rough_log_decl_pattern.search(line):
4279 has_modified_logs = True
4281 # Check if the new line is doing some logging
4282 match = log_call_pattern.search(line)
4284 has_modified_logs = True
4286 # Make sure it uses "TAG"
4287 if not match.group('tag') == 'TAG':
4288 tag_errors.append("%s:%d" % (f.LocalPath(), line_num))
4290 # Report non cr Log function calls in changed lines
4291 for line_num, line in f.ChangedContents():
4292 if log_call_pattern.search(line):
4293 util_log_errors.append("%s:%d" % (f.LocalPath(), line_num))
4296 if has_modified_logs:
4297 # Make sure the tag is using the "cr" prefix and is not too long
4298 match = log_decl_pattern.search(file_content)
4299 tag_name = match.group('name') if match else None
4301 tag_decl_errors.append(f.LocalPath())
4302 elif len(tag_name) > 20:
4303 tag_length_errors.append(f.LocalPath())
4304 elif '.' in tag_name:
4305 tag_with_dot_errors.append(f.LocalPath())
4310 output_api.PresubmitPromptWarning(
4311 'Please define your tags using the suggested format: .\n'
4312 '"private static final String TAG = "<package tag>".\n'
4313 'They will be prepended with "cr_" automatically.\n' + REF_MSG,
4316 if tag_length_errors:
4318 output_api.PresubmitError(
4319 'The tag length is restricted by the system to be at most '
4320 '20 characters.\n' + REF_MSG, tag_length_errors))
4324 output_api.PresubmitPromptWarning(
4325 'Please use a variable named "TAG" for your log tags.\n' +
4326 REF_MSG, tag_errors))
4330 output_api.PresubmitPromptWarning(
4331 'Please use org.chromium.base.Log for new logs.\n' + REF_MSG,
4334 if tag_with_dot_errors:
4336 output_api.PresubmitPromptWarning(
4337 'Dot in log tags cause them to be elided in crash reports.\n' +
4338 REF_MSG, tag_with_dot_errors))
4343 def _CheckAndroidTestJUnitFrameworkImport(input_api, output_api):
4344 """Checks that junit.framework.* is no longer used."""
4345 deprecated_junit_framework_pattern = input_api.re.compile(
4346 r'^import junit\.framework\..*;', input_api.re.MULTILINE)
4347 sources = lambda x: input_api.FilterSourceFile(
4348 x, files_to_check=[r'.*\.java$'], files_to_skip=None)
4350 for f in input_api.AffectedFiles(file_filter=sources):
4351 for line_num, line in f.ChangedContents():
4352 if deprecated_junit_framework_pattern.search(line):
4353 errors.append("%s:%d" % (f.LocalPath(), line_num))
4358 output_api.PresubmitError(
4359 'APIs from junit.framework.* are deprecated, please use JUnit4 framework'
4360 '(org.junit.*) from //third_party/junit. Contact yolandyan@chromium.org'
4361 ' if you have any question.', errors))
4365 def _CheckAndroidTestJUnitInheritance(input_api, output_api):
4366 """Checks that if new Java test classes have inheritance.
4367 Either the new test class is JUnit3 test or it is a JUnit4 test class
4368 with a base class, either case is undesirable.
4370 class_declaration_pattern = input_api.re.compile(r'^public class \w*Test ')
4372 sources = lambda x: input_api.FilterSourceFile(
4373 x, files_to_check=[r'.*Test\.java$'], files_to_skip=None)
4375 for f in input_api.AffectedFiles(file_filter=sources):
4376 if not f.OldContents():
4377 class_declaration_start_flag = False
4378 for line_num, line in f.ChangedContents():
4379 if class_declaration_pattern.search(line):
4380 class_declaration_start_flag = True
4381 if class_declaration_start_flag and ' extends ' in line:
4382 errors.append('%s:%d' % (f.LocalPath(), line_num))
4384 class_declaration_start_flag = False
4389 output_api.PresubmitPromptWarning(
4390 'The newly created files include Test classes that inherits from base'
4391 ' class. Please do not use inheritance in JUnit4 tests or add new'
4392 ' JUnit3 tests. Contact yolandyan@chromium.org if you have any'
4393 ' questions.', errors))
4397 def _CheckAndroidTestAnnotationUsage(input_api, output_api):
4398 """Checks that android.test.suitebuilder.annotation.* is no longer used."""
4399 deprecated_annotation_import_pattern = input_api.re.compile(
4400 r'^import android\.test\.suitebuilder\.annotation\..*;',
4401 input_api.re.MULTILINE)
4402 sources = lambda x: input_api.FilterSourceFile(
4403 x, files_to_check=[r'.*\.java$'], files_to_skip=None)
4405 for f in input_api.AffectedFiles(file_filter=sources):
4406 for line_num, line in f.ChangedContents():
4407 if deprecated_annotation_import_pattern.search(line):
4408 errors.append("%s:%d" % (f.LocalPath(), line_num))
4413 output_api.PresubmitError(
4414 'Annotations in android.test.suitebuilder.annotation have been'
4415 ' deprecated since API level 24. Please use androidx.test.filters'
4416 ' from //third_party/androidx:androidx_test_runner_java instead.'
4417 ' Contact yolandyan@chromium.org if you have any questions.',
4422 def _CheckAndroidNewMdpiAssetLocation(input_api, output_api):
4423 """Checks if MDPI assets are placed in a correct directory."""
4424 file_filter = lambda f: (f.LocalPath().endswith(
4425 '.png') and ('/res/drawable/'.replace('/', input_api.os_path.sep) in f.
4426 LocalPath() or '/res/drawable-ldrtl/'.replace(
4427 '/', input_api.os_path.sep) in f.LocalPath()))
4429 for f in input_api.AffectedFiles(include_deletes=False,
4430 file_filter=file_filter):
4431 errors.append(' %s' % f.LocalPath())
4436 output_api.PresubmitError(
4437 'MDPI assets should be placed in /res/drawable-mdpi/ or '
4438 '/res/drawable-ldrtl-mdpi/\ninstead of /res/drawable/ and'
4439 '/res/drawable-ldrtl/.\n'
4440 'Contact newt@chromium.org if you have questions.', errors))
4444 def _CheckAndroidWebkitImports(input_api, output_api):
4445 """Checks that code uses org.chromium.base.Callback instead of
4446 android.webview.ValueCallback except in the WebView glue layer
4449 valuecallback_import_pattern = input_api.re.compile(
4450 r'^import android\.webkit\.ValueCallback;$')
4454 sources = lambda affected_file: input_api.FilterSourceFile(
4456 files_to_skip=(_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS + input_api.
4457 DEFAULT_FILES_TO_SKIP + (
4458 r'^android_webview/glue/.*',
4461 files_to_check=[r'.*\.java$'])
4463 for f in input_api.AffectedSourceFiles(sources):
4464 for line_num, line in f.ChangedContents():
4465 if valuecallback_import_pattern.search(line):
4466 errors.append("%s:%d" % (f.LocalPath(), line_num))
4472 output_api.PresubmitError(
4473 'android.webkit.ValueCallback usage is detected outside of the glue'
4474 ' layer. To stay compatible with the support library, android.webkit.*'
4475 ' classes should only be used inside the glue layer and'
4476 ' org.chromium.base.Callback should be used instead.', errors))
4481 def _CheckAndroidXmlStyle(input_api, output_api, is_check_on_upload):
4482 """Checks Android XML styles """
4484 # Return early if no relevant files were modified.
4486 _IsXmlOrGrdFile(input_api, f.LocalPath())
4487 for f in input_api.AffectedFiles(include_deletes=False)):
4491 original_sys_path = sys.path
4493 sys.path = sys.path + [
4494 input_api.os_path.join(input_api.PresubmitLocalPath(), 'tools',
4495 'android', 'checkxmlstyle')
4497 import checkxmlstyle
4499 # Restore sys.path to what it was before.
4500 sys.path = original_sys_path
4502 if is_check_on_upload:
4503 return checkxmlstyle.CheckStyleOnUpload(input_api, output_api)
4505 return checkxmlstyle.CheckStyleOnCommit(input_api, output_api)
4508 def _CheckAndroidInfoBarDeprecation(input_api, output_api):
4509 """Checks Android Infobar Deprecation """
4512 original_sys_path = sys.path
4514 sys.path = sys.path + [
4515 input_api.os_path.join(input_api.PresubmitLocalPath(), 'tools',
4516 'android', 'infobar_deprecation')
4518 import infobar_deprecation
4520 # Restore sys.path to what it was before.
4521 sys.path = original_sys_path
4523 return infobar_deprecation.CheckDeprecationOnUpload(input_api, output_api)
4526 class _PydepsCheckerResult:
4527 def __init__(self, cmd, pydeps_path, process, old_contents):
4529 self._pydeps_path = pydeps_path
4530 self._process = process
4531 self._old_contents = old_contents
4534 """Returns an error message, or None."""
4536 new_contents = self._process.stdout.read().splitlines()[2:]
4537 if self._process.wait() != 0:
4538 # STDERR should already be printed.
4539 return 'Command failed: ' + self._cmd
4540 if self._old_contents != new_contents:
4542 difflib.context_diff(self._old_contents, new_contents))
4543 return ('File is stale: {}\n'
4544 'Diff (apply to fix):\n'
4546 'To regenerate, run:\n\n'
4547 ' {}').format(self._pydeps_path, diff, self._cmd)
4551 class PydepsChecker:
4552 def __init__(self, input_api, pydeps_files):
4553 self._file_cache = {}
4554 self._input_api = input_api
4555 self._pydeps_files = pydeps_files
4557 def _LoadFile(self, path):
4558 """Returns the list of paths within a .pydeps file relative to //."""
4559 if path not in self._file_cache:
4560 with open(path, encoding='utf-8') as f:
4561 self._file_cache[path] = f.read()
4562 return self._file_cache[path]
4564 def _ComputeNormalizedPydepsEntries(self, pydeps_path):
4565 """Returns an iterable of paths within the .pydep, relativized to //."""
4566 pydeps_data = self._LoadFile(pydeps_path)
4567 uses_gn_paths = '--gn-paths' in pydeps_data
4568 entries = (l for l in pydeps_data.splitlines()
4569 if not l.startswith('#'))
4571 # Paths look like: //foo/bar/baz
4572 return (e[2:] for e in entries)
4574 # Paths look like: path/relative/to/file.pydeps
4575 os_path = self._input_api.os_path
4576 pydeps_dir = os_path.dirname(pydeps_path)
4577 return (os_path.normpath(os_path.join(pydeps_dir, e))
4580 def _CreateFilesToPydepsMap(self):
4581 """Returns a map of local_path -> list_of_pydeps."""
4583 for pydep_local_path in self._pydeps_files:
4584 for path in self._ComputeNormalizedPydepsEntries(pydep_local_path):
4585 ret.setdefault(path, []).append(pydep_local_path)
4588 def ComputeAffectedPydeps(self):
4589 """Returns an iterable of .pydeps files that might need regenerating."""
4590 affected_pydeps = set()
4591 file_to_pydeps_map = None
4592 for f in self._input_api.AffectedFiles(include_deletes=True):
4593 local_path = f.LocalPath()
4594 # Changes to DEPS can lead to .pydeps changes if any .py files are in
4595 # subrepositories. We can't figure out which files change, so re-check
4597 # Changes to print_python_deps.py affect all .pydeps.
4598 if local_path in ('DEPS', 'PRESUBMIT.py'
4599 ) or local_path.endswith('print_python_deps.py'):
4600 return self._pydeps_files
4601 elif local_path.endswith('.pydeps'):
4602 if local_path in self._pydeps_files:
4603 affected_pydeps.add(local_path)
4604 elif local_path.endswith('.py'):
4605 if file_to_pydeps_map is None:
4606 file_to_pydeps_map = self._CreateFilesToPydepsMap()
4607 affected_pydeps.update(file_to_pydeps_map.get(local_path, ()))
4608 return affected_pydeps
4610 def DetermineIfStaleAsync(self, pydeps_path):
4611 """Runs print_python_deps.py to see if the files is stale."""
4614 old_pydeps_data = self._LoadFile(pydeps_path).splitlines()
4616 cmd = old_pydeps_data[1][1:].strip()
4617 if '--output' not in cmd:
4618 cmd += ' --output ' + pydeps_path
4619 old_contents = old_pydeps_data[2:]
4621 # A default cmd that should work in most cases (as long as pydeps filename
4622 # matches the script name) so that PRESUBMIT.py does not crash if pydeps
4623 # file is empty/new.
4624 cmd = 'build/print_python_deps.py {} --root={} --output={}'.format(
4625 pydeps_path[:-4], os.path.dirname(pydeps_path), pydeps_path)
4627 env = dict(os.environ)
4628 env['PYTHONDONTWRITEBYTECODE'] = '1'
4629 process = self._input_api.subprocess.Popen(
4630 cmd + ' --output ""',
4633 stdout=self._input_api.subprocess.PIPE,
4635 return _PydepsCheckerResult(cmd, pydeps_path, process, old_contents)
4638 def _ParseGclientArgs():
4640 with open('build/config/gclient_args.gni', 'r') as f:
4643 if not line or line.startswith('#'):
4645 attribute, value = line.split('=')
4646 args[attribute.strip()] = value.strip()
4650 def CheckPydepsNeedsUpdating(input_api, output_api, checker_for_tests=None):
4651 """Checks if a .pydeps file needs to be regenerated."""
4652 # This check is for Python dependency lists (.pydeps files), and involves
4653 # paths not only in the PRESUBMIT.py, but also in the .pydeps files. It
4654 # doesn't work on Windows and Mac, so skip it on other platforms.
4655 if not input_api.platform.startswith('linux'):
4659 # First, check for new / deleted .pydeps.
4660 for f in input_api.AffectedFiles(include_deletes=True):
4661 # Check whether we are running the presubmit check for a file in src.
4662 # f.LocalPath is relative to repo (src, or internal repo).
4663 # os_path.exists is relative to src repo.
4664 # Therefore if os_path.exists is true, it means f.LocalPath is relative
4665 # to src and we can conclude that the pydeps is in src.
4666 if f.LocalPath().endswith('.pydeps'):
4667 if input_api.os_path.exists(f.LocalPath()):
4668 if f.Action() == 'D' and f.LocalPath() in _ALL_PYDEPS_FILES:
4670 output_api.PresubmitError(
4671 'Please update _ALL_PYDEPS_FILES within //PRESUBMIT.py to '
4672 'remove %s' % f.LocalPath()))
4673 elif f.Action() != 'D' and f.LocalPath(
4674 ) not in _ALL_PYDEPS_FILES:
4676 output_api.PresubmitError(
4677 'Please update _ALL_PYDEPS_FILES within //PRESUBMIT.py to '
4678 'include %s' % f.LocalPath()))
4683 is_android = _ParseGclientArgs().get('checkout_android', 'false') == 'true'
4684 checker = checker_for_tests or PydepsChecker(input_api, _ALL_PYDEPS_FILES)
4685 affected_pydeps = set(checker.ComputeAffectedPydeps())
4686 affected_android_pydeps = affected_pydeps.intersection(
4687 set(_ANDROID_SPECIFIC_PYDEPS_FILES))
4688 if affected_android_pydeps and not is_android:
4690 output_api.PresubmitPromptOrNotify(
4691 'You have changed python files that may affect pydeps for android\n'
4692 'specific scripts. However, the relevant presubmit check cannot be\n'
4693 'run because you are not using an Android checkout. To validate that\n'
4694 'the .pydeps are correct, re-run presubmit in an Android checkout, or\n'
4695 'use the android-internal-presubmit optional trybot.\n'
4696 'Possibly stale pydeps files:\n{}'.format(
4697 '\n'.join(affected_android_pydeps))))
4699 all_pydeps = _ALL_PYDEPS_FILES if is_android else _GENERIC_PYDEPS_FILES
4700 pydeps_to_check = affected_pydeps.intersection(all_pydeps)
4701 # Process these concurrently, as each one takes 1-2 seconds.
4702 pydep_results = [checker.DetermineIfStaleAsync(p) for p in pydeps_to_check]
4703 for result in pydep_results:
4704 error_msg = result.GetError()
4706 results.append(output_api.PresubmitError(error_msg))
4711 def CheckSingletonInHeaders(input_api, output_api):
4712 """Checks to make sure no header files have |Singleton<|."""
4714 def FileFilter(affected_file):
4715 # It's ok for base/memory/singleton.h to have |Singleton<|.
4716 files_to_skip = (_EXCLUDED_PATHS + input_api.DEFAULT_FILES_TO_SKIP +
4717 (r"^base/memory/singleton\.h$",
4718 r"^net/quic/platform/impl/quic_singleton_impl\.h$"))
4719 return input_api.FilterSourceFile(affected_file,
4720 files_to_skip=files_to_skip)
4722 pattern = input_api.re.compile(r'(?<!class\sbase::)Singleton\s*<')
4724 for f in input_api.AffectedSourceFiles(FileFilter):
4725 if (f.LocalPath().endswith('.h') or f.LocalPath().endswith('.hxx')
4726 or f.LocalPath().endswith('.hpp')
4727 or f.LocalPath().endswith('.inl')):
4728 contents = input_api.ReadFile(f)
4729 for line in contents.splitlines(False):
4730 if (not line.lstrip().startswith('//')
4731 and # Strip C++ comment.
4732 pattern.search(line)):
4738 output_api.PresubmitError(
4739 'Found base::Singleton<T> in the following header files.\n' +
4740 'Please move them to an appropriate source file so that the ' +
4741 'template gets instantiated in a single compilation unit.',
4749 ( "-webkit-box", "flex" ),
4750 ( "-webkit-inline-box", "inline-flex" ),
4751 ( "-webkit-flex", "flex" ),
4752 ( "-webkit-inline-flex", "inline-flex" ),
4753 ( "-webkit-min-content", "min-content" ),
4754 ( "-webkit-max-content", "max-content" ),
4757 ( "-webkit-background-clip", "background-clip" ),
4758 ( "-webkit-background-origin", "background-origin" ),
4759 ( "-webkit-background-size", "background-size" ),
4760 ( "-webkit-box-shadow", "box-shadow" ),
4761 ( "-webkit-user-select", "user-select" ),
4764 ( "-webkit-gradient", "gradient" ),
4765 ( "-webkit-repeating-gradient", "repeating-gradient" ),
4766 ( "-webkit-linear-gradient", "linear-gradient" ),
4767 ( "-webkit-repeating-linear-gradient", "repeating-linear-gradient" ),
4768 ( "-webkit-radial-gradient", "radial-gradient" ),
4769 ( "-webkit-repeating-radial-gradient", "repeating-radial-gradient" ),
4773 # TODO: add unit tests
4774 def CheckNoDeprecatedCss(input_api, output_api):
4775 """ Make sure that we don't use deprecated CSS
4776 properties, functions or values. Our external
4777 documentation and iOS CSS for dom distiller
4778 (reader mode) are ignored by the hooks as it
4779 needs to be consumed by WebKit. """
4781 file_inclusion_pattern = [r".+\.css$"]
4782 files_to_skip = (_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS +
4783 input_api.DEFAULT_FILES_TO_SKIP +
4784 (r"^chrome/common/extensions/docs", r"^chrome/docs",
4785 r"^native_client_sdk"))
4786 file_filter = lambda f: input_api.FilterSourceFile(
4787 f, files_to_check=file_inclusion_pattern, files_to_skip=files_to_skip)
4788 for fpath in input_api.AffectedFiles(file_filter=file_filter):
4789 for line_num, line in fpath.ChangedContents():
4790 for (deprecated_value, value) in _DEPRECATED_CSS:
4791 if deprecated_value in line:
4793 output_api.PresubmitError(
4794 "%s:%d: Use of deprecated CSS %s, use %s instead" %
4795 (fpath.LocalPath(), line_num, deprecated_value,
4800 def CheckForRelativeIncludes(input_api, output_api):
4802 for f in input_api.AffectedFiles(include_deletes=False):
4803 if (f.LocalPath().startswith('third_party')
4804 and not f.LocalPath().startswith('third_party/blink')
4805 and not f.LocalPath().startswith('third_party\\blink')):
4808 if not _IsCPlusPlusFile(input_api, f.LocalPath()):
4811 relative_includes = [
4812 line for _, line in f.ChangedContents()
4813 if "#include" in line and "../" in line
4815 if not relative_includes:
4817 bad_files[f.LocalPath()] = relative_includes
4822 error_descriptions = []
4823 for file_path, bad_lines in bad_files.items():
4824 error_description = file_path
4825 for line in bad_lines:
4826 error_description += '\n ' + line
4827 error_descriptions.append(error_description)
4831 output_api.PresubmitError(
4832 'You added one or more relative #include paths (including "../").\n'
4833 'These shouldn\'t be used because they can be used to include headers\n'
4834 'from code that\'s not correctly specified as a dependency in the\n'
4835 'relevant BUILD.gn file(s).', error_descriptions))
4840 def CheckForCcIncludes(input_api, output_api):
4841 """Check that nobody tries to include a cc file. It's a relatively
4842 common error which results in duplicate symbols in object
4843 files. This may not always break the build until someone later gets
4844 very confusing linking errors."""
4846 for f in input_api.AffectedFiles(include_deletes=False):
4847 # We let third_party code do whatever it wants
4848 if (f.LocalPath().startswith('third_party')
4849 and not f.LocalPath().startswith('third_party/blink')
4850 and not f.LocalPath().startswith('third_party\\blink')):
4853 if not _IsCPlusPlusFile(input_api, f.LocalPath()):
4856 for _, line in f.ChangedContents():
4857 if line.startswith('#include "'):
4858 included_file = line.split('"')[1]
4859 if _IsCPlusPlusFile(input_api, included_file):
4860 # The most common naming for external files with C++ code,
4861 # apart from standard headers, is to call them foo.inc, but
4862 # Chromium sometimes uses foo-inc.cc so allow that as well.
4863 if not included_file.endswith(('.h', '-inc.cc')):
4865 output_api.PresubmitError(
4866 'Only header files or .inc files should be included in other\n'
4867 'C++ files. Compiling the contents of a cc file more than once\n'
4868 'will cause duplicate information in the build which may later\n'
4869 'result in strange link_errors.\n' +
4870 f.LocalPath() + ':\n ' + line))
4875 def _CheckWatchlistDefinitionsEntrySyntax(key, value, ast):
4876 if not isinstance(key, ast.Str):
4877 return 'Key at line %d must be a string literal' % key.lineno
4878 if not isinstance(value, ast.Dict):
4879 return 'Value at line %d must be a dict' % value.lineno
4880 if len(value.keys) != 1:
4881 return 'Dict at line %d must have single entry' % value.lineno
4882 if not isinstance(value.keys[0], ast.Str) or value.keys[0].s != 'filepath':
4884 'Entry at line %d must have a string literal \'filepath\' as key' %
4889 def _CheckWatchlistsEntrySyntax(key, value, ast, email_regex):
4890 if not isinstance(key, ast.Str):
4891 return 'Key at line %d must be a string literal' % key.lineno
4892 if not isinstance(value, ast.List):
4893 return 'Value at line %d must be a list' % value.lineno
4894 for element in value.elts:
4895 if not isinstance(element, ast.Str):
4896 return 'Watchlist elements on line %d is not a string' % key.lineno
4897 if not email_regex.match(element.s):
4898 return ('Watchlist element on line %d doesn\'t look like a valid '
4899 + 'email: %s') % (key.lineno, element.s)
4903 def _CheckWATCHLISTSEntries(wd_dict, w_dict, input_api):
4904 mismatch_template = (
4905 'Mismatch between WATCHLIST_DEFINITIONS entry (%s) and WATCHLISTS '
4908 email_regex = input_api.re.compile(
4909 r"^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]+$")
4915 if i >= len(wd_dict.keys):
4916 if i >= len(w_dict.keys):
4918 return mismatch_template % ('missing',
4919 'line %d' % w_dict.keys[i].lineno)
4920 elif i >= len(w_dict.keys):
4921 return (mismatch_template %
4922 ('line %d' % wd_dict.keys[i].lineno, 'missing'))
4924 wd_key = wd_dict.keys[i]
4925 w_key = w_dict.keys[i]
4927 result = _CheckWatchlistDefinitionsEntrySyntax(wd_key,
4928 wd_dict.values[i], ast)
4929 if result is not None:
4930 return 'Bad entry in WATCHLIST_DEFINITIONS dict: %s' % result
4932 result = _CheckWatchlistsEntrySyntax(w_key, w_dict.values[i], ast,
4934 if result is not None:
4935 return 'Bad entry in WATCHLISTS dict: %s' % result
4937 if wd_key.s != w_key.s:
4938 return mismatch_template % ('%s at line %d' %
4939 (wd_key.s, wd_key.lineno),
4941 (w_key.s, w_key.lineno))
4943 if wd_key.s < last_key:
4945 'WATCHLISTS dict is not sorted lexicographically at line %d and %d'
4946 % (wd_key.lineno, w_key.lineno))
4952 def _CheckWATCHLISTSSyntax(expression, input_api):
4954 if not isinstance(expression, ast.Expression):
4955 return 'WATCHLISTS file must contain a valid expression'
4956 dictionary = expression.body
4957 if not isinstance(dictionary, ast.Dict) or len(dictionary.keys) != 2:
4958 return 'WATCHLISTS file must have single dict with exactly two entries'
4960 first_key = dictionary.keys[0]
4961 first_value = dictionary.values[0]
4962 second_key = dictionary.keys[1]
4963 second_value = dictionary.values[1]
4965 if (not isinstance(first_key, ast.Str)
4966 or first_key.s != 'WATCHLIST_DEFINITIONS'
4967 or not isinstance(first_value, ast.Dict)):
4968 return ('The first entry of the dict in WATCHLISTS file must be '
4969 'WATCHLIST_DEFINITIONS dict')
4971 if (not isinstance(second_key, ast.Str) or second_key.s != 'WATCHLISTS'
4972 or not isinstance(second_value, ast.Dict)):
4973 return ('The second entry of the dict in WATCHLISTS file must be '
4976 return _CheckWATCHLISTSEntries(first_value, second_value, input_api)
4979 def CheckWATCHLISTS(input_api, output_api):
4980 for f in input_api.AffectedFiles(include_deletes=False):
4981 if f.LocalPath() == 'WATCHLISTS':
4982 contents = input_api.ReadFile(f, 'r')
4985 # First, make sure that it can be evaluated.
4986 input_api.ast.literal_eval(contents)
4987 # Get an AST tree for it and scan the tree for detailed style checking.
4988 expression = input_api.ast.parse(contents,
4989 filename='WATCHLISTS',
4991 except ValueError as e:
4993 output_api.PresubmitError('Cannot parse WATCHLISTS file',
4996 except SyntaxError as e:
4998 output_api.PresubmitError('Cannot parse WATCHLISTS file',
5001 except TypeError as e:
5003 output_api.PresubmitError('Cannot parse WATCHLISTS file',
5007 result = _CheckWATCHLISTSSyntax(expression, input_api)
5008 if result is not None:
5009 return [output_api.PresubmitError(result)]
5014 def CheckGnRebasePath(input_api, output_api):
5015 """Checks that target_gen_dir is not used wtih "//" in rebase_path().
5017 Developers should use root_build_dir instead of "//" when using target_gen_dir because
5018 Chromium is sometimes built outside of the source tree.
5022 return input_api.FilterSourceFile(f, files_to_check=(r'.+\.gn', ))
5024 rebase_path_regex = input_api.re.compile(r'rebase_path\(("\$target_gen_dir"|target_gen_dir), ("/"|"//")\)')
5026 for f in input_api.AffectedSourceFiles(gn_files):
5027 for line_num, line in f.ChangedContents():
5028 if rebase_path_regex.search(line):
5030 'Absolute path in rebase_path() in %s:%d' %
5031 (f.LocalPath(), line_num))
5035 output_api.PresubmitPromptWarning(
5036 'Using an absolute path in rebase_path()',
5037 items=sorted(problems),
5039 'rebase_path() should use root_build_dir instead of "/" ',
5040 'since builds can be initiated from outside of the source ',
5045 def CheckGnGlobForward(input_api, output_api):
5046 """Checks that forward_variables_from(invoker, "*") follows best practices.
5048 As documented at //build/docs/writing_gn_templates.md
5052 return input_api.FilterSourceFile(f, files_to_check=(r'.+\.gni', ))
5055 for f in input_api.AffectedSourceFiles(gn_files):
5056 for line_num, line in f.ChangedContents():
5057 if 'forward_variables_from(invoker, "*")' in line:
5059 'Bare forward_variables_from(invoker, "*") in %s:%d' %
5060 (f.LocalPath(), line_num))
5064 output_api.PresubmitPromptWarning(
5065 'forward_variables_from("*") without exclusions',
5066 items=sorted(problems),
5068 'The variables "visibility" and "test_only" should be '
5069 'explicitly listed in forward_variables_from(). For more '
5071 'https://chromium.googlesource.com/chromium/src/+/HEAD/'
5072 'build/docs/writing_gn_templates.md'
5073 '#Using-forward_variables_from'))
5077 def CheckNewHeaderWithoutGnChangeOnUpload(input_api, output_api):
5078 """Checks that newly added header files have corresponding GN changes.
5079 Note that this is only a heuristic. To be precise, run script:
5080 build/check_gn_headers.py.
5084 return input_api.FilterSourceFile(
5085 f, files_to_check=(r'.+%s' % _HEADER_EXTENSIONS, ))
5088 for f in input_api.AffectedSourceFiles(headers):
5089 if f.Action() != 'A':
5091 new_headers.append(f.LocalPath())
5094 return input_api.FilterSourceFile(f, files_to_check=(r'.+\.gn', ))
5096 all_gn_changed_contents = ''
5097 for f in input_api.AffectedSourceFiles(gn_files):
5098 for _, line in f.ChangedContents():
5099 all_gn_changed_contents += line
5102 for header in new_headers:
5103 basename = input_api.os_path.basename(header)
5104 if basename not in all_gn_changed_contents:
5105 problems.append(header)
5109 output_api.PresubmitPromptWarning(
5110 'Missing GN changes for new header files',
5111 items=sorted(problems),
5113 'Please double check whether newly added header files need '
5114 'corresponding changes in gn or gni files.\nThis checking is only a '
5115 'heuristic. Run build/check_gn_headers.py to be precise.\n'
5116 'Read https://crbug.com/661774 for more info.')
5121 def CheckCorrectProductNameInMessages(input_api, output_api):
5122 """Check that Chromium-branded strings don't include "Chrome" or vice versa.
5124 This assumes we won't intentionally reference one product from the other
5129 "filename_postfix": "google_chrome_strings.grd",
5130 "correct_name": "Chrome",
5131 "incorrect_name": "Chromium",
5133 "filename_postfix": "google_chrome_strings.grd",
5134 "correct_name": "Chrome",
5135 "incorrect_name": "Chrome for Testing",
5137 "filename_postfix": "chromium_strings.grd",
5138 "correct_name": "Chromium",
5139 "incorrect_name": "Chrome",
5142 for test_case in test_cases:
5144 filename_filter = lambda x: x.LocalPath().endswith(test_case[
5145 "filename_postfix"])
5147 # Check each new line. Can yield false positives in multiline comments, but
5148 # easier than trying to parse the XML because messages can have nested
5149 # children, and associating message elements with affected lines is hard.
5150 for f in input_api.AffectedSourceFiles(filename_filter):
5151 for line_num, line in f.ChangedContents():
5152 if "<message" in line or "<!--" in line or "-->" in line:
5154 if test_case["incorrect_name"] in line:
5155 # Chrome for Testing is a special edge case: https://goo.gle/chrome-for-testing#bookmark=id.n1rat320av91
5156 if (test_case["correct_name"] == "Chromium" and line.count("Chrome") == line.count("Chrome for Testing")):
5158 problems.append("Incorrect product name in %s:%d" %
5159 (f.LocalPath(), line_num))
5163 "Strings in %s-branded string files should reference \"%s\", not \"%s\""
5164 % (test_case["correct_name"], test_case["correct_name"],
5165 test_case["incorrect_name"]))
5166 all_problems.append(
5167 output_api.PresubmitPromptWarning(message, items=problems))
5172 def CheckForTooLargeFiles(input_api, output_api):
5173 """Avoid large files, especially binary files, in the repository since
5174 git doesn't scale well for those. They will be in everyone's repo
5175 clones forever, forever making Chromium slower to clone and work
5178 # Uploading files to cloud storage is not trivial so we don't want
5179 # to set the limit too low, but the upper limit for "normal" large
5180 # files seems to be 1-2 MB, with a handful around 5-8 MB, so
5181 # anything over 20 MB is exceptional.
5182 TOO_LARGE_FILE_SIZE_LIMIT = 20 * 1024 * 1024
5184 too_large_files = []
5185 for f in input_api.AffectedFiles():
5186 # Check both added and modified files (but not deleted files).
5187 if f.Action() in ('A', 'M'):
5188 size = input_api.os_path.getsize(f.AbsoluteLocalPath())
5189 if size > TOO_LARGE_FILE_SIZE_LIMIT:
5190 too_large_files.append("%s: %d bytes" % (f.LocalPath(), size))
5194 'Do not commit large files to git since git scales badly for those.\n'
5196 'Instead put the large files in cloud storage and use DEPS to\n' +
5197 'fetch them.\n' + '\n'.join(too_large_files))
5199 output_api.PresubmitError('Too large files found in commit',
5200 long_text=message + '\n')
5206 def CheckFuzzTargetsOnUpload(input_api, output_api):
5207 """Checks specific for fuzz target sources."""
5208 EXPORTED_SYMBOLS = [
5209 'LLVMFuzzerInitialize',
5210 'LLVMFuzzerCustomMutator',
5211 'LLVMFuzzerCustomCrossOver',
5215 REQUIRED_HEADER = '#include "testing/libfuzzer/libfuzzer_exports.h"'
5217 def FilterFile(affected_file):
5218 """Ignore libFuzzer source code."""
5219 files_to_check = r'.*fuzz.*\.(h|hpp|hcc|cc|cpp|cxx)$'
5220 files_to_skip = r"^third_party/libFuzzer"
5222 return input_api.FilterSourceFile(affected_file,
5223 files_to_check=[files_to_check],
5224 files_to_skip=[files_to_skip])
5226 files_with_missing_header = []
5227 for f in input_api.AffectedSourceFiles(FilterFile):
5228 contents = input_api.ReadFile(f, 'r')
5229 if REQUIRED_HEADER in contents:
5232 if any(symbol in contents for symbol in EXPORTED_SYMBOLS):
5233 files_with_missing_header.append(f.LocalPath())
5235 if not files_with_missing_header:
5239 'If you define any of the libFuzzer optional functions (%s), it is '
5240 'recommended to add \'%s\' directive. Otherwise, the fuzz target may '
5241 'work incorrectly on Mac (crbug.com/687076).\nNote that '
5242 'LLVMFuzzerInitialize should not be used, unless your fuzz target needs '
5243 'to access command line arguments passed to the fuzzer. Instead, prefer '
5244 'static initialization and shared resources as documented in '
5245 'https://chromium.googlesource.com/chromium/src/+/main/testing/'
5246 'libfuzzer/efficient_fuzzing.md#simplifying-initialization_cleanup.\n'
5247 % (', '.join(EXPORTED_SYMBOLS), REQUIRED_HEADER))
5250 output_api.PresubmitPromptWarning(message="Missing '%s' in:" %
5252 items=files_with_missing_header,
5253 long_text=long_text)
5257 def _CheckNewImagesWarning(input_api, output_api):
5259 Warns authors who add images into the repo to make sure their images are
5260 optimized before committing.
5262 images_added = False
5265 filter_lambda = lambda x: input_api.FilterSourceFile(
5267 files_to_skip=(('(?i).*test', r'.*\/junit\/') + input_api.
5268 DEFAULT_FILES_TO_SKIP),
5269 files_to_check=[r'.*\/(drawable|mipmap)'])
5270 for f in input_api.AffectedFiles(include_deletes=False,
5271 file_filter=filter_lambda):
5272 local_path = f.LocalPath().lower()
5274 local_path.endswith(extension)
5275 for extension in _IMAGE_EXTENSIONS):
5277 image_paths.append(f)
5280 output_api.PresubmitPromptWarning(
5281 'It looks like you are trying to commit some images. If these are '
5282 'non-test-only images, please make sure to read and apply the tips in '
5283 'https://chromium.googlesource.com/chromium/src/+/HEAD/docs/speed/'
5284 'binary_size/optimization_advice.md#optimizing-images\nThis check is '
5285 'FYI only and will not block your CL on the CQ.', image_paths))
5289 def ChecksAndroidSpecificOnUpload(input_api, output_api):
5290 """Groups upload checks that target android code."""
5292 results.extend(_CheckAndroidCrLogUsage(input_api, output_api))
5293 results.extend(_CheckAndroidDebuggableBuild(input_api, output_api))
5294 results.extend(_CheckAndroidNewMdpiAssetLocation(input_api, output_api))
5295 results.extend(_CheckAndroidToastUsage(input_api, output_api))
5296 results.extend(_CheckAndroidTestJUnitInheritance(input_api, output_api))
5297 results.extend(_CheckAndroidTestJUnitFrameworkImport(
5298 input_api, output_api))
5299 results.extend(_CheckAndroidTestAnnotationUsage(input_api, output_api))
5300 results.extend(_CheckAndroidWebkitImports(input_api, output_api))
5301 results.extend(_CheckAndroidXmlStyle(input_api, output_api, True))
5302 results.extend(_CheckNewImagesWarning(input_api, output_api))
5303 results.extend(_CheckAndroidNoBannedImports(input_api, output_api))
5304 results.extend(_CheckAndroidInfoBarDeprecation(input_api, output_api))
5308 def ChecksAndroidSpecificOnCommit(input_api, output_api):
5309 """Groups commit checks that target android code."""
5311 results.extend(_CheckAndroidXmlStyle(input_api, output_api, False))
5314 # TODO(chrishall): could we additionally match on any path owned by
5315 # ui/accessibility/OWNERS ?
5316 _ACCESSIBILITY_PATHS = (
5317 r"^chrome/browser.*/accessibility/",
5318 r"^chrome/browser/extensions/api/automation.*/",
5319 r"^chrome/renderer/extensions/accessibility_.*",
5320 r"^chrome/tests/data/accessibility/",
5321 r"^components/services/screen_ai/",
5322 r"^content/browser/accessibility/",
5323 r"^content/renderer/accessibility/",
5324 r"^content/tests/data/accessibility/",
5325 r"^extensions/renderer/api/automation/",
5326 r"^services/accessibility/",
5327 r"^ui/accessibility/",
5328 r"^ui/views/accessibility/",
5331 def CheckAccessibilityRelnotesField(input_api, output_api):
5332 """Checks that commits to accessibility code contain an AX-Relnotes field in
5333 their commit message."""
5335 def FileFilter(affected_file):
5336 paths = _ACCESSIBILITY_PATHS
5337 return input_api.FilterSourceFile(affected_file, files_to_check=paths)
5339 # Only consider changes affecting accessibility paths.
5340 if not any(input_api.AffectedFiles(file_filter=FileFilter)):
5343 # AX-Relnotes can appear in either the description or the footer.
5344 # When searching the description, require 'AX-Relnotes:' to appear at the
5345 # beginning of a line.
5346 ax_regex = input_api.re.compile('ax-relnotes[:=]')
5347 description_has_relnotes = any(
5348 ax_regex.match(line)
5349 for line in input_api.change.DescriptionText().lower().splitlines())
5351 footer_relnotes = input_api.change.GitFootersFromDescription().get(
5353 if description_has_relnotes or footer_relnotes:
5356 # TODO(chrishall): link to Relnotes documentation in message.
5358 "Missing 'AX-Relnotes:' field required for accessibility changes"
5359 "\n please add 'AX-Relnotes: [release notes].' to describe any "
5360 "user-facing changes"
5361 "\n otherwise add 'AX-Relnotes: n/a.' if this change has no "
5362 "user-facing effects"
5363 "\n if this is confusing or annoying then please contact members "
5364 "of ui/accessibility/OWNERS.")
5366 return [output_api.PresubmitNotifyResult(message)]
5369 _ACCESSIBILITY_EVENTS_TEST_PATH = (
5370 r"^content/test/data/accessibility/event/.*\.html",
5373 _ACCESSIBILITY_TREE_TEST_PATH = (
5374 r"^content/test/data/accessibility/accname/"
5375 ".*-expected-(mac|win|uia-win|auralinux).txt",
5376 r"^content/test/data/accessibility/aria/"
5377 ".*-expected-(mac|win|uia-win|auralinux).txt",
5378 r"^content/test/data/accessibility/css/"
5379 ".*-expected-(mac|win|uia-win|auralinux).txt",
5380 r"^content/test/data/accessibility/event/"
5381 ".*-expected-(mac|win|uia-win|auralinux).txt",
5382 r"^content/test/data/accessibility/html/"
5383 ".*-expected-(mac|win|uia-win|auralinux).txt",
5386 _ACCESSIBILITY_ANDROID_EVENTS_TEST_PATH = (
5387 r"^.*/WebContentsAccessibilityEventsTest\.java",
5390 _ACCESSIBILITY_ANDROID_TREE_TEST_PATH = (
5391 r"^.*/WebContentsAccessibilityTreeTest\.java",
5394 def CheckAccessibilityEventsTestsAreIncludedForAndroid(input_api, output_api):
5395 """Checks that commits that include a newly added, renamed/moved, or deleted
5396 test in the DumpAccessibilityEventsTest suite also includes a corresponding
5397 change to the Android test."""
5399 def FilePathFilter(affected_file):
5400 paths = _ACCESSIBILITY_EVENTS_TEST_PATH
5401 return input_api.FilterSourceFile(affected_file, files_to_check=paths)
5403 def AndroidFilePathFilter(affected_file):
5404 paths = _ACCESSIBILITY_ANDROID_EVENTS_TEST_PATH
5405 return input_api.FilterSourceFile(affected_file, files_to_check=paths)
5407 # Only consider changes in the events test data path with html type.
5409 input_api.AffectedFiles(include_deletes=True,
5410 file_filter=FilePathFilter)):
5413 # If the commit contains any change to the Android test file, ignore.
5415 input_api.AffectedFiles(include_deletes=True,
5416 file_filter=AndroidFilePathFilter)):
5419 # Only consider changes that are adding/renaming or deleting a file
5421 for f in input_api.AffectedFiles(include_deletes=True,
5422 file_filter=FilePathFilter):
5423 if f.Action() == 'A':
5425 "It appears that you are adding platform expectations for a"
5426 "\ndump_accessibility_events* test, but have not included"
5427 "\na corresponding change for Android."
5428 "\nPlease include the test from:"
5429 "\n content/public/android/javatests/src/org/chromium/"
5430 "content/browser/accessibility/"
5431 "WebContentsAccessibilityEventsTest.java"
5432 "\nIf this message is confusing or annoying, please contact"
5433 "\nmembers of ui/accessibility/OWNERS.")
5435 # If no message was set, return empty.
5436 if not len(message):
5439 return [output_api.PresubmitPromptWarning(message)]
5442 def CheckAccessibilityTreeTestsAreIncludedForAndroid(input_api, output_api):
5443 """Checks that commits that include a newly added, renamed/moved, or deleted
5444 test in the DumpAccessibilityTreeTest suite also includes a corresponding
5445 change to the Android test."""
5447 def FilePathFilter(affected_file):
5448 paths = _ACCESSIBILITY_TREE_TEST_PATH
5449 return input_api.FilterSourceFile(affected_file, files_to_check=paths)
5451 def AndroidFilePathFilter(affected_file):
5452 paths = _ACCESSIBILITY_ANDROID_TREE_TEST_PATH
5453 return input_api.FilterSourceFile(affected_file, files_to_check=paths)
5455 # Only consider changes in the various tree test data paths with html type.
5457 input_api.AffectedFiles(include_deletes=True,
5458 file_filter=FilePathFilter)):
5461 # If the commit contains any change to the Android test file, ignore.
5463 input_api.AffectedFiles(include_deletes=True,
5464 file_filter=AndroidFilePathFilter)):
5467 # Only consider changes that are adding/renaming or deleting a file
5469 for f in input_api.AffectedFiles(include_deletes=True,
5470 file_filter=FilePathFilter):
5471 if f.Action() == 'A':
5473 "It appears that you are adding platform expectations for a"
5474 "\ndump_accessibility_tree* test, but have not included"
5475 "\na corresponding change for Android."
5476 "\nPlease include (or remove) the test from:"
5477 "\n content/public/android/javatests/src/org/chromium/"
5478 "content/browser/accessibility/"
5479 "WebContentsAccessibilityTreeTest.java"
5480 "\nIf this message is confusing or annoying, please contact"
5481 "\nmembers of ui/accessibility/OWNERS.")
5483 # If no message was set, return empty.
5484 if not len(message):
5487 return [output_api.PresubmitPromptWarning(message)]
5490 def CheckEsLintConfigChanges(input_api, output_api):
5491 """Suggest using "git cl presubmit --files" when .eslintrc.js files are
5492 modified. This is important because enabling an error in .eslintrc.js can
5493 trigger errors in any .js or .ts files in its directory, leading to hidden
5494 presubmit errors."""
5496 eslint_filter = lambda f: input_api.FilterSourceFile(
5497 f, files_to_check=[r'.*\.eslintrc\.js$'])
5498 for f in input_api.AffectedFiles(include_deletes=False,
5499 file_filter=eslint_filter):
5500 local_dir = input_api.os_path.dirname(f.LocalPath())
5501 # Use / characters so that the commands printed work on any OS.
5502 local_dir = local_dir.replace(input_api.os_path.sep, '/')
5506 output_api.PresubmitNotifyResult(
5507 '%(file)s modified. Consider running \'git cl presubmit --files '
5508 '"%(dir)s*.js;%(dir)s*.ts"\' in order to check and fix the affected '
5509 'files before landing this change.' %
5510 { 'file' : f.LocalPath(), 'dir' : local_dir}))
5514 # string pattern, sequence of strings to show when pattern matches,
5515 # error flag. True if match is a presubmit error, otherwise it's a warning.
5516 _NON_INCLUSIVE_TERMS = (
5518 # Note that \b pattern in python re is pretty particular. In this
5519 # regexp, 'class WhiteList ...' will match, but 'class FooWhiteList
5520 # ...' will not. This may require some tweaking to catch these cases
5521 # without triggering a lot of false positives. Leaving it naive and
5522 # less matchy for now.
5523 r'/\b(?i)((black|white)list|master|slave)\b', # nocheck
5525 'Please don\'t use blacklist, whitelist, ' # nocheck
5526 'or slave in your', # nocheck
5527 'code and make every effort to use other terms. Using "// nocheck"',
5528 '"# nocheck" or "<!-- nocheck -->"',
5529 'at the end of the offending line will bypass this PRESUBMIT error',
5530 'but avoid using this whenever possible. Reach out to',
5531 'community@chromium.org if you have questions'),
5534 def ChecksCommon(input_api, output_api):
5535 """Checks common to both upload and commit."""
5538 input_api.canned_checks.PanProjectChecks(
5539 input_api, output_api, excluded_paths=_EXCLUDED_PATHS))
5541 author = input_api.change.author_email
5542 if author and author not in _KNOWN_ROBOTS:
5544 input_api.canned_checks.CheckAuthorizedAuthor(
5545 input_api, output_api))
5548 input_api.canned_checks.CheckChangeHasNoTabs(
5551 source_file_filter=lambda x: x.LocalPath().endswith('.grd')))
5554 input_api.canned_checks.CheckVPythonSpec(input_api, output_api)))
5556 dirmd = 'dirmd.bat' if input_api.is_windows else 'dirmd'
5557 dirmd_bin = input_api.os_path.join(input_api.PresubmitLocalPath(),
5558 'third_party', 'depot_tools', dirmd)
5561 input_api.canned_checks.CheckDirMetadataFormat(
5562 input_api, output_api, dirmd_bin)))
5564 input_api.canned_checks.CheckOwnersDirMetadataExclusive(
5565 input_api, output_api))
5567 input_api.canned_checks.CheckNoNewMetadataInOwners(
5568 input_api, output_api))
5570 input_api.canned_checks.CheckInclusiveLanguage(
5573 excluded_directories_relative_path=[
5574 'infra', 'inclusive_language_presubmit_exempt_dirs.txt'
5576 non_inclusive_terms=_NON_INCLUSIVE_TERMS))
5578 presubmit_py_filter = lambda f: input_api.FilterSourceFile(
5579 f, files_to_check=[r'.*PRESUBMIT\.py$'])
5580 for f in input_api.AffectedFiles(include_deletes=False,
5581 file_filter=presubmit_py_filter):
5582 full_path = input_api.os_path.dirname(f.AbsoluteLocalPath())
5583 test_file = input_api.os_path.join(full_path, 'PRESUBMIT_test.py')
5584 # The PRESUBMIT.py file (and the directory containing it) might have
5585 # been affected by being moved or removed, so only try to run the tests
5586 # if they still exist.
5587 if not input_api.os_path.exists(test_file):
5591 input_api.canned_checks.RunUnitTestsInDirectory(
5595 files_to_check=[r'^PRESUBMIT_test\.py$']))
5599 def CheckPatchFiles(input_api, output_api):
5601 f.LocalPath() for f in input_api.AffectedFiles()
5602 if f.LocalPath().endswith(('.orig', '.rej'))
5604 # Cargo.toml.orig files are part of third-party crates downloaded from
5605 # crates.io and should be included.
5606 problems = [f for f in problems if not f.endswith('Cargo.toml.orig')]
5609 output_api.PresubmitError("Don't commit .rej and .orig files.",
5616 def CheckBuildConfigMacrosWithoutInclude(input_api, output_api):
5617 # Excludes OS_CHROMEOS, which is not defined in build_config.h.
5618 macro_re = input_api.re.compile(
5619 r'^\s*#(el)?if.*\bdefined\(((COMPILER_|ARCH_CPU_|WCHAR_T_IS_)[^)]*)')
5620 include_re = input_api.re.compile(r'^#include\s+"build/build_config.h"',
5621 input_api.re.MULTILINE)
5622 extension_re = input_api.re.compile(r'\.[a-z]+$')
5624 config_h_file = input_api.os_path.join('build', 'build_config.h')
5625 for f in input_api.AffectedFiles(include_deletes=False):
5626 # The build-config macros are allowed to be used in build_config.h
5627 # without including itself.
5628 if f.LocalPath() == config_h_file:
5630 if not f.LocalPath().endswith(
5631 ('.h', '.c', '.cc', '.cpp', '.m', '.mm')):
5633 found_line_number = None
5635 all_lines = input_api.ReadFile(f, 'r').splitlines()
5636 for line_num, line in enumerate(all_lines):
5637 match = macro_re.search(line)
5639 found_line_number = line_num
5640 found_macro = match.group(2)
5642 if not found_line_number:
5645 found_include_line = -1
5646 for line_num, line in enumerate(all_lines):
5647 if include_re.search(line):
5648 found_include_line = line_num
5650 if found_include_line >= 0 and found_include_line < found_line_number:
5653 if not f.LocalPath().endswith('.h'):
5654 primary_header_path = extension_re.sub('.h', f.AbsoluteLocalPath())
5656 content = input_api.ReadFile(primary_header_path, 'r')
5657 if include_re.search(content):
5661 errors.append('%s:%d %s macro is used without first including build/'
5663 (f.LocalPath(), found_line_number, found_macro))
5665 return [output_api.PresubmitPromptWarning('\n'.join(errors))]
5669 def CheckForSuperfluousStlIncludesInHeaders(input_api, output_api):
5670 stl_include_re = input_api.re.compile(r'^#include\s+<('
5684 std_namespace_re = input_api.re.compile(r'std::')
5686 for f in input_api.AffectedFiles():
5687 if not _IsCPlusPlusHeaderFile(input_api, f.LocalPath()):
5690 uses_std_namespace = False
5691 has_stl_include = False
5692 for line in f.NewContents():
5693 if has_stl_include and uses_std_namespace:
5696 if not has_stl_include and stl_include_re.search(line):
5697 has_stl_include = True
5700 if not uses_std_namespace and (std_namespace_re.search(line)
5701 or 'no-std-usage-because-pch-file' in line):
5702 uses_std_namespace = True
5705 if has_stl_include and not uses_std_namespace:
5707 '%s: Includes STL header(s) but does not reference std::' %
5710 return [output_api.PresubmitPromptWarning('\n'.join(errors))]
5714 def _CheckForDeprecatedOSMacrosInFile(input_api, f):
5715 """Check for sensible looking, totally invalid OS macros."""
5716 preprocessor_statement = input_api.re.compile(r'^\s*#')
5717 os_macro = input_api.re.compile(r'defined\(OS_([^)]+)\)')
5719 for lnum, line in f.ChangedContents():
5720 if preprocessor_statement.search(line):
5721 for match in os_macro.finditer(line):
5724 (f.LocalPath(), lnum, 'defined(OS_' + match.group(1) +
5725 ') -> BUILDFLAG(IS_' + match.group(1) + ')'))
5729 def CheckForDeprecatedOSMacros(input_api, output_api):
5730 """Check all affected files for invalid OS macros."""
5732 # The OS_ macros are allowed to be used in build/build_config.h.
5733 config_h_file = input_api.os_path.join('build', 'build_config.h')
5734 for f in input_api.AffectedSourceFiles(None):
5735 if not f.LocalPath().endswith(('.py', '.js', '.html', '.css', '.md')) \
5736 and f.LocalPath() != config_h_file:
5737 bad_macros.extend(_CheckForDeprecatedOSMacrosInFile(input_api, f))
5743 output_api.PresubmitError(
5744 'OS macros have been deprecated. Please use BUILDFLAGs instead (still '
5745 'defined in build_config.h):', bad_macros)
5749 def _CheckForInvalidIfDefinedMacrosInFile(input_api, f):
5750 """Check all affected files for invalid "if defined" macros."""
5751 ALWAYS_DEFINED_MACROS = (
5760 "TARGET_IPHONE_SIMULATOR",
5761 "TARGET_OS_EMBEDDED",
5767 ifdef_macro = input_api.re.compile(
5768 r'^\s*#.*(?:ifdef\s|defined\()([^\s\)]+)')
5770 for lnum, line in f.ChangedContents():
5771 for match in ifdef_macro.finditer(line):
5772 if match.group(1) in ALWAYS_DEFINED_MACROS:
5773 always_defined = ' %s is always defined. ' % match.group(1)
5774 did_you_mean = 'Did you mean \'#if %s\'?' % match.group(1)
5777 (f.LocalPath(), lnum, always_defined, did_you_mean))
5781 def CheckForInvalidIfDefinedMacros(input_api, output_api):
5782 """Check all affected files for invalid "if defined" macros."""
5784 skipped_paths = ['third_party/sqlite/', 'third_party/abseil-cpp/']
5785 for f in input_api.AffectedFiles():
5786 if any([f.LocalPath().startswith(path) for path in skipped_paths]):
5788 if f.LocalPath().endswith(('.h', '.c', '.cc', '.m', '.mm')):
5790 _CheckForInvalidIfDefinedMacrosInFile(input_api, f))
5796 output_api.PresubmitError(
5797 'Found ifdef check on always-defined macro[s]. Please fix your code\n'
5798 'or check the list of ALWAYS_DEFINED_MACROS in src/PRESUBMIT.py.',
5803 def CheckForIPCRules(input_api, output_api):
5804 """Check for same IPC rules described in
5805 http://www.chromium.org/Home/chromium-security/education/security-tips-for-ipc
5807 base_pattern = r'IPC_ENUM_TRAITS\('
5808 inclusion_pattern = input_api.re.compile(r'(%s)' % base_pattern)
5809 comment_pattern = input_api.re.compile(r'//.*(%s)' % base_pattern)
5812 for f in input_api.AffectedSourceFiles(None):
5813 local_path = f.LocalPath()
5814 if not local_path.endswith('.h'):
5816 for line_number, line in f.ChangedContents():
5817 if inclusion_pattern.search(
5818 line) and not comment_pattern.search(line):
5819 problems.append('%s:%d\n %s' %
5820 (local_path, line_number, line.strip()))
5824 output_api.PresubmitPromptWarning(_IPC_ENUM_TRAITS_DEPRECATED,
5831 def CheckForLongPathnames(input_api, output_api):
5832 """Check to make sure no files being submitted have long paths.
5833 This causes issues on Windows.
5836 for f in input_api.AffectedTestableFiles():
5837 local_path = f.LocalPath()
5838 # Windows has a path limit of 260 characters. Limit path length to 200 so
5839 # that we have some extra for the prefix on dev machines and the bots.
5840 if len(local_path) > 200:
5841 problems.append(local_path)
5844 return [output_api.PresubmitError(_LONG_PATH_ERROR, problems)]
5849 def CheckForIncludeGuards(input_api, output_api):
5850 """Check that header files have proper guards against multiple inclusion.
5851 If a file should not have such guards (and it probably should) then it
5852 should include the string "no-include-guard-because-multiply-included" or
5853 "no-include-guard-because-pch-file".
5856 def is_chromium_header_file(f):
5857 # We only check header files under the control of the Chromium
5858 # project. That is, those outside third_party apart from
5859 # third_party/blink.
5860 # We also exclude *_message_generator.h headers as they use
5861 # include guards in a special, non-typical way.
5862 file_with_path = input_api.os_path.normpath(f.LocalPath())
5863 return (file_with_path.endswith('.h')
5864 and not file_with_path.endswith('_message_generator.h')
5865 and not file_with_path.endswith('com_imported_mstscax.h')
5866 and (not file_with_path.startswith('third_party')
5867 or file_with_path.startswith(
5868 input_api.os_path.join('third_party', 'blink'))))
5870 def replace_special_with_underscore(string):
5871 return input_api.re.sub(r'[+\\/.-]', '_', string)
5875 for f in input_api.AffectedSourceFiles(is_chromium_header_file):
5877 guard_line_number = None
5878 seen_guard_end = False
5880 file_with_path = input_api.os_path.normpath(f.LocalPath())
5881 base_file_name = input_api.os_path.splitext(
5882 input_api.os_path.basename(file_with_path))[0]
5883 upper_base_file_name = base_file_name.upper()
5885 expected_guard = replace_special_with_underscore(
5886 file_with_path.upper() + '_')
5888 # For "path/elem/file_name.h" we should really only accept
5889 # PATH_ELEM_FILE_NAME_H_ per coding style. Unfortunately there
5890 # are too many (1000+) files with slight deviations from the
5891 # coding style. The most important part is that the include guard
5892 # is there, and that it's unique, not the name so this check is
5893 # forgiving for existing files.
5895 # As code becomes more uniform, this could be made stricter.
5897 guard_name_pattern_list = [
5898 # Anything with the right suffix (maybe with an extra _).
5901 # To cover include guards with old Blink style.
5904 # Anything including the uppercase name of the file.
5905 r'\w*' + input_api.re.escape(
5906 replace_special_with_underscore(upper_base_file_name)) +
5909 guard_name_pattern = '|'.join(guard_name_pattern_list)
5910 guard_pattern = input_api.re.compile(r'#ifndef\s+(' +
5911 guard_name_pattern + ')')
5913 for line_number, line in enumerate(f.NewContents()):
5914 if ('no-include-guard-because-multiply-included' in line
5915 or 'no-include-guard-because-pch-file' in line):
5916 guard_name = 'DUMMY' # To not trigger check outside the loop.
5919 if guard_name is None:
5920 match = guard_pattern.match(line)
5922 guard_name = match.group(1)
5923 guard_line_number = line_number
5925 # We allow existing files to use include guards whose names
5926 # don't match the chromium style guide, but new files should
5928 if guard_name != expected_guard:
5929 if f.Action() == 'A': # If file was just 'A'dded
5931 output_api.PresubmitPromptWarning(
5932 'Header using the wrong include guard name %s'
5935 (f.LocalPath(), line_number + 1)
5936 ], 'Expected: %r\nFound: %r' %
5937 (expected_guard, guard_name)))
5939 # The line after #ifndef should have a #define of the same name.
5940 if line_number == guard_line_number + 1:
5941 expected_line = '#define %s' % guard_name
5942 if line != expected_line:
5944 output_api.PresubmitPromptWarning(
5945 'Missing "%s" for include guard' %
5947 ['%s:%d' % (f.LocalPath(), line_number + 1)],
5948 'Expected: %r\nGot: %r' %
5949 (expected_line, line)))
5951 if not seen_guard_end and line == '#endif // %s' % guard_name:
5952 seen_guard_end = True
5953 elif seen_guard_end:
5954 if line.strip() != '':
5956 output_api.PresubmitPromptWarning(
5957 'Include guard %s not covering the whole file'
5958 % (guard_name), [f.LocalPath()]))
5959 break # Nothing else to check and enough to warn once.
5961 if guard_name is None:
5963 output_api.PresubmitPromptWarning(
5964 'Missing include guard in %s\n'
5965 'Recommended name: %s\n'
5966 'This check can be disabled by having the string\n'
5967 '"no-include-guard-because-multiply-included" or\n'
5968 '"no-include-guard-because-pch-file" in the header.'
5969 % (f.LocalPath(), expected_guard)))
5974 def CheckForWindowsLineEndings(input_api, output_api):
5975 """Check source code and known ascii text files for Windows style line
5978 known_text_files = r'.*\.(txt|html|htm|py|gyp|gypi|gn|isolate|icon)$'
5980 file_inclusion_pattern = (known_text_files,
5981 r'.+%s' % _IMPLEMENTATION_EXTENSIONS,
5982 r'.+%s' % _HEADER_EXTENSIONS)
5985 source_file_filter = lambda f: input_api.FilterSourceFile(
5986 f, files_to_check=file_inclusion_pattern, files_to_skip=None)
5987 for f in input_api.AffectedSourceFiles(source_file_filter):
5988 # Ignore test files that contain crlf intentionally.
5989 if f.LocalPath().endswith('crlf.txt'):
5991 include_file = False
5992 for line in input_api.ReadFile(f, 'r').splitlines(True):
5993 if line.endswith('\r\n'):
5996 problems.append(f.LocalPath())
6000 output_api.PresubmitPromptWarning(
6001 'Are you sure that you want '
6002 'these files to contain Windows style line endings?\n' +
6003 '\n'.join(problems))
6009 def CheckIconFilesForLicenseHeaders(input_api, output_api):
6010 """Check that .icon files (which are fragments of C++) have license headers.
6013 icon_files = (r'.*\.icon$', )
6015 icons = lambda x: input_api.FilterSourceFile(x, files_to_check=icon_files)
6016 return input_api.canned_checks.CheckLicense(input_api,
6018 source_file_filter=icons)
6021 def CheckForUseOfChromeAppsDeprecations(input_api, output_api):
6022 """Check source code for use of Chrome App technologies being
6026 def _CheckForDeprecatedTech(input_api,
6029 files_to_check=None,
6030 files_to_skip=None):
6032 if (files_to_check or files_to_skip):
6033 source_file_filter = lambda f: input_api.FilterSourceFile(
6034 f, files_to_check=files_to_check, files_to_skip=files_to_skip)
6036 source_file_filter = None
6040 for f in input_api.AffectedSourceFiles(source_file_filter):
6041 if f.Action() == 'D':
6043 for _, line in f.ChangedContents():
6044 if any(detect in line for detect in detection_list):
6045 problems.append(f.LocalPath())
6049 # to avoid this presubmit script triggering warnings
6050 files_to_skip = ['PRESUBMIT.py', 'PRESUBMIT_test.py']
6054 # NMF: any files with extensions .nmf or NMF
6055 _NMF_FILES = r'\.(nmf|NMF)$'
6056 problems += _CheckForDeprecatedTech(
6059 detection_list=[''], # any change to the file will trigger warning
6060 files_to_check=[r'.+%s' % _NMF_FILES])
6062 # MANIFEST: any manifest.json that in its diff includes "app":
6063 _MANIFEST_FILES = r'(manifest\.json)$'
6064 problems += _CheckForDeprecatedTech(
6067 detection_list=['"app":'],
6068 files_to_check=[r'.*%s' % _MANIFEST_FILES])
6070 # NaCl / PNaCl: any file that in its diff contains the strings in the list
6071 problems += _CheckForDeprecatedTech(
6074 detection_list=['config=nacl', 'enable-nacl', 'cpu=pnacl', 'nacl_io'],
6075 files_to_skip=files_to_skip + [r"^native_client_sdk/"])
6077 # PPAPI: any C/C++ file that in its diff includes a ppapi library
6078 problems += _CheckForDeprecatedTech(
6081 detection_list=['#include "ppapi', '#include <ppapi'],
6082 files_to_check=(r'.+%s' % _HEADER_EXTENSIONS,
6083 r'.+%s' % _IMPLEMENTATION_EXTENSIONS),
6084 files_to_skip=[r"^ppapi/"])
6088 output_api.PresubmitPromptWarning(
6089 'You are adding/modifying code'
6090 'related to technologies which will soon be deprecated (Chrome Apps, NaCl,'
6091 ' PNaCl, PPAPI). See this blog post for more details:\n'
6092 'https://blog.chromium.org/2020/08/changes-to-chrome-app-support-timeline.html\n'
6093 'and this documentation for options to replace these technologies:\n'
6094 'https://developer.chrome.com/docs/apps/migration/\n' +
6095 '\n'.join(problems))
6101 def CheckSyslogUseWarningOnUpload(input_api, output_api, src_file_filter=None):
6102 """Checks that all source files use SYSLOG properly."""
6104 for f in input_api.AffectedSourceFiles(src_file_filter):
6105 for line_number, line in f.ChangedContents():
6106 if 'SYSLOG' in line:
6107 syslog_files.append(f.LocalPath() + ':' + str(line_number))
6111 output_api.PresubmitPromptWarning(
6112 'Please make sure there are no privacy sensitive bits of data in SYSLOG'
6113 ' calls.\nFiles to check:\n',
6119 def CheckChangeOnUpload(input_api, output_api):
6120 if input_api.version < [2, 0, 0]:
6122 output_api.PresubmitError(
6123 "Your depot_tools is out of date. "
6124 "This PRESUBMIT.py requires at least presubmit_support version 2.0.0, "
6125 "but your version is %d.%d.%d" % tuple(input_api.version))
6129 input_api.canned_checks.CheckPatchFormatted(input_api, output_api))
6133 def CheckChangeOnCommit(input_api, output_api):
6134 if input_api.version < [2, 0, 0]:
6136 output_api.PresubmitError(
6137 "Your depot_tools is out of date. "
6138 "This PRESUBMIT.py requires at least presubmit_support version 2.0.0, "
6139 "but your version is %d.%d.%d" % tuple(input_api.version))
6143 # Make sure the tree is 'open'.
6145 input_api.canned_checks.CheckTreeIsOpen(
6148 json_url='http://chromium-status.appspot.com/current?format=json'))
6151 input_api.canned_checks.CheckPatchFormatted(input_api, output_api))
6153 input_api.canned_checks.CheckChangeHasBugField(input_api, output_api))
6155 input_api.canned_checks.CheckChangeHasNoUnwantedTags(
6156 input_api, output_api))
6160 def CheckStrings(input_api, output_api):
6161 """Check string ICU syntax validity and if translation screenshots exist."""
6162 # Skip translation screenshots check if a SkipTranslationScreenshotsCheck
6163 # footer is set to true.
6164 git_footers = input_api.change.GitFootersFromDescription()
6165 skip_screenshot_check_footer = [
6166 footer.lower() for footer in git_footers.get(
6167 u'Skip-Translation-Screenshots-Check', [])
6169 run_screenshot_check = u'true' not in skip_screenshot_check_footer
6174 from io import StringIO
6176 new_or_added_paths = set(f.LocalPath() for f in input_api.AffectedFiles()
6177 if (f.Action() == 'A' or f.Action() == 'M'))
6178 removed_paths = set(f.LocalPath()
6179 for f in input_api.AffectedFiles(include_deletes=True)
6180 if f.Action() == 'D')
6183 f for f in input_api.AffectedFiles()
6184 if f.LocalPath().endswith(('.grd', '.grdp'))
6187 f for f in affected_grds if not 'testdata' in f.LocalPath()
6189 if not affected_grds:
6192 affected_png_paths = [
6193 f.AbsoluteLocalPath() for f in input_api.AffectedFiles()
6194 if (f.LocalPath().endswith('.png'))
6197 # Check for screenshots. Developers can upload screenshots using
6198 # tools/translation/upload_screenshots.py which finds and uploads
6199 # images associated with .grd files (e.g. test_grd/IDS_STRING.png for the
6200 # message named IDS_STRING in test.grd) and produces a .sha1 file (e.g.
6201 # test_grd/IDS_STRING.png.sha1) for each png when the upload is successful.
6203 # The logic here is as follows:
6205 # - If the CL has a .png file under the screenshots directory for a grd
6206 # file, warn the developer. Actual images should never be checked into the
6209 # - If the CL contains modified or new messages in grd files and doesn't
6210 # contain the corresponding .sha1 files, warn the developer to add images
6211 # and upload them via tools/translation/upload_screenshots.py.
6213 # - If the CL contains modified or new messages in grd files and the
6214 # corresponding .sha1 files, everything looks good.
6216 # - If the CL contains removed messages in grd files but the corresponding
6217 # .sha1 files aren't removed, warn the developer to remove them.
6218 unnecessary_screenshots = []
6221 missing_sha1_modified = []
6222 unnecessary_sha1_files = []
6224 # This checks verifies that the ICU syntax of messages this CL touched is
6225 # valid, and reports any found syntax errors.
6226 # Without this presubmit check, ICU syntax errors in Chromium strings can land
6227 # without developers being aware of them. Later on, such ICU syntax errors
6228 # break message extraction for translation, hence would block Chromium
6229 # translations until they are fixed.
6230 icu_syntax_errors = []
6231 sha1_pattern = input_api.re.compile(r'^[a-fA-F0-9]{40}$',
6232 input_api.re.MULTILINE)
6234 def _CheckScreenshotAdded(screenshots_dir, message_id):
6235 sha1_path = input_api.os_path.join(screenshots_dir,
6236 message_id + '.png.sha1')
6237 if sha1_path not in new_or_added_paths:
6238 missing_sha1.append(sha1_path)
6239 elif not _CheckValidSha1(sha1_path):
6240 invalid_sha1.append(sha1_path)
6242 def _CheckScreenshotModified(screenshots_dir, message_id):
6243 sha1_path = input_api.os_path.join(screenshots_dir,
6244 message_id + '.png.sha1')
6245 if sha1_path not in new_or_added_paths:
6246 missing_sha1_modified.append(sha1_path)
6247 elif not _CheckValidSha1(sha1_path):
6248 invalid_sha1.append(sha1_path)
6250 def _CheckValidSha1(sha1_path):
6251 return sha1_pattern.search(
6252 next("\n".join(f.NewContents()) for f in input_api.AffectedFiles()
6253 if f.LocalPath() == sha1_path))
6255 def _CheckScreenshotRemoved(screenshots_dir, message_id):
6256 sha1_path = input_api.os_path.join(screenshots_dir,
6257 message_id + '.png.sha1')
6258 if input_api.os_path.exists(
6259 sha1_path) and sha1_path not in removed_paths:
6260 unnecessary_sha1_files.append(sha1_path)
6262 def _ValidateIcuSyntax(text, level, signatures):
6263 """Validates ICU syntax of a text string.
6265 Check if text looks similar to ICU and checks for ICU syntax correctness
6266 in this case. Reports various issues with ICU syntax and values of
6267 variants. Supports checking of nested messages. Accumulate information of
6268 each ICU messages found in the text for further checking.
6271 text: a string to check.
6272 level: a number of current nesting level.
6273 signatures: an accumulator, a list of tuple of (level, variable,
6277 None if a string is not ICU or no issue detected.
6278 A tuple of (message, start index, end index) if an issue detected.
6281 'plural': (frozenset(
6282 ['=0', '=1', 'zero', 'one', 'two', 'few', 'many',
6283 'other']), frozenset(['=1', 'other'])),
6284 'selectordinal': (frozenset(
6285 ['=0', '=1', 'zero', 'one', 'two', 'few', 'many',
6286 'other']), frozenset(['one', 'other'])),
6287 'select': (frozenset(), frozenset(['other'])),
6290 # Check if the message looks like an attempt to use ICU
6291 # plural. If yes - check if its syntax strictly matches ICU format.
6292 like = re.match(r'^[^{]*\{[^{]*\b(plural|selectordinal|select)\b',
6295 signatures.append((level, None, None, None))
6298 # Check for valid prefix and suffix
6300 r'^([^{]*\{)([a-zA-Z0-9_]+),\s*'
6301 r'(plural|selectordinal|select),\s*'
6302 r'(?:offset:\d+)?\s*(.*)', text, re.DOTALL)
6304 return (('This message looks like an ICU plural, '
6305 'but does not follow ICU syntax.'), like.start(),
6307 starting, variable, kind, variant_pairs = m.groups()
6308 variants, depth, last_pos = _ParseIcuVariants(variant_pairs,
6311 return ('Invalid ICU format. Unbalanced opening bracket', last_pos,
6314 ending = text[last_pos:]
6316 return ('Invalid ICU format. No initial opening bracket',
6317 last_pos - 1, last_pos)
6318 if not ending or '}' not in ending:
6319 return ('Invalid ICU format. No final closing bracket',
6320 last_pos - 1, last_pos)
6323 'Invalid ICU format. Extra characters at the start of a complex '
6324 'message (go/icu-message-migration): "%s"') % starting, 0,
6328 'Invalid ICU format. Extra characters at the end of a complex '
6329 'message (go/icu-message-migration): "%s"') % ending,
6330 last_pos - 1, len(text) - 1)
6331 if kind not in valid_types:
6332 return (('Unknown ICU message type %s. '
6333 'Valid types are: plural, select, selectordinal') % kind,
6335 known, required = valid_types[kind]
6336 defined_variants = set()
6337 for variant, variant_range, value, value_range in variants:
6338 start, end = variant_range
6339 if variant in defined_variants:
6340 return ('Variant "%s" is defined more than once' % variant,
6342 elif known and variant not in known:
6343 return ('Variant "%s" is not valid for %s message' %
6344 (variant, kind), start, end)
6345 defined_variants.add(variant)
6346 # Check for nested structure
6347 res = _ValidateIcuSyntax(value[1:-1], level + 1, signatures)
6349 return (res[0], res[1] + value_range[0] + 1,
6350 res[2] + value_range[0] + 1)
6351 missing = required - defined_variants
6353 return ('Required variants missing: %s' % ', '.join(missing), 0,
6355 signatures.append((level, variable, kind, defined_variants))
6357 def _ParseIcuVariants(text, offset=0):
6358 """Parse variants part of ICU complex message.
6360 Builds a tuple of variant names and values, as well as
6361 their offsets in the input string.
6364 text: a string to parse
6365 offset: additional offset to add to positions in the text to get correct
6366 position in the complete ICU string.
6369 List of tuples, each tuple consist of four fields: variant name,
6370 variant name span (tuple of two integers), variant value, value
6371 span (tuple of two integers).
6373 depth, start, end = 0, -1, -1
6376 for idx, char in enumerate(text):
6380 chunk = text[end + 1:start]
6382 pos = offset + end + 1 + chunk.find(key)
6383 span = (pos, pos + len(key))
6387 return variants, depth, offset + idx
6391 variants.append((key, span, text[start:end + 1],
6392 (offset + start, offset + end + 1)))
6393 return variants, depth, offset + end + 1
6396 old_sys_path = sys.path
6397 sys.path = sys.path + [
6398 input_api.os_path.join(input_api.PresubmitLocalPath(), 'tools',
6401 from helper import grd_helper
6403 sys.path = old_sys_path
6405 for f in affected_grds:
6406 file_path = f.LocalPath()
6407 old_id_to_msg_map = {}
6408 new_id_to_msg_map = {}
6409 # Note that this code doesn't check if the file has been deleted. This is
6410 # OK because it only uses the old and new file contents and doesn't load
6411 # the file via its path.
6412 # It's also possible that a file's content refers to a renamed or deleted
6413 # file via a <part> tag, such as <part file="now-deleted-file.grdp">. This
6414 # is OK as well, because grd_helper ignores <part> tags when loading .grd or
6416 if file_path.endswith('.grdp'):
6418 old_id_to_msg_map = grd_helper.GetGrdpMessagesFromString(
6419 '\n'.join(f.OldContents()))
6421 new_id_to_msg_map = grd_helper.GetGrdpMessagesFromString(
6422 '\n'.join(f.NewContents()))
6424 file_dir = input_api.os_path.dirname(file_path) or '.'
6426 old_id_to_msg_map = grd_helper.GetGrdMessages(
6427 StringIO('\n'.join(f.OldContents())), file_dir)
6429 new_id_to_msg_map = grd_helper.GetGrdMessages(
6430 StringIO('\n'.join(f.NewContents())), file_dir)
6432 grd_name, ext = input_api.os_path.splitext(
6433 input_api.os_path.basename(file_path))
6434 screenshots_dir = input_api.os_path.join(
6435 input_api.os_path.dirname(file_path),
6436 grd_name + ext.replace('.', '_'))
6438 # Compute added, removed and modified message IDs.
6439 old_ids = set(old_id_to_msg_map)
6440 new_ids = set(new_id_to_msg_map)
6441 added_ids = new_ids - old_ids
6442 removed_ids = old_ids - new_ids
6443 modified_ids = set([])
6444 for key in old_ids.intersection(new_ids):
6445 if (old_id_to_msg_map[key].ContentsAsXml('', True) !=
6446 new_id_to_msg_map[key].ContentsAsXml('', True)):
6447 # The message content itself changed. Require an updated screenshot.
6448 modified_ids.add(key)
6449 elif old_id_to_msg_map[key].attrs['meaning'] != \
6450 new_id_to_msg_map[key].attrs['meaning']:
6451 # The message meaning changed. We later check for a screenshot.
6452 modified_ids.add(key)
6454 if run_screenshot_check:
6455 # Check the screenshot directory for .png files. Warn if there is any.
6456 for png_path in affected_png_paths:
6457 if png_path.startswith(screenshots_dir):
6458 unnecessary_screenshots.append(png_path)
6460 for added_id in added_ids:
6461 _CheckScreenshotAdded(screenshots_dir, added_id)
6463 for modified_id in modified_ids:
6464 _CheckScreenshotModified(screenshots_dir, modified_id)
6466 for removed_id in removed_ids:
6467 _CheckScreenshotRemoved(screenshots_dir, removed_id)
6469 # Check new and changed strings for ICU syntax errors.
6470 for key in added_ids.union(modified_ids):
6471 msg = new_id_to_msg_map[key].ContentsAsXml('', True)
6472 err = _ValidateIcuSyntax(msg, 0, [])
6474 icu_syntax_errors.append(str(key) + ': ' + str(err[0]))
6477 if run_screenshot_check:
6478 if unnecessary_screenshots:
6480 output_api.PresubmitError(
6481 'Do not include actual screenshots in the changelist. Run '
6482 'tools/translate/upload_screenshots.py to upload them instead:',
6483 sorted(unnecessary_screenshots)))
6487 output_api.PresubmitError(
6488 'You are adding UI strings.\n'
6489 'To ensure the best translations, take screenshots of the relevant UI '
6490 '(https://g.co/chrome/translation) and add these files to your '
6491 'changelist:', sorted(missing_sha1)))
6495 output_api.PresubmitError(
6496 'The following files do not seem to contain valid sha1 hashes. '
6497 'Make sure they contain hashes created by '
6498 'tools/translate/upload_screenshots.py:', sorted(invalid_sha1)))
6500 if missing_sha1_modified:
6502 output_api.PresubmitError(
6503 'You are modifying UI strings or their meanings.\n'
6504 'To ensure the best translations, take screenshots of the relevant UI '
6505 '(https://g.co/chrome/translation) and add these files to your '
6506 'changelist:', sorted(missing_sha1_modified)))
6508 if unnecessary_sha1_files:
6510 output_api.PresubmitError(
6511 'You removed strings associated with these files. Remove:',
6512 sorted(unnecessary_sha1_files)))
6515 output_api.PresubmitPromptOrNotify('Skipping translation '
6516 'screenshots check.'))
6518 if icu_syntax_errors:
6520 output_api.PresubmitPromptWarning(
6521 'ICU syntax errors were found in the following strings (problems or '
6522 'feedback? Contact rainhard@chromium.org):',
6523 items=icu_syntax_errors))
6528 def CheckTranslationExpectations(input_api, output_api,
6530 translation_expectations_path=None,
6534 f for f in input_api.AffectedFiles()
6535 if (f.LocalPath().endswith('.grd') or f.LocalPath().endswith('.grdp'))
6537 if not affected_grds:
6541 old_sys_path = sys.path
6542 sys.path = sys.path + [
6543 input_api.os_path.join(input_api.PresubmitLocalPath(), 'tools',
6546 from helper import git_helper
6547 from helper import translation_helper
6549 sys.path = old_sys_path
6551 # Check that translation expectations can be parsed and we can get a list of
6552 # translatable grd files. |repo_root| and |translation_expectations_path| are
6553 # only passed by tests.
6555 repo_root = input_api.PresubmitLocalPath()
6556 if not translation_expectations_path:
6557 translation_expectations_path = input_api.os_path.join(
6558 repo_root, 'tools', 'gritsettings', 'translation_expectations.pyl')
6560 grd_files = git_helper.list_grds_in_repository(repo_root)
6562 # Ignore bogus grd files used only for testing
6563 # ui/webui/resources/tools/generate_grd.py.
6564 ignore_path = input_api.os_path.join('ui', 'webui', 'resources', 'tools',
6566 grd_files = [p for p in grd_files if ignore_path not in p]
6569 translation_helper.get_translatable_grds(
6570 repo_root, grd_files, translation_expectations_path)
6571 except Exception as e:
6573 output_api.PresubmitNotifyResult(
6574 'Failed to get a list of translatable grd files. This happens when:\n'
6575 ' - One of the modified grd or grdp files cannot be parsed or\n'
6576 ' - %s is not updated.\n'
6577 'Stack:\n%s' % (translation_expectations_path, str(e)))
6582 def CheckStableMojomChanges(input_api, output_api):
6583 """Changes to [Stable] mojom types must preserve backward-compatibility."""
6584 changed_mojoms = input_api.AffectedFiles(
6585 include_deletes=True,
6586 file_filter=lambda f: f.LocalPath().endswith(('.mojom')))
6588 if not changed_mojoms or input_api.no_diffs:
6592 for mojom in changed_mojoms:
6594 'filename': mojom.LocalPath(),
6595 'old': '\n'.join(mojom.OldContents()) or None,
6596 'new': '\n'.join(mojom.NewContents()) or None,
6599 process = input_api.subprocess.Popen([
6600 input_api.python3_executable,
6601 input_api.os_path.join(
6602 input_api.PresubmitLocalPath(), 'mojo', 'public', 'tools', 'mojom',
6603 'check_stable_mojom_compatibility.py'), '--src-root',
6604 input_api.PresubmitLocalPath()
6606 stdin=input_api.subprocess.PIPE,
6607 stdout=input_api.subprocess.PIPE,
6608 stderr=input_api.subprocess.PIPE,
6609 universal_newlines=True)
6610 (x, error) = process.communicate(input=input_api.json.dumps(delta))
6611 if process.returncode:
6613 output_api.PresubmitError(
6614 'One or more [Stable] mojom definitions appears to have been changed '
6615 'in a way that is not backward-compatible.',
6620 def CheckDeprecationOfPreferences(input_api, output_api):
6621 """Removing a preference should come with a deprecation."""
6623 def FilterFile(affected_file):
6624 """Accept only .cc files and the like."""
6625 file_inclusion_pattern = [r'.+%s' % _IMPLEMENTATION_EXTENSIONS]
6626 files_to_skip = (_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS +
6627 input_api.DEFAULT_FILES_TO_SKIP)
6628 return input_api.FilterSourceFile(
6630 files_to_check=file_inclusion_pattern,
6631 files_to_skip=files_to_skip)
6633 def ModifiedLines(affected_file):
6634 """Returns a list of tuples (line number, line text) of added and removed
6637 Deleted lines share the same line number as the previous line.
6639 This relies on the scm diff output describing each changed code section
6640 with a line of the form
6642 ^@@ <old line num>,<old size> <new line num>,<new size> @@$
6646 for line in affected_file.GenerateScmDiff().splitlines():
6647 # Extract <new line num> of the patch fragment (see format above).
6648 m = input_api.re.match(r'^@@ [0-9\,\+\-]+ \+([0-9]+)\,[0-9]+ @@',
6651 line_num = int(m.groups(1)[0])
6653 if ((line.startswith('+') and not line.startswith('++'))
6654 or (line.startswith('-') and not line.startswith('--'))):
6655 modified_lines.append((line_num, line))
6657 if not line.startswith('-'):
6659 return modified_lines
6661 def FindLineWith(lines, needle):
6662 """Returns the line number (i.e. index + 1) in `lines` containing `needle`.
6664 If 0 or >1 lines contain `needle`, -1 is returned.
6666 matching_line_numbers = [
6667 # + 1 for 1-based counting of line numbers.
6668 i + 1 for i, line in enumerate(lines) if needle in line
6670 return matching_line_numbers[0] if len(
6671 matching_line_numbers) == 1 else -1
6673 def ModifiedPrefMigration(affected_file):
6674 """Returns whether the MigrateObsolete.*Pref functions were modified."""
6675 # Determine first and last lines of MigrateObsolete.*Pref functions.
6676 new_contents = affected_file.NewContents()
6677 range_1 = (FindLineWith(new_contents,
6678 'BEGIN_MIGRATE_OBSOLETE_LOCAL_STATE_PREFS'),
6679 FindLineWith(new_contents,
6680 'END_MIGRATE_OBSOLETE_LOCAL_STATE_PREFS'))
6681 range_2 = (FindLineWith(new_contents,
6682 'BEGIN_MIGRATE_OBSOLETE_PROFILE_PREFS'),
6683 FindLineWith(new_contents,
6684 'END_MIGRATE_OBSOLETE_PROFILE_PREFS'))
6685 if (-1 in range_1 + range_2):
6687 'Broken .*MIGRATE_OBSOLETE_.*_PREFS markers in browser_prefs.cc.'
6690 # Check whether any of the modified lines are part of the
6691 # MigrateObsolete.*Pref functions.
6692 for line_nr, line in ModifiedLines(affected_file):
6693 if (range_1[0] <= line_nr <= range_1[1]
6694 or range_2[0] <= line_nr <= range_2[1]):
6698 register_pref_pattern = input_api.re.compile(r'Register.+Pref')
6699 browser_prefs_file_pattern = input_api.re.compile(
6700 r'chrome/browser/prefs/browser_prefs.cc')
6702 changes = input_api.AffectedFiles(include_deletes=True,
6703 file_filter=FilterFile)
6704 potential_problems = []
6706 for line in f.GenerateScmDiff().splitlines():
6707 # Check deleted lines for pref registrations.
6708 if (line.startswith('-') and not line.startswith('--')
6709 and register_pref_pattern.search(line)):
6710 potential_problems.append('%s: %s' % (f.LocalPath(), line))
6712 if browser_prefs_file_pattern.search(f.LocalPath()):
6713 # If the developer modified the MigrateObsolete.*Prefs() functions, we
6714 # assume that they knew that they have to deprecate preferences and don't
6717 if ModifiedPrefMigration(f):
6719 except Exception as e:
6720 return [output_api.PresubmitError(str(e))]
6722 if potential_problems:
6724 output_api.PresubmitPromptWarning(
6725 'Discovered possible removal of preference registrations.\n\n'
6726 'Please make sure to properly deprecate preferences by clearing their\n'
6727 'value for a couple of milestones before finally removing the code.\n'
6728 'Otherwise data may stay in the preferences files forever. See\n'
6729 'Migrate*Prefs() in chrome/browser/prefs/browser_prefs.cc and\n'
6730 'chrome/browser/prefs/README.md for examples.\n'
6731 'This may be a false positive warning (e.g. if you move preference\n'
6732 'registrations to a different place).\n', potential_problems)
6737 def CheckConsistentGrdChanges(input_api, output_api):
6738 """Changes to GRD files must be consistent for tools to read them."""
6739 changed_grds = input_api.AffectedFiles(
6740 include_deletes=False,
6741 file_filter=lambda f: f.LocalPath().endswith(('.grd')))
6743 invalid_file_regexes = [(input_api.re.compile(matcher), msg)
6744 for matcher, msg in _INVALID_GRD_FILE_LINE]
6745 for grd in changed_grds:
6746 for i, line in enumerate(grd.NewContents()):
6747 for matcher, msg in invalid_file_regexes:
6748 if matcher.search(line):
6750 output_api.PresubmitError(
6751 'Problem on {grd}:{i} - {msg}'.format(
6752 grd=grd.LocalPath(), i=i + 1, msg=msg)))
6756 def CheckAssertAshOnlyCode(input_api, output_api):
6757 """Errors if a BUILD.gn file in an ash/ directory doesn't include
6758 assert(is_chromeos_ash).
6761 def FileFilter(affected_file):
6762 """Includes directories known to be Ash only."""
6763 return input_api.FilterSourceFile(
6766 r'^ash/.*BUILD\.gn', # Top-level src/ash/.
6767 r'.*/ash/.*BUILD\.gn'), # Any path component.
6768 files_to_skip=(input_api.DEFAULT_FILES_TO_SKIP))
6771 pattern = input_api.re.compile(r'assert\(is_chromeos_ash')
6772 for f in input_api.AffectedFiles(include_deletes=False,
6773 file_filter=FileFilter):
6774 if (not pattern.search(input_api.ReadFile(f))):
6776 output_api.PresubmitError(
6777 'Please add assert(is_chromeos_ash) to %s. If that\'s not '
6778 'possible, please create and issue and add a comment such '
6779 'as:\n # TODO(https://crbug.com/XXX): add '
6780 'assert(is_chromeos_ash) when ...' % f.LocalPath()))
6784 def _IsMiraclePtrDisallowed(input_api, affected_file):
6785 path = affected_file.LocalPath()
6786 if not _IsCPlusPlusFile(input_api, path):
6789 # Renderer code is generally allowed to use MiraclePtr.
6790 # These directories, however, are specifically disallowed.
6791 if ("third_party/blink/renderer/core/" in path
6792 or "third_party/blink/renderer/platform/heap/" in path
6793 or "third_party/blink/renderer/platform/wtf/" in path):
6796 # Blink's public/web API is only used/included by Renderer-only code. Note
6797 # that public/platform API may be used in non-Renderer processes (e.g. there
6798 # are some includes in code used by Utility, PDF, or Plugin processes).
6799 if "/blink/public/web/" in path:
6802 # We assume that everything else may be used outside of Renderer processes.
6805 # TODO(https://crbug.com/1273182): Remove these checks, once they are replaced
6806 # by the Chromium Clang Plugin (which will be preferable because it will
6807 # 1) report errors earlier - at compile-time and 2) cover more rules).
6808 def CheckRawPtrUsage(input_api, output_api):
6809 """Rough checks that raw_ptr<T> usage guidelines are followed."""
6811 # The regex below matches "raw_ptr<" following a word boundary, but not in a
6813 raw_ptr_matcher = input_api.re.compile(r'^((?!//).)*\braw_ptr<')
6814 file_filter = lambda f: _IsMiraclePtrDisallowed(input_api, f)
6815 for f, line_num, line in input_api.RightHandSideLines(file_filter):
6816 if raw_ptr_matcher.search(line):
6818 output_api.PresubmitError(
6819 'Problem on {path}:{line} - '\
6820 'raw_ptr<T> should not be used in this renderer code '\
6821 '(as documented in the "Pointers to unprotected memory" '\
6822 'section in //base/memory/raw_ptr.md)'.format(
6823 path=f.LocalPath(), line=line_num)))
6826 def CheckAdvancedMemorySafetyChecksUsage(input_api, output_api):
6827 """Checks that ADVANCED_MEMORY_SAFETY_CHECKS() macro is neither added nor
6828 removed as it is managed by the memory safety team internally.
6829 Do not add / remove it manually."""
6831 # The regex below matches "ADVANCED_MEMORY_SAFETY_CHECKS(" following a word
6832 # boundary, but not in a C++ comment.
6833 macro_matcher = input_api.re.compile(
6834 r'^((?!//).)*\bADVANCED_MEMORY_SAFETY_CHECKS\(', input_api.re.MULTILINE)
6835 for f in input_api.AffectedFiles():
6836 if not _IsCPlusPlusFile(input_api, f.LocalPath()):
6838 if macro_matcher.search(f.GenerateScmDiff()):
6839 paths.add(f.LocalPath())
6842 return [output_api.PresubmitPromptWarning(
6843 'ADVANCED_MEMORY_SAFETY_CHECKS() macro is managed by ' \
6844 'the memory safety team (chrome-memory-safety@). ' \
6845 'Please contact us to add/delete the uses of the macro.',
6848 def CheckPythonShebang(input_api, output_api):
6849 """Checks that python scripts use #!/usr/bin/env instead of hardcoding a
6853 sources = lambda affected_file: input_api.FilterSourceFile(
6855 files_to_skip=((_THIRD_PARTY_EXCEPT_BLINK,
6856 r'third_party/blink/web_tests/external/') + input_api.
6857 DEFAULT_FILES_TO_SKIP),
6858 files_to_check=[r'.*\.py$'])
6859 for f in input_api.AffectedSourceFiles(sources):
6860 for line_num, line in f.ChangedContents():
6861 if line_num == 1 and line.startswith('#!/usr/bin/python'):
6862 errors.append(f.LocalPath())
6868 output_api.PresubmitError(
6869 "Please use '#!/usr/bin/env python/2/3' as the shebang of %s" %
6874 def CheckBatchAnnotation(input_api, output_api):
6875 """Checks that tests have either @Batch or @DoNotBatch annotation. If this
6876 is not an instrumentation test, disregard."""
6878 batch_annotation = input_api.re.compile(r'^\s*@Batch')
6879 do_not_batch_annotation = input_api.re.compile(r'^\s*@DoNotBatch')
6880 robolectric_test = input_api.re.compile(r'[rR]obolectric')
6881 test_class_declaration = input_api.re.compile(r'^\s*public\sclass.*Test')
6882 uiautomator_test = input_api.re.compile(r'[uU]i[aA]utomator')
6883 test_annotation_declaration = input_api.re.compile(r'^\s*public\s@interface\s.*{')
6885 missing_annotation_errors = []
6886 extra_annotation_errors = []
6888 def _FilterFile(affected_file):
6889 return input_api.FilterSourceFile(
6891 files_to_skip=input_api.DEFAULT_FILES_TO_SKIP,
6892 files_to_check=[r'.*Test\.java$'])
6894 for f in input_api.AffectedSourceFiles(_FilterFile):
6895 batch_matched = None
6896 do_not_batch_matched = None
6897 is_instrumentation_test = True
6898 test_annotation_declaration_matched = None
6899 for line in f.NewContents():
6900 if robolectric_test.search(line) or uiautomator_test.search(line):
6901 # Skip Robolectric and UiAutomator tests.
6902 is_instrumentation_test = False
6904 if not batch_matched:
6905 batch_matched = batch_annotation.search(line)
6906 if not do_not_batch_matched:
6907 do_not_batch_matched = do_not_batch_annotation.search(line)
6908 test_class_declaration_matched = test_class_declaration.search(
6910 test_annotation_declaration_matched = test_annotation_declaration.search(line)
6911 if test_class_declaration_matched or test_annotation_declaration_matched:
6913 if test_annotation_declaration_matched:
6915 if (is_instrumentation_test and
6916 not batch_matched and
6917 not do_not_batch_matched):
6918 missing_annotation_errors.append(str(f.LocalPath()))
6919 if (not is_instrumentation_test and
6921 do_not_batch_matched)):
6922 extra_annotation_errors.append(str(f.LocalPath()))
6926 if missing_annotation_errors:
6928 output_api.PresubmitPromptWarning(
6930 A change was made to an on-device test that has neither been annotated with
6931 @Batch nor @DoNotBatch. If this is a new test, please add the annotation. If
6932 this is an existing test, please consider adding it if you are sufficiently
6933 familiar with the test (but do so as a separate change).
6935 See https://source.chromium.org/chromium/chromium/src/+/main:docs/testing/batching_instrumentation_tests.md
6936 """, missing_annotation_errors))
6937 if extra_annotation_errors:
6939 output_api.PresubmitPromptWarning(
6941 Robolectric tests do not need a @Batch or @DoNotBatch annotations.
6942 """, extra_annotation_errors))
6947 def CheckMockAnnotation(input_api, output_api):
6948 """Checks that we have annotated all Mockito.mock()-ed or Mockito.spy()-ed
6949 classes with @Mock or @Spy. If this is not an instrumentation test,
6952 # This is just trying to be approximately correct. We are not writing a
6953 # Java parser, so special cases like statically importing mock() then
6954 # calling an unrelated non-mockito spy() function will cause a false
6956 package_name = input_api.re.compile(r'^package\s+(\w+(?:\.\w+)+);')
6957 mock_static_import = input_api.re.compile(
6958 r'^import\s+static\s+org.mockito.Mockito.(?:mock|spy);')
6959 import_class = input_api.re.compile(r'import\s+((?:\w+\.)+)(\w+);')
6960 mock_annotation = input_api.re.compile(r'^\s*@(?:Mock|Spy)')
6961 field_type = input_api.re.compile(r'(\w+)(?:<\w+>)?\s+\w+\s*(?:;|=)')
6962 mock_or_spy_function_call = r'(?:mock|spy)\(\s*(?:new\s*)?(\w+)(?:\.class|\()'
6963 fully_qualified_mock_function = input_api.re.compile(
6964 r'Mockito\.' + mock_or_spy_function_call)
6965 statically_imported_mock_function = input_api.re.compile(
6966 r'\W' + mock_or_spy_function_call)
6967 robolectric_test = input_api.re.compile(r'[rR]obolectric')
6968 uiautomator_test = input_api.re.compile(r'[uU]i[aA]utomator')
6970 def _DoClassLookup(class_name, class_name_map, package):
6971 found = class_name_map.get(class_name)
6972 if found is not None:
6975 return package + '.' + class_name
6977 def _FilterFile(affected_file):
6978 return input_api.FilterSourceFile(
6980 files_to_skip=input_api.DEFAULT_FILES_TO_SKIP,
6981 files_to_check=[r'.*Test\.java$'])
6983 mocked_by_function_classes = set()
6984 mocked_by_annotation_classes = set()
6985 class_to_filename = {}
6986 for f in input_api.AffectedSourceFiles(_FilterFile):
6987 mock_function_regex = fully_qualified_mock_function
6988 next_line_is_annotated = False
6989 fully_qualified_class_map = {}
6992 for line in f.NewContents():
6993 if robolectric_test.search(line) or uiautomator_test.search(line):
6994 # Skip Robolectric and UiAutomator tests.
6997 m = package_name.search(line)
6999 package = m.group(1)
7002 if mock_static_import.search(line):
7003 mock_function_regex = statically_imported_mock_function
7006 m = import_class.search(line)
7008 fully_qualified_class_map[m.group(2)] = m.group(1) + m.group(2)
7011 if next_line_is_annotated:
7012 next_line_is_annotated = False
7013 fully_qualified_class = _DoClassLookup(
7014 field_type.search(line).group(1), fully_qualified_class_map,
7016 mocked_by_annotation_classes.add(fully_qualified_class)
7019 if mock_annotation.search(line):
7020 field_type_search = field_type.search(line)
7021 if field_type_search:
7022 fully_qualified_class = _DoClassLookup(
7023 field_type_search.group(1), fully_qualified_class_map,
7025 mocked_by_annotation_classes.add(fully_qualified_class)
7027 next_line_is_annotated = True
7030 m = mock_function_regex.search(line)
7032 fully_qualified_class = _DoClassLookup(m.group(1),
7033 fully_qualified_class_map, package)
7034 # Skipping builtin classes, since they don't get optimized.
7035 if fully_qualified_class.startswith(
7036 'android.') or fully_qualified_class.startswith(
7039 class_to_filename[fully_qualified_class] = str(f.LocalPath())
7040 mocked_by_function_classes.add(fully_qualified_class)
7043 missed_classes = mocked_by_function_classes - mocked_by_annotation_classes
7045 error_locations = []
7046 for c in missed_classes:
7047 error_locations.append(c + ' in ' + class_to_filename[c])
7049 output_api.PresubmitPromptWarning(
7051 Mockito.mock()/spy() cause issues with our Java optimizer. You have 3 options:
7052 1) If the mocked variable can be a class member, annotate the member with
7054 2) If the mocked variable cannot be a class member, create a dummy member
7055 variable of that type, annotated with @Mock/@Spy. This dummy does not need
7056 to be used or initialized in any way.
7057 3) If the mocked type is definitely not going to be optimized, whether it's a
7058 builtin type which we don't ship, or a class you know R8 will treat
7059 specially, you can ignore this warning.
7060 """, error_locations))
7064 def CheckNoJsInIos(input_api, output_api):
7065 """Checks to make sure that JavaScript files are not used on iOS."""
7067 def _FilterFile(affected_file):
7068 return input_api.FilterSourceFile(
7070 files_to_skip=input_api.DEFAULT_FILES_TO_SKIP +
7071 (r'^ios/third_party/*', r'^ios/tools/*', r'^third_party/*',
7072 r'^components/autofill/ios/form_util/resources/*'),
7073 files_to_check=[r'^ios/.*\.js$', r'.*/ios/.*\.js$'])
7077 # Collect filenames of all removed JS files.
7078 for f in input_api.AffectedSourceFiles(_FilterFile):
7079 local_path = f.LocalPath()
7081 if input_api.os_path.splitext(local_path)[1] == '.js' and f.Action() == 'D':
7082 deleted_files.append(input_api.os_path.basename(local_path))
7088 for f in input_api.AffectedSourceFiles(_FilterFile):
7089 local_path = f.LocalPath()
7091 if input_api.os_path.splitext(local_path)[1] == '.js':
7092 if f.Action() == 'A':
7093 if input_api.os_path.basename(local_path) in deleted_files:
7094 # This script was probably moved rather than newly created.
7095 # Present a warning instead of an error for these cases.
7096 moved_paths.append(local_path)
7098 error_paths.append(local_path)
7099 elif f.Action() != 'D':
7100 warning_paths.append(local_path)
7105 results.append(output_api.PresubmitPromptWarning(
7106 'TypeScript is now fully supported for iOS feature scripts. '
7107 'Consider converting JavaScript files to TypeScript. See '
7108 '//ios/web/public/js_messaging/README.md for more details.',
7112 results.append(output_api.PresubmitPromptWarning(
7113 'Do not use JavaScript on iOS for new files as TypeScript is '
7114 'fully supported. (If this is a moved file, you may leave the '
7115 'script unconverted.) See //ios/web/public/js_messaging/README.md '
7116 'for help using scripts on iOS.', moved_paths))
7119 results.append(output_api.PresubmitError(
7120 'Do not use JavaScript on iOS as TypeScript is fully supported. '
7121 'See //ios/web/public/js_messaging/README.md for help using '
7122 'scripts on iOS.', error_paths))
7126 def CheckLibcxxRevisionsMatch(input_api, output_api):
7127 """Check to make sure the libc++ version matches across deps files."""
7128 # Disable check for changes to sub-repositories.
7129 if input_api.PresubmitLocalPath() != input_api.change.RepositoryRoot():
7132 DEPS_FILES = [ 'DEPS', 'buildtools/deps_revisions.gni' ]
7134 file_filter = lambda f: f.LocalPath().replace(
7135 input_api.os_path.sep, '/') in DEPS_FILES
7136 changed_deps_files = input_api.AffectedFiles(file_filter=file_filter)
7137 if not changed_deps_files:
7140 def LibcxxRevision(file):
7141 file = input_api.os_path.join(input_api.PresubmitLocalPath(),
7143 return input_api.re.search(
7144 r'libcxx_revision.*[:=].*[\'"](\w+)[\'"]',
7145 input_api.ReadFile(file)).group(1)
7147 if len(set([LibcxxRevision(f) for f in DEPS_FILES])) == 1:
7150 return [output_api.PresubmitError(
7151 'libcxx_revision not equal across %s' % ', '.join(DEPS_FILES),
7152 changed_deps_files)]
7155 def CheckDanglingUntriaged(input_api, output_api):
7156 """Warn developers adding DanglingUntriaged raw_ptr."""
7158 # Ignore during git presubmit --all.
7160 # This would be too costly, because this would check every lines of every
7161 # C++ files. Check from _BANNED_CPP_FUNCTIONS are also reading the whole
7162 # source code, but only once to apply every checks. It seems the bots like
7163 # `win-presubmit` are particularly sensitive to reading the files. Adding
7164 # this check caused the bot to run 2x longer. See https://crbug.com/1486612.
7165 if input_api.no_diffs:
7168 def FilterFile(file):
7169 return input_api.FilterSourceFile(
7171 files_to_check=[r".*\.(h|cc|cpp|cxx|m|mm)$"],
7172 files_to_skip=[r"^base/allocator.*"],
7176 for f in input_api.AffectedSourceFiles(FilterFile):
7177 count -= f.OldContents().count("DanglingUntriaged")
7178 count += f.NewContents().count("DanglingUntriaged")
7180 # Most likely, nothing changed:
7184 # Congrats developers for improving it:
7187 f"DanglingUntriaged pointers removed: {-count}",
7190 return [output_api.PresubmitNotifyResult(message)]
7192 # Check for 'DanglingUntriaged-notes' in the description:
7193 notes_regex = input_api.re.compile("DanglingUntriaged-notes[:=]")
7195 notes_regex.match(line)
7196 for line in input_api.change.DescriptionText().splitlines()):
7199 # Check for DanglingUntriaged-notes in the git footer:
7200 if input_api.change.GitFootersFromDescription().get(
7201 "DanglingUntriaged-notes", []):
7205 "Unexpected new occurrences of `DanglingUntriaged` detected. Please",
7206 "avoid adding new ones",
7208 "See documentation:",
7209 "https://chromium.googlesource.com/chromium/src/+/main/docs/dangling_ptr.md",
7211 "See also the guide to fix dangling pointers:",
7212 "https://chromium.googlesource.com/chromium/src/+/main/docs/dangling_ptr_guide.md",
7214 "To disable this warning, please add in the commit description:",
7215 "DanglingUntriaged-notes: <rational for new untriaged dangling "
7218 return [output_api.PresubmitPromptWarning(message)]
7220 def CheckInlineConstexprDefinitionsInHeaders(input_api, output_api):
7221 """Checks that non-static constexpr definitions in headers are inline."""
7222 # In a properly formatted file, constexpr definitions inside classes or
7223 # structs will have additional whitespace at the beginning of the line.
7224 # The pattern looks for variables initialized as constexpr kVar = ...; or
7225 # constexpr kVar{...};
7226 # The pattern does not match expressions that have braces in kVar to avoid
7227 # matching constexpr functions.
7228 pattern = input_api.re.compile(r'^constexpr (?!inline )[^\(\)]*[={]')
7229 attribute_pattern = input_api.re.compile(r'(\[\[[a-zA-Z_:]+\]\]|[A-Z]+[A-Z_]+) ')
7231 for f in input_api.AffectedFiles():
7232 if not _IsCPlusPlusHeaderFile(input_api, f.LocalPath()):
7235 for line_number, line in f.ChangedContents():
7236 line = attribute_pattern.sub('', line)
7237 if pattern.search(line):
7239 f"{f.LocalPath()}: {line_number}\n {line}")
7243 output_api.PresubmitPromptWarning(
7244 'Consider inlining constexpr variable definitions in headers '
7245 'outside of classes to avoid unnecessary copies of the '
7246 'constant. See https://abseil.io/tips/168 for more details.',