1 # Copyright 2012 The Chromium Authors
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 """Top-level presubmit script for Chromium.
7 See https://www.chromium.org/developers/how-tos/depottools/presubmit-scripts/
8 for more details about the presubmit API built into depot_tools.
11 from typing import Callable
12 from typing import Optional
13 from typing import Sequence
14 from dataclasses import dataclass
16 PRESUBMIT_VERSION = '2.0.0'
18 # This line is 'magic' in that git-cl looks for it to decide whether to
19 # use Python3 instead of Python2 when running the code in this file.
24 (r"chrome/android/webapk/shell_apk/src/org/chromium"
25 r"/webapk/lib/runtime_library/IWebApkApi.java"),
26 # File needs to write to stdout to emulate a tool it's replacing.
27 r"chrome/updater/mac/keystone/ksadmin.mm",
29 (r"^components/variations/proto/devtools/"
30 r"client_variations.js"),
31 # These are video files, not typescript.
32 r"^media/test/data/.*.ts",
33 r"^native_client_sdksrc/build_tools/make_rules.py",
34 r"^native_client_sdk/src/build_tools/make_simple.py",
35 r"^native_client_sdk/src/tools/.*.mk",
36 r"^net/tools/spdyshark/.*",
38 r"^third_party/blink/.*",
39 r"^third_party/breakpad/.*",
40 # sqlite is an imported third party dependency.
41 r"^third_party/sqlite/.*",
45 r".+_pb2(_grpc)?\.py$",
47 r"^gpu/config/.*_list_json\.cc$",
48 r"tools/md_browser/.*\.css$",
49 # Test pages for Maps telemetry tests.
50 r"tools/perf/page_sets/maps_perf_test.*",
51 # Test pages for WebRTC telemetry tests.
52 r"tools/perf/page_sets/webrtc_cases.*",
53 # Test file compared with generated output.
54 r"tools/polymer/tests/html_to_wrapper/.*.html.ts$",
57 _EXCLUDED_SET_NO_PARENT_PATHS = (
58 # It's for historical reasons that blink isn't a top level directory, where
59 # it would be allowed to have "set noparent" to avoid top level owners
60 # accidentally +1ing changes.
61 'third_party/blink/OWNERS',
65 # Fragment of a regular expression that matches C++ and Objective-C++
66 # implementation files.
67 _IMPLEMENTATION_EXTENSIONS = r'\.(cc|cpp|cxx|mm)$'
70 # Fragment of a regular expression that matches C++ and Objective-C++
72 _HEADER_EXTENSIONS = r'\.(h|hpp|hxx)$'
75 # Paths with sources that don't use //base.
76 _NON_BASE_DEPENDENT_PATHS = (
77 r"^chrome/browser/browser_switcher/bho/",
82 # Regular expression that matches code only used for test binaries
84 _TEST_CODE_EXCLUDED_PATHS = (
85 r'.*/(fake_|test_|mock_).+%s' % _IMPLEMENTATION_EXTENSIONS,
86 r'.+_test_(base|support|util)%s' % _IMPLEMENTATION_EXTENSIONS,
87 # Test suite files, like:
89 # bar_unittest_mac.cc (suffix)
90 # baz_unittests.cc (plural)
91 r'.+_(api|browser|eg|int|perf|pixel|unit|ui)?test(s)?(_[a-z]+)?%s' %
92 _IMPLEMENTATION_EXTENSIONS,
93 r'.+_(fuzz|fuzzer)(_[a-z]+)?%s' % _IMPLEMENTATION_EXTENSIONS,
94 r'.+sync_service_impl_harness%s' % _IMPLEMENTATION_EXTENSIONS,
95 r'.*/(test|tool(s)?)/.*',
96 # content_shell is used for running content_browsertests.
99 r'content/web_test/.*',
100 # Non-production example code.
102 # Launcher for running iOS tests on the simulator.
103 r'testing/iossim/iossim\.mm$',
104 # EarlGrey app side code for tests.
105 r'ios/.*_app_interface\.mm$',
106 # Views Examples code
107 r'ui/views/examples/.*',
112 _THIRD_PARTY_EXCEPT_BLINK = 'third_party/(?!blink/)'
114 _TEST_ONLY_WARNING = (
115 'You might be calling functions intended only for testing from\n'
116 'production code. If you are doing this from inside another method\n'
117 'named as *ForTesting(), then consider exposing things to have tests\n'
118 'make that same call directly.\n'
119 'If that is not possible, you may put a comment on the same line with\n'
121 'to tell the PRESUBMIT script that the code is inside a *ForTesting()\n'
122 'method and can be ignored. Do not do this inside production code.\n'
123 'The android-binary-size trybot will block if the method exists in the\n'
129 # String pattern. If the pattern begins with a slash, the pattern will be
130 # treated as a regular expression instead.
132 # Explanation as a sequence of strings. Each string in the sequence will be
133 # printed on its own line.
134 explanation: Sequence[str]
135 # Whether or not to treat this ban as a fatal error. If unspecified,
137 treat_as_error: Optional[bool] = None
138 # Paths that should be excluded from the ban check. Each string is a regular
139 # expression that will be matched against the path of the file being checked
140 # relative to the root of the source tree.
141 excluded_paths: Optional[Sequence[str]] = None
144 _BANNED_JAVA_IMPORTS : Sequence[BanRule] = (
146 'import java.net.URI;',
148 'Use org.chromium.url.GURL instead of java.net.URI, where possible.',
151 (r'net/android/javatests/src/org/chromium/net/'
152 'AndroidProxySelectorTest\.java'),
153 r'components/cronet/',
154 r'third_party/robolectric/local/',
158 'import android.annotation.TargetApi;',
160 'Do not use TargetApi, use @androidx.annotation.RequiresApi instead. '
161 'RequiresApi ensures that any calls are guarded by the appropriate '
162 'SDK_INT check. See https://crbug.com/1116486.',
166 'import androidx.test.rule.UiThreadTestRule;',
168 'Do not use UiThreadTestRule, just use '
169 '@org.chromium.base.test.UiThreadTest on test methods that should run '
170 'on the UI thread. See https://crbug.com/1111893.',
174 'import androidx.test.annotation.UiThreadTest;',
175 ('Do not use androidx.test.annotation.UiThreadTest, use '
176 'org.chromium.base.test.UiThreadTest instead. See '
177 'https://crbug.com/1111893.',
181 'import androidx.test.rule.ActivityTestRule;',
183 'Do not use ActivityTestRule, use '
184 'org.chromium.base.test.BaseActivityTestRule instead.',
187 'components/cronet/',
191 'import androidx.vectordrawable.graphics.drawable.VectorDrawableCompat;',
193 'Do not use VectorDrawableCompat, use getResources().getDrawable() to '
194 'avoid extra indirections. Please also add trace event as the call '
195 'might take more than 20 ms to complete.',
200 _BANNED_JAVA_FUNCTIONS : Sequence[BanRule] = (
202 'StrictMode.allowThreadDiskReads()',
204 'Prefer using StrictModeContext.allowDiskReads() to using StrictMode '
210 'StrictMode.allowThreadDiskWrites()',
212 'Prefer using StrictModeContext.allowDiskWrites() to using StrictMode '
218 '.waitForIdleSync()',
220 'Do not use waitForIdleSync as it masks underlying issues. There is '
221 'almost always something else you should wait on instead.',
226 r'/(?<!\bsuper\.)(?<!\bIntent )\bregisterReceiver\(',
228 'Do not call android.content.Context.registerReceiver (or an override) '
229 'directly. Use one of the wrapper methods defined in '
230 'org.chromium.base.ContextUtils, such as '
231 'registerProtectedBroadcastReceiver, '
232 'registerExportedBroadcastReceiver, or '
233 'registerNonExportedBroadcastReceiver. See their documentation for '
240 'base/android/java/src/org/chromium/base/ContextUtils.java',
241 'chromecast/browser/android/apk/src/org/chromium/chromecast/shell/BroadcastReceiverScope.java',
245 r'/(?:extends|new)\s*(?:android.util.)?Property<[A-Za-z.]+,\s*(?:Integer|Float)>',
247 'Do not use Property<..., Integer|Float>, but use FloatProperty or '
248 'IntProperty because it will avoid unnecessary autoboxing of '
255 'Layouts can be expensive. Prefer using ViewUtils.requestLayout(), '
256 'which emits a trace event with additional information to help with '
257 'scroll jank investigations. See http://crbug.com/1354176.',
261 'ui/android/java/src/org/chromium/ui/base/ViewUtils.java',
265 'Profile.getLastUsedRegularProfile()',
267 'Prefer passing in the Profile reference instead of relying on the '
268 'static getLastUsedRegularProfile() call. Only top level entry points '
269 '(e.g. Activities) should call this method. Otherwise, the Profile '
270 'should either be passed in explicitly or retreived from an existing '
271 'entity with a reference to the Profile (e.g. WebContents).',
275 r'.*Test[A-Z]?.*\.java',
279 r'/(ResourcesCompat|getResources\(\))\.getDrawable\(\)',
281 'getDrawable() can be expensive. If you have a lot of calls to '
282 'GetDrawable() or your code may introduce janks, please put your calls '
287 r'.*Test[A-Z]?.*\.java',
291 r'/RecordHistogram\.getHistogram(ValueCount|TotalCount|Samples)ForTesting\(',
293 'Raw histogram counts are easy to misuse; for example they don\'t reset '
294 'between batched tests. Use HistogramWatcher to check histogram records instead.',
298 'base/android/javatests/src/org/chromium/base/metrics/RecordHistogramTest.java',
299 'base/test/android/javatests/src/org/chromium/base/test/util/HistogramWatcher.java',
304 _BANNED_JAVASCRIPT_FUNCTIONS : Sequence [BanRule] = (
306 r'/\bchrome\.send\b',
308 'The use of chrome.send is disallowed in Chrome (context: https://chromium.googlesource.com/chromium/src/+/refs/heads/main/docs/security/handling-messages-from-web-content.md).',
309 'Please use mojo instead for new webuis. https://docs.google.com/document/d/1RF-GSUoveYa37eoyZ9EhwMtaIwoW7Z88pIgNZ9YzQi4/edit#heading=h.gkk22wgk6wff',
313 r'^(?!ash\/webui).+',
314 # TODO(crbug.com/1385601): pre-existing violations still need to be
316 'ash/webui/common/resources/cr.m.js',
317 'ash/webui/common/resources/multidevice_setup/multidevice_setup_browser_proxy.js',
318 'ash/webui/common/resources/quick_unlock/lock_screen_constants.js',
319 'ash/webui/common/resources/smb_shares/smb_browser_proxy.js',
320 'ash/webui/connectivity_diagnostics/resources/connectivity_diagnostics.js',
321 'ash/webui/diagnostics_ui/resources/diagnostics_browser_proxy.ts',
322 'ash/webui/multidevice_debug/resources/logs.js',
323 'ash/webui/multidevice_debug/resources/webui.js',
324 'ash/webui/projector_app/resources/annotator/trusted/annotator_browser_proxy.js',
325 'ash/webui/projector_app/resources/app/trusted/projector_browser_proxy.js',
326 'ash/webui/scanning/resources/scanning_browser_proxy.js',
331 _BANNED_OBJC_FUNCTIONS : Sequence[BanRule] = (
335 'The use of -[NSView addTrackingRect:owner:userData:assumeInside:] is'
336 'prohibited. Please use CrTrackingArea instead.',
337 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
342 r'/NSTrackingArea\W',
344 'The use of NSTrackingAreas is prohibited. Please use CrTrackingArea',
346 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
351 'convertPointFromBase:',
353 'The use of -[NSView convertPointFromBase:] is almost certainly wrong.',
354 'Please use |convertPoint:(point) fromView:nil| instead.',
355 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
360 'convertPointToBase:',
362 'The use of -[NSView convertPointToBase:] is almost certainly wrong.',
363 'Please use |convertPoint:(point) toView:nil| instead.',
364 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
369 'convertRectFromBase:',
371 'The use of -[NSView convertRectFromBase:] is almost certainly wrong.',
372 'Please use |convertRect:(point) fromView:nil| instead.',
373 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
378 'convertRectToBase:',
380 'The use of -[NSView convertRectToBase:] is almost certainly wrong.',
381 'Please use |convertRect:(point) toView:nil| instead.',
382 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
387 'convertSizeFromBase:',
389 'The use of -[NSView convertSizeFromBase:] is almost certainly wrong.',
390 'Please use |convertSize:(point) fromView:nil| instead.',
391 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
396 'convertSizeToBase:',
398 'The use of -[NSView convertSizeToBase:] is almost certainly wrong.',
399 'Please use |convertSize:(point) toView:nil| instead.',
400 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
405 r"/\s+UTF8String\s*]",
407 'The use of -[NSString UTF8String] is dangerous as it can return null',
408 'even if |canBeConvertedToEncoding:NSUTF8StringEncoding| returns YES.',
409 'Please use |SysNSStringToUTF8| instead.',
414 r'__unsafe_unretained',
416 'The use of __unsafe_unretained is almost certainly wrong, unless',
417 'when interacting with NSFastEnumeration or NSInvocation.',
418 'Please use __weak in files build with ARC, nothing otherwise.',
425 'The use of "freeWhenDone:NO" with the NoCopy creation of ',
426 'Foundation types is prohibited.',
432 _BANNED_IOS_OBJC_FUNCTIONS = (
436 'TEST() macro should not be used in Objective-C++ code as it does not ',
437 'drain the autorelease pool at the end of the test. Use TEST_F() ',
438 'macro instead with a fixture inheriting from PlatformTest (or a ',
444 r'/\btesting::Test\b',
446 'testing::Test should not be used in Objective-C++ code as it does ',
447 'not drain the autorelease pool at the end of the test. Use ',
448 'PlatformTest instead.'
453 ' systemImageNamed:',
455 '+[UIImage systemImageNamed:] should not be used to create symbols.',
456 'Instead use a wrapper defined in:',
457 'ios/chrome/browser/ui/icons/symbol_helpers.h'
461 'ios/chrome/browser/shared/ui/symbols/symbol_helpers.mm',
462 'ios/chrome/search_widget_extension/',
467 _BANNED_IOS_EGTEST_FUNCTIONS : Sequence[BanRule] = (
469 r'/\bEXPECT_OCMOCK_VERIFY\b',
471 'EXPECT_OCMOCK_VERIFY should not be used in EarlGrey tests because ',
472 'it is meant for GTests. Use [mock verify] instead.'
478 _BANNED_CPP_FUNCTIONS : Sequence[BanRule] = (
480 r'/\busing namespace ',
482 'Using directives ("using namespace x") are banned by the Google Style',
483 'Guide ( http://google.github.io/styleguide/cppguide.html#Namespaces ).',
484 'Explicitly qualify symbols or use using declarations ("using x::foo").',
487 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
489 # Make sure that gtest's FRIEND_TEST() macro is not used; the
490 # FRIEND_TEST_ALL_PREFIXES() macro from base/gtest_prod_util.h should be
491 # used instead since that allows for FLAKY_ and DISABLED_ prefixes.
495 'Chromium code should not use gtest\'s FRIEND_TEST() macro. Include',
496 'base/gtest_prod_util.h and use FRIEND_TEST_ALL_PREFIXES() instead.',
504 'Overriding setMatrixClip() is prohibited; ',
505 'the base function is deprecated. ',
513 'The use of SkRefPtr is prohibited. ',
514 'Please use sk_sp<> instead.'
522 'The indirect use of SkRefPtr via SkAutoRef is prohibited. ',
523 'Please use sk_sp<> instead.'
531 'The use of SkAutoTUnref is dangerous because it implicitly ',
532 'converts to a raw pointer. Please use sk_sp<> instead.'
540 'The indirect use of SkAutoTUnref through SkAutoUnref is dangerous ',
541 'because it implicitly converts to a raw pointer. ',
542 'Please use sk_sp<> instead.'
548 r'/HANDLE_EINTR\(.*close',
550 'HANDLE_EINTR(close) is invalid. If close fails with EINTR, the file',
551 'descriptor will be closed, and it is incorrect to retry the close.',
552 'Either call close directly and ignore its return value, or wrap close',
553 'in IGNORE_EINTR to use its return value. See http://crbug.com/269623'
559 r'/IGNORE_EINTR\((?!.*close)',
561 'IGNORE_EINTR is only valid when wrapping close. To wrap other system',
562 'calls, use HANDLE_EINTR. See http://crbug.com/269623',
566 # Files that #define IGNORE_EINTR.
567 r'^base/posix/eintr_wrapper\.h$',
568 r'^ppapi/tests/test_broker\.cc$',
574 'Do not introduce new v8::Extensions into the code base, use',
575 'gin::Wrappable instead. See http://crbug.com/334679',
579 r'extensions/renderer/safe_builtins\.*',
583 '#pragma comment(lib,',
585 'Specify libraries to link with in build files and not in the source.',
589 r'^base/third_party/symbolize/.*',
590 r'^third_party/abseil-cpp/.*',
594 r'/base::SequenceChecker\b',
596 'Consider using SEQUENCE_CHECKER macros instead of the class directly.',
602 r'/base::ThreadChecker\b',
604 'Consider using THREAD_CHECKER macros instead of the class directly.',
610 r'/\b(?!(Sequenced|SingleThread))\w*TaskRunner::(GetCurrentDefault|CurrentDefaultHandle)',
612 'It is not allowed to call these methods from the subclasses ',
613 'of Sequenced or SingleThread task runners.',
619 r'/(Time(|Delta|Ticks)|ThreadTicks)::FromInternalValue|ToInternalValue',
621 'base::TimeXXX::FromInternalValue() and ToInternalValue() are',
622 'deprecated (http://crbug.com/634507). Please avoid converting away',
623 'from the Time types in Chromium code, especially if any math is',
624 'being done on time values. For interfacing with platform/library',
625 'APIs, use FromMicroseconds() or InMicroseconds(), or one of the other',
626 'type converter methods instead. For faking TimeXXX values (for unit',
627 'testing only), use TimeXXX() + Microseconds(N). For',
628 'other use cases, please contact base/time/OWNERS.',
634 'CallJavascriptFunctionUnsafe',
636 "Don't use CallJavascriptFunctionUnsafe() in new code. Instead, use",
637 'AllowJavascript(), OnJavascriptAllowed()/OnJavascriptDisallowed(),',
638 'and CallJavascriptFunction(). See https://goo.gl/qivavq.',
642 r'^content/browser/webui/web_ui_impl\.(cc|h)$',
643 r'^content/public/browser/web_ui\.h$',
644 r'^content/public/test/test_web_ui\.(cc|h)$',
650 'Instead of leveldb::DB::Open() use leveldb_env::OpenDB() from',
651 'third_party/leveldatabase/env_chromium.h. It exposes databases to',
652 "Chrome's tracing, making their memory usage visible.",
656 r'^third_party/leveldatabase/.*\.(cc|h)$',
660 'leveldb::NewMemEnv',
662 'Instead of leveldb::NewMemEnv() use leveldb_chrome::NewMemEnv() from',
663 'third_party/leveldatabase/leveldb_chrome.h. It exposes environments',
664 "to Chrome's tracing, making their memory usage visible.",
668 r'^third_party/leveldatabase/.*\.(cc|h)$',
672 'RunLoop::QuitCurrent',
674 'Please migrate away from RunLoop::QuitCurrent*() methods. Use member',
675 'methods of a specific RunLoop instance instead.',
681 'base::ScopedMockTimeMessageLoopTaskRunner',
683 'ScopedMockTimeMessageLoopTaskRunner is deprecated. Prefer',
684 'TaskEnvironment::TimeSource::MOCK_TIME. There are still a',
685 'few cases that may require a ScopedMockTimeMessageLoopTaskRunner',
686 '(i.e. mocking the main MessageLoopForUI in browser_tests), but check',
687 'with gab@ first if you think you need it)',
695 'Using std::regex adds unnecessary binary size to Chrome. Please use',
696 're2::RE2 instead (crbug.com/755321)',
699 # Abseil's benchmarks never linked into chrome.
700 ['third_party/abseil-cpp/.*_benchmark.cc'],
703 r'/\bstd::sto(i|l|ul|ll|ull)\b',
705 'std::sto{i,l,ul,ll,ull}() use exceptions to communicate results. ',
706 'Use base::StringTo[U]Int[64]() instead.',
709 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
712 r'/\bstd::sto(f|d|ld)\b',
714 'std::sto{f,d,ld}() use exceptions to communicate results. ',
715 'For locale-independent values, e.g. reading numbers from disk',
716 'profiles, use base::StringToDouble().',
717 'For user-visible values, parse using ICU.',
720 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
723 r'/\bstd::to_string\b',
725 'std::to_string() is locale dependent and slower than alternatives.',
726 'For locale-independent strings, e.g. writing numbers to disk',
727 'profiles, use base::NumberToString().',
728 'For user-visible strings, use base::FormatNumber() and',
729 'the related functions in base/i18n/number_formatting.h.',
731 False, # Only a warning since it is already used.
732 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
735 r'/\bstd::shared_ptr\b',
737 'std::shared_ptr is banned. Use scoped_refptr instead.',
741 # Needed for interop with third-party library.
742 '^third_party/blink/renderer/core/typed_arrays/array_buffer/' +
743 'array_buffer_contents\.(cc|h)',
744 '^third_party/blink/renderer/bindings/core/v8/' +
745 'v8_wasm_response_extensions.cc',
746 '^gin/array_buffer\.(cc|h)',
747 '^chrome/services/sharing/nearby/',
748 # Needed for interop with third-party library libunwindstack.
749 '^base/profiler/libunwindstack_unwinder_android\.(cc|h)',
750 # Needed for interop with third-party boringssl cert verifier
751 '^third_party/boringssl/',
753 '^net/tools/cert_verify_tool/',
754 '^services/cert_verifier/',
755 '^components/certificate_transparency/',
756 '^components/media_router/common/providers/cast/certificate/',
757 # gRPC provides some C++ libraries that use std::shared_ptr<>.
758 '^chromeos/ash/services/libassistant/grpc/',
759 '^chromecast/cast_core/grpc',
760 '^chromecast/cast_core/runtime/browser',
761 '^ios/chrome/test/earl_grey/chrome_egtest_plugin_client\.(mm|h)',
762 # Fuchsia provides C++ libraries that use std::shared_ptr<>.
763 '^base/fuchsia/.*\.(cc|h)',
764 '.*fuchsia.*test\.(cc|h)',
765 # Needed for clang plugin tests
766 '^tools/clang/plugins/tests/',
767 _THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
770 r'/\bstd::weak_ptr\b',
772 'std::weak_ptr is banned. Use base::WeakPtr instead.',
775 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
780 'long long is banned. Use [u]int64_t instead.',
782 False, # Only a warning since it is already used.
783 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
786 r'/\b(absl|std)::any\b',
788 '{absl,std}::any are banned due to incompatibility with the component ',
792 # Not an error in third party folders, though it probably should be :)
793 [_THIRD_PARTY_EXCEPT_BLINK],
798 'std::bind() is banned because of lifetime risks. Use ',
799 'base::Bind{Once,Repeating}() instead.',
802 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
807 r'linear_congruential_engine|mersenne_twister_engine|'
808 r'subtract_with_carry_engine|discard_block_engine|'
809 r'independent_bits_engine|shuffle_order_engine|'
810 r'minstd_rand0?|mt19937(_64)?|ranlux(24|48)(_base)?|knuth_b|'
811 r'default_random_engine|'
817 'STL random number engines and generators are banned. Use the ',
818 'helpers in base/rand_util.h instead, e.g. base::RandBytes() or ',
819 'base::RandomBitGenerator.'
823 # Not an error in third_party folders.
824 _THIRD_PARTY_EXCEPT_BLINK,
825 # Various tools which build outside of Chrome.
826 r'testing/libfuzzer',
827 r'tools/android/io_benchmark/',
828 # Fuzzers are allowed to use standard library random number generators
829 # since fuzzing speed + reproducibility is important.
830 r'tools/ipc_fuzzer/',
832 r'.+_fuzzertest\.cc$',
833 # TODO(https://crbug.com/1380528): These are all unsanctioned uses of
834 # the standard library's random number generators, and should be
835 # migrated to the //base equivalent.
836 r'ash/ambient/model/ambient_topic_queue\.cc',
837 r'base/allocator/partition_allocator/partition_alloc_unittest\.cc',
838 r'base/ranges/algorithm_unittest\.cc',
839 r'base/test/launcher/test_launcher\.cc',
840 r'cc/metrics/video_playback_roughness_reporter_unittest\.cc',
841 r'chrome/browser/apps/app_service/metrics/website_metrics\.cc',
842 r'chrome/browser/ash/power/auto_screen_brightness/monotone_cubic_spline_unittest\.cc',
843 r'chrome/browser/ash/printing/zeroconf_printer_detector_unittest\.cc',
844 r'chrome/browser/nearby_sharing/contacts/nearby_share_contact_manager_impl_unittest\.cc',
845 r'chrome/browser/nearby_sharing/contacts/nearby_share_contacts_sorter_unittest\.cc',
846 r'chrome/browser/privacy_budget/mesa_distribution_unittest\.cc',
847 r'chrome/browser/web_applications/test/web_app_test_utils\.cc',
848 r'chrome/browser/web_applications/test/web_app_test_utils\.cc',
849 r'chrome/browser/win/conflicts/module_blocklist_cache_util_unittest\.cc',
850 r'chrome/chrome_cleaner/logging/detailed_info_sampler\.cc',
851 r'chromeos/ash/components/memory/userspace_swap/swap_storage_unittest\.cc',
852 r'chromeos/ash/components/memory/userspace_swap/userspace_swap\.cc',
853 r'components/metrics/metrics_state_manager\.cc',
854 r'components/omnibox/browser/history_quick_provider_performance_unittest\.cc',
855 r'components/zucchini/disassembler_elf_unittest\.cc',
856 r'content/browser/webid/federated_auth_request_impl\.cc',
857 r'content/browser/webid/federated_auth_request_impl\.cc',
858 r'media/cast/test/utility/udp_proxy\.h',
859 r'sql/recover_module/module_unittest\.cc',
863 r'/\b(absl,std)::bind_front\b',
865 '{absl,std}::bind_front() are banned. Use base::Bind{Once,Repeating}() '
869 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
874 'ABSL_FLAG is banned. Use base::CommandLine instead.',
877 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
882 'Abseil container utilities are banned. Use base/ranges/algorithm.h ',
886 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
889 r'/\babsl::FunctionRef\b',
891 'absl::FunctionRef is banned. Use base::FunctionRef instead.',
895 # base::Bind{Once,Repeating} references absl::FunctionRef to disallow
897 r'^base/functional/bind_internal\.h',
898 # base::FunctionRef is implemented on top of absl::FunctionRef.
899 r'^base/functional/function_ref.*\..+',
900 # Not an error in third_party folders.
901 _THIRD_PARTY_EXCEPT_BLINK,
905 r'/\babsl::(Insecure)?BitGen\b',
907 'absl random number generators are banned. Use the helpers in '
908 'base/rand_util.h instead, e.g. base::RandBytes() or ',
909 'base::RandomBitGenerator.'
912 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
915 r'/(\babsl::Span\b|#include <span>)',
917 'absl::Span is banned and <span> is not allowed yet ',
918 '(https://crbug.com/1414652). Use base::span instead.',
922 # Needed to use QUICHE API.
923 r'services/network/web_transport\.cc',
924 # Not an error in third_party folders.
925 _THIRD_PARTY_EXCEPT_BLINK
929 r'/\babsl::StatusOr\b',
931 'absl::StatusOr is banned. Use base::expected instead.',
935 # Needed to use liburlpattern API.
936 r'third_party/blink/renderer/core/url_pattern/.*',
937 r'third_party/blink/renderer/modules/manifest/manifest_parser\.cc',
938 # Not an error in third_party folders.
939 _THIRD_PARTY_EXCEPT_BLINK
943 r'/\babsl::StrFormat\b',
945 'absl::StrFormat() is not allowed yet (https://crbug.com/1371963). ',
946 'Use base::StringPrintf() instead.',
949 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
952 r'/\babsl::string_view\b',
954 'absl::string_view is a legacy spelling of std::string_view, which is ',
955 'not allowed yet (https://crbug.com/691162). Use base::StringPiece ',
956 'instead, unless std::string_view is needed to use with an external ',
960 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
963 r'/\bstd::(u16)?string_view\b',
965 'std::[u16]string_view is not yet allowed (crbug.com/691162). Use ',
966 'base::StringPiece[16] instead, unless std::[u16]string_view is ',
967 'needed to use an external API.',
971 # Needed to implement and test std::string_view interoperability.
972 r'base/strings/string_piece.*',
973 # Needed to use liburlpattern API.
974 r'third_party/blink/renderer/core/url_pattern/.*',
975 r'third_party/blink/renderer/modules/manifest/manifest_parser\.cc',
976 # Needed to use QUICHE API.
979 r'net/test/embedded_test_server/.*',
980 r'net/third_party/quiche/.*',
981 r'services/network/web_transport\.cc',
982 # This code is in the process of being extracted into an external
983 # library, where //base will be unavailable.
986 # Needed to use APIs from the above.
988 # Not an error in third_party folders.
989 _THIRD_PARTY_EXCEPT_BLINK
993 r'/\babsl::(StrSplit|StrJoin|StrCat|StrAppend|Substitute|StrContains)\b',
995 'Abseil string utilities are banned. Use base/strings instead.',
998 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
1001 r'/\babsl::(Mutex|CondVar|Notification|Barrier|BlockingCounter)\b',
1003 'Abseil synchronization primitives are banned. Use',
1004 'base/synchronization instead.',
1007 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
1010 r'/\babsl::(Duration|Time|TimeZone|CivilDay)\b',
1012 'Abseil\'s time library is banned. Use base/time instead.',
1015 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
1018 r'/\bstd::optional\b',
1020 'std::optional is not allowed yet (https://crbug.com/1373619). Use ',
1021 'absl::optional instead.',
1024 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
1027 r'/#include <chrono>',
1029 '<chrono> is banned. Use base/time instead.',
1032 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
1035 r'/#include <exception>',
1037 'Exceptions are banned and disabled in Chromium.',
1040 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
1043 r'/\bstd::function\b',
1045 'std::function is banned. Use base::{Once,Repeating}Callback instead.',
1049 # Has tests that template trait helpers don't unintentionally match
1051 r'base/functional/callback_helpers_unittest\.cc',
1052 # Required to implement interfaces from the third-party perfetto
1054 r'base/tracing/perfetto_task_runner\.cc',
1055 r'base/tracing/perfetto_task_runner\.h',
1056 # Needed for interop with the third-party nearby library type
1057 # location::nearby::connections::ResultCallback.
1058 'chrome/services/sharing/nearby/nearby_connections_conversions\.cc'
1059 # Needed for interop with the internal libassistant library.
1060 'chromeos/ash/services/libassistant/callback_utils\.h',
1061 # Needed for interop with Fuchsia fidl APIs.
1062 'fuchsia_web/webengine/browser/context_impl_browsertest\.cc',
1063 'fuchsia_web/webengine/browser/cookie_manager_impl_unittest\.cc',
1064 'fuchsia_web/webengine/browser/media_player_impl_unittest\.cc',
1065 # Required to interop with interfaces from the third-party perfetto
1067 'services/tracing/public/cpp/perfetto/custom_event_recorder\.cc',
1068 'services/tracing/public/cpp/perfetto/perfetto_traced_process\.cc',
1069 'services/tracing/public/cpp/perfetto/perfetto_traced_process\.h',
1070 'services/tracing/public/cpp/perfetto/perfetto_tracing_backend\.cc',
1071 'services/tracing/public/cpp/perfetto/producer_client\.cc',
1072 'services/tracing/public/cpp/perfetto/producer_client\.h',
1073 'services/tracing/public/cpp/perfetto/producer_test_utils\.cc',
1074 'services/tracing/public/cpp/perfetto/producer_test_utils\.h',
1075 # Required for interop with the third-party webrtc library.
1076 'third_party/blink/renderer/modules/peerconnection/mock_peer_connection_impl\.cc',
1077 'third_party/blink/renderer/modules/peerconnection/mock_peer_connection_impl\.h',
1078 # This code is in the process of being extracted into a third-party library.
1079 # See https://crbug.com/1322914
1080 '^net/cert/pki/path_builder_unittest\.cc',
1081 # TODO(https://crbug.com/1364577): Various uses that should be
1082 # migrated to something else.
1083 # Should use base::OnceCallback or base::RepeatingCallback.
1084 'base/allocator/dispatcher/initializer_unittest\.cc',
1085 'chrome/browser/ash/accessibility/speech_monitor\.cc',
1086 'chrome/browser/ash/accessibility/speech_monitor\.h',
1087 'chrome/browser/ash/login/ash_hud_login_browsertest\.cc',
1088 'chromecast/base/observer_unittest\.cc',
1089 'chromecast/browser/cast_web_view\.h',
1090 'chromecast/public/cast_media_shlib\.h',
1091 'device/bluetooth/floss/exported_callback_manager\.h',
1092 'device/bluetooth/floss/floss_dbus_client\.h',
1093 'device/fido/cable/v2_handshake_unittest\.cc',
1094 'device/fido/pin\.cc',
1095 'services/tracing/perfetto/test_utils\.h',
1096 # Should use base::FunctionRef.
1097 'chrome/browser/media/webrtc/test_stats_dictionary\.cc',
1098 'chrome/browser/media/webrtc/test_stats_dictionary\.h',
1099 'chromeos/ash/services/libassistant/device_settings_controller\.cc',
1100 'components/browser_ui/client_certificate/android/ssl_client_certificate_request\.cc',
1101 'components/gwp_asan/client/sampling_malloc_shims_unittest\.cc',
1102 'content/browser/font_unique_name_lookup/font_unique_name_lookup_unittest\.cc',
1103 # Does not need std::function at all.
1104 'components/omnibox/browser/autocomplete_result\.cc',
1105 'device/fido/win/webauthn_api\.cc',
1106 'media/audio/alsa/alsa_util\.cc',
1107 'media/remoting/stream_provider\.h',
1108 'sql/vfs_wrapper\.cc',
1109 # TODO(https://crbug.com/1364585): Remove usage and exception list
1111 'extensions/renderer/api/automation/automation_internal_custom_bindings\.cc',
1112 'extensions/renderer/api/automation/automation_internal_custom_bindings\.h',
1113 # TODO(https://crbug.com/1364579): Remove usage and exception list
1115 'ui/views/controls/focus_ring\.h',
1117 # Various pre-existing uses in //tools that is low-priority to fix.
1118 'tools/binary_size/libsupersize/viewer/caspian/diff\.cc',
1119 'tools/binary_size/libsupersize/viewer/caspian/model\.cc',
1120 'tools/binary_size/libsupersize/viewer/caspian/model\.h',
1121 'tools/binary_size/libsupersize/viewer/caspian/tree_builder\.h',
1122 'tools/clang/base_bind_rewriters/BaseBindRewriters\.cpp',
1124 # Not an error in third_party folders.
1125 _THIRD_PARTY_EXCEPT_BLINK
1131 'Do not use Xlib. Use xproto (from //ui/gfx/x:xproto) instead.',
1134 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
1139 'std::ratio is banned by the Google Style Guide.',
1142 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
1145 r'/\bstd::aligned_alloc\b',
1147 'std::aligned_alloc() is not yet allowed (crbug.com/1412818). Use ',
1148 'base::AlignedAlloc() instead.',
1151 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
1154 r'/#include <(barrier|latch|semaphore|stop_token)>',
1156 'The thread support library is banned. Use base/synchronization '
1160 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
1163 r'/\bstd::(c8rtomb|mbrtoc8)\b',
1165 'std::c8rtomb() and std::mbrtoc8() are banned.',
1168 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
1171 r'/\bchar8_t|std::u8string\b',
1173 'char8_t and std::u8string are not yet allowed. Can you use [unsigned]',
1174 ' char and std::string instead?',
1178 # The demangler does not use this type but needs to know about it.
1179 'base/third_party/symbolize/demangle\.cc',
1180 # Don't warn in third_party folders.
1181 _THIRD_PARTY_EXCEPT_BLINK
1185 r'/(\b(co_await|co_return|co_yield)\b|#include <coroutine>)',
1187 'Coroutines are not yet allowed (https://crbug.com/1403840).',
1190 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
1193 r'/^\s*(export\s|import\s+["<:\w]|module(;|\s+[:\w]))',
1195 'Modules are disallowed for now due to lack of toolchain support.',
1198 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
1201 r'/\[\[(un)?likely\]\]',
1203 '[[likely]] and [[unlikely]] are not yet allowed ',
1204 '(https://crbug.com/1414620). Use [UN]LIKELY instead.',
1207 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
1210 r'/#include <format>',
1212 '<format> is not yet allowed. Use base::StringPrintf() instead.',
1215 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
1218 r'/#include <ranges>',
1220 '<ranges> is not yet allowed. Use base/ranges/algorithm.h instead.',
1223 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
1226 r'/#include <source_location>',
1228 '<source_location> is not yet allowed. Use base/location.h instead.',
1231 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
1234 r'/#include <syncstream>',
1236 '<syncstream> is banned.',
1239 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
1242 r'/\bRunMessageLoop\b',
1244 'RunMessageLoop is deprecated, use RunLoop instead.',
1250 'RunAllPendingInMessageLoop()',
1252 "Prefer RunLoop over RunAllPendingInMessageLoop, please contact gab@",
1253 "if you're convinced you need this.",
1259 'RunAllPendingInMessageLoop(BrowserThread',
1261 'RunAllPendingInMessageLoop is deprecated. Use RunLoop for',
1262 'BrowserThread::UI, BrowserTaskEnvironment::RunIOThreadUntilIdle',
1263 'for BrowserThread::IO, and prefer RunLoop::QuitClosure to observe',
1264 'async events instead of flushing threads.',
1270 r'MessageLoopRunner',
1272 'MessageLoopRunner is deprecated, use RunLoop instead.',
1278 'GetDeferredQuitTaskForRunLoop',
1280 "GetDeferredQuitTaskForRunLoop shouldn't be needed, please contact",
1281 "gab@ if you found a use case where this is the only solution.",
1287 'sqlite3_initialize(',
1289 'Instead of calling sqlite3_initialize(), depend on //sql, ',
1290 '#include "sql/initialize.h" and use sql::EnsureSqliteInitialized().',
1294 r'^sql/initialization\.(cc|h)$',
1295 r'^third_party/sqlite/.*\.(c|cc|h)$',
1301 'SQL views are disabled in Chromium feature code',
1302 'https://chromium.googlesource.com/chromium/src/+/HEAD/sql#no-views',
1306 _THIRD_PARTY_EXCEPT_BLINK,
1307 # sql/ itself uses views when using memory-mapped IO.
1309 # Various performance tools that do not build as part of Chrome.
1316 'CREATE VIRTUAL TABLE',
1318 'SQL virtual tables are disabled in Chromium feature code',
1319 'https://chromium.googlesource.com/chromium/src/+/HEAD/sql#no-virtual-tables',
1323 _THIRD_PARTY_EXCEPT_BLINK,
1324 # sql/ itself uses virtual tables in the recovery module and tests.
1326 # TODO(https://crbug.com/695592): Remove once WebSQL is deprecated.
1327 r'third_party/blink/web_tests/storage/websql/.*'
1328 # Various performance tools that do not build as part of Chrome.
1334 'std::random_shuffle',
1336 'std::random_shuffle is deprecated in C++14, and removed in C++17. Use',
1337 'base::RandomShuffle instead.'
1343 'ios/web/public/test/http_server',
1345 'web::HTTPserver is deprecated use net::EmbeddedTestServer instead.',
1353 'Improper use of Microsoft::WRL::ComPtr<T>::GetAddressOf() has been ',
1354 'implicated in a few leaks. ReleaseAndGetAddressOf() is safe but ',
1355 'operator& is generally recommended. So always use operator& instead. ',
1356 'See http://crbug.com/914910 for more conversion guidance.'
1364 'SHFileOperation was deprecated in Windows Vista, and there are less ',
1365 'complex functions to achieve the same goals. Use IFileOperation for ',
1366 'any esoteric actions instead.'
1374 'StringFromGUID2 introduces an unnecessary dependency on ole32.dll.',
1375 'Use base::win::WStringFromGUID instead.'
1379 r'/base/win/win_util_unittest.cc',
1385 'StringFromCLSID introduces an unnecessary dependency on ole32.dll.',
1386 'Use base::win::WStringFromGUID instead.'
1390 r'/base/win/win_util_unittest.cc',
1396 'The use of kCFAllocatorNull with the NoCopy creation of ',
1397 'CoreFoundation types is prohibited.',
1405 'mojo::ConvertTo and TypeConverter are deprecated. Please consider',
1406 'StructTraits / UnionTraits / EnumTraits / ArrayTraits / MapTraits /',
1407 'StringTraits if you would like to convert between custom types and',
1408 'the wire format of mojom types.'
1412 r'^fuchsia_web/webengine/browser/url_request_rewrite_rules_manager\.cc$',
1413 r'^fuchsia_web/webengine/url_request_rewrite_type_converters\.cc$',
1414 r'^third_party/blink/.*\.(cc|h)$',
1415 r'^content/renderer/.*\.(cc|h)$',
1419 'GetInterfaceProvider',
1421 'InterfaceProvider is deprecated.',
1422 'Please use ExecutionContext::GetBrowserInterfaceBroker and overrides',
1423 'or Platform::GetBrowserInterfaceBroker.'
1431 'New code should use Microsoft::WRL::ComPtr from wrl/client.h as a ',
1432 'replacement for CComPtr from ATL. See http://crbug.com/5027 for more ',
1439 r'/\b(IFACE|STD)METHOD_?\(',
1441 'IFACEMETHOD() and STDMETHOD() make code harder to format and read.',
1442 'Instead, always use IFACEMETHODIMP in the declaration.'
1445 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
1448 'set_owned_by_client',
1450 'set_owned_by_client is deprecated.',
1451 'views::View already owns the child views by default. This introduces ',
1452 'a competing ownership model which makes the code difficult to reason ',
1453 'about. See http://crbug.com/1044687 for more details.'
1459 'RemoveAllChildViewsWithoutDeleting',
1461 'RemoveAllChildViewsWithoutDeleting is deprecated.',
1462 'This method is deemed dangerous as, unless raw pointers are re-added,',
1463 'calls to this method introduce memory leaks.'
1469 r'/\bTRACE_EVENT_ASYNC_',
1471 'Please use TRACE_EVENT_NESTABLE_ASYNC_.. macros instead',
1472 'of TRACE_EVENT_ASYNC_.. (crbug.com/1038710).',
1476 r'^base/trace_event/.*',
1477 r'^base/tracing/.*',
1481 r'/\bbase::debug::DumpWithoutCrashingUnthrottled[(][)]',
1483 'base::debug::DumpWithoutCrashingUnthrottled() does not throttle',
1484 'dumps and may spam crash reports. Consider if the throttled',
1485 'variants suffice instead.',
1493 'Improper use of [base::win]::RoInitialize() has been implicated in a ',
1494 'few COM initialization leaks. Use base::win::ScopedWinrtInitializer ',
1495 'instead. See http://crbug.com/1197722 for more information.'
1499 r'^base/win/scoped_winrt_initializer\.cc$',
1505 'base::Watchdog is deprecated because it creates its own thread.',
1506 'Instead, manually start a timer on a SequencedTaskRunner.',
1514 'Do not use base::Passed. It is a legacy helper for capturing ',
1515 'move-only types with base::BindRepeating, but invoking the ',
1516 'resulting RepeatingCallback moves the captured value out of ',
1517 'the callback storage, and subsequent invocations may pass the ',
1518 'value in a valid but undefined state. Prefer base::BindOnce().',
1519 'See http://crbug.com/1326449 for context.'
1523 # False positive, but it is also fine to let bind internals reference
1525 r'^base[\\/]functional[\\/]bind\.h',
1526 r'^base[\\/]functional[\\/]bind_internal\.h',
1532 'Please use BASE_DECLARE_FEATURE() or BASE_FEATURE() instead of ',
1533 'directly declaring/defining features.'
1537 _THIRD_PARTY_EXCEPT_BLINK,
1543 'chartorune is not memory-safe, unless you can guarantee the input ',
1544 'string is always null-terminated. Otherwise, please use charntorune ',
1545 'from libphonenumber instead.'
1549 _THIRD_PARTY_EXCEPT_BLINK,
1550 # Exceptions to this rule should have a fuzzer.
1554 r'/\b#include "base/atomicops\.h"\b',
1556 'Do not use base::subtle atomics, but std::atomic, which are simpler '
1557 'to use, have better understood, clearer and richer semantics, and are '
1558 'harder to mis-use. See details in base/atomicops.h.',
1561 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
1564 r'CrossThreadPersistent<',
1566 'Do not use blink::CrossThreadPersistent, but '
1567 'blink::CrossThreadHandle. It is harder to mis-use.',
1569 'https://docs.google.com/document/d/1GIT0ysdQ84sGhIo1r9EscF_fFt93lmNVM_q4vvHj2FQ/edit#heading=h.3e4d6y61tgs',
1570 'Please contact platform-architecture-dev@ before adding new instances.'
1576 r'CrossThreadWeakPersistent<',
1578 'Do not use blink::CrossThreadWeakPersistent, but '
1579 'blink::CrossThreadWeakHandle. It is harder to mis-use.',
1581 'https://docs.google.com/document/d/1GIT0ysdQ84sGhIo1r9EscF_fFt93lmNVM_q4vvHj2FQ/edit#heading=h.3e4d6y61tgs',
1582 'Please contact platform-architecture-dev@ before adding new instances.'
1590 'Do not include <objc/objc.h>. It defines away ARC lifetime '
1591 'annotations, and is thus dangerous.',
1592 'Please use the pimpl pattern; search for `ObjCStorage` for examples.',
1593 'For further reading on how to safely mix C++ and Obj-C, see',
1594 'https://chromium.googlesource.com/chromium/src/+/main/docs/mac/mixing_cpp_and_objc.md'
1601 _BANNED_MOJOM_PATTERNS : Sequence[BanRule] = (
1603 'handle<shared_buffer>',
1605 'Please use one of the more specific shared memory types instead:',
1606 ' mojo_base.mojom.ReadOnlySharedMemoryRegion',
1607 ' mojo_base.mojom.WritableSharedMemoryRegion',
1608 ' mojo_base.mojom.UnsafeSharedMemoryRegion',
1614 _IPC_ENUM_TRAITS_DEPRECATED = (
1615 'You are using IPC_ENUM_TRAITS() in your code. It has been deprecated.\n'
1616 'See http://www.chromium.org/Home/chromium-security/education/'
1617 'security-tips-for-ipc')
1619 _LONG_PATH_ERROR = (
1620 'Some files included in this CL have file names that are too long (> 200'
1621 ' characters). If committed, these files will cause issues on Windows. See'
1622 ' https://crbug.com/612667 for more details.'
1625 _JAVA_MULTIPLE_DEFINITION_EXCLUDED_PATHS = [
1626 r".*/AppHooksImpl\.java",
1627 r".*/BuildHooksAndroidImpl\.java",
1628 r".*/LicenseContentProvider\.java",
1629 r".*/PlatformServiceBridgeImpl.java",
1630 r".*chrome/android/feed/dummy/.*\.java",
1633 # List of image extensions that are used as resources in chromium.
1634 _IMAGE_EXTENSIONS = ['.svg', '.png', '.webp']
1636 # These paths contain test data and other known invalid JSON files.
1637 _KNOWN_TEST_DATA_AND_INVALID_JSON_FILE_PATTERNS = [
1639 r'testing/buildbot/',
1640 r'^components/policy/resources/policy_templates\.json$',
1641 r'^third_party/protobuf/',
1642 r'^third_party/blink/perf_tests/speedometer/resources/todomvc/learn.json',
1643 r'^third_party/blink/renderer/devtools/protocol\.json$',
1644 r'^third_party/blink/web_tests/external/wpt/',
1646 r'^tools/traceline/svgui/startup-release.json',
1647 # vscode configuration files allow comments
1651 # These are not checked on the public chromium-presubmit trybot.
1652 # Add files here that rely on .py files that exists only for target_os="android"
1654 _ANDROID_SPECIFIC_PYDEPS_FILES = [
1655 'chrome/android/features/create_stripped_java_factory.pydeps',
1659 _GENERIC_PYDEPS_FILES = [
1660 'android_webview/test/components/run_webview_component_smoketest.pydeps',
1661 'android_webview/tools/run_cts.pydeps',
1662 'base/android/jni_generator/jni_generator.pydeps',
1663 'base/android/jni_generator/jni_registration_generator.pydeps',
1664 'build/android/apk_operations.pydeps',
1665 'build/android/devil_chromium.pydeps',
1666 'build/android/gyp/aar.pydeps',
1667 'build/android/gyp/aidl.pydeps',
1668 'build/android/gyp/allot_native_libraries.pydeps',
1669 'build/android/gyp/apkbuilder.pydeps',
1670 'build/android/gyp/assert_static_initializers.pydeps',
1671 'build/android/gyp/binary_baseline_profile.pydeps',
1672 'build/android/gyp/bytecode_processor.pydeps',
1673 'build/android/gyp/bytecode_rewriter.pydeps',
1674 'build/android/gyp/check_flag_expectations.pydeps',
1675 'build/android/gyp/compile_java.pydeps',
1676 'build/android/gyp/compile_kt.pydeps',
1677 'build/android/gyp/compile_resources.pydeps',
1678 'build/android/gyp/copy_ex.pydeps',
1679 'build/android/gyp/create_apk_operations_script.pydeps',
1680 'build/android/gyp/create_app_bundle.pydeps',
1681 'build/android/gyp/create_app_bundle_apks.pydeps',
1682 'build/android/gyp/create_bundle_wrapper_script.pydeps',
1683 'build/android/gyp/create_java_binary_script.pydeps',
1684 'build/android/gyp/create_r_java.pydeps',
1685 'build/android/gyp/create_r_txt.pydeps',
1686 'build/android/gyp/create_size_info_files.pydeps',
1687 'build/android/gyp/create_test_apk_wrapper_script.pydeps',
1688 'build/android/gyp/create_ui_locale_resources.pydeps',
1689 'build/android/gyp/dex.pydeps',
1690 'build/android/gyp/dist_aar.pydeps',
1691 'build/android/gyp/filter_zip.pydeps',
1692 'build/android/gyp/flatc_java.pydeps',
1693 'build/android/gyp/gcc_preprocess.pydeps',
1694 'build/android/gyp/generate_linker_version_script.pydeps',
1695 'build/android/gyp/ijar.pydeps',
1696 'build/android/gyp/jacoco_instr.pydeps',
1697 'build/android/gyp/java_cpp_enum.pydeps',
1698 'build/android/gyp/java_cpp_features.pydeps',
1699 'build/android/gyp/java_cpp_strings.pydeps',
1700 'build/android/gyp/java_google_api_keys.pydeps',
1701 'build/android/gyp/jinja_template.pydeps',
1702 'build/android/gyp/lint.pydeps',
1703 'build/android/gyp/merge_manifest.pydeps',
1704 'build/android/gyp/optimize_resources.pydeps',
1705 'build/android/gyp/prepare_resources.pydeps',
1706 'build/android/gyp/process_native_prebuilt.pydeps',
1707 'build/android/gyp/proguard.pydeps',
1708 'build/android/gyp/system_image_apks.pydeps',
1709 'build/android/gyp/trace_event_bytecode_rewriter.pydeps',
1710 'build/android/gyp/turbine.pydeps',
1711 'build/android/gyp/unused_resources.pydeps',
1712 'build/android/gyp/validate_static_library_dex_references.pydeps',
1713 'build/android/gyp/write_build_config.pydeps',
1714 'build/android/gyp/write_native_libraries_java.pydeps',
1715 'build/android/gyp/zip.pydeps',
1716 'build/android/incremental_install/generate_android_manifest.pydeps',
1717 'build/android/incremental_install/write_installer_json.pydeps',
1718 'build/android/pylib/results/presentation/test_results_presentation.pydeps',
1719 'build/android/resource_sizes.pydeps',
1720 'build/android/test_runner.pydeps',
1721 'build/android/test_wrapper/logdog_wrapper.pydeps',
1722 'build/lacros/lacros_resource_sizes.pydeps',
1723 'build/protoc_java.pydeps',
1724 'chrome/android/monochrome/scripts/monochrome_python_tests.pydeps',
1725 'chrome/test/chromedriver/log_replay/client_replay_unittest.pydeps',
1726 'chrome/test/chromedriver/test/run_py_tests.pydeps',
1727 'chromecast/resource_sizes/chromecast_resource_sizes.pydeps',
1728 'components/cronet/tools/generate_javadoc.pydeps',
1729 'components/cronet/tools/jar_src.pydeps',
1730 'components/module_installer/android/module_desc_java.pydeps',
1731 'content/public/android/generate_child_service.pydeps',
1732 'net/tools/testserver/testserver.pydeps',
1733 'testing/scripts/run_isolated_script_test.pydeps',
1734 'testing/merge_scripts/standard_isolated_script_merge.pydeps',
1735 'testing/merge_scripts/standard_gtest_merge.pydeps',
1736 'testing/merge_scripts/code_coverage/merge_results.pydeps',
1737 'testing/merge_scripts/code_coverage/merge_steps.pydeps',
1738 'third_party/android_platform/development/scripts/stack.pydeps',
1739 'third_party/blink/renderer/bindings/scripts/build_web_idl_database.pydeps',
1740 'third_party/blink/renderer/bindings/scripts/check_generated_file_list.pydeps',
1741 'third_party/blink/renderer/bindings/scripts/collect_idl_files.pydeps',
1742 'third_party/blink/renderer/bindings/scripts/generate_bindings.pydeps',
1743 'third_party/blink/renderer/bindings/scripts/validate_web_idl.pydeps',
1744 'third_party/blink/tools/blinkpy/web_tests/merge_results.pydeps',
1745 'third_party/blink/tools/merge_web_test_results.pydeps',
1746 'tools/binary_size/sizes.pydeps',
1747 'tools/binary_size/supersize.pydeps',
1748 'tools/perf/process_perf_results.pydeps',
1752 _ALL_PYDEPS_FILES = _ANDROID_SPECIFIC_PYDEPS_FILES + _GENERIC_PYDEPS_FILES
1755 # Bypass the AUTHORS check for these accounts.
1756 _KNOWN_ROBOTS = set(
1757 ) | set('%s@appspot.gserviceaccount.com' % s for s in ('findit-for-me',)
1758 ) | set('%s@developer.gserviceaccount.com' % s for s in ('3su6n15k.default',)
1759 ) | set('%s@chops-service-accounts.iam.gserviceaccount.com' % s
1760 for s in ('bling-autoroll-builder', 'v8-ci-autoroll-builder',
1761 'wpt-autoroller', 'chrome-weblayer-builder',
1762 'lacros-version-skew-roller', 'skylab-test-cros-roller',
1763 'infra-try-recipes-tester', 'lacros-tracking-roller',
1764 'lacros-sdk-version-roller', 'chrome-automated-expectation',
1765 'chromium-automated-expectation', 'chrome-branch-day')
1766 ) | set('%s@skia-public.iam.gserviceaccount.com' % s
1767 for s in ('chromium-autoroll', 'chromium-release-autoroll')
1768 ) | set('%s@skia-corp.google.com.iam.gserviceaccount.com' % s
1769 for s in ('chromium-internal-autoroll',)
1770 ) | set('%s@owners-cleanup-prod.google.com.iam.gserviceaccount.com' % s
1771 for s in ('swarming-tasks',)
1772 ) | set('%s@fuchsia-infra.iam.gserviceaccount.com' % s
1773 for s in ('global-integration-try-builder',
1774 'global-integration-ci-builder'))
1776 _INVALID_GRD_FILE_LINE = [
1777 (r'<file lang=.* path=.*', 'Path should come before lang in GRD files.')
1780 def _IsCPlusPlusFile(input_api, file_path):
1781 """Returns True if this file contains C++-like code (and not Python,
1782 Go, Java, MarkDown, ...)"""
1784 ext = input_api.os_path.splitext(file_path)[1]
1785 # This list is compatible with CppChecker.IsCppFile but we should
1786 # consider adding ".c" to it. If we do that we can use this function
1787 # at more places in the code.
1797 def _IsCPlusPlusHeaderFile(input_api, file_path):
1798 return input_api.os_path.splitext(file_path)[1] == ".h"
1801 def _IsJavaFile(input_api, file_path):
1802 return input_api.os_path.splitext(file_path)[1] == ".java"
1805 def _IsProtoFile(input_api, file_path):
1806 return input_api.os_path.splitext(file_path)[1] == ".proto"
1809 def _IsXmlOrGrdFile(input_api, file_path):
1810 ext = input_api.os_path.splitext(file_path)[1]
1811 return ext in ('.grd', '.xml')
1814 def _IsMojomFile(input_api, file_path):
1815 return input_api.os_path.splitext(file_path)[1] == ".mojom"
1818 def CheckNoUpstreamDepsOnClank(input_api, output_api):
1819 """Prevent additions of dependencies from the upstream repo on //clank."""
1820 # clank can depend on clank
1821 if input_api.change.RepositoryRoot().endswith('clank'):
1823 build_file_patterns = [
1827 excluded_files = [r'build[/\\]config[/\\]android[/\\]config\.gni']
1828 bad_pattern = input_api.re.compile(r'^[^#]*//clank')
1830 error_message = 'Disallowed import on //clank in an upstream build file:'
1832 def FilterFile(affected_file):
1833 return input_api.FilterSourceFile(affected_file,
1834 files_to_check=build_file_patterns,
1835 files_to_skip=excluded_files)
1838 for f in input_api.AffectedSourceFiles(FilterFile):
1839 local_path = f.LocalPath()
1840 for line_number, line in f.ChangedContents():
1841 if (bad_pattern.search(line)):
1842 problems.append('%s:%d\n %s' %
1843 (local_path, line_number, line.strip()))
1845 return [output_api.PresubmitPromptOrNotify(error_message, problems)]
1850 def CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api):
1851 """Attempts to prevent use of functions intended only for testing in
1852 non-testing code. For now this is just a best-effort implementation
1853 that ignores header files and may have some false positives. A
1854 better implementation would probably need a proper C++ parser.
1856 # We only scan .cc files and the like, as the declaration of
1857 # for-testing functions in header files are hard to distinguish from
1858 # calls to such functions without a proper C++ parser.
1859 file_inclusion_pattern = [r'.+%s' % _IMPLEMENTATION_EXTENSIONS]
1861 base_function_pattern = r'[ :]test::[^\s]+|ForTest(s|ing)?|for_test(s|ing)?'
1862 inclusion_pattern = input_api.re.compile(r'(%s)\s*\(' %
1863 base_function_pattern)
1864 comment_pattern = input_api.re.compile(r'//.*(%s)' % base_function_pattern)
1865 allowlist_pattern = input_api.re.compile(r'// IN-TEST$')
1866 exclusion_pattern = input_api.re.compile(
1867 r'::[A-Za-z0-9_]+(%s)|(%s)[^;]+\{' %
1868 (base_function_pattern, base_function_pattern))
1869 # Avoid a false positive in this case, where the method name, the ::, and
1870 # the closing { are all on different lines due to line wrapping.
1871 # HelperClassForTesting::
1872 # HelperClassForTesting(
1875 method_defn_pattern = input_api.re.compile(r'[A-Za-z0-9_]+::$')
1877 def FilterFile(affected_file):
1878 files_to_skip = (_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS +
1879 input_api.DEFAULT_FILES_TO_SKIP)
1880 return input_api.FilterSourceFile(
1882 files_to_check=file_inclusion_pattern,
1883 files_to_skip=files_to_skip)
1886 for f in input_api.AffectedSourceFiles(FilterFile):
1887 local_path = f.LocalPath()
1888 in_method_defn = False
1889 for line_number, line in f.ChangedContents():
1890 if (inclusion_pattern.search(line)
1891 and not comment_pattern.search(line)
1892 and not exclusion_pattern.search(line)
1893 and not allowlist_pattern.search(line)
1894 and not in_method_defn):
1895 problems.append('%s:%d\n %s' %
1896 (local_path, line_number, line.strip()))
1897 in_method_defn = method_defn_pattern.search(line)
1901 output_api.PresubmitPromptOrNotify(_TEST_ONLY_WARNING, problems)
1907 def CheckNoProductionCodeUsingTestOnlyFunctionsJava(input_api, output_api):
1908 """This is a simplified version of
1909 CheckNoProductionCodeUsingTestOnlyFunctions for Java files.
1911 javadoc_start_re = input_api.re.compile(r'^\s*/\*\*')
1912 javadoc_end_re = input_api.re.compile(r'^\s*\*/')
1913 name_pattern = r'ForTest(s|ing)?'
1914 # Describes an occurrence of "ForTest*" inside a // comment.
1915 comment_re = input_api.re.compile(r'//.*%s' % name_pattern)
1916 # Describes @VisibleForTesting(otherwise = VisibleForTesting.PROTECTED)
1917 annotation_re = input_api.re.compile(r'@VisibleForTesting\(')
1919 inclusion_re = input_api.re.compile(r'(%s)\s*\(' % name_pattern)
1920 # Ignore definitions. (Comments are ignored separately.)
1921 exclusion_re = input_api.re.compile(r'(%s)[^;]+\{' % name_pattern)
1924 sources = lambda x: input_api.FilterSourceFile(
1926 files_to_skip=(('(?i).*test', r'.*\/junit\/') + input_api.
1927 DEFAULT_FILES_TO_SKIP),
1928 files_to_check=[r'.*\.java$'])
1929 for f in input_api.AffectedFiles(include_deletes=False,
1930 file_filter=sources):
1931 local_path = f.LocalPath()
1932 is_inside_javadoc = False
1933 for line_number, line in f.ChangedContents():
1934 if is_inside_javadoc and javadoc_end_re.search(line):
1935 is_inside_javadoc = False
1936 if not is_inside_javadoc and javadoc_start_re.search(line):
1937 is_inside_javadoc = True
1938 if is_inside_javadoc:
1940 if (inclusion_re.search(line) and not comment_re.search(line)
1941 and not annotation_re.search(line)
1942 and not exclusion_re.search(line)):
1943 problems.append('%s:%d\n %s' %
1944 (local_path, line_number, line.strip()))
1948 output_api.PresubmitPromptOrNotify(_TEST_ONLY_WARNING, problems)
1954 def CheckNoIOStreamInHeaders(input_api, output_api):
1955 """Checks to make sure no .h files include <iostream>."""
1957 pattern = input_api.re.compile(r'^#include\s*<iostream>',
1958 input_api.re.MULTILINE)
1959 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
1960 if not f.LocalPath().endswith('.h'):
1962 contents = input_api.ReadFile(f)
1963 if pattern.search(contents):
1968 output_api.PresubmitError(
1969 'Do not #include <iostream> in header files, since it inserts static '
1970 'initialization into every file including the header. Instead, '
1971 '#include <ostream>. See http://crbug.com/94794', files)
1976 def CheckNoStrCatRedefines(input_api, output_api):
1977 """Checks no windows headers with StrCat redefined are included directly."""
1979 files_to_check = (r'.+%s' % _HEADER_EXTENSIONS,
1980 r'.+%s' % _IMPLEMENTATION_EXTENSIONS)
1981 files_to_skip = (input_api.DEFAULT_FILES_TO_SKIP +
1982 _NON_BASE_DEPENDENT_PATHS)
1983 sources_filter = lambda f: input_api.FilterSourceFile(
1984 f, files_to_check=files_to_check, files_to_skip=files_to_skip)
1986 pattern_deny = input_api.re.compile(
1987 r'^#include\s*[<"](shlwapi|atlbase|propvarutil|sphelper).h[">]',
1988 input_api.re.MULTILINE)
1989 pattern_allow = input_api.re.compile(
1990 r'^#include\s"base/win/windows_defines.inc"', input_api.re.MULTILINE)
1991 for f in input_api.AffectedSourceFiles(sources_filter):
1992 contents = input_api.ReadFile(f)
1993 if pattern_deny.search(
1994 contents) and not pattern_allow.search(contents):
1995 files.append(f.LocalPath())
1999 output_api.PresubmitError(
2000 'Do not #include shlwapi.h, atlbase.h, propvarutil.h or sphelper.h '
2001 'directly since they pollute code with StrCat macro. Instead, '
2002 'include matching header from base/win. See http://crbug.com/856536',
2008 def CheckNoUNIT_TESTInSourceFiles(input_api, output_api):
2009 """Checks to make sure no source files use UNIT_TEST."""
2011 for f in input_api.AffectedFiles():
2012 if (not f.LocalPath().endswith(('.cc', '.mm'))):
2015 for line_num, line in f.ChangedContents():
2016 if 'UNIT_TEST ' in line or line.endswith('UNIT_TEST'):
2017 problems.append(' %s:%d' % (f.LocalPath(), line_num))
2022 output_api.PresubmitPromptWarning('UNIT_TEST is only for headers.\n' +
2023 '\n'.join(problems))
2027 def CheckNoDISABLETypoInTests(input_api, output_api):
2028 """Checks to prevent attempts to disable tests with DISABLE_ prefix.
2030 This test warns if somebody tries to disable a test with the DISABLE_ prefix
2031 instead of DISABLED_. To filter false positives, reports are only generated
2032 if a corresponding MAYBE_ line exists.
2036 # The following two patterns are looked for in tandem - is a test labeled
2037 # as MAYBE_ followed by a DISABLE_ (instead of the correct DISABLED)
2038 maybe_pattern = input_api.re.compile(r'MAYBE_([a-zA-Z0-9_]+)')
2039 disable_pattern = input_api.re.compile(r'DISABLE_([a-zA-Z0-9_]+)')
2041 # This is for the case that a test is disabled on all platforms.
2042 full_disable_pattern = input_api.re.compile(
2043 r'^\s*TEST[^(]*\([a-zA-Z0-9_]+,\s*DISABLE_[a-zA-Z0-9_]+\)',
2044 input_api.re.MULTILINE)
2046 for f in input_api.AffectedFiles(False):
2047 if not 'test' in f.LocalPath() or not f.LocalPath().endswith('.cc'):
2050 # Search for MABYE_, DISABLE_ pairs.
2051 disable_lines = {} # Maps of test name to line number.
2053 for line_num, line in f.ChangedContents():
2054 disable_match = disable_pattern.search(line)
2056 disable_lines[disable_match.group(1)] = line_num
2057 maybe_match = maybe_pattern.search(line)
2059 maybe_lines[maybe_match.group(1)] = line_num
2061 # Search for DISABLE_ occurrences within a TEST() macro.
2062 disable_tests = set(disable_lines.keys())
2063 maybe_tests = set(maybe_lines.keys())
2064 for test in disable_tests.intersection(maybe_tests):
2065 problems.append(' %s:%d' % (f.LocalPath(), disable_lines[test]))
2067 contents = input_api.ReadFile(f)
2068 full_disable_match = full_disable_pattern.search(contents)
2069 if full_disable_match:
2070 problems.append(' %s' % f.LocalPath())
2075 output_api.PresubmitPromptWarning(
2076 'Attempt to disable a test with DISABLE_ instead of DISABLED_?\n' +
2077 '\n'.join(problems))
2081 def CheckForgettingMAYBEInTests(input_api, output_api):
2082 """Checks to make sure tests disabled conditionally are not missing a
2083 corresponding MAYBE_ prefix.
2085 # Expect at least a lowercase character in the test name. This helps rule out
2086 # false positives with macros wrapping the actual tests name.
2087 define_maybe_pattern = input_api.re.compile(
2088 r'^\#define MAYBE_(?P<test_name>\w*[a-z]\w*)')
2089 # The test_maybe_pattern needs to handle all of these forms. The standard:
2090 # IN_PROC_TEST_F(SyncTest, MAYBE_Start) {
2091 # With a wrapper macro around the test name:
2092 # IN_PROC_TEST_F(SyncTest, E2E_ENABLED(MAYBE_Start)) {
2093 # And the odd-ball NACL_BROWSER_TEST_f format:
2094 # NACL_BROWSER_TEST_F(NaClBrowserTest, SimpleLoad, {
2095 # The optional E2E_ENABLED-style is handled with (\w*\()?
2096 # The NACL_BROWSER_TEST_F pattern is handled by allowing a trailing comma or
2098 test_maybe_pattern = (
2099 r'^\s*\w*TEST[^(]*\(\s*\w+,\s*(\w*\()?MAYBE_{test_name}[\),]')
2100 suite_maybe_pattern = r'^\s*\w*TEST[^(]*\(\s*MAYBE_{test_name}[\),]'
2103 # Read the entire files. We can't just read the affected lines, forgetting to
2104 # add MAYBE_ on a change would not show up otherwise.
2105 for f in input_api.AffectedFiles(False):
2106 if not 'test' in f.LocalPath() or not f.LocalPath().endswith('.cc'):
2108 contents = input_api.ReadFile(f)
2109 lines = contents.splitlines(True)
2110 current_position = 0
2111 warning_test_names = set()
2112 for line_num, line in enumerate(lines, start=1):
2113 current_position += len(line)
2114 maybe_match = define_maybe_pattern.search(line)
2116 test_name = maybe_match.group('test_name')
2117 # Do not warn twice for the same test.
2118 if (test_name in warning_test_names):
2120 warning_test_names.add(test_name)
2122 # Attempt to find the corresponding MAYBE_ test or suite, starting from
2123 # the current position.
2124 test_match = input_api.re.compile(
2125 test_maybe_pattern.format(test_name=test_name),
2126 input_api.re.MULTILINE).search(contents, current_position)
2127 suite_match = input_api.re.compile(
2128 suite_maybe_pattern.format(test_name=test_name),
2129 input_api.re.MULTILINE).search(contents, current_position)
2130 if not test_match and not suite_match:
2132 output_api.PresubmitPromptWarning(
2133 '%s:%d found MAYBE_ defined without corresponding test %s'
2134 % (f.LocalPath(), line_num, test_name)))
2138 def CheckDCHECK_IS_ONHasBraces(input_api, output_api):
2139 """Checks to make sure DCHECK_IS_ON() does not skip the parentheses."""
2141 pattern = input_api.re.compile(r'\bDCHECK_IS_ON\b(?!\(\))',
2142 input_api.re.MULTILINE)
2143 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
2144 if (not f.LocalPath().endswith(('.cc', '.mm', '.h'))):
2146 for lnum, line in f.ChangedContents():
2147 if input_api.re.search(pattern, line):
2149 output_api.PresubmitError((
2150 '%s:%d: Use of DCHECK_IS_ON() must be written as "#if '
2151 + 'DCHECK_IS_ON()", not forgetting the parentheses.') %
2152 (f.LocalPath(), lnum)))
2156 # TODO(crbug/1138055): Reimplement CheckUmaHistogramChangesOnUpload check in a
2157 # more reliable way. See
2158 # https://chromium-review.googlesource.com/c/chromium/src/+/2500269
2161 def CheckFlakyTestUsage(input_api, output_api):
2162 """Check that FlakyTest annotation is our own instead of the android one"""
2163 pattern = input_api.re.compile(r'import android.test.FlakyTest;')
2165 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
2166 if f.LocalPath().endswith('Test.java'):
2167 if pattern.search(input_api.ReadFile(f)):
2171 output_api.PresubmitError(
2172 'Use org.chromium.base.test.util.FlakyTest instead of '
2173 'android.test.FlakyTest', files)
2178 def CheckNoDEPSGIT(input_api, output_api):
2179 """Make sure .DEPS.git is never modified manually."""
2180 if any(f.LocalPath().endswith('.DEPS.git')
2181 for f in input_api.AffectedFiles()):
2183 output_api.PresubmitError(
2184 'Never commit changes to .DEPS.git. This file is maintained by an\n'
2185 'automated system based on what\'s in DEPS and your changes will be\n'
2187 'See https://sites.google.com/a/chromium.org/dev/developers/how-tos/'
2188 'get-the-code#Rolling_DEPS\n'
2189 'for more information')
2194 def CheckCrosApiNeedBrowserTest(input_api, output_api):
2195 """Check new crosapi should add browser test."""
2196 has_new_crosapi = False
2197 has_browser_test = False
2198 for f in input_api.AffectedFiles():
2199 path = f.LocalPath()
2200 if (path.startswith('chromeos/crosapi/mojom') and
2201 _IsMojomFile(input_api, path) and f.Action() == 'A'):
2202 has_new_crosapi = True
2203 if path.endswith('browsertest.cc') or path.endswith('browser_test.cc'):
2204 has_browser_test = True
2205 if has_new_crosapi and not has_browser_test:
2207 output_api.PresubmitPromptWarning(
2208 'You are adding a new crosapi, but there is no file ends with '
2209 'browsertest.cc file being added or modified. It is important '
2210 'to add crosapi browser test coverage to avoid version '
2212 'Check //docs/lacros/test_instructions.md for more information.'
2218 def CheckValidHostsInDEPSOnUpload(input_api, output_api):
2219 """Checks that DEPS file deps are from allowed_hosts."""
2220 # Run only if DEPS file has been modified to annoy fewer bystanders.
2221 if all(f.LocalPath() != 'DEPS' for f in input_api.AffectedFiles()):
2223 # Outsource work to gclient verify
2225 gclient_path = input_api.os_path.join(input_api.PresubmitLocalPath(),
2226 'third_party', 'depot_tools',
2228 input_api.subprocess.check_output(
2229 [input_api.python3_executable, gclient_path, 'verify'],
2230 stderr=input_api.subprocess.STDOUT)
2232 except input_api.subprocess.CalledProcessError as error:
2234 output_api.PresubmitError(
2235 'DEPS file must have only git dependencies.',
2236 long_text=error.output)
2240 def _GetMessageForMatchingType(input_api, affected_file, line_number, line,
2242 """Helper method for checking for banned constructs.
2244 Returns an string composed of the name of the file, the line number where the
2245 match has been found and the additional text passed as |message| in case the
2246 target type name matches the text inside the line passed as parameter.
2250 # Ignore comments about banned types.
2251 if input_api.re.search(r"^ *//", line):
2253 # A // nocheck comment will bypass this error.
2254 if line.endswith(" nocheck"):
2258 if ban_rule.pattern[0:1] == '/':
2259 regex = ban_rule.pattern[1:]
2260 if input_api.re.search(regex, line):
2262 elif ban_rule.pattern in line:
2266 result.append(' %s:%d:' % (affected_file.LocalPath(), line_number))
2267 for line in ban_rule.explanation:
2268 result.append(' %s' % line)
2273 def CheckNoBannedFunctions(input_api, output_api):
2274 """Make sure that banned functions are not used."""
2278 def IsExcludedFile(affected_file, excluded_paths):
2279 if not excluded_paths:
2282 local_path = affected_file.LocalPath()
2283 # Consistently use / as path separator to simplify the writing of regex
2285 local_path = local_path.replace(input_api.os_path.sep, '/')
2286 for item in excluded_paths:
2287 if input_api.re.match(item, local_path):
2291 def IsIosObjcFile(affected_file):
2292 local_path = affected_file.LocalPath()
2293 if input_api.os_path.splitext(local_path)[-1] not in ('.mm', '.m',
2296 basename = input_api.os_path.basename(local_path)
2297 if 'ios' in basename.split('_'):
2299 for sep in (input_api.os_path.sep, input_api.os_path.altsep):
2300 if sep and 'ios' in local_path.split(sep):
2304 def CheckForMatch(affected_file, line_num: int, line: str,
2306 if IsExcludedFile(affected_file, ban_rule.excluded_paths):
2309 problems = _GetMessageForMatchingType(input_api, f, line_num, line,
2312 if ban_rule.treat_as_error is not None and ban_rule.treat_as_error:
2313 errors.extend(problems)
2315 warnings.extend(problems)
2317 file_filter = lambda f: f.LocalPath().endswith(('.java'))
2318 for f in input_api.AffectedFiles(file_filter=file_filter):
2319 for line_num, line in f.ChangedContents():
2320 for ban_rule in _BANNED_JAVA_FUNCTIONS:
2321 CheckForMatch(f, line_num, line, ban_rule)
2323 file_filter = lambda f: f.LocalPath().endswith(('.js', '.ts'))
2324 for f in input_api.AffectedFiles(file_filter=file_filter):
2325 for line_num, line in f.ChangedContents():
2326 for ban_rule in _BANNED_JAVASCRIPT_FUNCTIONS:
2327 CheckForMatch(f, line_num, line, ban_rule)
2329 file_filter = lambda f: f.LocalPath().endswith(('.mm', '.m', '.h'))
2330 for f in input_api.AffectedFiles(file_filter=file_filter):
2331 for line_num, line in f.ChangedContents():
2332 for ban_rule in _BANNED_OBJC_FUNCTIONS:
2333 CheckForMatch(f, line_num, line, ban_rule)
2335 for f in input_api.AffectedFiles(file_filter=IsIosObjcFile):
2336 for line_num, line in f.ChangedContents():
2337 for ban_rule in _BANNED_IOS_OBJC_FUNCTIONS:
2338 CheckForMatch(f, line_num, line, ban_rule)
2340 egtest_filter = lambda f: f.LocalPath().endswith(('_egtest.mm'))
2341 for f in input_api.AffectedFiles(file_filter=egtest_filter):
2342 for line_num, line in f.ChangedContents():
2343 for ban_rule in _BANNED_IOS_EGTEST_FUNCTIONS:
2344 CheckForMatch(f, line_num, line, ban_rule)
2346 file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm', '.h'))
2347 for f in input_api.AffectedFiles(file_filter=file_filter):
2348 for line_num, line in f.ChangedContents():
2349 for ban_rule in _BANNED_CPP_FUNCTIONS:
2350 CheckForMatch(f, line_num, line, ban_rule)
2352 file_filter = lambda f: f.LocalPath().endswith(('.mojom'))
2353 for f in input_api.AffectedFiles(file_filter=file_filter):
2354 for line_num, line in f.ChangedContents():
2355 for ban_rule in _BANNED_MOJOM_PATTERNS:
2356 CheckForMatch(f, line_num, line, ban_rule)
2362 output_api.PresubmitPromptWarning('Banned functions were used.\n' +
2363 '\n'.join(warnings)))
2366 output_api.PresubmitError('Banned functions were used.\n' +
2370 def CheckNoLayoutCallsInTests(input_api, output_api):
2371 """Make sure there are no explicit calls to View::Layout() in tests"""
2374 r'/(\.|->)Layout\(\);',
2376 'Direct calls to View::Layout() are not allowed in tests. '
2377 'If the view must be laid out here, use RunScheduledLayout(view). It '
2378 'is found in //ui/views/test/views_test_utils.h. '
2379 'See http://crbug.com/1350521 for more details.',
2383 file_filter = lambda f: input_api.re.search(
2384 r'_(unittest|browsertest|ui_test).*\.(cc|mm)$', f.LocalPath())
2385 for f in input_api.AffectedFiles(file_filter = file_filter):
2386 for line_num, line in f.ChangedContents():
2387 problems = _GetMessageForMatchingType(input_api, f,
2391 warnings.extend(problems)
2395 output_api.PresubmitPromptWarning(
2396 'Banned call to View::Layout() in tests.\n\n'.join(warnings)))
2399 def _CheckAndroidNoBannedImports(input_api, output_api):
2400 """Make sure that banned java imports are not used."""
2403 file_filter = lambda f: f.LocalPath().endswith(('.java'))
2404 for f in input_api.AffectedFiles(file_filter=file_filter):
2405 for line_num, line in f.ChangedContents():
2406 for ban_rule in _BANNED_JAVA_IMPORTS:
2407 # Consider merging this into the above function. There is no
2408 # real difference anymore other than helping with a little
2409 # bit of boilerplate text. Doing so means things like
2410 # `treat_as_error` will also be uniformly handled.
2411 problems = _GetMessageForMatchingType(input_api, f, line_num,
2414 errors.extend(problems)
2418 output_api.PresubmitError('Banned imports were used.\n' +
2423 def CheckNoPragmaOnce(input_api, output_api):
2424 """Make sure that banned functions are not used."""
2426 pattern = input_api.re.compile(r'^#pragma\s+once', input_api.re.MULTILINE)
2427 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
2428 if not f.LocalPath().endswith('.h'):
2430 if f.LocalPath().endswith('com_imported_mstscax.h'):
2432 contents = input_api.ReadFile(f)
2433 if pattern.search(contents):
2438 output_api.PresubmitError(
2439 'Do not use #pragma once in header files.\n'
2440 'See http://www.chromium.org/developers/coding-style#TOC-File-headers',
2446 def CheckNoTrinaryTrueFalse(input_api, output_api):
2447 """Checks to make sure we don't introduce use of foo ? true : false."""
2449 pattern = input_api.re.compile(r'\?\s*(true|false)\s*:\s*(true|false)')
2450 for f in input_api.AffectedFiles():
2451 if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
2454 for line_num, line in f.ChangedContents():
2455 if pattern.match(line):
2456 problems.append(' %s:%d' % (f.LocalPath(), line_num))
2461 output_api.PresubmitPromptWarning(
2462 'Please consider avoiding the "? true : false" pattern if possible.\n'
2463 + '\n'.join(problems))
2467 def CheckUnwantedDependencies(input_api, output_api):
2468 """Runs checkdeps on #include and import statements added in this
2469 change. Breaking - rules is an error, breaking ! rules is a
2472 # Return early if no relevant file types were modified.
2473 for f in input_api.AffectedFiles():
2474 path = f.LocalPath()
2475 if (_IsCPlusPlusFile(input_api, path) or _IsProtoFile(input_api, path)
2476 or _IsJavaFile(input_api, path)):
2482 # We need to wait until we have an input_api object and use this
2483 # roundabout construct to import checkdeps because this file is
2484 # eval-ed and thus doesn't have __file__.
2485 original_sys_path = sys.path
2487 sys.path = sys.path + [
2488 input_api.os_path.join(input_api.PresubmitLocalPath(),
2489 'buildtools', 'checkdeps')
2492 from rules import Rule
2494 # Restore sys.path to what it was before.
2495 sys.path = original_sys_path
2499 added_java_imports = []
2500 for f in input_api.AffectedFiles():
2501 if _IsCPlusPlusFile(input_api, f.LocalPath()):
2502 changed_lines = [line for _, line in f.ChangedContents()]
2503 added_includes.append([f.AbsoluteLocalPath(), changed_lines])
2504 elif _IsProtoFile(input_api, f.LocalPath()):
2505 changed_lines = [line for _, line in f.ChangedContents()]
2506 added_imports.append([f.AbsoluteLocalPath(), changed_lines])
2507 elif _IsJavaFile(input_api, f.LocalPath()):
2508 changed_lines = [line for _, line in f.ChangedContents()]
2509 added_java_imports.append([f.AbsoluteLocalPath(), changed_lines])
2511 deps_checker = checkdeps.DepsChecker(input_api.PresubmitLocalPath())
2513 error_descriptions = []
2514 warning_descriptions = []
2515 error_subjects = set()
2516 warning_subjects = set()
2518 for path, rule_type, rule_description in deps_checker.CheckAddedCppIncludes(
2520 path = input_api.os_path.relpath(path, input_api.PresubmitLocalPath())
2521 description_with_path = '%s\n %s' % (path, rule_description)
2522 if rule_type == Rule.DISALLOW:
2523 error_descriptions.append(description_with_path)
2524 error_subjects.add("#includes")
2526 warning_descriptions.append(description_with_path)
2527 warning_subjects.add("#includes")
2529 for path, rule_type, rule_description in deps_checker.CheckAddedProtoImports(
2531 path = input_api.os_path.relpath(path, input_api.PresubmitLocalPath())
2532 description_with_path = '%s\n %s' % (path, rule_description)
2533 if rule_type == Rule.DISALLOW:
2534 error_descriptions.append(description_with_path)
2535 error_subjects.add("imports")
2537 warning_descriptions.append(description_with_path)
2538 warning_subjects.add("imports")
2540 for path, rule_type, rule_description in deps_checker.CheckAddedJavaImports(
2541 added_java_imports, _JAVA_MULTIPLE_DEFINITION_EXCLUDED_PATHS):
2542 path = input_api.os_path.relpath(path, input_api.PresubmitLocalPath())
2543 description_with_path = '%s\n %s' % (path, rule_description)
2544 if rule_type == Rule.DISALLOW:
2545 error_descriptions.append(description_with_path)
2546 error_subjects.add("imports")
2548 warning_descriptions.append(description_with_path)
2549 warning_subjects.add("imports")
2552 if error_descriptions:
2554 output_api.PresubmitError(
2555 'You added one or more %s that violate checkdeps rules.' %
2556 " and ".join(error_subjects), error_descriptions))
2557 if warning_descriptions:
2559 output_api.PresubmitPromptOrNotify(
2560 'You added one or more %s of files that are temporarily\n'
2561 'allowed but being removed. Can you avoid introducing the\n'
2562 '%s? See relevant DEPS file(s) for details and contacts.' %
2563 (" and ".join(warning_subjects), "/".join(warning_subjects)),
2564 warning_descriptions))
2568 def CheckFilePermissions(input_api, output_api):
2569 """Check that all files have their permissions properly set."""
2570 if input_api.platform == 'win32':
2572 checkperms_tool = input_api.os_path.join(input_api.PresubmitLocalPath(),
2573 'tools', 'checkperms',
2576 input_api.python3_executable, checkperms_tool, '--root',
2577 input_api.change.RepositoryRoot()
2579 with input_api.CreateTemporaryFile() as file_list:
2580 for f in input_api.AffectedFiles():
2581 # checkperms.py file/directory arguments must be relative to the
2583 file_list.write((f.LocalPath() + '\n').encode('utf8'))
2585 args += ['--file-list', file_list.name]
2587 input_api.subprocess.check_output(args)
2589 except input_api.subprocess.CalledProcessError as error:
2591 output_api.PresubmitError('checkperms.py failed:',
2592 long_text=error.output.decode(
2597 def CheckNoAuraWindowPropertyHInHeaders(input_api, output_api):
2598 """Makes sure we don't include ui/aura/window_property.h
2601 pattern = input_api.re.compile(r'^#include\s*"ui/aura/window_property.h"')
2603 for f in input_api.AffectedFiles():
2604 if not f.LocalPath().endswith('.h'):
2606 for line_num, line in f.ChangedContents():
2607 if pattern.match(line):
2608 errors.append(' %s:%d' % (f.LocalPath(), line_num))
2613 output_api.PresubmitError(
2614 'Header files should not include ui/aura/window_property.h',
2619 def CheckNoInternalHeapIncludes(input_api, output_api):
2620 """Makes sure we don't include any headers from
2621 third_party/blink/renderer/platform/heap/impl or
2622 third_party/blink/renderer/platform/heap/v8_wrapper from files outside of
2623 third_party/blink/renderer/platform/heap
2625 impl_pattern = input_api.re.compile(
2626 r'^\s*#include\s*"third_party/blink/renderer/platform/heap/impl/.*"')
2627 v8_wrapper_pattern = input_api.re.compile(
2628 r'^\s*#include\s*"third_party/blink/renderer/platform/heap/v8_wrapper/.*"'
2630 # Consistently use / as path separator to simplify the writing of regex
2632 file_filter = lambda f: not input_api.re.match(
2633 r"^third_party/blink/renderer/platform/heap/.*",
2634 f.LocalPath().replace(input_api.os_path.sep, '/'))
2637 for f in input_api.AffectedFiles(file_filter=file_filter):
2638 for line_num, line in f.ChangedContents():
2639 if impl_pattern.match(line) or v8_wrapper_pattern.match(line):
2640 errors.append(' %s:%d' % (f.LocalPath(), line_num))
2645 output_api.PresubmitError(
2646 'Do not include files from third_party/blink/renderer/platform/heap/impl'
2647 ' or third_party/blink/renderer/platform/heap/v8_wrapper. Use the '
2648 'relevant counterparts from third_party/blink/renderer/platform/heap',
2653 def _CheckForVersionControlConflictsInFile(input_api, f):
2654 pattern = input_api.re.compile('^(?:<<<<<<<|>>>>>>>) |^=======$')
2656 for line_num, line in f.ChangedContents():
2657 if f.LocalPath().endswith(('.md', '.rst', '.txt')):
2658 # First-level headers in markdown look a lot like version control
2659 # conflict markers. http://daringfireball.net/projects/markdown/basics
2661 if pattern.match(line):
2662 errors.append(' %s:%d %s' % (f.LocalPath(), line_num, line))
2666 def CheckForVersionControlConflicts(input_api, output_api):
2667 """Usually this is not intentional and will cause a compile failure."""
2669 for f in input_api.AffectedFiles():
2670 errors.extend(_CheckForVersionControlConflictsInFile(input_api, f))
2675 output_api.PresubmitError(
2676 'Version control conflict markers found, please resolve.',
2681 def CheckGoogleSupportAnswerUrlOnUpload(input_api, output_api):
2682 pattern = input_api.re.compile('support\.google\.com\/chrome.*/answer')
2684 for f in input_api.AffectedFiles():
2685 for line_num, line in f.ChangedContents():
2686 if pattern.search(line):
2687 errors.append(' %s:%d %s' % (f.LocalPath(), line_num, line))
2692 output_api.PresubmitPromptWarning(
2693 'Found Google support URL addressed by answer number. Please replace '
2694 'with a p= identifier instead. See crbug.com/679462\n',
2699 def CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api):
2700 def FilterFile(affected_file):
2701 """Filter function for use with input_api.AffectedSourceFiles,
2702 below. This filters out everything except non-test files from
2703 top-level directories that generally speaking should not hard-code
2704 service URLs (e.g. src/android_webview/, src/content/ and others).
2706 return input_api.FilterSourceFile(
2708 files_to_check=[r'^(android_webview|base|content|net)/.*'],
2709 files_to_skip=(_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS +
2710 input_api.DEFAULT_FILES_TO_SKIP))
2712 base_pattern = ('"[^"]*(google|googleapis|googlezip|googledrive|appspot)'
2713 '\.(com|net)[^"]*"')
2714 comment_pattern = input_api.re.compile('//.*%s' % base_pattern)
2715 pattern = input_api.re.compile(base_pattern)
2716 problems = [] # items are (filename, line_number, line)
2717 for f in input_api.AffectedSourceFiles(FilterFile):
2718 for line_num, line in f.ChangedContents():
2719 if not comment_pattern.search(line) and pattern.search(line):
2720 problems.append((f.LocalPath(), line_num, line))
2724 output_api.PresubmitPromptOrNotify(
2725 'Most layers below src/chrome/ should not hardcode service URLs.\n'
2726 'Are you sure this is correct?', [
2727 ' %s:%d: %s' % (problem[0], problem[1], problem[2])
2728 for problem in problems
2735 def CheckChromeOsSyncedPrefRegistration(input_api, output_api):
2736 """Warns if Chrome OS C++ files register syncable prefs as browser prefs."""
2738 def FileFilter(affected_file):
2739 """Includes directories known to be Chrome OS only."""
2740 return input_api.FilterSourceFile(
2744 '^chromeos/', # Top-level src/chromeos.
2745 '.*/chromeos/', # Any path component.
2748 files_to_skip=(input_api.DEFAULT_FILES_TO_SKIP))
2752 for f in input_api.AffectedFiles(file_filter=FileFilter):
2753 for line_num, line in f.ChangedContents():
2754 if input_api.re.search('PrefRegistrySyncable::SYNCABLE_PREF',
2756 prefs.append(' %s:%d:' % (f.LocalPath(), line_num))
2757 prefs.append(' %s' % line)
2758 if input_api.re.search(
2759 'PrefRegistrySyncable::SYNCABLE_PRIORITY_PREF', line):
2760 priority_prefs.append(' %s:%d' % (f.LocalPath(), line_num))
2761 priority_prefs.append(' %s' % line)
2766 output_api.PresubmitPromptWarning(
2767 'Preferences were registered as SYNCABLE_PREF and will be controlled '
2768 'by browser sync settings. If these prefs should be controlled by OS '
2769 'sync settings use SYNCABLE_OS_PREF instead.\n' +
2771 if (priority_prefs):
2773 output_api.PresubmitPromptWarning(
2774 'Preferences were registered as SYNCABLE_PRIORITY_PREF and will be '
2775 'controlled by browser sync settings. If these prefs should be '
2776 'controlled by OS sync settings use SYNCABLE_OS_PRIORITY_PREF '
2777 'instead.\n' + '\n'.join(prefs)))
2781 def CheckNoAbbreviationInPngFileName(input_api, output_api):
2782 """Makes sure there are no abbreviations in the name of PNG files.
2783 The native_client_sdk directory is excluded because it has auto-generated PNG
2784 files for documentation.
2787 files_to_check = [r'.*\.png$']
2788 files_to_skip = [r'^native_client_sdk/',
2790 r'^third_party/blink/web_tests/',
2792 file_filter = lambda f: input_api.FilterSourceFile(
2793 f, files_to_check=files_to_check, files_to_skip=files_to_skip)
2794 abbreviation = input_api.re.compile('.+_[a-z]\.png|.+_[a-z]_.*\.png')
2795 for f in input_api.AffectedFiles(include_deletes=False,
2796 file_filter=file_filter):
2797 file_name = input_api.os_path.split(f.LocalPath())[1]
2798 if abbreviation.search(file_name):
2799 errors.append(' %s' % f.LocalPath())
2804 output_api.PresubmitError(
2805 'The name of PNG files should not have abbreviations. \n'
2806 'Use _hover.png, _center.png, instead of _h.png, _c.png.\n'
2807 'Contact oshima@chromium.org if you have questions.', errors))
2810 def CheckNoProductIconsAddedToPublicRepo(input_api, output_api):
2811 """Heuristically identifies product icons based on their file name and reminds
2812 contributors not to add them to the Chromium repository.
2815 files_to_check = [r'.*google.*\.png$|.*google.*\.svg$|.*google.*\.icon$']
2816 file_filter = lambda f: input_api.FilterSourceFile(
2817 f, files_to_check=files_to_check)
2818 for f in input_api.AffectedFiles(include_deletes=False,
2819 file_filter=file_filter):
2820 errors.append(' %s' % f.LocalPath())
2824 # Give warnings instead of errors on presubmit --all and presubmit
2826 message_type = (output_api.PresubmitNotifyResult if input_api.no_diffs
2827 else output_api.PresubmitError)
2830 'Trademarked images should not be added to the public repo. '
2831 'See crbug.com/944754', errors))
2835 def _ExtractAddRulesFromParsedDeps(parsed_deps):
2836 """Extract the rules that add dependencies from a parsed DEPS file.
2839 parsed_deps: the locals dictionary from evaluating the DEPS file."""
2842 rule[1:] for rule in parsed_deps.get('include_rules', [])
2843 if rule.startswith('+') or rule.startswith('!')
2845 for _, rules in parsed_deps.get('specific_include_rules', {}).items():
2847 rule[1:] for rule in rules
2848 if rule.startswith('+') or rule.startswith('!')
2853 def _ParseDeps(contents):
2854 """Simple helper for parsing DEPS files."""
2856 # Stubs for handling special syntax in the root DEPS file.
2858 def __init__(self, local_scope):
2859 self._local_scope = local_scope
2861 def Lookup(self, var_name):
2862 """Implements the Var syntax."""
2864 return self._local_scope['vars'][var_name]
2866 raise Exception('Var is not defined: %s' % var_name)
2870 'Var': _VarImpl(local_scope).Lookup,
2874 exec(contents, global_scope, local_scope)
2878 def _CalculateAddedDeps(os_path, old_contents, new_contents):
2879 """Helper method for CheckAddedDepsHaveTargetApprovals. Returns
2880 a set of DEPS entries that we should look up.
2882 For a directory (rather than a specific filename) we fake a path to
2883 a specific filename by adding /DEPS. This is chosen as a file that
2884 will seldom or never be subject to per-file include_rules.
2886 # We ignore deps entries on auto-generated directories.
2887 AUTO_GENERATED_DIRS = ['grit', 'jni']
2889 old_deps = _ExtractAddRulesFromParsedDeps(_ParseDeps(old_contents))
2890 new_deps = _ExtractAddRulesFromParsedDeps(_ParseDeps(new_contents))
2892 added_deps = new_deps.difference(old_deps)
2895 for added_dep in added_deps:
2896 if added_dep.split('/')[0] in AUTO_GENERATED_DIRS:
2898 # Assume that a rule that ends in .h is a rule for a specific file.
2899 if added_dep.endswith('.h'):
2900 results.add(added_dep)
2902 results.add(os_path.join(added_dep, 'DEPS'))
2906 def CheckAddedDepsHaveTargetApprovals(input_api, output_api):
2907 """When a dependency prefixed with + is added to a DEPS file, we
2908 want to make sure that the change is reviewed by an OWNER of the
2909 target file or directory, to avoid layering violations from being
2910 introduced. This check verifies that this happens.
2912 # We rely on Gerrit's code-owners to check approvals.
2913 # input_api.gerrit is always set for Chromium, but other projects
2914 # might not use Gerrit.
2915 if not input_api.gerrit or input_api.no_diffs:
2917 if 'PRESUBMIT_SKIP_NETWORK' in input_api.environ:
2920 if (input_api.change.issue and
2921 input_api.gerrit.IsOwnersOverrideApproved(
2922 input_api.change.issue)):
2923 # Skip OWNERS check when Owners-Override label is approved. This is
2924 # intended for global owners, trusted bots, and on-call sheriffs.
2925 # Review is still required for these changes.
2927 except Exception as e:
2928 return [output_api.PresubmitPromptWarning(
2929 'Failed to retrieve owner override status - %s' % str(e))]
2931 virtual_depended_on_files = set()
2933 # Consistently use / as path separator to simplify the writing of regex
2935 file_filter = lambda f: not input_api.re.match(
2936 r"^third_party/blink/.*",
2937 f.LocalPath().replace(input_api.os_path.sep, '/'))
2938 for f in input_api.AffectedFiles(include_deletes=False,
2939 file_filter=file_filter):
2940 filename = input_api.os_path.basename(f.LocalPath())
2941 if filename == 'DEPS':
2942 virtual_depended_on_files.update(
2943 _CalculateAddedDeps(input_api.os_path,
2944 '\n'.join(f.OldContents()),
2945 '\n'.join(f.NewContents())))
2947 if not virtual_depended_on_files:
2950 if input_api.is_committing:
2953 output_api.PresubmitNotifyResult(
2954 '--tbr was specified, skipping OWNERS check for DEPS additions'
2957 # TODO(dcheng): Make this generate an error on dry runs if the reviewer
2958 # is not added, to prevent review serialization.
2959 if input_api.dry_run:
2961 output_api.PresubmitNotifyResult(
2962 'This is a dry run, skipping OWNERS check for DEPS additions'
2965 if not input_api.change.issue:
2967 output_api.PresubmitError(
2968 "DEPS approval by OWNERS check failed: this change has "
2969 "no change number, so we can't check it for approvals.")
2971 output = output_api.PresubmitError
2973 output = output_api.PresubmitNotifyResult
2975 owner_email, reviewers = (
2976 input_api.canned_checks.GetCodereviewOwnerAndReviewers(
2977 input_api, None, approval_needed=input_api.is_committing))
2979 owner_email = owner_email or input_api.change.author_email
2981 approval_status = input_api.owners_client.GetFilesApprovalStatus(
2982 virtual_depended_on_files, reviewers.union([owner_email]), [])
2984 f for f in virtual_depended_on_files
2985 if approval_status[f] != input_api.owners_client.APPROVED
2988 # We strip the /DEPS part that was added by
2989 # _FilesToCheckForIncomingDeps to fake a path to a file in a
2991 def StripDeps(path):
2992 start_deps = path.rfind('/DEPS')
2993 if start_deps != -1:
2994 return path[:start_deps]
2998 unapproved_dependencies = [
2999 "'+%s'," % StripDeps(path) for path in missing_files
3002 if unapproved_dependencies:
3005 'You need LGTM from owners of depends-on paths in DEPS that were '
3006 'modified in this CL:\n %s' %
3007 '\n '.join(sorted(unapproved_dependencies)))
3009 suggested_owners = input_api.owners_client.SuggestOwners(
3010 missing_files, exclude=[owner_email])
3012 output('Suggested missing target path OWNERS:\n %s' %
3013 '\n '.join(suggested_owners or [])))
3019 # TODO: add unit tests.
3020 def CheckSpamLogging(input_api, output_api):
3021 file_inclusion_pattern = [r'.+%s' % _IMPLEMENTATION_EXTENSIONS]
3023 _EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS +
3024 input_api.DEFAULT_FILES_TO_SKIP + (
3025 r"^base/fuchsia/scoped_fx_logger\.cc$",
3026 r"^base/logging\.h$",
3027 r"^base/logging\.cc$",
3028 r"^base/task/thread_pool/task_tracker\.cc$",
3029 r"^chrome/app/chrome_main_delegate\.cc$",
3030 r"^chrome/browser/ash/arc/enterprise/cert_store/arc_cert_installer\.cc$",
3031 r"^chrome/browser/ash/policy/remote_commands/user_command_arc_job\.cc$",
3032 r"^chrome/browser/chrome_browser_main\.cc$",
3033 r"^chrome/browser/ui/startup/startup_browser_creator\.cc$",
3034 r"^chrome/browser/browser_switcher/bho/.*",
3035 r"^chrome/browser/diagnostics/diagnostics_writer\.cc$",
3036 r"^chrome/chrome_cleaner/.*",
3037 r"^chrome/chrome_elf/dll_hash/dll_hash_main\.cc$",
3038 r"^chrome/installer/setup/.*",
3040 r"^components/media_control/renderer/media_playback_options\.cc$",
3041 r"^components/policy/core/common/policy_logger\.cc$",
3042 r"^components/viz/service/display/"
3043 r"overlay_strategy_underlay_cast\.cc$",
3044 r"^components/zucchini/.*",
3045 # TODO(peter): Remove exception. https://crbug.com/534537
3046 r"^content/browser/notifications/"
3047 r"notification_event_dispatcher_impl\.cc$",
3048 r"^content/common/gpu/client/gl_helper_benchmark\.cc$",
3049 r"^courgette/courgette_minimal_tool\.cc$",
3050 r"^courgette/courgette_tool\.cc$",
3051 r"^extensions/renderer/logging_native_handler\.cc$",
3052 r"^fuchsia_web/common/init_logging\.cc$",
3053 r"^fuchsia_web/runners/common/web_component\.cc$",
3054 r"^fuchsia_web/shell/.*\.cc$",
3055 r"^headless/app/headless_shell\.cc$",
3056 r"^ipc/ipc_logging\.cc$",
3057 r"^native_client_sdk/",
3058 r"^remoting/base/logging\.h$",
3059 r"^remoting/host/.*",
3060 r"^sandbox/linux/.*",
3061 r"^storage/browser/file_system/dump_file_system\.cc$",
3063 r"^ui/base/resource/data_pack\.cc$",
3064 r"^ui/aura/bench/bench_main\.cc$",
3065 r"^ui/ozone/platform/cast/",
3066 r"^ui/base/x/xwmstartupcheck/"
3067 r"xwmstartupcheck\.cc$"))
3068 source_file_filter = lambda x: input_api.FilterSourceFile(
3069 x, files_to_check=file_inclusion_pattern, files_to_skip=files_to_skip)
3074 for f in input_api.AffectedSourceFiles(source_file_filter):
3075 for _, line in f.ChangedContents():
3076 if input_api.re.search(r"\bD?LOG\s*\(\s*INFO\s*\)", line):
3077 log_info.add(f.LocalPath())
3078 elif input_api.re.search(r"\bD?LOG_IF\s*\(\s*INFO\s*,", line):
3079 log_info.add(f.LocalPath())
3081 if input_api.re.search(r"\bprintf\(", line):
3082 printf.add(f.LocalPath())
3083 elif input_api.re.search(r"\bfprintf\((stdout|stderr)", line):
3084 printf.add(f.LocalPath())
3088 output_api.PresubmitError(
3089 'These files spam the console log with LOG(INFO):',
3094 output_api.PresubmitError(
3095 'These files spam the console log with printf/fprintf:',
3101 def CheckForAnonymousVariables(input_api, output_api):
3102 """These types are all expected to hold locks while in scope and
3103 so should never be anonymous (which causes them to be immediately
3105 they_who_must_be_named = [
3109 'SkAutoAlphaRestore',
3110 'SkAutoBitmapShaderInstall',
3111 'SkAutoBlitterChoose',
3112 'SkAutoBounderCommit',
3114 'SkAutoCanvasRestore',
3115 'SkAutoCommentBlock',
3117 'SkAutoDisableDirectionCheck',
3118 'SkAutoDisableOvalCheck',
3125 'SkAutoMaskFreeImage',
3126 'SkAutoMutexAcquire',
3127 'SkAutoPathBoundsUpdate',
3129 'SkAutoRasterClipValidate',
3135 anonymous = r'(%s)\s*[({]' % '|'.join(they_who_must_be_named)
3136 # bad: base::AutoLock(lock.get());
3137 # not bad: base::AutoLock lock(lock.get());
3138 bad_pattern = input_api.re.compile(anonymous)
3139 # good: new base::AutoLock(lock.get())
3140 good_pattern = input_api.re.compile(r'\bnew\s*' + anonymous)
3143 for f in input_api.AffectedFiles():
3144 if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
3146 for linenum, line in f.ChangedContents():
3147 if bad_pattern.search(line) and not good_pattern.search(line):
3148 errors.append('%s:%d' % (f.LocalPath(), linenum))
3152 output_api.PresubmitError(
3153 'These lines create anonymous variables that need to be named:',
3159 def CheckUniquePtrOnUpload(input_api, output_api):
3160 # Returns whether |template_str| is of the form <T, U...> for some types T
3161 # and U. Assumes that |template_str| is already in the form <...>.
3162 def HasMoreThanOneArg(template_str):
3163 # Level of <...> nesting.
3165 for c in template_str:
3170 elif c == ',' and nesting == 1:
3174 file_inclusion_pattern = [r'.+%s' % _IMPLEMENTATION_EXTENSIONS]
3175 sources = lambda affected_file: input_api.FilterSourceFile(
3177 files_to_skip=(_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS + input_api.
3178 DEFAULT_FILES_TO_SKIP),
3179 files_to_check=file_inclusion_pattern)
3181 # Pattern to capture a single "<...>" block of template arguments. It can
3182 # handle linearly nested blocks, such as "<std::vector<std::set<T>>>", but
3183 # cannot handle branching structures, such as "<pair<set<T>,set<U>>". The
3184 # latter would likely require counting that < and > match, which is not
3185 # expressible in regular languages. Should the need arise, one can introduce
3186 # limited counting (matching up to a total number of nesting depth), which
3187 # should cover all practical cases for already a low nesting limit.
3188 template_arg_pattern = (
3189 r'<[^>]*' # Opening block of <.
3190 r'>([^<]*>)?') # Closing block of >.
3191 # Prefix expressing that whatever follows is not already inside a <...>
3193 not_inside_template_arg_pattern = r'(^|[^<,\s]\s*)'
3194 null_construct_pattern = input_api.re.compile(
3195 not_inside_template_arg_pattern + r'\bstd::unique_ptr' +
3196 template_arg_pattern + r'\(\)')
3198 # Same as template_arg_pattern, but excluding type arrays, e.g., <T[]>.
3199 template_arg_no_array_pattern = (
3200 r'<[^>]*[^]]' # Opening block of <.
3201 r'>([^(<]*[^]]>)?') # Closing block of >.
3202 # Prefix saying that what follows is the start of an expression.
3203 start_of_expr_pattern = r'(=|\breturn|^)\s*'
3204 # Suffix saying that what follows are call parentheses with a non-empty list
3206 nonempty_arg_list_pattern = r'\(([^)]|$)'
3207 # Put the template argument into a capture group for deeper examination later.
3208 return_construct_pattern = input_api.re.compile(
3209 start_of_expr_pattern + r'std::unique_ptr' + '(?P<template_arg>' +
3210 template_arg_no_array_pattern + ')' + nonempty_arg_list_pattern)
3212 problems_constructor = []
3213 problems_nullptr = []
3214 for f in input_api.AffectedSourceFiles(sources):
3215 for line_number, line in f.ChangedContents():
3217 # return std::unique_ptr<T>(foo);
3218 # bar = std::unique_ptr<T>(foo);
3220 # return std::unique_ptr<T[]>(foo);
3221 # bar = std::unique_ptr<T[]>(foo);
3222 # And also allow cases when the second template argument is present. Those
3223 # cases cannot be handled by std::make_unique:
3224 # return std::unique_ptr<T, U>(foo);
3225 # bar = std::unique_ptr<T, U>(foo);
3226 local_path = f.LocalPath()
3227 return_construct_result = return_construct_pattern.search(line)
3228 if return_construct_result and not HasMoreThanOneArg(
3229 return_construct_result.group('template_arg')):
3230 problems_constructor.append(
3231 '%s:%d\n %s' % (local_path, line_number, line.strip()))
3233 # std::unique_ptr<T>()
3234 if null_construct_pattern.search(line):
3235 problems_nullptr.append(
3236 '%s:%d\n %s' % (local_path, line_number, line.strip()))
3239 if problems_nullptr:
3241 output_api.PresubmitPromptWarning(
3242 'The following files use std::unique_ptr<T>(). Use nullptr instead.',
3244 if problems_constructor:
3246 output_api.PresubmitError(
3247 'The following files use explicit std::unique_ptr constructor. '
3248 'Use std::make_unique<T>() instead, or use base::WrapUnique if '
3249 'std::make_unique is not an option.', problems_constructor))
3253 def CheckUserActionUpdate(input_api, output_api):
3254 """Checks if any new user action has been added."""
3255 if any('actions.xml' == input_api.os_path.basename(f)
3256 for f in input_api.LocalPaths()):
3257 # If actions.xml is already included in the changelist, the PRESUBMIT
3258 # for actions.xml will do a more complete presubmit check.
3261 file_inclusion_pattern = [r'.*\.(cc|mm)$']
3262 files_to_skip = (_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS +
3263 input_api.DEFAULT_FILES_TO_SKIP)
3264 file_filter = lambda f: input_api.FilterSourceFile(
3265 f, files_to_check=file_inclusion_pattern, files_to_skip=files_to_skip)
3267 action_re = r'[^a-zA-Z]UserMetricsAction\("([^"]*)'
3268 current_actions = None
3269 for f in input_api.AffectedFiles(file_filter=file_filter):
3270 for line_num, line in f.ChangedContents():
3271 match = input_api.re.search(action_re, line)
3273 # Loads contents in tools/metrics/actions/actions.xml to memory. It's
3275 if not current_actions:
3276 with open('tools/metrics/actions/actions.xml',
3277 encoding='utf-8') as actions_f:
3278 current_actions = actions_f.read()
3279 # Search for the matched user action name in |current_actions|.
3280 for action_name in match.groups():
3281 action = 'name="{0}"'.format(action_name)
3282 if action not in current_actions:
3284 output_api.PresubmitPromptWarning(
3285 'File %s line %d: %s is missing in '
3286 'tools/metrics/actions/actions.xml. Please run '
3287 'tools/metrics/actions/extract_actions.py to update.'
3288 % (f.LocalPath(), line_num, action_name))
3293 def _ImportJSONCommentEater(input_api):
3295 sys.path = sys.path + [
3296 input_api.os_path.join(input_api.PresubmitLocalPath(), 'tools',
3297 'json_comment_eater')
3299 import json_comment_eater
3300 return json_comment_eater
3303 def _GetJSONParseError(input_api, filename, eat_comments=True):
3305 contents = input_api.ReadFile(filename)
3307 json_comment_eater = _ImportJSONCommentEater(input_api)
3308 contents = json_comment_eater.Nom(contents)
3310 input_api.json.loads(contents)
3311 except ValueError as e:
3316 def _GetIDLParseError(input_api, filename):
3318 contents = input_api.ReadFile(filename)
3319 for i, char in enumerate(contents):
3320 if not char.isascii():
3322 'Non-ascii character "%s" (ord %d) found at offset %d.' %
3323 (char, ord(char), i))
3324 idl_schema = input_api.os_path.join(input_api.PresubmitLocalPath(),
3325 'tools', 'json_schema_compiler',
3327 process = input_api.subprocess.Popen(
3328 [input_api.python3_executable, idl_schema],
3329 stdin=input_api.subprocess.PIPE,
3330 stdout=input_api.subprocess.PIPE,
3331 stderr=input_api.subprocess.PIPE,
3332 universal_newlines=True)
3333 (_, error) = process.communicate(input=contents)
3334 return error or None
3335 except ValueError as e:
3339 def CheckParseErrors(input_api, output_api):
3340 """Check that IDL and JSON files do not contain syntax errors."""
3342 '.idl': _GetIDLParseError,
3343 '.json': _GetJSONParseError,
3345 # Most JSON files are preprocessed and support comments, but these do not.
3346 json_no_comments_patterns = [
3349 # Only run IDL checker on files in these directories.
3350 idl_included_patterns = [
3351 r'^chrome/common/extensions/api/',
3352 r'^extensions/common/api/',
3355 def get_action(affected_file):
3356 filename = affected_file.LocalPath()
3357 return actions.get(input_api.os_path.splitext(filename)[1])
3359 def FilterFile(affected_file):
3360 action = get_action(affected_file)
3363 path = affected_file.LocalPath()
3365 if _MatchesFile(input_api,
3366 _KNOWN_TEST_DATA_AND_INVALID_JSON_FILE_PATTERNS, path):
3369 if (action == _GetIDLParseError
3370 and not _MatchesFile(input_api, idl_included_patterns, path)):
3375 for affected_file in input_api.AffectedFiles(file_filter=FilterFile,
3376 include_deletes=False):
3377 action = get_action(affected_file)
3379 if (action == _GetJSONParseError
3380 and _MatchesFile(input_api, json_no_comments_patterns,
3381 affected_file.LocalPath())):
3382 kwargs['eat_comments'] = False
3383 parse_error = action(input_api, affected_file.AbsoluteLocalPath(),
3387 output_api.PresubmitError(
3388 '%s could not be parsed: %s' %
3389 (affected_file.LocalPath(), parse_error)))
3393 def CheckJavaStyle(input_api, output_api):
3394 """Runs checkstyle on changed java files and returns errors if any exist."""
3396 # Return early if no java files were modified.
3398 _IsJavaFile(input_api, f.LocalPath())
3399 for f in input_api.AffectedFiles()):
3403 original_sys_path = sys.path
3405 sys.path = sys.path + [
3406 input_api.os_path.join(input_api.PresubmitLocalPath(), 'tools',
3407 'android', 'checkstyle')
3411 # Restore sys.path to what it was before.
3412 sys.path = original_sys_path
3414 return checkstyle.run_presubmit(
3417 files_to_skip=_EXCLUDED_PATHS + input_api.DEFAULT_FILES_TO_SKIP)
3420 def CheckPythonDevilInit(input_api, output_api):
3421 """Checks to make sure devil is initialized correctly in python scripts."""
3422 script_common_initialize_pattern = input_api.re.compile(
3423 r'script_common\.InitializeEnvironment\(')
3424 devil_env_config_initialize = input_api.re.compile(
3425 r'devil_env\.config\.Initialize\(')
3429 sources = lambda affected_file: input_api.FilterSourceFile(
3431 files_to_skip=(_EXCLUDED_PATHS + input_api.DEFAULT_FILES_TO_SKIP + (
3432 r'^build/android/devil_chromium\.py',
3435 files_to_check=[r'.*\.py$'])
3437 for f in input_api.AffectedSourceFiles(sources):
3438 for line_num, line in f.ChangedContents():
3439 if (script_common_initialize_pattern.search(line)
3440 or devil_env_config_initialize.search(line)):
3441 errors.append("%s:%d" % (f.LocalPath(), line_num))
3447 output_api.PresubmitError(
3448 'Devil initialization should always be done using '
3449 'devil_chromium.Initialize() in the chromium project, to use better '
3450 'defaults for dependencies (ex. up-to-date version of adb).',
3456 def _MatchesFile(input_api, patterns, path):
3457 # Consistently use / as path separator to simplify the writing of regex
3459 path = path.replace(input_api.os_path.sep, '/')
3460 for pattern in patterns:
3461 if input_api.re.search(pattern, path):
3466 def _ChangeHasSecurityReviewer(input_api, owners_file):
3467 """Returns True iff the CL has a reviewer from SECURITY_OWNERS.
3470 input_api: The presubmit input API.
3471 owners_file: OWNERS file with required reviewers. Typically, this is
3472 something like ipc/SECURITY_OWNERS.
3474 Note: if the presubmit is running for commit rather than for upload, this
3475 only returns True if a security reviewer has also approved the CL.
3477 # Owners-Override should bypass all additional OWNERS enforcement checks.
3478 # A CR+1 vote will still be required to land this change.
3479 if (input_api.change.issue and input_api.gerrit.IsOwnersOverrideApproved(
3480 input_api.change.issue)):
3483 owner_email, reviewers = (
3484 input_api.canned_checks.GetCodereviewOwnerAndReviewers(
3487 approval_needed=input_api.is_committing and not input_api.dry_run))
3489 security_owners = input_api.owners_client.ListOwners(owners_file)
3490 return any(owner in reviewers for owner in security_owners)
3494 class _SecurityProblemWithItems:
3496 items: Sequence[str]
3500 class _MissingSecurityOwnersResult:
3501 owners_file_problems: Sequence[_SecurityProblemWithItems]
3502 has_security_sensitive_files: bool
3503 missing_reviewer_problem: Optional[_SecurityProblemWithItems]
3506 def _FindMissingSecurityOwners(input_api,
3508 file_patterns: Sequence[str],
3509 excluded_patterns: Sequence[str],
3510 required_owners_file: str,
3511 custom_rule_function: Optional[Callable] = None
3512 ) -> _MissingSecurityOwnersResult:
3513 """Find OWNERS files missing per-file rules for security-sensitive files.
3516 input_api: the PRESUBMIT input API object.
3517 output_api: the PRESUBMIT output API object.
3518 file_patterns: basename patterns that require a corresponding per-file
3519 security restriction.
3520 excluded_patterns: path patterns that should be exempted from
3521 requiring a security restriction.
3522 required_owners_file: path to the required OWNERS file, e.g.
3524 cc_alias: If not None, email that will be CCed automatically if the
3525 change contains security-sensitive files, as determined by
3526 `file_patterns` and `excluded_patterns`.
3527 custom_rule_function: If not None, will be called with `input_api` and
3528 the current file under consideration. Returning True will add an
3529 exact match per-file rule check for the current file.
3532 # `to_check` is a mapping of an OWNERS file path to Patterns.
3534 # Patterns is a dictionary mapping glob patterns (suitable for use in
3535 # per-file rules) to a PatternEntry.
3537 # PatternEntry is a dictionary with two keys:
3538 # - 'files': the files that are matched by this pattern
3539 # - 'rules': the per-file rules needed for this pattern
3541 # For example, if we expect OWNERS file to contain rules for *.mojom and
3542 # *_struct_traits*.*, Patterns might look like this:
3547 # 'per-file *.mojom=set noparent',
3548 # 'per-file *.mojom=file://ipc/SECURITY_OWNERS',
3551 # '*_struct_traits*.*': {
3554 # 'per-file *_struct_traits*.*=set noparent',
3555 # 'per-file *_struct_traits*.*=file://ipc/SECURITY_OWNERS',
3560 files_to_review = []
3562 def AddPatternToCheck(file, pattern):
3563 owners_file = input_api.os_path.join(
3564 input_api.os_path.dirname(file.LocalPath()), 'OWNERS')
3565 if owners_file not in to_check:
3566 to_check[owners_file] = {}
3567 if pattern not in to_check[owners_file]:
3568 to_check[owners_file][pattern] = {
3571 f'per-file {pattern}=set noparent',
3572 f'per-file {pattern}=file://{required_owners_file}',
3575 to_check[owners_file][pattern]['files'].append(file.LocalPath())
3576 files_to_review.append(file.LocalPath())
3578 # Only enforce security OWNERS rules for a directory if that directory has a
3579 # file that matches `file_patterns`. For example, if a directory only
3580 # contains *.mojom files and no *_messages*.h files, the check should only
3581 # ensure that rules for *.mojom files are present.
3582 for file in input_api.AffectedFiles(include_deletes=False):
3583 file_basename = input_api.os_path.basename(file.LocalPath())
3584 if custom_rule_function is not None and custom_rule_function(
3586 AddPatternToCheck(file, file_basename)
3590 input_api.fnmatch.fnmatch(file.LocalPath(), pattern)
3591 for pattern in excluded_patterns):
3594 for pattern in file_patterns:
3595 # Unlike `excluded_patterns`, `file_patterns` is checked only against the
3597 if input_api.fnmatch.fnmatch(file_basename, pattern):
3598 AddPatternToCheck(file, pattern)
3601 has_security_sensitive_files = bool(to_check)
3603 # Check if any newly added lines in OWNERS files intersect with required
3604 # per-file OWNERS lines. If so, ensure that a security reviewer is included.
3605 # This is a hack, but is needed because the OWNERS check (by design) ignores
3606 # new OWNERS entries; otherwise, a non-owner could add someone as a new
3607 # OWNER and have that newly-added OWNER self-approve their own addition.
3608 newly_covered_files = []
3609 for file in input_api.AffectedFiles(include_deletes=False):
3610 if not file.LocalPath() in to_check:
3612 for _, line in file.ChangedContents():
3613 for _, entry in to_check[file.LocalPath()].items():
3614 if line in entry['rules']:
3615 newly_covered_files.extend(entry['files'])
3617 missing_reviewer_problems = None
3618 if newly_covered_files and not _ChangeHasSecurityReviewer(
3619 input_api, required_owners_file):
3620 missing_reviewer_problems = _SecurityProblemWithItems(
3621 f'Review from an owner in {required_owners_file} is required for '
3622 'the following newly-added files:',
3623 [f'{file}' for file in sorted(set(newly_covered_files))])
3625 # Go through the OWNERS files to check, filtering out rules that are already
3626 # present in that OWNERS file.
3627 for owners_file, patterns in to_check.items():
3631 input_api.os_path.join(input_api.change.RepositoryRoot(),
3632 owners_file)).splitlines())
3633 for entry in patterns.values():
3635 rule for rule in entry['rules'] if rule not in lines
3638 # No OWNERS file, so all the rules are definitely missing.
3641 # All the remaining lines weren't found in OWNERS files, so emit an error.
3642 owners_file_problems = []
3644 for owners_file, patterns in to_check.items():
3647 for _, entry in patterns.items():
3648 files.extend(entry['files'])
3649 missing_lines.extend(entry['rules'])
3651 joined_missing_lines = '\n'.join(line for line in missing_lines)
3652 owners_file_problems.append(
3653 _SecurityProblemWithItems(
3654 'Found missing OWNERS lines for security-sensitive files. '
3655 f'Please add the following lines to {owners_file}:\n'
3656 f'{joined_missing_lines}\n\nTo ensure security review for:',
3659 return _MissingSecurityOwnersResult(owners_file_problems,
3660 has_security_sensitive_files,
3661 missing_reviewer_problems)
3664 def _CheckChangeForIpcSecurityOwners(input_api, output_api):
3665 # Whether or not a file affects IPC is (mostly) determined by a simple list
3666 # of filename patterns.
3671 '*_param_traits*.*',
3674 '*_mojom_traits*.*',
3675 '*_type_converter*.*',
3676 # Android native IPC:
3680 excluded_patterns = [
3681 # These third_party directories do not contain IPCs, but contain files
3682 # matching the above patterns, which trigger false positives.
3683 'third_party/crashpad/*',
3684 'third_party/blink/renderer/platform/bindings/*',
3685 'third_party/protobuf/benchmarks/python/*',
3686 'third_party/win_build_output/*',
3687 # Enum-only mojoms used for web metrics, so no security review needed.
3688 'third_party/blink/public/mojom/use_counter/metrics/*',
3689 # These files are just used to communicate between class loaders running
3690 # in the same process.
3691 'weblayer/browser/java/org/chromium/weblayer_private/interfaces/*',
3692 'weblayer/browser/java/org/chromium/weblayer_private/test_interfaces/*',
3695 def IsMojoServiceManifestFile(input_api, file):
3696 manifest_pattern = input_api.re.compile('manifests?\.(cc|h)$')
3697 test_manifest_pattern = input_api.re.compile('test_manifests?\.(cc|h)')
3698 if not manifest_pattern.search(file.LocalPath()):
3701 if test_manifest_pattern.search(file.LocalPath()):
3704 # All actual service manifest files should contain at least one
3705 # qualified reference to service_manager::Manifest.
3706 return any('service_manager::Manifest' in line
3707 for line in file.NewContents())
3709 return _FindMissingSecurityOwners(
3714 'ipc/SECURITY_OWNERS',
3715 custom_rule_function=IsMojoServiceManifestFile)
3718 def _CheckChangeForFuchsiaSecurityOwners(input_api, output_api):
3720 # Component specifications.
3721 '*.cml', # Component Framework v2.
3722 '*.cmx', # Component Framework v1.
3724 # Fuchsia IDL protocol specifications.
3728 # Don't check for owners files for changes in these directories.
3729 excluded_patterns = [
3730 'third_party/crashpad/*',
3733 return _FindMissingSecurityOwners(input_api, output_api, file_patterns,
3735 'build/fuchsia/SECURITY_OWNERS')
3738 def CheckSecurityOwners(input_api, output_api):
3739 """Checks that various security-sensitive files have an IPC OWNERS rule."""
3740 ipc_results = _CheckChangeForIpcSecurityOwners(input_api, output_api)
3741 fuchsia_results = _CheckChangeForFuchsiaSecurityOwners(
3742 input_api, output_api)
3744 if ipc_results.has_security_sensitive_files:
3745 output_api.AppendCC('ipc-security-reviews@chromium.org')
3749 missing_reviewer_problems = []
3750 if ipc_results.missing_reviewer_problem:
3751 missing_reviewer_problems.append(ipc_results.missing_reviewer_problem)
3752 if fuchsia_results.missing_reviewer_problem:
3753 missing_reviewer_problems.append(
3754 fuchsia_results.missing_reviewer_problem)
3756 # Missing reviewers are an error unless there's no issue number
3757 # associated with this branch; in that case, the presubmit is being run
3758 # with --all or --files.
3760 # Note that upload should never be an error; otherwise, it would be
3761 # impossible to upload changes at all.
3762 if input_api.is_committing and input_api.change.issue:
3763 make_presubmit_message = output_api.PresubmitError
3765 make_presubmit_message = output_api.PresubmitNotifyResult
3766 for problem in missing_reviewer_problems:
3768 make_presubmit_message(problem.problem, items=problem.items))
3770 owners_file_problems = []
3771 owners_file_problems.extend(ipc_results.owners_file_problems)
3772 owners_file_problems.extend(fuchsia_results.owners_file_problems)
3774 for problem in owners_file_problems:
3775 # Missing per-file rules are always an error. While swarming and caching
3776 # means that uploading a patchset with updated OWNERS files and sending
3777 # it to the CQ again should not have a large incremental cost, it is
3778 # still frustrating to discover the error only after the change has
3779 # already been uploaded.
3781 output_api.PresubmitError(problem.problem, items=problem.items))
3786 def _GetFilesUsingSecurityCriticalFunctions(input_api):
3787 """Checks affected files for changes to security-critical calls. This
3788 function checks the full change diff, to catch both additions/changes
3791 Returns a dict keyed by file name, and the value is a set of detected
3794 # Map of function pretty name (displayed in an error) to the pattern to
3796 _PATTERNS_TO_CHECK = {
3797 'content::GetServiceSandboxType<>()': 'GetServiceSandboxType\\<'
3799 _PATTERNS_TO_CHECK = {
3800 k: input_api.re.compile(v)
3801 for k, v in _PATTERNS_TO_CHECK.items()
3804 # We don't want to trigger on strings within this file.
3805 def presubmit_file_filter(f):
3806 return 'PRESUBMIT.py' != input_api.os_path.split(f.LocalPath())[1]
3808 # Scan all affected files for changes touching _FUNCTIONS_TO_CHECK.
3809 files_to_functions = {}
3810 for f in input_api.AffectedFiles(file_filter=presubmit_file_filter):
3811 diff = f.GenerateScmDiff()
3812 for line in diff.split('\n'):
3813 # Not using just RightHandSideLines() because removing a
3814 # call to a security-critical function can be just as important
3815 # as adding or changing the arguments.
3816 if line.startswith('-') or (line.startswith('+')
3817 and not line.startswith('++')):
3818 for name, pattern in _PATTERNS_TO_CHECK.items():
3819 if pattern.search(line):
3820 path = f.LocalPath()
3821 if not path in files_to_functions:
3822 files_to_functions[path] = set()
3823 files_to_functions[path].add(name)
3824 return files_to_functions
3827 def CheckSecurityChanges(input_api, output_api):
3828 """Checks that changes involving security-critical functions are reviewed
3829 by the security team.
3831 files_to_functions = _GetFilesUsingSecurityCriticalFunctions(input_api)
3832 if not len(files_to_functions):
3835 owners_file = 'ipc/SECURITY_OWNERS'
3836 if _ChangeHasSecurityReviewer(input_api, owners_file):
3839 msg = 'The following files change calls to security-sensitive functions\n' \
3840 'that need to be reviewed by {}.\n'.format(owners_file)
3841 for path, names in files_to_functions.items():
3842 msg += ' {}\n'.format(path)
3844 msg += ' {}\n'.format(name)
3847 if input_api.is_committing:
3848 output = output_api.PresubmitError
3850 output = output_api.PresubmitNotifyResult
3851 return [output(msg)]
3854 def CheckSetNoParent(input_api, output_api):
3855 """Checks that set noparent is only used together with an OWNERS file in
3856 //build/OWNERS.setnoparent (see also
3857 //docs/code_reviews.md#owners-files-details)
3859 # Return early if no OWNERS files were modified.
3860 if not any(f.LocalPath().endswith('OWNERS')
3861 for f in input_api.AffectedFiles(include_deletes=False)):
3866 allowed_owners_files_file = 'build/OWNERS.setnoparent'
3867 allowed_owners_files = set()
3868 with open(allowed_owners_files_file, 'r', encoding='utf-8') as f:
3871 if not line or line.startswith('#'):
3873 allowed_owners_files.add(line)
3875 per_file_pattern = input_api.re.compile('per-file (.+)=(.+)')
3877 for f in input_api.AffectedFiles(include_deletes=False):
3878 if not f.LocalPath().endswith('OWNERS'):
3881 found_owners_files = set()
3882 found_set_noparent_lines = dict()
3884 # Parse the OWNERS file.
3885 for lineno, line in enumerate(f.NewContents(), 1):
3887 if line.startswith('set noparent'):
3888 found_set_noparent_lines[''] = lineno
3889 if line.startswith('file://'):
3890 if line in allowed_owners_files:
3891 found_owners_files.add('')
3892 if line.startswith('per-file'):
3893 match = per_file_pattern.match(line)
3895 glob = match.group(1).strip()
3896 directive = match.group(2).strip()
3897 if directive == 'set noparent':
3898 found_set_noparent_lines[glob] = lineno
3899 if directive.startswith('file://'):
3900 if directive in allowed_owners_files:
3901 found_owners_files.add(glob)
3903 # Check that every set noparent line has a corresponding file:// line
3904 # listed in build/OWNERS.setnoparent. An exception is made for top level
3905 # directories since src/OWNERS shouldn't review them.
3906 linux_path = f.LocalPath().replace(input_api.os_path.sep, '/')
3907 if (linux_path.count('/') != 1
3908 and (not linux_path in _EXCLUDED_SET_NO_PARENT_PATHS)):
3909 for set_noparent_line in found_set_noparent_lines:
3910 if set_noparent_line in found_owners_files:
3912 errors.append(' %s:%d' %
3914 found_set_noparent_lines[set_noparent_line]))
3918 if input_api.is_committing:
3919 output = output_api.PresubmitError
3921 output = output_api.PresubmitPromptWarning
3924 'Found the following "set noparent" restrictions in OWNERS files that '
3925 'do not include owners from build/OWNERS.setnoparent:',
3926 long_text='\n\n'.join(errors)))
3930 def CheckUselessForwardDeclarations(input_api, output_api):
3931 """Checks that added or removed lines in non third party affected
3932 header files do not lead to new useless class or struct forward
3936 class_pattern = input_api.re.compile(r'^class\s+(\w+);$',
3937 input_api.re.MULTILINE)
3938 struct_pattern = input_api.re.compile(r'^struct\s+(\w+);$',
3939 input_api.re.MULTILINE)
3940 for f in input_api.AffectedFiles(include_deletes=False):
3941 if (f.LocalPath().startswith('third_party')
3942 and not f.LocalPath().startswith('third_party/blink')
3943 and not f.LocalPath().startswith('third_party\\blink')):
3946 if not f.LocalPath().endswith('.h'):
3949 contents = input_api.ReadFile(f)
3950 fwd_decls = input_api.re.findall(class_pattern, contents)
3951 fwd_decls.extend(input_api.re.findall(struct_pattern, contents))
3953 useless_fwd_decls = []
3954 for decl in fwd_decls:
3955 count = sum(1 for _ in input_api.re.finditer(
3956 r'\b%s\b' % input_api.re.escape(decl), contents))
3958 useless_fwd_decls.append(decl)
3960 if not useless_fwd_decls:
3963 for line in f.GenerateScmDiff().splitlines():
3964 if (line.startswith('-') and not line.startswith('--')
3965 or line.startswith('+') and not line.startswith('++')):
3966 for decl in useless_fwd_decls:
3967 if input_api.re.search(r'\b%s\b' % decl, line[1:]):
3969 output_api.PresubmitPromptWarning(
3970 '%s: %s forward declaration is no longer needed'
3971 % (f.LocalPath(), decl)))
3972 useless_fwd_decls.remove(decl)
3977 def _CheckAndroidDebuggableBuild(input_api, output_api):
3978 """Checks that code uses BuildInfo.isDebugAndroid() instead of
3979 Build.TYPE.equals('') or ''.equals(Build.TYPE) to check if
3980 this is a debuggable build of Android.
3982 build_type_check_pattern = input_api.re.compile(
3983 r'\bBuild\.TYPE\.equals\(|\.equals\(\s*\bBuild\.TYPE\)')
3987 sources = lambda affected_file: input_api.FilterSourceFile(
3990 _EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS + input_api.
3991 DEFAULT_FILES_TO_SKIP + (
3992 r"^android_webview/support_library/boundary_interfaces/",
3993 r"^chrome/android/webapk/.*",
3995 r"tools/android/customtabs_benchmark/.*",
3996 r"webview/chromium/License.*",
3998 files_to_check=[r'.*\.java$'])
4000 for f in input_api.AffectedSourceFiles(sources):
4001 for line_num, line in f.ChangedContents():
4002 if build_type_check_pattern.search(line):
4003 errors.append("%s:%d" % (f.LocalPath(), line_num))
4009 output_api.PresubmitPromptWarning(
4010 'Build.TYPE.equals or .equals(Build.TYPE) usage is detected.'
4011 ' Please use BuildInfo.isDebugAndroid() instead.', errors))
4015 # TODO: add unit tests
4016 def _CheckAndroidToastUsage(input_api, output_api):
4017 """Checks that code uses org.chromium.ui.widget.Toast instead of
4018 android.widget.Toast (Chromium Toast doesn't force hardware
4019 acceleration on low-end devices, saving memory).
4021 toast_import_pattern = input_api.re.compile(
4022 r'^import android\.widget\.Toast;$')
4026 sources = lambda affected_file: input_api.FilterSourceFile(
4028 files_to_skip=(_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS + input_api.
4029 DEFAULT_FILES_TO_SKIP + (r'^chromecast/.*',
4031 files_to_check=[r'.*\.java$'])
4033 for f in input_api.AffectedSourceFiles(sources):
4034 for line_num, line in f.ChangedContents():
4035 if toast_import_pattern.search(line):
4036 errors.append("%s:%d" % (f.LocalPath(), line_num))
4042 output_api.PresubmitError(
4043 'android.widget.Toast usage is detected. Android toasts use hardware'
4044 ' acceleration, and can be\ncostly on low-end devices. Please use'
4045 ' org.chromium.ui.widget.Toast instead.\n'
4046 'Contact dskiba@chromium.org if you have any questions.',
4052 def _CheckAndroidCrLogUsage(input_api, output_api):
4053 """Checks that new logs using org.chromium.base.Log:
4054 - Are using 'TAG' as variable name for the tags (warn)
4055 - Are using a tag that is shorter than 20 characters (error)
4058 # Do not check format of logs in the given files
4059 cr_log_check_excluded_paths = [
4060 # //chrome/android/webapk cannot depend on //base
4061 r"^chrome/android/webapk/.*",
4062 # WebView license viewer code cannot depend on //base; used in stub APK.
4063 r"^android_webview/glue/java/src/com/android/"
4064 r"webview/chromium/License.*",
4065 # The customtabs_benchmark is a small app that does not depend on Chromium
4067 r"tools/android/customtabs_benchmark/.*",
4070 cr_log_import_pattern = input_api.re.compile(
4071 r'^import org\.chromium\.base\.Log;$', input_api.re.MULTILINE)
4072 class_in_base_pattern = input_api.re.compile(
4073 r'^package org\.chromium\.base;$', input_api.re.MULTILINE)
4074 has_some_log_import_pattern = input_api.re.compile(r'^import .*\.Log;$',
4075 input_api.re.MULTILINE)
4076 # Extract the tag from lines like `Log.d(TAG, "*");` or `Log.d("TAG", "*");`
4077 log_call_pattern = input_api.re.compile(r'\bLog\.\w\((?P<tag>\"?\w+)')
4078 log_decl_pattern = input_api.re.compile(
4079 r'static final String TAG = "(?P<name>(.*))"')
4080 rough_log_decl_pattern = input_api.re.compile(r'\bString TAG\s*=')
4082 REF_MSG = ('See docs/android_logging.md for more info.')
4083 sources = lambda x: input_api.FilterSourceFile(
4085 files_to_check=[r'.*\.java$'],
4086 files_to_skip=cr_log_check_excluded_paths)
4088 tag_decl_errors = []
4089 tag_length_errors = []
4091 tag_with_dot_errors = []
4092 util_log_errors = []
4094 for f in input_api.AffectedSourceFiles(sources):
4095 file_content = input_api.ReadFile(f)
4096 has_modified_logs = False
4098 if (cr_log_import_pattern.search(file_content)
4099 or (class_in_base_pattern.search(file_content)
4100 and not has_some_log_import_pattern.search(file_content))):
4101 # Checks to run for files using cr log
4102 for line_num, line in f.ChangedContents():
4103 if rough_log_decl_pattern.search(line):
4104 has_modified_logs = True
4106 # Check if the new line is doing some logging
4107 match = log_call_pattern.search(line)
4109 has_modified_logs = True
4111 # Make sure it uses "TAG"
4112 if not match.group('tag') == 'TAG':
4113 tag_errors.append("%s:%d" % (f.LocalPath(), line_num))
4115 # Report non cr Log function calls in changed lines
4116 for line_num, line in f.ChangedContents():
4117 if log_call_pattern.search(line):
4118 util_log_errors.append("%s:%d" % (f.LocalPath(), line_num))
4121 if has_modified_logs:
4122 # Make sure the tag is using the "cr" prefix and is not too long
4123 match = log_decl_pattern.search(file_content)
4124 tag_name = match.group('name') if match else None
4126 tag_decl_errors.append(f.LocalPath())
4127 elif len(tag_name) > 20:
4128 tag_length_errors.append(f.LocalPath())
4129 elif '.' in tag_name:
4130 tag_with_dot_errors.append(f.LocalPath())
4135 output_api.PresubmitPromptWarning(
4136 'Please define your tags using the suggested format: .\n'
4137 '"private static final String TAG = "<package tag>".\n'
4138 'They will be prepended with "cr_" automatically.\n' + REF_MSG,
4141 if tag_length_errors:
4143 output_api.PresubmitError(
4144 'The tag length is restricted by the system to be at most '
4145 '20 characters.\n' + REF_MSG, tag_length_errors))
4149 output_api.PresubmitPromptWarning(
4150 'Please use a variable named "TAG" for your log tags.\n' +
4151 REF_MSG, tag_errors))
4155 output_api.PresubmitPromptWarning(
4156 'Please use org.chromium.base.Log for new logs.\n' + REF_MSG,
4159 if tag_with_dot_errors:
4161 output_api.PresubmitPromptWarning(
4162 'Dot in log tags cause them to be elided in crash reports.\n' +
4163 REF_MSG, tag_with_dot_errors))
4168 def _CheckAndroidTestJUnitFrameworkImport(input_api, output_api):
4169 """Checks that junit.framework.* is no longer used."""
4170 deprecated_junit_framework_pattern = input_api.re.compile(
4171 r'^import junit\.framework\..*;', input_api.re.MULTILINE)
4172 sources = lambda x: input_api.FilterSourceFile(
4173 x, files_to_check=[r'.*\.java$'], files_to_skip=None)
4175 for f in input_api.AffectedFiles(file_filter=sources):
4176 for line_num, line in f.ChangedContents():
4177 if deprecated_junit_framework_pattern.search(line):
4178 errors.append("%s:%d" % (f.LocalPath(), line_num))
4183 output_api.PresubmitError(
4184 'APIs from junit.framework.* are deprecated, please use JUnit4 framework'
4185 '(org.junit.*) from //third_party/junit. Contact yolandyan@chromium.org'
4186 ' if you have any question.', errors))
4190 def _CheckAndroidTestJUnitInheritance(input_api, output_api):
4191 """Checks that if new Java test classes have inheritance.
4192 Either the new test class is JUnit3 test or it is a JUnit4 test class
4193 with a base class, either case is undesirable.
4195 class_declaration_pattern = input_api.re.compile(r'^public class \w*Test ')
4197 sources = lambda x: input_api.FilterSourceFile(
4198 x, files_to_check=[r'.*Test\.java$'], files_to_skip=None)
4200 for f in input_api.AffectedFiles(file_filter=sources):
4201 if not f.OldContents():
4202 class_declaration_start_flag = False
4203 for line_num, line in f.ChangedContents():
4204 if class_declaration_pattern.search(line):
4205 class_declaration_start_flag = True
4206 if class_declaration_start_flag and ' extends ' in line:
4207 errors.append('%s:%d' % (f.LocalPath(), line_num))
4209 class_declaration_start_flag = False
4214 output_api.PresubmitPromptWarning(
4215 'The newly created files include Test classes that inherits from base'
4216 ' class. Please do not use inheritance in JUnit4 tests or add new'
4217 ' JUnit3 tests. Contact yolandyan@chromium.org if you have any'
4218 ' questions.', errors))
4222 def _CheckAndroidTestAnnotationUsage(input_api, output_api):
4223 """Checks that android.test.suitebuilder.annotation.* is no longer used."""
4224 deprecated_annotation_import_pattern = input_api.re.compile(
4225 r'^import android\.test\.suitebuilder\.annotation\..*;',
4226 input_api.re.MULTILINE)
4227 sources = lambda x: input_api.FilterSourceFile(
4228 x, files_to_check=[r'.*\.java$'], files_to_skip=None)
4230 for f in input_api.AffectedFiles(file_filter=sources):
4231 for line_num, line in f.ChangedContents():
4232 if deprecated_annotation_import_pattern.search(line):
4233 errors.append("%s:%d" % (f.LocalPath(), line_num))
4238 output_api.PresubmitError(
4239 'Annotations in android.test.suitebuilder.annotation have been'
4240 ' deprecated since API level 24. Please use androidx.test.filters'
4241 ' from //third_party/androidx:androidx_test_runner_java instead.'
4242 ' Contact yolandyan@chromium.org if you have any questions.',
4247 def _CheckAndroidNewMdpiAssetLocation(input_api, output_api):
4248 """Checks if MDPI assets are placed in a correct directory."""
4249 file_filter = lambda f: (f.LocalPath().endswith(
4250 '.png') and ('/res/drawable/'.replace('/', input_api.os_path.sep) in f.
4251 LocalPath() or '/res/drawable-ldrtl/'.replace(
4252 '/', input_api.os_path.sep) in f.LocalPath()))
4254 for f in input_api.AffectedFiles(include_deletes=False,
4255 file_filter=file_filter):
4256 errors.append(' %s' % f.LocalPath())
4261 output_api.PresubmitError(
4262 'MDPI assets should be placed in /res/drawable-mdpi/ or '
4263 '/res/drawable-ldrtl-mdpi/\ninstead of /res/drawable/ and'
4264 '/res/drawable-ldrtl/.\n'
4265 'Contact newt@chromium.org if you have questions.', errors))
4269 def _CheckAndroidWebkitImports(input_api, output_api):
4270 """Checks that code uses org.chromium.base.Callback instead of
4271 android.webview.ValueCallback except in the WebView glue layer
4274 valuecallback_import_pattern = input_api.re.compile(
4275 r'^import android\.webkit\.ValueCallback;$')
4279 sources = lambda affected_file: input_api.FilterSourceFile(
4281 files_to_skip=(_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS + input_api.
4282 DEFAULT_FILES_TO_SKIP + (
4283 r'^android_webview/glue/.*',
4286 files_to_check=[r'.*\.java$'])
4288 for f in input_api.AffectedSourceFiles(sources):
4289 for line_num, line in f.ChangedContents():
4290 if valuecallback_import_pattern.search(line):
4291 errors.append("%s:%d" % (f.LocalPath(), line_num))
4297 output_api.PresubmitError(
4298 'android.webkit.ValueCallback usage is detected outside of the glue'
4299 ' layer. To stay compatible with the support library, android.webkit.*'
4300 ' classes should only be used inside the glue layer and'
4301 ' org.chromium.base.Callback should be used instead.', errors))
4306 def _CheckAndroidXmlStyle(input_api, output_api, is_check_on_upload):
4307 """Checks Android XML styles """
4309 # Return early if no relevant files were modified.
4311 _IsXmlOrGrdFile(input_api, f.LocalPath())
4312 for f in input_api.AffectedFiles(include_deletes=False)):
4316 original_sys_path = sys.path
4318 sys.path = sys.path + [
4319 input_api.os_path.join(input_api.PresubmitLocalPath(), 'tools',
4320 'android', 'checkxmlstyle')
4322 import checkxmlstyle
4324 # Restore sys.path to what it was before.
4325 sys.path = original_sys_path
4327 if is_check_on_upload:
4328 return checkxmlstyle.CheckStyleOnUpload(input_api, output_api)
4330 return checkxmlstyle.CheckStyleOnCommit(input_api, output_api)
4333 def _CheckAndroidInfoBarDeprecation(input_api, output_api):
4334 """Checks Android Infobar Deprecation """
4337 original_sys_path = sys.path
4339 sys.path = sys.path + [
4340 input_api.os_path.join(input_api.PresubmitLocalPath(), 'tools',
4341 'android', 'infobar_deprecation')
4343 import infobar_deprecation
4345 # Restore sys.path to what it was before.
4346 sys.path = original_sys_path
4348 return infobar_deprecation.CheckDeprecationOnUpload(input_api, output_api)
4351 class _PydepsCheckerResult:
4352 def __init__(self, cmd, pydeps_path, process, old_contents):
4354 self._pydeps_path = pydeps_path
4355 self._process = process
4356 self._old_contents = old_contents
4359 """Returns an error message, or None."""
4361 if self._process.wait() != 0:
4362 # STDERR should already be printed.
4363 return 'Command failed: ' + self._cmd
4364 new_contents = self._process.stdout.read().splitlines()[2:]
4365 if self._old_contents != new_contents:
4367 difflib.context_diff(self._old_contents, new_contents))
4368 return ('File is stale: {}\n'
4369 'Diff (apply to fix):\n'
4371 'To regenerate, run:\n\n'
4372 ' {}').format(self._pydeps_path, diff, self._cmd)
4376 class PydepsChecker:
4377 def __init__(self, input_api, pydeps_files):
4378 self._file_cache = {}
4379 self._input_api = input_api
4380 self._pydeps_files = pydeps_files
4382 def _LoadFile(self, path):
4383 """Returns the list of paths within a .pydeps file relative to //."""
4384 if path not in self._file_cache:
4385 with open(path, encoding='utf-8') as f:
4386 self._file_cache[path] = f.read()
4387 return self._file_cache[path]
4389 def _ComputeNormalizedPydepsEntries(self, pydeps_path):
4390 """Returns an iterable of paths within the .pydep, relativized to //."""
4391 pydeps_data = self._LoadFile(pydeps_path)
4392 uses_gn_paths = '--gn-paths' in pydeps_data
4393 entries = (l for l in pydeps_data.splitlines()
4394 if not l.startswith('#'))
4396 # Paths look like: //foo/bar/baz
4397 return (e[2:] for e in entries)
4399 # Paths look like: path/relative/to/file.pydeps
4400 os_path = self._input_api.os_path
4401 pydeps_dir = os_path.dirname(pydeps_path)
4402 return (os_path.normpath(os_path.join(pydeps_dir, e))
4405 def _CreateFilesToPydepsMap(self):
4406 """Returns a map of local_path -> list_of_pydeps."""
4408 for pydep_local_path in self._pydeps_files:
4409 for path in self._ComputeNormalizedPydepsEntries(pydep_local_path):
4410 ret.setdefault(path, []).append(pydep_local_path)
4413 def ComputeAffectedPydeps(self):
4414 """Returns an iterable of .pydeps files that might need regenerating."""
4415 affected_pydeps = set()
4416 file_to_pydeps_map = None
4417 for f in self._input_api.AffectedFiles(include_deletes=True):
4418 local_path = f.LocalPath()
4419 # Changes to DEPS can lead to .pydeps changes if any .py files are in
4420 # subrepositories. We can't figure out which files change, so re-check
4422 # Changes to print_python_deps.py affect all .pydeps.
4423 if local_path in ('DEPS', 'PRESUBMIT.py'
4424 ) or local_path.endswith('print_python_deps.py'):
4425 return self._pydeps_files
4426 elif local_path.endswith('.pydeps'):
4427 if local_path in self._pydeps_files:
4428 affected_pydeps.add(local_path)
4429 elif local_path.endswith('.py'):
4430 if file_to_pydeps_map is None:
4431 file_to_pydeps_map = self._CreateFilesToPydepsMap()
4432 affected_pydeps.update(file_to_pydeps_map.get(local_path, ()))
4433 return affected_pydeps
4435 def DetermineIfStaleAsync(self, pydeps_path):
4436 """Runs print_python_deps.py to see if the files is stale."""
4439 old_pydeps_data = self._LoadFile(pydeps_path).splitlines()
4441 cmd = old_pydeps_data[1][1:].strip()
4442 if '--output' not in cmd:
4443 cmd += ' --output ' + pydeps_path
4444 old_contents = old_pydeps_data[2:]
4446 # A default cmd that should work in most cases (as long as pydeps filename
4447 # matches the script name) so that PRESUBMIT.py does not crash if pydeps
4448 # file is empty/new.
4449 cmd = 'build/print_python_deps.py {} --root={} --output={}'.format(
4450 pydeps_path[:-4], os.path.dirname(pydeps_path), pydeps_path)
4452 env = dict(os.environ)
4453 env['PYTHONDONTWRITEBYTECODE'] = '1'
4454 process = self._input_api.subprocess.Popen(
4455 cmd + ' --output ""',
4458 stdout=self._input_api.subprocess.PIPE,
4460 return _PydepsCheckerResult(cmd, pydeps_path, process, old_contents)
4463 def _ParseGclientArgs():
4465 with open('build/config/gclient_args.gni', 'r') as f:
4468 if not line or line.startswith('#'):
4470 attribute, value = line.split('=')
4471 args[attribute.strip()] = value.strip()
4475 def CheckPydepsNeedsUpdating(input_api, output_api, checker_for_tests=None):
4476 """Checks if a .pydeps file needs to be regenerated."""
4477 # This check is for Python dependency lists (.pydeps files), and involves
4478 # paths not only in the PRESUBMIT.py, but also in the .pydeps files. It
4479 # doesn't work on Windows and Mac, so skip it on other platforms.
4480 if not input_api.platform.startswith('linux'):
4484 # First, check for new / deleted .pydeps.
4485 for f in input_api.AffectedFiles(include_deletes=True):
4486 # Check whether we are running the presubmit check for a file in src.
4487 # f.LocalPath is relative to repo (src, or internal repo).
4488 # os_path.exists is relative to src repo.
4489 # Therefore if os_path.exists is true, it means f.LocalPath is relative
4490 # to src and we can conclude that the pydeps is in src.
4491 if f.LocalPath().endswith('.pydeps'):
4492 if input_api.os_path.exists(f.LocalPath()):
4493 if f.Action() == 'D' and f.LocalPath() in _ALL_PYDEPS_FILES:
4495 output_api.PresubmitError(
4496 'Please update _ALL_PYDEPS_FILES within //PRESUBMIT.py to '
4497 'remove %s' % f.LocalPath()))
4498 elif f.Action() != 'D' and f.LocalPath(
4499 ) not in _ALL_PYDEPS_FILES:
4501 output_api.PresubmitError(
4502 'Please update _ALL_PYDEPS_FILES within //PRESUBMIT.py to '
4503 'include %s' % f.LocalPath()))
4508 is_android = _ParseGclientArgs().get('checkout_android', 'false') == 'true'
4509 checker = checker_for_tests or PydepsChecker(input_api, _ALL_PYDEPS_FILES)
4510 affected_pydeps = set(checker.ComputeAffectedPydeps())
4511 affected_android_pydeps = affected_pydeps.intersection(
4512 set(_ANDROID_SPECIFIC_PYDEPS_FILES))
4513 if affected_android_pydeps and not is_android:
4515 output_api.PresubmitPromptOrNotify(
4516 'You have changed python files that may affect pydeps for android\n'
4517 'specific scripts. However, the relevant presubmit check cannot be\n'
4518 'run because you are not using an Android checkout. To validate that\n'
4519 'the .pydeps are correct, re-run presubmit in an Android checkout, or\n'
4520 'use the android-internal-presubmit optional trybot.\n'
4521 'Possibly stale pydeps files:\n{}'.format(
4522 '\n'.join(affected_android_pydeps))))
4524 all_pydeps = _ALL_PYDEPS_FILES if is_android else _GENERIC_PYDEPS_FILES
4525 pydeps_to_check = affected_pydeps.intersection(all_pydeps)
4526 # Process these concurrently, as each one takes 1-2 seconds.
4527 pydep_results = [checker.DetermineIfStaleAsync(p) for p in pydeps_to_check]
4528 for result in pydep_results:
4529 error_msg = result.GetError()
4531 results.append(output_api.PresubmitError(error_msg))
4536 def CheckSingletonInHeaders(input_api, output_api):
4537 """Checks to make sure no header files have |Singleton<|."""
4539 def FileFilter(affected_file):
4540 # It's ok for base/memory/singleton.h to have |Singleton<|.
4541 files_to_skip = (_EXCLUDED_PATHS + input_api.DEFAULT_FILES_TO_SKIP +
4542 (r"^base/memory/singleton\.h$",
4543 r"^net/quic/platform/impl/quic_singleton_impl\.h$"))
4544 return input_api.FilterSourceFile(affected_file,
4545 files_to_skip=files_to_skip)
4547 pattern = input_api.re.compile(r'(?<!class\sbase::)Singleton\s*<')
4549 for f in input_api.AffectedSourceFiles(FileFilter):
4550 if (f.LocalPath().endswith('.h') or f.LocalPath().endswith('.hxx')
4551 or f.LocalPath().endswith('.hpp')
4552 or f.LocalPath().endswith('.inl')):
4553 contents = input_api.ReadFile(f)
4554 for line in contents.splitlines(False):
4555 if (not line.lstrip().startswith('//')
4556 and # Strip C++ comment.
4557 pattern.search(line)):
4563 output_api.PresubmitError(
4564 'Found base::Singleton<T> in the following header files.\n' +
4565 'Please move them to an appropriate source file so that the ' +
4566 'template gets instantiated in a single compilation unit.',
4574 ( "-webkit-box", "flex" ),
4575 ( "-webkit-inline-box", "inline-flex" ),
4576 ( "-webkit-flex", "flex" ),
4577 ( "-webkit-inline-flex", "inline-flex" ),
4578 ( "-webkit-min-content", "min-content" ),
4579 ( "-webkit-max-content", "max-content" ),
4582 ( "-webkit-background-clip", "background-clip" ),
4583 ( "-webkit-background-origin", "background-origin" ),
4584 ( "-webkit-background-size", "background-size" ),
4585 ( "-webkit-box-shadow", "box-shadow" ),
4586 ( "-webkit-user-select", "user-select" ),
4589 ( "-webkit-gradient", "gradient" ),
4590 ( "-webkit-repeating-gradient", "repeating-gradient" ),
4591 ( "-webkit-linear-gradient", "linear-gradient" ),
4592 ( "-webkit-repeating-linear-gradient", "repeating-linear-gradient" ),
4593 ( "-webkit-radial-gradient", "radial-gradient" ),
4594 ( "-webkit-repeating-radial-gradient", "repeating-radial-gradient" ),
4598 # TODO: add unit tests
4599 def CheckNoDeprecatedCss(input_api, output_api):
4600 """ Make sure that we don't use deprecated CSS
4601 properties, functions or values. Our external
4602 documentation and iOS CSS for dom distiller
4603 (reader mode) are ignored by the hooks as it
4604 needs to be consumed by WebKit. """
4606 file_inclusion_pattern = [r".+\.css$"]
4607 files_to_skip = (_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS +
4608 input_api.DEFAULT_FILES_TO_SKIP +
4609 (r"^chrome/common/extensions/docs", r"^chrome/docs",
4610 r"^native_client_sdk"))
4611 file_filter = lambda f: input_api.FilterSourceFile(
4612 f, files_to_check=file_inclusion_pattern, files_to_skip=files_to_skip)
4613 for fpath in input_api.AffectedFiles(file_filter=file_filter):
4614 for line_num, line in fpath.ChangedContents():
4615 for (deprecated_value, value) in _DEPRECATED_CSS:
4616 if deprecated_value in line:
4618 output_api.PresubmitError(
4619 "%s:%d: Use of deprecated CSS %s, use %s instead" %
4620 (fpath.LocalPath(), line_num, deprecated_value,
4625 def CheckForRelativeIncludes(input_api, output_api):
4627 for f in input_api.AffectedFiles(include_deletes=False):
4628 if (f.LocalPath().startswith('third_party')
4629 and not f.LocalPath().startswith('third_party/blink')
4630 and not f.LocalPath().startswith('third_party\\blink')):
4633 if not _IsCPlusPlusFile(input_api, f.LocalPath()):
4636 relative_includes = [
4637 line for _, line in f.ChangedContents()
4638 if "#include" in line and "../" in line
4640 if not relative_includes:
4642 bad_files[f.LocalPath()] = relative_includes
4647 error_descriptions = []
4648 for file_path, bad_lines in bad_files.items():
4649 error_description = file_path
4650 for line in bad_lines:
4651 error_description += '\n ' + line
4652 error_descriptions.append(error_description)
4656 output_api.PresubmitError(
4657 'You added one or more relative #include paths (including "../").\n'
4658 'These shouldn\'t be used because they can be used to include headers\n'
4659 'from code that\'s not correctly specified as a dependency in the\n'
4660 'relevant BUILD.gn file(s).', error_descriptions))
4665 def CheckForCcIncludes(input_api, output_api):
4666 """Check that nobody tries to include a cc file. It's a relatively
4667 common error which results in duplicate symbols in object
4668 files. This may not always break the build until someone later gets
4669 very confusing linking errors."""
4671 for f in input_api.AffectedFiles(include_deletes=False):
4672 # We let third_party code do whatever it wants
4673 if (f.LocalPath().startswith('third_party')
4674 and not f.LocalPath().startswith('third_party/blink')
4675 and not f.LocalPath().startswith('third_party\\blink')):
4678 if not _IsCPlusPlusFile(input_api, f.LocalPath()):
4681 for _, line in f.ChangedContents():
4682 if line.startswith('#include "'):
4683 included_file = line.split('"')[1]
4684 if _IsCPlusPlusFile(input_api, included_file):
4685 # The most common naming for external files with C++ code,
4686 # apart from standard headers, is to call them foo.inc, but
4687 # Chromium sometimes uses foo-inc.cc so allow that as well.
4688 if not included_file.endswith(('.h', '-inc.cc')):
4690 output_api.PresubmitError(
4691 'Only header files or .inc files should be included in other\n'
4692 'C++ files. Compiling the contents of a cc file more than once\n'
4693 'will cause duplicate information in the build which may later\n'
4694 'result in strange link_errors.\n' +
4695 f.LocalPath() + ':\n ' + line))
4700 def _CheckWatchlistDefinitionsEntrySyntax(key, value, ast):
4701 if not isinstance(key, ast.Str):
4702 return 'Key at line %d must be a string literal' % key.lineno
4703 if not isinstance(value, ast.Dict):
4704 return 'Value at line %d must be a dict' % value.lineno
4705 if len(value.keys) != 1:
4706 return 'Dict at line %d must have single entry' % value.lineno
4707 if not isinstance(value.keys[0], ast.Str) or value.keys[0].s != 'filepath':
4709 'Entry at line %d must have a string literal \'filepath\' as key' %
4714 def _CheckWatchlistsEntrySyntax(key, value, ast, email_regex):
4715 if not isinstance(key, ast.Str):
4716 return 'Key at line %d must be a string literal' % key.lineno
4717 if not isinstance(value, ast.List):
4718 return 'Value at line %d must be a list' % value.lineno
4719 for element in value.elts:
4720 if not isinstance(element, ast.Str):
4721 return 'Watchlist elements on line %d is not a string' % key.lineno
4722 if not email_regex.match(element.s):
4723 return ('Watchlist element on line %d doesn\'t look like a valid '
4724 + 'email: %s') % (key.lineno, element.s)
4728 def _CheckWATCHLISTSEntries(wd_dict, w_dict, input_api):
4729 mismatch_template = (
4730 'Mismatch between WATCHLIST_DEFINITIONS entry (%s) and WATCHLISTS '
4733 email_regex = input_api.re.compile(
4734 r"^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]+$")
4740 if i >= len(wd_dict.keys):
4741 if i >= len(w_dict.keys):
4743 return mismatch_template % ('missing',
4744 'line %d' % w_dict.keys[i].lineno)
4745 elif i >= len(w_dict.keys):
4746 return (mismatch_template %
4747 ('line %d' % wd_dict.keys[i].lineno, 'missing'))
4749 wd_key = wd_dict.keys[i]
4750 w_key = w_dict.keys[i]
4752 result = _CheckWatchlistDefinitionsEntrySyntax(wd_key,
4753 wd_dict.values[i], ast)
4754 if result is not None:
4755 return 'Bad entry in WATCHLIST_DEFINITIONS dict: %s' % result
4757 result = _CheckWatchlistsEntrySyntax(w_key, w_dict.values[i], ast,
4759 if result is not None:
4760 return 'Bad entry in WATCHLISTS dict: %s' % result
4762 if wd_key.s != w_key.s:
4763 return mismatch_template % ('%s at line %d' %
4764 (wd_key.s, wd_key.lineno),
4766 (w_key.s, w_key.lineno))
4768 if wd_key.s < last_key:
4770 'WATCHLISTS dict is not sorted lexicographically at line %d and %d'
4771 % (wd_key.lineno, w_key.lineno))
4777 def _CheckWATCHLISTSSyntax(expression, input_api):
4779 if not isinstance(expression, ast.Expression):
4780 return 'WATCHLISTS file must contain a valid expression'
4781 dictionary = expression.body
4782 if not isinstance(dictionary, ast.Dict) or len(dictionary.keys) != 2:
4783 return 'WATCHLISTS file must have single dict with exactly two entries'
4785 first_key = dictionary.keys[0]
4786 first_value = dictionary.values[0]
4787 second_key = dictionary.keys[1]
4788 second_value = dictionary.values[1]
4790 if (not isinstance(first_key, ast.Str)
4791 or first_key.s != 'WATCHLIST_DEFINITIONS'
4792 or not isinstance(first_value, ast.Dict)):
4793 return ('The first entry of the dict in WATCHLISTS file must be '
4794 'WATCHLIST_DEFINITIONS dict')
4796 if (not isinstance(second_key, ast.Str) or second_key.s != 'WATCHLISTS'
4797 or not isinstance(second_value, ast.Dict)):
4798 return ('The second entry of the dict in WATCHLISTS file must be '
4801 return _CheckWATCHLISTSEntries(first_value, second_value, input_api)
4804 def CheckWATCHLISTS(input_api, output_api):
4805 for f in input_api.AffectedFiles(include_deletes=False):
4806 if f.LocalPath() == 'WATCHLISTS':
4807 contents = input_api.ReadFile(f, 'r')
4810 # First, make sure that it can be evaluated.
4811 input_api.ast.literal_eval(contents)
4812 # Get an AST tree for it and scan the tree for detailed style checking.
4813 expression = input_api.ast.parse(contents,
4814 filename='WATCHLISTS',
4816 except ValueError as e:
4818 output_api.PresubmitError('Cannot parse WATCHLISTS file',
4821 except SyntaxError as e:
4823 output_api.PresubmitError('Cannot parse WATCHLISTS file',
4826 except TypeError as e:
4828 output_api.PresubmitError('Cannot parse WATCHLISTS file',
4832 result = _CheckWATCHLISTSSyntax(expression, input_api)
4833 if result is not None:
4834 return [output_api.PresubmitError(result)]
4839 def CheckGnRebasePath(input_api, output_api):
4840 """Checks that target_gen_dir is not used wtih "//" in rebase_path().
4842 Developers should use root_build_dir instead of "//" when using target_gen_dir because
4843 Chromium is sometimes built outside of the source tree.
4847 return input_api.FilterSourceFile(f, files_to_check=(r'.+\.gn', ))
4849 rebase_path_regex = input_api.re.compile(r'rebase_path\(("\$target_gen_dir"|target_gen_dir), ("/"|"//")\)')
4851 for f in input_api.AffectedSourceFiles(gn_files):
4852 for line_num, line in f.ChangedContents():
4853 if rebase_path_regex.search(line):
4855 'Absolute path in rebase_path() in %s:%d' %
4856 (f.LocalPath(), line_num))
4860 output_api.PresubmitPromptWarning(
4861 'Using an absolute path in rebase_path()',
4862 items=sorted(problems),
4864 'rebase_path() should use root_build_dir instead of "/" ',
4865 'since builds can be initiated from outside of the source ',
4870 def CheckGnGlobForward(input_api, output_api):
4871 """Checks that forward_variables_from(invoker, "*") follows best practices.
4873 As documented at //build/docs/writing_gn_templates.md
4877 return input_api.FilterSourceFile(f, files_to_check=(r'.+\.gni', ))
4880 for f in input_api.AffectedSourceFiles(gn_files):
4881 for line_num, line in f.ChangedContents():
4882 if 'forward_variables_from(invoker, "*")' in line:
4884 'Bare forward_variables_from(invoker, "*") in %s:%d' %
4885 (f.LocalPath(), line_num))
4889 output_api.PresubmitPromptWarning(
4890 'forward_variables_from("*") without exclusions',
4891 items=sorted(problems),
4893 'The variables "visibility" and "test_only" should be '
4894 'explicitly listed in forward_variables_from(). For more '
4896 'https://chromium.googlesource.com/chromium/src/+/HEAD/'
4897 'build/docs/writing_gn_templates.md'
4898 '#Using-forward_variables_from'))
4902 def CheckNewHeaderWithoutGnChangeOnUpload(input_api, output_api):
4903 """Checks that newly added header files have corresponding GN changes.
4904 Note that this is only a heuristic. To be precise, run script:
4905 build/check_gn_headers.py.
4909 return input_api.FilterSourceFile(
4910 f, files_to_check=(r'.+%s' % _HEADER_EXTENSIONS, ))
4913 for f in input_api.AffectedSourceFiles(headers):
4914 if f.Action() != 'A':
4916 new_headers.append(f.LocalPath())
4919 return input_api.FilterSourceFile(f, files_to_check=(r'.+\.gn', ))
4921 all_gn_changed_contents = ''
4922 for f in input_api.AffectedSourceFiles(gn_files):
4923 for _, line in f.ChangedContents():
4924 all_gn_changed_contents += line
4927 for header in new_headers:
4928 basename = input_api.os_path.basename(header)
4929 if basename not in all_gn_changed_contents:
4930 problems.append(header)
4934 output_api.PresubmitPromptWarning(
4935 'Missing GN changes for new header files',
4936 items=sorted(problems),
4938 'Please double check whether newly added header files need '
4939 'corresponding changes in gn or gni files.\nThis checking is only a '
4940 'heuristic. Run build/check_gn_headers.py to be precise.\n'
4941 'Read https://crbug.com/661774 for more info.')
4946 def CheckCorrectProductNameInMessages(input_api, output_api):
4947 """Check that Chromium-branded strings don't include "Chrome" or vice versa.
4949 This assumes we won't intentionally reference one product from the other
4954 "filename_postfix": "google_chrome_strings.grd",
4955 "correct_name": "Chrome",
4956 "incorrect_name": "Chromium",
4958 "filename_postfix": "chromium_strings.grd",
4959 "correct_name": "Chromium",
4960 "incorrect_name": "Chrome",
4963 for test_case in test_cases:
4965 filename_filter = lambda x: x.LocalPath().endswith(test_case[
4966 "filename_postfix"])
4968 # Check each new line. Can yield false positives in multiline comments, but
4969 # easier than trying to parse the XML because messages can have nested
4970 # children, and associating message elements with affected lines is hard.
4971 for f in input_api.AffectedSourceFiles(filename_filter):
4972 for line_num, line in f.ChangedContents():
4973 if "<message" in line or "<!--" in line or "-->" in line:
4975 if test_case["incorrect_name"] in line:
4976 problems.append("Incorrect product name in %s:%d" %
4977 (f.LocalPath(), line_num))
4981 "Strings in %s-branded string files should reference \"%s\", not \"%s\""
4982 % (test_case["correct_name"], test_case["correct_name"],
4983 test_case["incorrect_name"]))
4984 all_problems.append(
4985 output_api.PresubmitPromptWarning(message, items=problems))
4990 def CheckForTooLargeFiles(input_api, output_api):
4991 """Avoid large files, especially binary files, in the repository since
4992 git doesn't scale well for those. They will be in everyone's repo
4993 clones forever, forever making Chromium slower to clone and work
4996 # Uploading files to cloud storage is not trivial so we don't want
4997 # to set the limit too low, but the upper limit for "normal" large
4998 # files seems to be 1-2 MB, with a handful around 5-8 MB, so
4999 # anything over 20 MB is exceptional.
5000 TOO_LARGE_FILE_SIZE_LIMIT = 20 * 1024 * 1024
5002 too_large_files = []
5003 for f in input_api.AffectedFiles():
5004 # Check both added and modified files (but not deleted files).
5005 if f.Action() in ('A', 'M'):
5006 size = input_api.os_path.getsize(f.AbsoluteLocalPath())
5007 if size > TOO_LARGE_FILE_SIZE_LIMIT:
5008 too_large_files.append("%s: %d bytes" % (f.LocalPath(), size))
5012 'Do not commit large files to git since git scales badly for those.\n'
5014 'Instead put the large files in cloud storage and use DEPS to\n' +
5015 'fetch them.\n' + '\n'.join(too_large_files))
5017 output_api.PresubmitError('Too large files found in commit',
5018 long_text=message + '\n')
5024 def CheckFuzzTargetsOnUpload(input_api, output_api):
5025 """Checks specific for fuzz target sources."""
5026 EXPORTED_SYMBOLS = [
5027 'LLVMFuzzerInitialize',
5028 'LLVMFuzzerCustomMutator',
5029 'LLVMFuzzerCustomCrossOver',
5033 REQUIRED_HEADER = '#include "testing/libfuzzer/libfuzzer_exports.h"'
5035 def FilterFile(affected_file):
5036 """Ignore libFuzzer source code."""
5037 files_to_check = r'.*fuzz.*\.(h|hpp|hcc|cc|cpp|cxx)$'
5038 files_to_skip = r"^third_party/libFuzzer"
5040 return input_api.FilterSourceFile(affected_file,
5041 files_to_check=[files_to_check],
5042 files_to_skip=[files_to_skip])
5044 files_with_missing_header = []
5045 for f in input_api.AffectedSourceFiles(FilterFile):
5046 contents = input_api.ReadFile(f, 'r')
5047 if REQUIRED_HEADER in contents:
5050 if any(symbol in contents for symbol in EXPORTED_SYMBOLS):
5051 files_with_missing_header.append(f.LocalPath())
5053 if not files_with_missing_header:
5057 'If you define any of the libFuzzer optional functions (%s), it is '
5058 'recommended to add \'%s\' directive. Otherwise, the fuzz target may '
5059 'work incorrectly on Mac (crbug.com/687076).\nNote that '
5060 'LLVMFuzzerInitialize should not be used, unless your fuzz target needs '
5061 'to access command line arguments passed to the fuzzer. Instead, prefer '
5062 'static initialization and shared resources as documented in '
5063 'https://chromium.googlesource.com/chromium/src/+/main/testing/'
5064 'libfuzzer/efficient_fuzzing.md#simplifying-initialization_cleanup.\n'
5065 % (', '.join(EXPORTED_SYMBOLS), REQUIRED_HEADER))
5068 output_api.PresubmitPromptWarning(message="Missing '%s' in:" %
5070 items=files_with_missing_header,
5071 long_text=long_text)
5075 def _CheckNewImagesWarning(input_api, output_api):
5077 Warns authors who add images into the repo to make sure their images are
5078 optimized before committing.
5080 images_added = False
5083 filter_lambda = lambda x: input_api.FilterSourceFile(
5085 files_to_skip=(('(?i).*test', r'.*\/junit\/') + input_api.
5086 DEFAULT_FILES_TO_SKIP),
5087 files_to_check=[r'.*\/(drawable|mipmap)'])
5088 for f in input_api.AffectedFiles(include_deletes=False,
5089 file_filter=filter_lambda):
5090 local_path = f.LocalPath().lower()
5092 local_path.endswith(extension)
5093 for extension in _IMAGE_EXTENSIONS):
5095 image_paths.append(f)
5098 output_api.PresubmitPromptWarning(
5099 'It looks like you are trying to commit some images. If these are '
5100 'non-test-only images, please make sure to read and apply the tips in '
5101 'https://chromium.googlesource.com/chromium/src/+/HEAD/docs/speed/'
5102 'binary_size/optimization_advice.md#optimizing-images\nThis check is '
5103 'FYI only and will not block your CL on the CQ.', image_paths))
5107 def ChecksAndroidSpecificOnUpload(input_api, output_api):
5108 """Groups upload checks that target android code."""
5110 results.extend(_CheckAndroidCrLogUsage(input_api, output_api))
5111 results.extend(_CheckAndroidDebuggableBuild(input_api, output_api))
5112 results.extend(_CheckAndroidNewMdpiAssetLocation(input_api, output_api))
5113 results.extend(_CheckAndroidToastUsage(input_api, output_api))
5114 results.extend(_CheckAndroidTestJUnitInheritance(input_api, output_api))
5115 results.extend(_CheckAndroidTestJUnitFrameworkImport(
5116 input_api, output_api))
5117 results.extend(_CheckAndroidTestAnnotationUsage(input_api, output_api))
5118 results.extend(_CheckAndroidWebkitImports(input_api, output_api))
5119 results.extend(_CheckAndroidXmlStyle(input_api, output_api, True))
5120 results.extend(_CheckNewImagesWarning(input_api, output_api))
5121 results.extend(_CheckAndroidNoBannedImports(input_api, output_api))
5122 results.extend(_CheckAndroidInfoBarDeprecation(input_api, output_api))
5126 def ChecksAndroidSpecificOnCommit(input_api, output_api):
5127 """Groups commit checks that target android code."""
5129 results.extend(_CheckAndroidXmlStyle(input_api, output_api, False))
5132 # TODO(chrishall): could we additionally match on any path owned by
5133 # ui/accessibility/OWNERS ?
5134 _ACCESSIBILITY_PATHS = (
5135 r"^chrome/browser.*/accessibility/",
5136 r"^chrome/browser/extensions/api/automation.*/",
5137 r"^chrome/renderer/extensions/accessibility_.*",
5138 r"^chrome/tests/data/accessibility/",
5139 r"^components/services/screen_ai/",
5140 r"^content/browser/accessibility/",
5141 r"^content/renderer/accessibility/",
5142 r"^content/tests/data/accessibility/",
5143 r"^extensions/renderer/api/automation/",
5144 r"^services/accessibility/",
5145 r"^ui/accessibility/",
5146 r"^ui/views/accessibility/",
5149 def CheckAccessibilityRelnotesField(input_api, output_api):
5150 """Checks that commits to accessibility code contain an AX-Relnotes field in
5151 their commit message."""
5153 def FileFilter(affected_file):
5154 paths = _ACCESSIBILITY_PATHS
5155 return input_api.FilterSourceFile(affected_file, files_to_check=paths)
5157 # Only consider changes affecting accessibility paths.
5158 if not any(input_api.AffectedFiles(file_filter=FileFilter)):
5161 # AX-Relnotes can appear in either the description or the footer.
5162 # When searching the description, require 'AX-Relnotes:' to appear at the
5163 # beginning of a line.
5164 ax_regex = input_api.re.compile('ax-relnotes[:=]')
5165 description_has_relnotes = any(
5166 ax_regex.match(line)
5167 for line in input_api.change.DescriptionText().lower().splitlines())
5169 footer_relnotes = input_api.change.GitFootersFromDescription().get(
5171 if description_has_relnotes or footer_relnotes:
5174 # TODO(chrishall): link to Relnotes documentation in message.
5176 "Missing 'AX-Relnotes:' field required for accessibility changes"
5177 "\n please add 'AX-Relnotes: [release notes].' to describe any "
5178 "user-facing changes"
5179 "\n otherwise add 'AX-Relnotes: n/a.' if this change has no "
5180 "user-facing effects"
5181 "\n if this is confusing or annoying then please contact members "
5182 "of ui/accessibility/OWNERS.")
5184 return [output_api.PresubmitNotifyResult(message)]
5187 _ACCESSIBILITY_EVENTS_TEST_PATH = (
5188 r"^content/test/data/accessibility/event/.*\.html",
5191 _ACCESSIBILITY_TREE_TEST_PATH = (
5192 r"^content/test/data/accessibility/accname/.*\.html",
5193 r"^content/test/data/accessibility/aria/.*\.html",
5194 r"^content/test/data/accessibility/css/.*\.html",
5195 r"^content/test/data/accessibility/html/.*\.html",
5198 _ACCESSIBILITY_ANDROID_EVENTS_TEST_PATH = (
5199 r"^.*/WebContentsAccessibilityEventsTest\.java",
5202 _ACCESSIBILITY_ANDROID_TREE_TEST_PATH = (
5203 r"^.*/WebContentsAccessibilityTreeTest\.java",
5206 def CheckAccessibilityEventsTestsAreIncludedForAndroid(input_api, output_api):
5207 """Checks that commits that include a newly added, renamed/moved, or deleted
5208 test in the DumpAccessibilityEventsTest suite also includes a corresponding
5209 change to the Android test."""
5211 def FilePathFilter(affected_file):
5212 paths = _ACCESSIBILITY_EVENTS_TEST_PATH
5213 return input_api.FilterSourceFile(affected_file, files_to_check=paths)
5215 def AndroidFilePathFilter(affected_file):
5216 paths = _ACCESSIBILITY_ANDROID_EVENTS_TEST_PATH
5217 return input_api.FilterSourceFile(affected_file, files_to_check=paths)
5219 # Only consider changes in the events test data path with html type.
5221 input_api.AffectedFiles(include_deletes=True,
5222 file_filter=FilePathFilter)):
5225 # If the commit contains any change to the Android test file, ignore.
5227 input_api.AffectedFiles(include_deletes=True,
5228 file_filter=AndroidFilePathFilter)):
5231 # Only consider changes that are adding/renaming or deleting a file
5233 for f in input_api.AffectedFiles(include_deletes=True,
5234 file_filter=FilePathFilter):
5235 if f.Action() == 'A' or f.Action() == 'D':
5237 "It appears that you are adding, renaming or deleting"
5238 "\na dump_accessibility_events* test, but have not included"
5239 "\na corresponding change for Android."
5240 "\nPlease include (or remove) the test from:"
5241 "\n content/public/android/javatests/src/org/chromium/"
5242 "content/browser/accessibility/"
5243 "WebContentsAccessibilityEventsTest.java"
5244 "\nIf this message is confusing or annoying, please contact"
5245 "\nmembers of ui/accessibility/OWNERS.")
5247 # If no message was set, return empty.
5248 if not len(message):
5251 return [output_api.PresubmitPromptWarning(message)]
5254 def CheckAccessibilityTreeTestsAreIncludedForAndroid(input_api, output_api):
5255 """Checks that commits that include a newly added, renamed/moved, or deleted
5256 test in the DumpAccessibilityTreeTest suite also includes a corresponding
5257 change to the Android test."""
5259 def FilePathFilter(affected_file):
5260 paths = _ACCESSIBILITY_TREE_TEST_PATH
5261 return input_api.FilterSourceFile(affected_file, files_to_check=paths)
5263 def AndroidFilePathFilter(affected_file):
5264 paths = _ACCESSIBILITY_ANDROID_TREE_TEST_PATH
5265 return input_api.FilterSourceFile(affected_file, files_to_check=paths)
5267 # Only consider changes in the various tree test data paths with html type.
5269 input_api.AffectedFiles(include_deletes=True,
5270 file_filter=FilePathFilter)):
5273 # If the commit contains any change to the Android test file, ignore.
5275 input_api.AffectedFiles(include_deletes=True,
5276 file_filter=AndroidFilePathFilter)):
5279 # Only consider changes that are adding/renaming or deleting a file
5281 for f in input_api.AffectedFiles(include_deletes=True,
5282 file_filter=FilePathFilter):
5283 if f.Action() == 'A' or f.Action() == 'D':
5285 "It appears that you are adding, renaming or deleting"
5286 "\na dump_accessibility_tree* test, but have not included"
5287 "\na corresponding change for Android."
5288 "\nPlease include (or remove) the test from:"
5289 "\n content/public/android/javatests/src/org/chromium/"
5290 "content/browser/accessibility/"
5291 "WebContentsAccessibilityTreeTest.java"
5292 "\nIf this message is confusing or annoying, please contact"
5293 "\nmembers of ui/accessibility/OWNERS.")
5295 # If no message was set, return empty.
5296 if not len(message):
5299 return [output_api.PresubmitPromptWarning(message)]
5302 def CheckEsLintConfigChanges(input_api, output_api):
5303 """Suggest using "git cl presubmit --files" when .eslintrc.js files are
5304 modified. This is important because enabling an error in .eslintrc.js can
5305 trigger errors in any .js or .ts files in its directory, leading to hidden
5306 presubmit errors."""
5308 eslint_filter = lambda f: input_api.FilterSourceFile(
5309 f, files_to_check=[r'.*\.eslintrc\.js$'])
5310 for f in input_api.AffectedFiles(include_deletes=False,
5311 file_filter=eslint_filter):
5312 local_dir = input_api.os_path.dirname(f.LocalPath())
5313 # Use / characters so that the commands printed work on any OS.
5314 local_dir = local_dir.replace(input_api.os_path.sep, '/')
5318 output_api.PresubmitNotifyResult(
5319 '%(file)s modified. Consider running \'git cl presubmit --files '
5320 '"%(dir)s*.js;%(dir)s*.ts"\' in order to check and fix the affected '
5321 'files before landing this change.' %
5322 { 'file' : f.LocalPath(), 'dir' : local_dir}))
5326 # string pattern, sequence of strings to show when pattern matches,
5327 # error flag. True if match is a presubmit error, otherwise it's a warning.
5328 _NON_INCLUSIVE_TERMS = (
5330 # Note that \b pattern in python re is pretty particular. In this
5331 # regexp, 'class WhiteList ...' will match, but 'class FooWhiteList
5332 # ...' will not. This may require some tweaking to catch these cases
5333 # without triggering a lot of false positives. Leaving it naive and
5334 # less matchy for now.
5335 r'/\b(?i)((black|white)list|master|slave)\b', # nocheck
5337 'Please don\'t use blacklist, whitelist, ' # nocheck
5338 'or slave in your', # nocheck
5339 'code and make every effort to use other terms. Using "// nocheck"',
5340 '"# nocheck" or "<!-- nocheck -->"',
5341 'at the end of the offending line will bypass this PRESUBMIT error',
5342 'but avoid using this whenever possible. Reach out to',
5343 'community@chromium.org if you have questions'),
5346 def ChecksCommon(input_api, output_api):
5347 """Checks common to both upload and commit."""
5350 input_api.canned_checks.PanProjectChecks(
5351 input_api, output_api, excluded_paths=_EXCLUDED_PATHS))
5353 author = input_api.change.author_email
5354 if author and author not in _KNOWN_ROBOTS:
5356 input_api.canned_checks.CheckAuthorizedAuthor(
5357 input_api, output_api))
5360 input_api.canned_checks.CheckChangeHasNoTabs(
5363 source_file_filter=lambda x: x.LocalPath().endswith('.grd')))
5366 input_api.canned_checks.CheckVPythonSpec(input_api, output_api)))
5368 dirmd = 'dirmd.bat' if input_api.is_windows else 'dirmd'
5369 dirmd_bin = input_api.os_path.join(input_api.PresubmitLocalPath(),
5370 'third_party', 'depot_tools', dirmd)
5373 input_api.canned_checks.CheckDirMetadataFormat(
5374 input_api, output_api, dirmd_bin)))
5376 input_api.canned_checks.CheckOwnersDirMetadataExclusive(
5377 input_api, output_api))
5379 input_api.canned_checks.CheckNoNewMetadataInOwners(
5380 input_api, output_api))
5382 input_api.canned_checks.CheckInclusiveLanguage(
5385 excluded_directories_relative_path=[
5386 'infra', 'inclusive_language_presubmit_exempt_dirs.txt'
5388 non_inclusive_terms=_NON_INCLUSIVE_TERMS))
5390 presubmit_py_filter = lambda f: input_api.FilterSourceFile(
5391 f, files_to_check=[r'.*PRESUBMIT\.py$'])
5392 for f in input_api.AffectedFiles(include_deletes=False,
5393 file_filter=presubmit_py_filter):
5394 full_path = input_api.os_path.dirname(f.AbsoluteLocalPath())
5395 test_file = input_api.os_path.join(full_path, 'PRESUBMIT_test.py')
5396 # The PRESUBMIT.py file (and the directory containing it) might have
5397 # been affected by being moved or removed, so only try to run the tests
5398 # if they still exist.
5399 if not input_api.os_path.exists(test_file):
5403 with open(f.LocalPath(), encoding='utf-8') as fp:
5405 line.startswith('USE_PYTHON3 = True')
5406 for line in fp.readlines())
5409 input_api.canned_checks.RunUnitTestsInDirectory(
5413 files_to_check=[r'^PRESUBMIT_test\.py$'],
5414 run_on_python2=not use_python3,
5415 run_on_python3=use_python3,
5416 skip_shebang_check=True))
5420 def CheckPatchFiles(input_api, output_api):
5422 f.LocalPath() for f in input_api.AffectedFiles()
5423 if f.LocalPath().endswith(('.orig', '.rej'))
5425 # Cargo.toml.orig files are part of third-party crates downloaded from
5426 # crates.io and should be included.
5427 problems = [f for f in problems if not f.endswith('Cargo.toml.orig')]
5430 output_api.PresubmitError("Don't commit .rej and .orig files.",
5437 def CheckBuildConfigMacrosWithoutInclude(input_api, output_api):
5438 # Excludes OS_CHROMEOS, which is not defined in build_config.h.
5439 macro_re = input_api.re.compile(
5440 r'^\s*#(el)?if.*\bdefined\(((COMPILER_|ARCH_CPU_|WCHAR_T_IS_)[^)]*)')
5441 include_re = input_api.re.compile(r'^#include\s+"build/build_config.h"',
5442 input_api.re.MULTILINE)
5443 extension_re = input_api.re.compile(r'\.[a-z]+$')
5445 config_h_file = input_api.os_path.join('build', 'build_config.h')
5446 for f in input_api.AffectedFiles(include_deletes=False):
5447 # The build-config macros are allowed to be used in build_config.h
5448 # without including itself.
5449 if f.LocalPath() == config_h_file:
5451 if not f.LocalPath().endswith(
5452 ('.h', '.c', '.cc', '.cpp', '.m', '.mm')):
5454 found_line_number = None
5456 all_lines = input_api.ReadFile(f, 'r').splitlines()
5457 for line_num, line in enumerate(all_lines):
5458 match = macro_re.search(line)
5460 found_line_number = line_num
5461 found_macro = match.group(2)
5463 if not found_line_number:
5466 found_include_line = -1
5467 for line_num, line in enumerate(all_lines):
5468 if include_re.search(line):
5469 found_include_line = line_num
5471 if found_include_line >= 0 and found_include_line < found_line_number:
5474 if not f.LocalPath().endswith('.h'):
5475 primary_header_path = extension_re.sub('.h', f.AbsoluteLocalPath())
5477 content = input_api.ReadFile(primary_header_path, 'r')
5478 if include_re.search(content):
5482 errors.append('%s:%d %s macro is used without first including build/'
5484 (f.LocalPath(), found_line_number, found_macro))
5486 return [output_api.PresubmitPromptWarning('\n'.join(errors))]
5490 def CheckForSuperfluousStlIncludesInHeaders(input_api, output_api):
5491 stl_include_re = input_api.re.compile(r'^#include\s+<('
5505 std_namespace_re = input_api.re.compile(r'std::')
5507 for f in input_api.AffectedFiles():
5508 if not _IsCPlusPlusHeaderFile(input_api, f.LocalPath()):
5511 uses_std_namespace = False
5512 has_stl_include = False
5513 for line in f.NewContents():
5514 if has_stl_include and uses_std_namespace:
5517 if not has_stl_include and stl_include_re.search(line):
5518 has_stl_include = True
5521 if not uses_std_namespace and (std_namespace_re.search(line)
5522 or 'no-std-usage-because-pch-file' in line):
5523 uses_std_namespace = True
5526 if has_stl_include and not uses_std_namespace:
5528 '%s: Includes STL header(s) but does not reference std::' %
5531 return [output_api.PresubmitPromptWarning('\n'.join(errors))]
5535 def _CheckForDeprecatedOSMacrosInFile(input_api, f):
5536 """Check for sensible looking, totally invalid OS macros."""
5537 preprocessor_statement = input_api.re.compile(r'^\s*#')
5538 os_macro = input_api.re.compile(r'defined\(OS_([^)]+)\)')
5540 for lnum, line in f.ChangedContents():
5541 if preprocessor_statement.search(line):
5542 for match in os_macro.finditer(line):
5545 (f.LocalPath(), lnum, 'defined(OS_' + match.group(1) +
5546 ') -> BUILDFLAG(IS_' + match.group(1) + ')'))
5550 def CheckForDeprecatedOSMacros(input_api, output_api):
5551 """Check all affected files for invalid OS macros."""
5553 # The OS_ macros are allowed to be used in build/build_config.h.
5554 config_h_file = input_api.os_path.join('build', 'build_config.h')
5555 for f in input_api.AffectedSourceFiles(None):
5556 if not f.LocalPath().endswith(('.py', '.js', '.html', '.css', '.md')) \
5557 and f.LocalPath() != config_h_file:
5558 bad_macros.extend(_CheckForDeprecatedOSMacrosInFile(input_api, f))
5564 output_api.PresubmitError(
5565 'OS macros have been deprecated. Please use BUILDFLAGs instead (still '
5566 'defined in build_config.h):', bad_macros)
5570 def _CheckForInvalidIfDefinedMacrosInFile(input_api, f):
5571 """Check all affected files for invalid "if defined" macros."""
5572 ALWAYS_DEFINED_MACROS = (
5581 "TARGET_IPHONE_SIMULATOR",
5582 "TARGET_OS_EMBEDDED",
5588 ifdef_macro = input_api.re.compile(
5589 r'^\s*#.*(?:ifdef\s|defined\()([^\s\)]+)')
5591 for lnum, line in f.ChangedContents():
5592 for match in ifdef_macro.finditer(line):
5593 if match.group(1) in ALWAYS_DEFINED_MACROS:
5594 always_defined = ' %s is always defined. ' % match.group(1)
5595 did_you_mean = 'Did you mean \'#if %s\'?' % match.group(1)
5598 (f.LocalPath(), lnum, always_defined, did_you_mean))
5602 def CheckForInvalidIfDefinedMacros(input_api, output_api):
5603 """Check all affected files for invalid "if defined" macros."""
5605 skipped_paths = ['third_party/sqlite/', 'third_party/abseil-cpp/']
5606 for f in input_api.AffectedFiles():
5607 if any([f.LocalPath().startswith(path) for path in skipped_paths]):
5609 if f.LocalPath().endswith(('.h', '.c', '.cc', '.m', '.mm')):
5611 _CheckForInvalidIfDefinedMacrosInFile(input_api, f))
5617 output_api.PresubmitError(
5618 'Found ifdef check on always-defined macro[s]. Please fix your code\n'
5619 'or check the list of ALWAYS_DEFINED_MACROS in src/PRESUBMIT.py.',
5624 def CheckForIPCRules(input_api, output_api):
5625 """Check for same IPC rules described in
5626 http://www.chromium.org/Home/chromium-security/education/security-tips-for-ipc
5628 base_pattern = r'IPC_ENUM_TRAITS\('
5629 inclusion_pattern = input_api.re.compile(r'(%s)' % base_pattern)
5630 comment_pattern = input_api.re.compile(r'//.*(%s)' % base_pattern)
5633 for f in input_api.AffectedSourceFiles(None):
5634 local_path = f.LocalPath()
5635 if not local_path.endswith('.h'):
5637 for line_number, line in f.ChangedContents():
5638 if inclusion_pattern.search(
5639 line) and not comment_pattern.search(line):
5640 problems.append('%s:%d\n %s' %
5641 (local_path, line_number, line.strip()))
5645 output_api.PresubmitPromptWarning(_IPC_ENUM_TRAITS_DEPRECATED,
5652 def CheckForLongPathnames(input_api, output_api):
5653 """Check to make sure no files being submitted have long paths.
5654 This causes issues on Windows.
5657 for f in input_api.AffectedTestableFiles():
5658 local_path = f.LocalPath()
5659 # Windows has a path limit of 260 characters. Limit path length to 200 so
5660 # that we have some extra for the prefix on dev machines and the bots.
5661 if len(local_path) > 200:
5662 problems.append(local_path)
5665 return [output_api.PresubmitError(_LONG_PATH_ERROR, problems)]
5670 def CheckForIncludeGuards(input_api, output_api):
5671 """Check that header files have proper guards against multiple inclusion.
5672 If a file should not have such guards (and it probably should) then it
5673 should include the string "no-include-guard-because-multiply-included" or
5674 "no-include-guard-because-pch-file".
5677 def is_chromium_header_file(f):
5678 # We only check header files under the control of the Chromium
5679 # project. That is, those outside third_party apart from
5680 # third_party/blink.
5681 # We also exclude *_message_generator.h headers as they use
5682 # include guards in a special, non-typical way.
5683 file_with_path = input_api.os_path.normpath(f.LocalPath())
5684 return (file_with_path.endswith('.h')
5685 and not file_with_path.endswith('_message_generator.h')
5686 and not file_with_path.endswith('com_imported_mstscax.h')
5687 and (not file_with_path.startswith('third_party')
5688 or file_with_path.startswith(
5689 input_api.os_path.join('third_party', 'blink'))))
5691 def replace_special_with_underscore(string):
5692 return input_api.re.sub(r'[+\\/.-]', '_', string)
5696 for f in input_api.AffectedSourceFiles(is_chromium_header_file):
5698 guard_line_number = None
5699 seen_guard_end = False
5701 file_with_path = input_api.os_path.normpath(f.LocalPath())
5702 base_file_name = input_api.os_path.splitext(
5703 input_api.os_path.basename(file_with_path))[0]
5704 upper_base_file_name = base_file_name.upper()
5706 expected_guard = replace_special_with_underscore(
5707 file_with_path.upper() + '_')
5709 # For "path/elem/file_name.h" we should really only accept
5710 # PATH_ELEM_FILE_NAME_H_ per coding style. Unfortunately there
5711 # are too many (1000+) files with slight deviations from the
5712 # coding style. The most important part is that the include guard
5713 # is there, and that it's unique, not the name so this check is
5714 # forgiving for existing files.
5716 # As code becomes more uniform, this could be made stricter.
5718 guard_name_pattern_list = [
5719 # Anything with the right suffix (maybe with an extra _).
5722 # To cover include guards with old Blink style.
5725 # Anything including the uppercase name of the file.
5726 r'\w*' + input_api.re.escape(
5727 replace_special_with_underscore(upper_base_file_name)) +
5730 guard_name_pattern = '|'.join(guard_name_pattern_list)
5731 guard_pattern = input_api.re.compile(r'#ifndef\s+(' +
5732 guard_name_pattern + ')')
5734 for line_number, line in enumerate(f.NewContents()):
5735 if ('no-include-guard-because-multiply-included' in line
5736 or 'no-include-guard-because-pch-file' in line):
5737 guard_name = 'DUMMY' # To not trigger check outside the loop.
5740 if guard_name is None:
5741 match = guard_pattern.match(line)
5743 guard_name = match.group(1)
5744 guard_line_number = line_number
5746 # We allow existing files to use include guards whose names
5747 # don't match the chromium style guide, but new files should
5749 if guard_name != expected_guard:
5750 if f.Action() == 'A': # If file was just 'A'dded
5752 output_api.PresubmitPromptWarning(
5753 'Header using the wrong include guard name %s'
5756 (f.LocalPath(), line_number + 1)
5757 ], 'Expected: %r\nFound: %r' %
5758 (expected_guard, guard_name)))
5760 # The line after #ifndef should have a #define of the same name.
5761 if line_number == guard_line_number + 1:
5762 expected_line = '#define %s' % guard_name
5763 if line != expected_line:
5765 output_api.PresubmitPromptWarning(
5766 'Missing "%s" for include guard' %
5768 ['%s:%d' % (f.LocalPath(), line_number + 1)],
5769 'Expected: %r\nGot: %r' %
5770 (expected_line, line)))
5772 if not seen_guard_end and line == '#endif // %s' % guard_name:
5773 seen_guard_end = True
5774 elif seen_guard_end:
5775 if line.strip() != '':
5777 output_api.PresubmitPromptWarning(
5778 'Include guard %s not covering the whole file'
5779 % (guard_name), [f.LocalPath()]))
5780 break # Nothing else to check and enough to warn once.
5782 if guard_name is None:
5784 output_api.PresubmitPromptWarning(
5785 'Missing include guard in %s\n'
5786 'Recommended name: %s\n'
5787 'This check can be disabled by having the string\n'
5788 '"no-include-guard-because-multiply-included" or\n'
5789 '"no-include-guard-because-pch-file" in the header.'
5790 % (f.LocalPath(), expected_guard)))
5795 def CheckForWindowsLineEndings(input_api, output_api):
5796 """Check source code and known ascii text files for Windows style line
5799 known_text_files = r'.*\.(txt|html|htm|py|gyp|gypi|gn|isolate|icon)$'
5801 file_inclusion_pattern = (known_text_files,
5802 r'.+%s' % _IMPLEMENTATION_EXTENSIONS,
5803 r'.+%s' % _HEADER_EXTENSIONS)
5806 source_file_filter = lambda f: input_api.FilterSourceFile(
5807 f, files_to_check=file_inclusion_pattern, files_to_skip=None)
5808 for f in input_api.AffectedSourceFiles(source_file_filter):
5809 # Ignore test files that contain crlf intentionally.
5810 if f.LocalPath().endswith('crlf.txt'):
5812 include_file = False
5813 for line in input_api.ReadFile(f, 'r').splitlines(True):
5814 if line.endswith('\r\n'):
5817 problems.append(f.LocalPath())
5821 output_api.PresubmitPromptWarning(
5822 'Are you sure that you want '
5823 'these files to contain Windows style line endings?\n' +
5824 '\n'.join(problems))
5830 def CheckIconFilesForLicenseHeaders(input_api, output_api):
5831 """Check that .icon files (which are fragments of C++) have license headers.
5834 icon_files = (r'.*\.icon$', )
5836 icons = lambda x: input_api.FilterSourceFile(x, files_to_check=icon_files)
5837 return input_api.canned_checks.CheckLicense(input_api,
5839 source_file_filter=icons)
5842 def CheckForUseOfChromeAppsDeprecations(input_api, output_api):
5843 """Check source code for use of Chrome App technologies being
5847 def _CheckForDeprecatedTech(input_api,
5850 files_to_check=None,
5851 files_to_skip=None):
5853 if (files_to_check or files_to_skip):
5854 source_file_filter = lambda f: input_api.FilterSourceFile(
5855 f, files_to_check=files_to_check, files_to_skip=files_to_skip)
5857 source_file_filter = None
5861 for f in input_api.AffectedSourceFiles(source_file_filter):
5862 if f.Action() == 'D':
5864 for _, line in f.ChangedContents():
5865 if any(detect in line for detect in detection_list):
5866 problems.append(f.LocalPath())
5870 # to avoid this presubmit script triggering warnings
5871 files_to_skip = ['PRESUBMIT.py', 'PRESUBMIT_test.py']
5875 # NMF: any files with extensions .nmf or NMF
5876 _NMF_FILES = r'\.(nmf|NMF)$'
5877 problems += _CheckForDeprecatedTech(
5880 detection_list=[''], # any change to the file will trigger warning
5881 files_to_check=[r'.+%s' % _NMF_FILES])
5883 # MANIFEST: any manifest.json that in its diff includes "app":
5884 _MANIFEST_FILES = r'(manifest\.json)$'
5885 problems += _CheckForDeprecatedTech(
5888 detection_list=['"app":'],
5889 files_to_check=[r'.*%s' % _MANIFEST_FILES])
5891 # NaCl / PNaCl: any file that in its diff contains the strings in the list
5892 problems += _CheckForDeprecatedTech(
5895 detection_list=['config=nacl', 'enable-nacl', 'cpu=pnacl', 'nacl_io'],
5896 files_to_skip=files_to_skip + [r"^native_client_sdk/"])
5898 # PPAPI: any C/C++ file that in its diff includes a ppapi library
5899 problems += _CheckForDeprecatedTech(
5902 detection_list=['#include "ppapi', '#include <ppapi'],
5903 files_to_check=(r'.+%s' % _HEADER_EXTENSIONS,
5904 r'.+%s' % _IMPLEMENTATION_EXTENSIONS),
5905 files_to_skip=[r"^ppapi/"])
5909 output_api.PresubmitPromptWarning(
5910 'You are adding/modifying code'
5911 'related to technologies which will soon be deprecated (Chrome Apps, NaCl,'
5912 ' PNaCl, PPAPI). See this blog post for more details:\n'
5913 'https://blog.chromium.org/2020/08/changes-to-chrome-app-support-timeline.html\n'
5914 'and this documentation for options to replace these technologies:\n'
5915 'https://developer.chrome.com/docs/apps/migration/\n' +
5916 '\n'.join(problems))
5922 def CheckSyslogUseWarningOnUpload(input_api, output_api, src_file_filter=None):
5923 """Checks that all source files use SYSLOG properly."""
5925 for f in input_api.AffectedSourceFiles(src_file_filter):
5926 for line_number, line in f.ChangedContents():
5927 if 'SYSLOG' in line:
5928 syslog_files.append(f.LocalPath() + ':' + str(line_number))
5932 output_api.PresubmitPromptWarning(
5933 'Please make sure there are no privacy sensitive bits of data in SYSLOG'
5934 ' calls.\nFiles to check:\n',
5940 def CheckChangeOnUpload(input_api, output_api):
5941 if input_api.version < [2, 0, 0]:
5943 output_api.PresubmitError(
5944 "Your depot_tools is out of date. "
5945 "This PRESUBMIT.py requires at least presubmit_support version 2.0.0, "
5946 "but your version is %d.%d.%d" % tuple(input_api.version))
5950 input_api.canned_checks.CheckPatchFormatted(input_api, output_api))
5954 def CheckChangeOnCommit(input_api, output_api):
5955 if input_api.version < [2, 0, 0]:
5957 output_api.PresubmitError(
5958 "Your depot_tools is out of date. "
5959 "This PRESUBMIT.py requires at least presubmit_support version 2.0.0, "
5960 "but your version is %d.%d.%d" % tuple(input_api.version))
5964 # Make sure the tree is 'open'.
5966 input_api.canned_checks.CheckTreeIsOpen(
5969 json_url='http://chromium-status.appspot.com/current?format=json'))
5972 input_api.canned_checks.CheckPatchFormatted(input_api, output_api))
5974 input_api.canned_checks.CheckChangeHasBugField(input_api, output_api))
5976 input_api.canned_checks.CheckChangeHasNoUnwantedTags(
5977 input_api, output_api))
5981 def CheckStrings(input_api, output_api):
5982 """Check string ICU syntax validity and if translation screenshots exist."""
5983 # Skip translation screenshots check if a SkipTranslationScreenshotsCheck
5984 # footer is set to true.
5985 git_footers = input_api.change.GitFootersFromDescription()
5986 skip_screenshot_check_footer = [
5987 footer.lower() for footer in git_footers.get(
5988 u'Skip-Translation-Screenshots-Check', [])
5990 run_screenshot_check = u'true' not in skip_screenshot_check_footer
5995 from io import StringIO
5997 new_or_added_paths = set(f.LocalPath() for f in input_api.AffectedFiles()
5998 if (f.Action() == 'A' or f.Action() == 'M'))
5999 removed_paths = set(f.LocalPath()
6000 for f in input_api.AffectedFiles(include_deletes=True)
6001 if f.Action() == 'D')
6004 f for f in input_api.AffectedFiles()
6005 if f.LocalPath().endswith(('.grd', '.grdp'))
6008 f for f in affected_grds if not 'testdata' in f.LocalPath()
6010 if not affected_grds:
6013 affected_png_paths = [
6014 f.AbsoluteLocalPath() for f in input_api.AffectedFiles()
6015 if (f.LocalPath().endswith('.png'))
6018 # Check for screenshots. Developers can upload screenshots using
6019 # tools/translation/upload_screenshots.py which finds and uploads
6020 # images associated with .grd files (e.g. test_grd/IDS_STRING.png for the
6021 # message named IDS_STRING in test.grd) and produces a .sha1 file (e.g.
6022 # test_grd/IDS_STRING.png.sha1) for each png when the upload is successful.
6024 # The logic here is as follows:
6026 # - If the CL has a .png file under the screenshots directory for a grd
6027 # file, warn the developer. Actual images should never be checked into the
6030 # - If the CL contains modified or new messages in grd files and doesn't
6031 # contain the corresponding .sha1 files, warn the developer to add images
6032 # and upload them via tools/translation/upload_screenshots.py.
6034 # - If the CL contains modified or new messages in grd files and the
6035 # corresponding .sha1 files, everything looks good.
6037 # - If the CL contains removed messages in grd files but the corresponding
6038 # .sha1 files aren't removed, warn the developer to remove them.
6039 unnecessary_screenshots = []
6041 missing_sha1_modified = []
6042 unnecessary_sha1_files = []
6044 # This checks verifies that the ICU syntax of messages this CL touched is
6045 # valid, and reports any found syntax errors.
6046 # Without this presubmit check, ICU syntax errors in Chromium strings can land
6047 # without developers being aware of them. Later on, such ICU syntax errors
6048 # break message extraction for translation, hence would block Chromium
6049 # translations until they are fixed.
6050 icu_syntax_errors = []
6052 def _CheckScreenshotAdded(screenshots_dir, message_id):
6053 sha1_path = input_api.os_path.join(screenshots_dir,
6054 message_id + '.png.sha1')
6055 if sha1_path not in new_or_added_paths:
6056 missing_sha1.append(sha1_path)
6058 def _CheckScreenshotModified(screenshots_dir, message_id):
6059 sha1_path = input_api.os_path.join(screenshots_dir,
6060 message_id + '.png.sha1')
6061 if sha1_path not in new_or_added_paths:
6062 missing_sha1_modified.append(sha1_path)
6064 def _CheckScreenshotRemoved(screenshots_dir, message_id):
6065 sha1_path = input_api.os_path.join(screenshots_dir,
6066 message_id + '.png.sha1')
6067 if input_api.os_path.exists(
6068 sha1_path) and sha1_path not in removed_paths:
6069 unnecessary_sha1_files.append(sha1_path)
6071 def _ValidateIcuSyntax(text, level, signatures):
6072 """Validates ICU syntax of a text string.
6074 Check if text looks similar to ICU and checks for ICU syntax correctness
6075 in this case. Reports various issues with ICU syntax and values of
6076 variants. Supports checking of nested messages. Accumulate information of
6077 each ICU messages found in the text for further checking.
6080 text: a string to check.
6081 level: a number of current nesting level.
6082 signatures: an accumulator, a list of tuple of (level, variable,
6086 None if a string is not ICU or no issue detected.
6087 A tuple of (message, start index, end index) if an issue detected.
6090 'plural': (frozenset(
6091 ['=0', '=1', 'zero', 'one', 'two', 'few', 'many',
6092 'other']), frozenset(['=1', 'other'])),
6093 'selectordinal': (frozenset(
6094 ['=0', '=1', 'zero', 'one', 'two', 'few', 'many',
6095 'other']), frozenset(['one', 'other'])),
6096 'select': (frozenset(), frozenset(['other'])),
6099 # Check if the message looks like an attempt to use ICU
6100 # plural. If yes - check if its syntax strictly matches ICU format.
6101 like = re.match(r'^[^{]*\{[^{]*\b(plural|selectordinal|select)\b',
6104 signatures.append((level, None, None, None))
6107 # Check for valid prefix and suffix
6109 r'^([^{]*\{)([a-zA-Z0-9_]+),\s*'
6110 r'(plural|selectordinal|select),\s*'
6111 r'(?:offset:\d+)?\s*(.*)', text, re.DOTALL)
6113 return (('This message looks like an ICU plural, '
6114 'but does not follow ICU syntax.'), like.start(),
6116 starting, variable, kind, variant_pairs = m.groups()
6117 variants, depth, last_pos = _ParseIcuVariants(variant_pairs,
6120 return ('Invalid ICU format. Unbalanced opening bracket', last_pos,
6123 ending = text[last_pos:]
6125 return ('Invalid ICU format. No initial opening bracket',
6126 last_pos - 1, last_pos)
6127 if not ending or '}' not in ending:
6128 return ('Invalid ICU format. No final closing bracket',
6129 last_pos - 1, last_pos)
6132 'Invalid ICU format. Extra characters at the start of a complex '
6133 'message (go/icu-message-migration): "%s"') % starting, 0,
6137 'Invalid ICU format. Extra characters at the end of a complex '
6138 'message (go/icu-message-migration): "%s"') % ending,
6139 last_pos - 1, len(text) - 1)
6140 if kind not in valid_types:
6141 return (('Unknown ICU message type %s. '
6142 'Valid types are: plural, select, selectordinal') % kind,
6144 known, required = valid_types[kind]
6145 defined_variants = set()
6146 for variant, variant_range, value, value_range in variants:
6147 start, end = variant_range
6148 if variant in defined_variants:
6149 return ('Variant "%s" is defined more than once' % variant,
6151 elif known and variant not in known:
6152 return ('Variant "%s" is not valid for %s message' %
6153 (variant, kind), start, end)
6154 defined_variants.add(variant)
6155 # Check for nested structure
6156 res = _ValidateIcuSyntax(value[1:-1], level + 1, signatures)
6158 return (res[0], res[1] + value_range[0] + 1,
6159 res[2] + value_range[0] + 1)
6160 missing = required - defined_variants
6162 return ('Required variants missing: %s' % ', '.join(missing), 0,
6164 signatures.append((level, variable, kind, defined_variants))
6166 def _ParseIcuVariants(text, offset=0):
6167 """Parse variants part of ICU complex message.
6169 Builds a tuple of variant names and values, as well as
6170 their offsets in the input string.
6173 text: a string to parse
6174 offset: additional offset to add to positions in the text to get correct
6175 position in the complete ICU string.
6178 List of tuples, each tuple consist of four fields: variant name,
6179 variant name span (tuple of two integers), variant value, value
6180 span (tuple of two integers).
6182 depth, start, end = 0, -1, -1
6185 for idx, char in enumerate(text):
6189 chunk = text[end + 1:start]
6191 pos = offset + end + 1 + chunk.find(key)
6192 span = (pos, pos + len(key))
6196 return variants, depth, offset + idx
6200 variants.append((key, span, text[start:end + 1],
6201 (offset + start, offset + end + 1)))
6202 return variants, depth, offset + end + 1
6205 old_sys_path = sys.path
6206 sys.path = sys.path + [
6207 input_api.os_path.join(input_api.PresubmitLocalPath(), 'tools',
6210 from helper import grd_helper
6212 sys.path = old_sys_path
6214 for f in affected_grds:
6215 file_path = f.LocalPath()
6216 old_id_to_msg_map = {}
6217 new_id_to_msg_map = {}
6218 # Note that this code doesn't check if the file has been deleted. This is
6219 # OK because it only uses the old and new file contents and doesn't load
6220 # the file via its path.
6221 # It's also possible that a file's content refers to a renamed or deleted
6222 # file via a <part> tag, such as <part file="now-deleted-file.grdp">. This
6223 # is OK as well, because grd_helper ignores <part> tags when loading .grd or
6225 if file_path.endswith('.grdp'):
6227 old_id_to_msg_map = grd_helper.GetGrdpMessagesFromString(
6228 '\n'.join(f.OldContents()))
6230 new_id_to_msg_map = grd_helper.GetGrdpMessagesFromString(
6231 '\n'.join(f.NewContents()))
6233 file_dir = input_api.os_path.dirname(file_path) or '.'
6235 old_id_to_msg_map = grd_helper.GetGrdMessages(
6236 StringIO('\n'.join(f.OldContents())), file_dir)
6238 new_id_to_msg_map = grd_helper.GetGrdMessages(
6239 StringIO('\n'.join(f.NewContents())), file_dir)
6241 grd_name, ext = input_api.os_path.splitext(
6242 input_api.os_path.basename(file_path))
6243 screenshots_dir = input_api.os_path.join(
6244 input_api.os_path.dirname(file_path),
6245 grd_name + ext.replace('.', '_'))
6247 # Compute added, removed and modified message IDs.
6248 old_ids = set(old_id_to_msg_map)
6249 new_ids = set(new_id_to_msg_map)
6250 added_ids = new_ids - old_ids
6251 removed_ids = old_ids - new_ids
6252 modified_ids = set([])
6253 for key in old_ids.intersection(new_ids):
6254 if (old_id_to_msg_map[key].ContentsAsXml('', True) !=
6255 new_id_to_msg_map[key].ContentsAsXml('', True)):
6256 # The message content itself changed. Require an updated screenshot.
6257 modified_ids.add(key)
6258 elif old_id_to_msg_map[key].attrs['meaning'] != \
6259 new_id_to_msg_map[key].attrs['meaning']:
6260 # The message meaning changed. Ensure there is a screenshot for it.
6261 sha1_path = input_api.os_path.join(screenshots_dir,
6263 if sha1_path not in new_or_added_paths and not \
6264 input_api.os_path.exists(sha1_path):
6265 # There is neither a previous screenshot nor is a new one added now.
6266 # Require a screenshot.
6267 modified_ids.add(key)
6269 if run_screenshot_check:
6270 # Check the screenshot directory for .png files. Warn if there is any.
6271 for png_path in affected_png_paths:
6272 if png_path.startswith(screenshots_dir):
6273 unnecessary_screenshots.append(png_path)
6275 for added_id in added_ids:
6276 _CheckScreenshotAdded(screenshots_dir, added_id)
6278 for modified_id in modified_ids:
6279 _CheckScreenshotModified(screenshots_dir, modified_id)
6281 for removed_id in removed_ids:
6282 _CheckScreenshotRemoved(screenshots_dir, removed_id)
6284 # Check new and changed strings for ICU syntax errors.
6285 for key in added_ids.union(modified_ids):
6286 msg = new_id_to_msg_map[key].ContentsAsXml('', True)
6287 err = _ValidateIcuSyntax(msg, 0, [])
6289 icu_syntax_errors.append(str(key) + ': ' + str(err[0]))
6292 if run_screenshot_check:
6293 if unnecessary_screenshots:
6295 output_api.PresubmitError(
6296 'Do not include actual screenshots in the changelist. Run '
6297 'tools/translate/upload_screenshots.py to upload them instead:',
6298 sorted(unnecessary_screenshots)))
6302 output_api.PresubmitError(
6303 'You are adding UI strings.\n'
6304 'To ensure the best translations, take screenshots of the relevant UI '
6305 '(https://g.co/chrome/translation) and add these files to your '
6306 'changelist:', sorted(missing_sha1)))
6308 if missing_sha1_modified:
6310 output_api.PresubmitError(
6311 'You are modifying UI strings or their meanings.\n'
6312 'To ensure the best translations, take screenshots of the relevant UI '
6313 '(https://g.co/chrome/translation) and add these files to your '
6314 'changelist:', sorted(missing_sha1_modified)))
6316 if unnecessary_sha1_files:
6318 output_api.PresubmitError(
6319 'You removed strings associated with these files. Remove:',
6320 sorted(unnecessary_sha1_files)))
6323 output_api.PresubmitPromptOrNotify('Skipping translation '
6324 'screenshots check.'))
6326 if icu_syntax_errors:
6328 output_api.PresubmitPromptWarning(
6329 'ICU syntax errors were found in the following strings (problems or '
6330 'feedback? Contact rainhard@chromium.org):',
6331 items=icu_syntax_errors))
6336 def CheckTranslationExpectations(input_api, output_api,
6338 translation_expectations_path=None,
6342 f for f in input_api.AffectedFiles()
6343 if (f.LocalPath().endswith('.grd') or f.LocalPath().endswith('.grdp'))
6345 if not affected_grds:
6349 old_sys_path = sys.path
6350 sys.path = sys.path + [
6351 input_api.os_path.join(input_api.PresubmitLocalPath(), 'tools',
6354 from helper import git_helper
6355 from helper import translation_helper
6357 sys.path = old_sys_path
6359 # Check that translation expectations can be parsed and we can get a list of
6360 # translatable grd files. |repo_root| and |translation_expectations_path| are
6361 # only passed by tests.
6363 repo_root = input_api.PresubmitLocalPath()
6364 if not translation_expectations_path:
6365 translation_expectations_path = input_api.os_path.join(
6366 repo_root, 'tools', 'gritsettings', 'translation_expectations.pyl')
6368 grd_files = git_helper.list_grds_in_repository(repo_root)
6370 # Ignore bogus grd files used only for testing
6371 # ui/webui/resources/tools/generate_grd.py.
6372 ignore_path = input_api.os_path.join('ui', 'webui', 'resources', 'tools',
6374 grd_files = [p for p in grd_files if ignore_path not in p]
6377 translation_helper.get_translatable_grds(
6378 repo_root, grd_files, translation_expectations_path)
6379 except Exception as e:
6381 output_api.PresubmitNotifyResult(
6382 'Failed to get a list of translatable grd files. This happens when:\n'
6383 ' - One of the modified grd or grdp files cannot be parsed or\n'
6384 ' - %s is not updated.\n'
6385 'Stack:\n%s' % (translation_expectations_path, str(e)))
6390 def CheckStableMojomChanges(input_api, output_api):
6391 """Changes to [Stable] mojom types must preserve backward-compatibility."""
6392 changed_mojoms = input_api.AffectedFiles(
6393 include_deletes=True,
6394 file_filter=lambda f: f.LocalPath().endswith(('.mojom')))
6396 if not changed_mojoms or input_api.no_diffs:
6400 for mojom in changed_mojoms:
6402 'filename': mojom.LocalPath(),
6403 'old': '\n'.join(mojom.OldContents()) or None,
6404 'new': '\n'.join(mojom.NewContents()) or None,
6407 process = input_api.subprocess.Popen([
6408 input_api.python3_executable,
6409 input_api.os_path.join(
6410 input_api.PresubmitLocalPath(), 'mojo', 'public', 'tools', 'mojom',
6411 'check_stable_mojom_compatibility.py'), '--src-root',
6412 input_api.PresubmitLocalPath()
6414 stdin=input_api.subprocess.PIPE,
6415 stdout=input_api.subprocess.PIPE,
6416 stderr=input_api.subprocess.PIPE,
6417 universal_newlines=True)
6418 (x, error) = process.communicate(input=input_api.json.dumps(delta))
6419 if process.returncode:
6421 output_api.PresubmitError(
6422 'One or more [Stable] mojom definitions appears to have been changed '
6423 'in a way that is not backward-compatible.',
6428 def CheckDeprecationOfPreferences(input_api, output_api):
6429 """Removing a preference should come with a deprecation."""
6431 def FilterFile(affected_file):
6432 """Accept only .cc files and the like."""
6433 file_inclusion_pattern = [r'.+%s' % _IMPLEMENTATION_EXTENSIONS]
6434 files_to_skip = (_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS +
6435 input_api.DEFAULT_FILES_TO_SKIP)
6436 return input_api.FilterSourceFile(
6438 files_to_check=file_inclusion_pattern,
6439 files_to_skip=files_to_skip)
6441 def ModifiedLines(affected_file):
6442 """Returns a list of tuples (line number, line text) of added and removed
6445 Deleted lines share the same line number as the previous line.
6447 This relies on the scm diff output describing each changed code section
6448 with a line of the form
6450 ^@@ <old line num>,<old size> <new line num>,<new size> @@$
6454 for line in affected_file.GenerateScmDiff().splitlines():
6455 # Extract <new line num> of the patch fragment (see format above).
6456 m = input_api.re.match(r'^@@ [0-9\,\+\-]+ \+([0-9]+)\,[0-9]+ @@',
6459 line_num = int(m.groups(1)[0])
6461 if ((line.startswith('+') and not line.startswith('++'))
6462 or (line.startswith('-') and not line.startswith('--'))):
6463 modified_lines.append((line_num, line))
6465 if not line.startswith('-'):
6467 return modified_lines
6469 def FindLineWith(lines, needle):
6470 """Returns the line number (i.e. index + 1) in `lines` containing `needle`.
6472 If 0 or >1 lines contain `needle`, -1 is returned.
6474 matching_line_numbers = [
6475 # + 1 for 1-based counting of line numbers.
6476 i + 1 for i, line in enumerate(lines) if needle in line
6478 return matching_line_numbers[0] if len(
6479 matching_line_numbers) == 1 else -1
6481 def ModifiedPrefMigration(affected_file):
6482 """Returns whether the MigrateObsolete.*Pref functions were modified."""
6483 # Determine first and last lines of MigrateObsolete.*Pref functions.
6484 new_contents = affected_file.NewContents()
6485 range_1 = (FindLineWith(new_contents,
6486 'BEGIN_MIGRATE_OBSOLETE_LOCAL_STATE_PREFS'),
6487 FindLineWith(new_contents,
6488 'END_MIGRATE_OBSOLETE_LOCAL_STATE_PREFS'))
6489 range_2 = (FindLineWith(new_contents,
6490 'BEGIN_MIGRATE_OBSOLETE_PROFILE_PREFS'),
6491 FindLineWith(new_contents,
6492 'END_MIGRATE_OBSOLETE_PROFILE_PREFS'))
6493 if (-1 in range_1 + range_2):
6495 'Broken .*MIGRATE_OBSOLETE_.*_PREFS markers in browser_prefs.cc.'
6498 # Check whether any of the modified lines are part of the
6499 # MigrateObsolete.*Pref functions.
6500 for line_nr, line in ModifiedLines(affected_file):
6501 if (range_1[0] <= line_nr <= range_1[1]
6502 or range_2[0] <= line_nr <= range_2[1]):
6506 register_pref_pattern = input_api.re.compile(r'Register.+Pref')
6507 browser_prefs_file_pattern = input_api.re.compile(
6508 r'chrome/browser/prefs/browser_prefs.cc')
6510 changes = input_api.AffectedFiles(include_deletes=True,
6511 file_filter=FilterFile)
6512 potential_problems = []
6514 for line in f.GenerateScmDiff().splitlines():
6515 # Check deleted lines for pref registrations.
6516 if (line.startswith('-') and not line.startswith('--')
6517 and register_pref_pattern.search(line)):
6518 potential_problems.append('%s: %s' % (f.LocalPath(), line))
6520 if browser_prefs_file_pattern.search(f.LocalPath()):
6521 # If the developer modified the MigrateObsolete.*Prefs() functions, we
6522 # assume that they knew that they have to deprecate preferences and don't
6525 if ModifiedPrefMigration(f):
6527 except Exception as e:
6528 return [output_api.PresubmitError(str(e))]
6530 if potential_problems:
6532 output_api.PresubmitPromptWarning(
6533 'Discovered possible removal of preference registrations.\n\n'
6534 'Please make sure to properly deprecate preferences by clearing their\n'
6535 'value for a couple of milestones before finally removing the code.\n'
6536 'Otherwise data may stay in the preferences files forever. See\n'
6537 'Migrate*Prefs() in chrome/browser/prefs/browser_prefs.cc and\n'
6538 'chrome/browser/prefs/README.md for examples.\n'
6539 'This may be a false positive warning (e.g. if you move preference\n'
6540 'registrations to a different place).\n', potential_problems)
6545 def CheckConsistentGrdChanges(input_api, output_api):
6546 """Changes to GRD files must be consistent for tools to read them."""
6547 changed_grds = input_api.AffectedFiles(
6548 include_deletes=False,
6549 file_filter=lambda f: f.LocalPath().endswith(('.grd')))
6551 invalid_file_regexes = [(input_api.re.compile(matcher), msg)
6552 for matcher, msg in _INVALID_GRD_FILE_LINE]
6553 for grd in changed_grds:
6554 for i, line in enumerate(grd.NewContents()):
6555 for matcher, msg in invalid_file_regexes:
6556 if matcher.search(line):
6558 output_api.PresubmitError(
6559 'Problem on {grd}:{i} - {msg}'.format(
6560 grd=grd.LocalPath(), i=i + 1, msg=msg)))
6564 def CheckAssertAshOnlyCode(input_api, output_api):
6565 """Errors if a BUILD.gn file in an ash/ directory doesn't include
6566 assert(is_chromeos_ash).
6569 def FileFilter(affected_file):
6570 """Includes directories known to be Ash only."""
6571 return input_api.FilterSourceFile(
6574 r'^ash/.*BUILD\.gn', # Top-level src/ash/.
6575 r'.*/ash/.*BUILD\.gn'), # Any path component.
6576 files_to_skip=(input_api.DEFAULT_FILES_TO_SKIP))
6579 pattern = input_api.re.compile(r'assert\(is_chromeos_ash')
6580 for f in input_api.AffectedFiles(include_deletes=False,
6581 file_filter=FileFilter):
6582 if (not pattern.search(input_api.ReadFile(f))):
6584 output_api.PresubmitError(
6585 'Please add assert(is_chromeos_ash) to %s. If that\'s not '
6586 'possible, please create and issue and add a comment such '
6587 'as:\n # TODO(https://crbug.com/XXX): add '
6588 'assert(is_chromeos_ash) when ...' % f.LocalPath()))
6592 def _IsRendererOnlyCppFile(input_api, affected_file):
6593 path = affected_file.LocalPath()
6594 if not _IsCPlusPlusFile(input_api, path):
6597 # Any code under a "renderer" subdirectory is assumed to be Renderer-only.
6598 if "/renderer/" in path:
6601 # Blink's public/web API is only used/included by Renderer-only code. Note
6602 # that public/platform API may be used in non-Renderer processes (e.g. there
6603 # are some includes in code used by Utility, PDF, or Plugin processes).
6604 if "/blink/public/web/" in path:
6607 # We assume that everything else may be used outside of Renderer processes.
6610 # TODO(https://crbug.com/1273182): Remove these checks, once they are replaced
6611 # by the Chromium Clang Plugin (which will be preferable because it will
6612 # 1) report errors earlier - at compile-time and 2) cover more rules).
6613 def CheckRawPtrUsage(input_api, output_api):
6614 """Rough checks that raw_ptr<T> usage guidelines are followed."""
6616 # The regex below matches "raw_ptr<" following a word boundary, but not in a
6618 raw_ptr_matcher = input_api.re.compile(r'^((?!//).)*\braw_ptr<')
6619 file_filter = lambda f: _IsRendererOnlyCppFile(input_api, f)
6620 for f, line_num, line in input_api.RightHandSideLines(file_filter):
6621 if raw_ptr_matcher.search(line):
6623 output_api.PresubmitError(
6624 'Problem on {path}:{line} - '\
6625 'raw_ptr<T> should not be used in Renderer-only code '\
6626 '(as documented in the "Pointers to unprotected memory" '\
6627 'section in //base/memory/raw_ptr.md)'.format(
6628 path=f.LocalPath(), line=line_num)))
6632 def CheckPythonShebang(input_api, output_api):
6633 """Checks that python scripts use #!/usr/bin/env instead of hardcoding a
6637 sources = lambda affected_file: input_api.FilterSourceFile(
6639 files_to_skip=((_THIRD_PARTY_EXCEPT_BLINK,
6640 r'third_party/blink/web_tests/external/') + input_api.
6641 DEFAULT_FILES_TO_SKIP),
6642 files_to_check=[r'.*\.py$'])
6643 for f in input_api.AffectedSourceFiles(sources):
6644 for line_num, line in f.ChangedContents():
6645 if line_num == 1 and line.startswith('#!/usr/bin/python'):
6646 errors.append(f.LocalPath())
6652 output_api.PresubmitError(
6653 "Please use '#!/usr/bin/env python/2/3' as the shebang of %s" %
6658 def CheckBatchAnnotation(input_api, output_api):
6659 """Checks that tests have either @Batch or @DoNotBatch annotation. If this
6660 is not an instrumentation test, disregard."""
6662 batch_annotation = input_api.re.compile(r'^\s*@Batch')
6663 do_not_batch_annotation = input_api.re.compile(r'^\s*@DoNotBatch')
6664 robolectric_test = input_api.re.compile(r'[rR]obolectric')
6665 test_class_declaration = input_api.re.compile(r'^\s*public\sclass.*Test')
6666 uiautomator_test = input_api.re.compile(r'[uU]i[aA]utomator')
6668 missing_annotation_errors = []
6669 extra_annotation_errors = []
6671 def _FilterFile(affected_file):
6672 return input_api.FilterSourceFile(
6674 files_to_skip=input_api.DEFAULT_FILES_TO_SKIP,
6675 files_to_check=[r'.*Test\.java$'])
6677 for f in input_api.AffectedSourceFiles(_FilterFile):
6678 batch_matched = None
6679 do_not_batch_matched = None
6680 is_instrumentation_test = True
6681 for line in f.NewContents():
6682 if robolectric_test.search(line) or uiautomator_test.search(line):
6683 # Skip Robolectric and UiAutomator tests.
6684 is_instrumentation_test = False
6686 if not batch_matched:
6687 batch_matched = batch_annotation.search(line)
6688 if not do_not_batch_matched:
6689 do_not_batch_matched = do_not_batch_annotation.search(line)
6690 test_class_declaration_matched = test_class_declaration.search(
6692 if test_class_declaration_matched:
6694 if (is_instrumentation_test and
6695 not batch_matched and
6696 not do_not_batch_matched):
6697 missing_annotation_errors.append(str(f.LocalPath()))
6698 if (not is_instrumentation_test and
6700 do_not_batch_matched)):
6701 extra_annotation_errors.append(str(f.LocalPath()))
6705 if missing_annotation_errors:
6707 output_api.PresubmitPromptWarning(
6709 Instrumentation tests should use either @Batch or @DoNotBatch. Use
6710 @Batch(Batch.PER_CLASS) in most cases. Use @Batch(Batch.UNIT_TESTS) when tests
6711 have no side-effects. If the tests are not safe to run in batch, please use
6712 @DoNotBatch with reasons.
6713 See https://source.chromium.org/chromium/chromium/src/+/main:docs/testing/batching_instrumentation_tests.md
6714 """, missing_annotation_errors))
6715 if extra_annotation_errors:
6717 output_api.PresubmitPromptWarning(
6719 Robolectric tests do not need a @Batch or @DoNotBatch annotations.
6720 """, extra_annotation_errors))
6725 def CheckMockAnnotation(input_api, output_api):
6726 """Checks that we have annotated all Mockito.mock()-ed or Mockito.spy()-ed
6727 classes with @Mock or @Spy. If this is not an instrumentation test,
6730 # This is just trying to be approximately correct. We are not writing a
6731 # Java parser, so special cases like statically importing mock() then
6732 # calling an unrelated non-mockito spy() function will cause a false
6734 package_name = input_api.re.compile(r'^package\s+(\w+(?:\.\w+)+);')
6735 mock_static_import = input_api.re.compile(
6736 r'^import\s+static\s+org.mockito.Mockito.(?:mock|spy);')
6737 import_class = input_api.re.compile(r'import\s+((?:\w+\.)+)(\w+);')
6738 mock_annotation = input_api.re.compile(r'^\s*@(?:Mock|Spy)')
6739 field_type = input_api.re.compile(r'(\w+)(?:<\w+>)?\s+\w+\s*(?:;|=)')
6740 mock_or_spy_function_call = r'(?:mock|spy)\(\s*(?:new\s*)?(\w+)(?:\.class|\()'
6741 fully_qualified_mock_function = input_api.re.compile(
6742 r'Mockito\.' + mock_or_spy_function_call)
6743 statically_imported_mock_function = input_api.re.compile(
6744 r'\W' + mock_or_spy_function_call)
6745 robolectric_test = input_api.re.compile(r'[rR]obolectric')
6746 uiautomator_test = input_api.re.compile(r'[uU]i[aA]utomator')
6748 def _DoClassLookup(class_name, class_name_map, package):
6749 found = class_name_map.get(class_name)
6750 if found is not None:
6753 return package + '.' + class_name
6755 def _FilterFile(affected_file):
6756 return input_api.FilterSourceFile(
6758 files_to_skip=input_api.DEFAULT_FILES_TO_SKIP,
6759 files_to_check=[r'.*Test\.java$'])
6761 mocked_by_function_classes = set()
6762 mocked_by_annotation_classes = set()
6763 class_to_filename = {}
6764 for f in input_api.AffectedSourceFiles(_FilterFile):
6765 mock_function_regex = fully_qualified_mock_function
6766 next_line_is_annotated = False
6767 fully_qualified_class_map = {}
6770 for line in f.NewContents():
6771 if robolectric_test.search(line) or uiautomator_test.search(line):
6772 # Skip Robolectric and UiAutomator tests.
6775 m = package_name.search(line)
6777 package = m.group(1)
6780 if mock_static_import.search(line):
6781 mock_function_regex = statically_imported_mock_function
6784 m = import_class.search(line)
6786 fully_qualified_class_map[m.group(2)] = m.group(1) + m.group(2)
6789 if next_line_is_annotated:
6790 next_line_is_annotated = False
6791 fully_qualified_class = _DoClassLookup(
6792 field_type.search(line).group(1), fully_qualified_class_map,
6794 mocked_by_annotation_classes.add(fully_qualified_class)
6797 if mock_annotation.search(line):
6798 next_line_is_annotated = True
6801 m = mock_function_regex.search(line)
6803 fully_qualified_class = _DoClassLookup(m.group(1),
6804 fully_qualified_class_map, package)
6805 # Skipping builtin classes, since they don't get optimized.
6806 if fully_qualified_class.startswith(
6807 'android.') or fully_qualified_class.startswith(
6810 class_to_filename[fully_qualified_class] = str(f.LocalPath())
6811 mocked_by_function_classes.add(fully_qualified_class)
6814 missed_classes = mocked_by_function_classes - mocked_by_annotation_classes
6816 error_locations = []
6817 for c in missed_classes:
6818 error_locations.append(c + ' in ' + class_to_filename[c])
6820 output_api.PresubmitPromptWarning(
6822 Mockito.mock()/spy() cause issues with our Java optimizer. You have 3 options:
6823 1) If the mocked variable can be a class member, annotate the member with
6825 2) If the mocked variable cannot be a class member, create a dummy member
6826 variable of that type, annotated with @Mock/@Spy. This dummy does not need
6827 to be used or initialized in any way.
6828 3) If the mocked type is definitely not going to be optimized, whether it's a
6829 builtin type which we don't ship, or a class you know R8 will treat
6830 specially, you can ignore this warning.
6831 """, error_locations))
6835 def CheckNoJsInIos(input_api, output_api):
6836 """Checks to make sure that JavaScript files are not used on iOS."""
6838 def _FilterFile(affected_file):
6839 return input_api.FilterSourceFile(
6841 files_to_skip=input_api.DEFAULT_FILES_TO_SKIP +
6842 (r'^ios/third_party/*', r'^third_party/*'),
6843 files_to_check=[r'^ios/.*\.js$', r'.*/ios/.*\.js$'])
6847 # Collect filenames of all removed JS files.
6848 for f in input_api.AffectedSourceFiles(_FilterFile):
6849 local_path = f.LocalPath()
6851 if input_api.os_path.splitext(local_path)[1] == '.js' and f.Action() == 'D':
6852 deleted_files.append(input_api.os_path.basename(local_path))
6858 for f in input_api.AffectedSourceFiles(_FilterFile):
6859 local_path = f.LocalPath()
6861 if input_api.os_path.splitext(local_path)[1] == '.js':
6862 if f.Action() == 'A':
6863 if input_api.os_path.basename(local_path) in deleted_files:
6864 # This script was probably moved rather than newly created.
6865 # Present a warning instead of an error for these cases.
6866 moved_paths.append(local_path)
6868 error_paths.append(local_path)
6869 elif f.Action() != 'D':
6870 warning_paths.append(local_path)
6875 results.append(output_api.PresubmitPromptWarning(
6876 'TypeScript is now fully supported for iOS feature scripts. '
6877 'Consider converting JavaScript files to TypeScript. See '
6878 '//ios/web/public/js_messaging/README.md for more details.',
6882 results.append(output_api.PresubmitPromptWarning(
6883 'Do not use JavaScript on iOS for new files as TypeScript is '
6884 'fully supported. (If this is a moved file, you may leave the '
6885 'script unconverted.) See //ios/web/public/js_messaging/README.md '
6886 'for help using scripts on iOS.', moved_paths))
6889 results.append(output_api.PresubmitError(
6890 'Do not use JavaScript on iOS as TypeScript is fully supported. '
6891 'See //ios/web/public/js_messaging/README.md for help using '
6892 'scripts on iOS.', error_paths))
6896 def CheckLibcxxRevisionsMatch(input_api, output_api):
6897 """Check to make sure the libc++ version matches across deps files."""
6898 # Disable check for changes to sub-repositories.
6899 if input_api.PresubmitLocalPath() != input_api.change.RepositoryRoot():
6902 DEPS_FILES = [ 'DEPS', 'buildtools/deps_revisions.gni' ]
6904 file_filter = lambda f: f.LocalPath().replace(
6905 input_api.os_path.sep, '/') in DEPS_FILES
6906 changed_deps_files = input_api.AffectedFiles(file_filter=file_filter)
6907 if not changed_deps_files:
6910 def LibcxxRevision(file):
6911 file = input_api.os_path.join(input_api.PresubmitLocalPath(),
6913 return input_api.re.search(
6914 r'libcxx_revision.*[:=].*[\'"](\w+)[\'"]',
6915 input_api.ReadFile(file)).group(1)
6917 if len(set([LibcxxRevision(f) for f in DEPS_FILES])) == 1:
6920 return [output_api.PresubmitError(
6921 'libcxx_revision not equal across %s' % ', '.join(DEPS_FILES),
6922 changed_deps_files)]