1 # Copyright 2012 The Chromium Authors
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 """Top-level presubmit script for Chromium.
7 See https://www.chromium.org/developers/how-tos/depottools/presubmit-scripts/
8 for more details about the presubmit API built into depot_tools.
11 from typing import Callable
12 from typing import Optional
13 from typing import Sequence
14 from dataclasses import dataclass
16 PRESUBMIT_VERSION = '2.0.0'
18 # This line is 'magic' in that git-cl looks for it to decide whether to
19 # use Python3 instead of Python2 when running the code in this file.
24 (r"chrome/android/webapk/shell_apk/src/org/chromium"
25 r"/webapk/lib/runtime_library/IWebApkApi.java"),
26 # File needs to write to stdout to emulate a tool it's replacing.
27 r"chrome/updater/mac/keystone/ksadmin.mm",
29 (r"^components/variations/proto/devtools/"
30 r"client_variations.js"),
31 # These are video files, not typescript.
32 r"^media/test/data/.*.ts",
33 r"^native_client_sdksrc/build_tools/make_rules.py",
34 r"^native_client_sdk/src/build_tools/make_simple.py",
35 r"^native_client_sdk/src/tools/.*.mk",
36 r"^net/tools/spdyshark/.*",
38 r"^third_party/blink/.*",
39 r"^third_party/breakpad/.*",
40 # sqlite is an imported third party dependency.
41 r"^third_party/sqlite/.*",
45 r".+_pb2(_grpc)?\.py$",
47 r"^gpu/config/.*_list_json\.cc$",
48 r"tools/md_browser/.*\.css$",
49 # Test pages for Maps telemetry tests.
50 r"tools/perf/page_sets/maps_perf_test.*",
51 # Test pages for WebRTC telemetry tests.
52 r"tools/perf/page_sets/webrtc_cases.*",
55 _EXCLUDED_SET_NO_PARENT_PATHS = (
56 # It's for historical reasons that blink isn't a top level directory, where
57 # it would be allowed to have "set noparent" to avoid top level owners
58 # accidentally +1ing changes.
59 'third_party/blink/OWNERS',
63 # Fragment of a regular expression that matches C++ and Objective-C++
64 # implementation files.
65 _IMPLEMENTATION_EXTENSIONS = r'\.(cc|cpp|cxx|mm)$'
68 # Fragment of a regular expression that matches C++ and Objective-C++
70 _HEADER_EXTENSIONS = r'\.(h|hpp|hxx)$'
73 # Paths with sources that don't use //base.
74 _NON_BASE_DEPENDENT_PATHS = (
75 r"^chrome/browser/browser_switcher/bho/",
80 # Regular expression that matches code only used for test binaries
82 _TEST_CODE_EXCLUDED_PATHS = (
83 r'.*/(fake_|test_|mock_).+%s' % _IMPLEMENTATION_EXTENSIONS,
84 r'.+_test_(base|support|util)%s' % _IMPLEMENTATION_EXTENSIONS,
85 # Test suite files, like:
87 # bar_unittest_mac.cc (suffix)
88 # baz_unittests.cc (plural)
89 r'.+_(api|browser|eg|int|perf|pixel|unit|ui)?test(s)?(_[a-z]+)?%s' %
90 _IMPLEMENTATION_EXTENSIONS,
91 r'.+_(fuzz|fuzzer)(_[a-z]+)?%s' % _IMPLEMENTATION_EXTENSIONS,
92 r'.+sync_service_impl_harness%s' % _IMPLEMENTATION_EXTENSIONS,
93 r'.*/(test|tool(s)?)/.*',
94 # content_shell is used for running content_browsertests.
97 r'content/web_test/.*',
98 # Non-production example code.
100 # Launcher for running iOS tests on the simulator.
101 r'testing/iossim/iossim\.mm$',
102 # EarlGrey app side code for tests.
103 r'ios/.*_app_interface\.mm$',
104 # Views Examples code
105 r'ui/views/examples/.*',
110 _THIRD_PARTY_EXCEPT_BLINK = 'third_party/(?!blink/)'
112 _TEST_ONLY_WARNING = (
113 'You might be calling functions intended only for testing from\n'
114 'production code. If you are doing this from inside another method\n'
115 'named as *ForTesting(), then consider exposing things to have tests\n'
116 'make that same call directly.\n'
117 'If that is not possible, you may put a comment on the same line with\n'
119 'to tell the PRESUBMIT script that the code is inside a *ForTesting()\n'
120 'method and can be ignored. Do not do this inside production code.\n'
121 'The android-binary-size trybot will block if the method exists in the\n'
127 # String pattern. If the pattern begins with a slash, the pattern will be
128 # treated as a regular expression instead.
130 # Explanation as a sequence of strings. Each string in the sequence will be
131 # printed on its own line.
132 explanation: Sequence[str]
133 # Whether or not to treat this ban as a fatal error. If unspecified,
135 treat_as_error: Optional[bool] = None
136 # Paths that should be excluded from the ban check. Each string is a regular
137 # expression that will be matched against the path of the file being checked
138 # relative to the root of the source tree.
139 excluded_paths: Optional[Sequence[str]] = None
142 _BANNED_JAVA_IMPORTS : Sequence[BanRule] = (
144 'import java.net.URI;',
146 'Use org.chromium.url.GURL instead of java.net.URI, where possible.',
149 (r'net/android/javatests/src/org/chromium/net/'
150 'AndroidProxySelectorTest\.java'),
151 r'components/cronet/',
152 r'third_party/robolectric/local/',
156 'import android.annotation.TargetApi;',
158 'Do not use TargetApi, use @androidx.annotation.RequiresApi instead. '
159 'RequiresApi ensures that any calls are guarded by the appropriate '
160 'SDK_INT check. See https://crbug.com/1116486.',
164 'import android.support.test.rule.UiThreadTestRule;',
166 'Do not use UiThreadTestRule, just use '
167 '@org.chromium.base.test.UiThreadTest on test methods that should run '
168 'on the UI thread. See https://crbug.com/1111893.',
172 'import android.support.test.annotation.UiThreadTest;',
173 ('Do not use android.support.test.annotation.UiThreadTest, use '
174 'org.chromium.base.test.UiThreadTest instead. See '
175 'https://crbug.com/1111893.',
179 'import android.support.test.rule.ActivityTestRule;',
181 'Do not use ActivityTestRule, use '
182 'org.chromium.base.test.BaseActivityTestRule instead.',
185 'components/cronet/',
190 _BANNED_JAVA_FUNCTIONS : Sequence[BanRule] = (
192 'StrictMode.allowThreadDiskReads()',
194 'Prefer using StrictModeContext.allowDiskReads() to using StrictMode '
200 'StrictMode.allowThreadDiskWrites()',
202 'Prefer using StrictModeContext.allowDiskWrites() to using StrictMode '
208 '.waitForIdleSync()',
210 'Do not use waitForIdleSync as it masks underlying issues. There is '
211 'almost always something else you should wait on instead.',
217 _BANNED_OBJC_FUNCTIONS : Sequence[BanRule] = (
221 'The use of -[NSView addTrackingRect:owner:userData:assumeInside:] is'
222 'prohibited. Please use CrTrackingArea instead.',
223 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
228 r'/NSTrackingArea\W',
230 'The use of NSTrackingAreas is prohibited. Please use CrTrackingArea',
232 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
237 'convertPointFromBase:',
239 'The use of -[NSView convertPointFromBase:] is almost certainly wrong.',
240 'Please use |convertPoint:(point) fromView:nil| instead.',
241 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
246 'convertPointToBase:',
248 'The use of -[NSView convertPointToBase:] is almost certainly wrong.',
249 'Please use |convertPoint:(point) toView:nil| instead.',
250 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
255 'convertRectFromBase:',
257 'The use of -[NSView convertRectFromBase:] is almost certainly wrong.',
258 'Please use |convertRect:(point) fromView:nil| instead.',
259 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
264 'convertRectToBase:',
266 'The use of -[NSView convertRectToBase:] is almost certainly wrong.',
267 'Please use |convertRect:(point) toView:nil| instead.',
268 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
273 'convertSizeFromBase:',
275 'The use of -[NSView convertSizeFromBase:] is almost certainly wrong.',
276 'Please use |convertSize:(point) fromView:nil| instead.',
277 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
282 'convertSizeToBase:',
284 'The use of -[NSView convertSizeToBase:] is almost certainly wrong.',
285 'Please use |convertSize:(point) toView:nil| instead.',
286 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
291 r"/\s+UTF8String\s*]",
293 'The use of -[NSString UTF8String] is dangerous as it can return null',
294 'even if |canBeConvertedToEncoding:NSUTF8StringEncoding| returns YES.',
295 'Please use |SysNSStringToUTF8| instead.',
300 r'__unsafe_unretained',
302 'The use of __unsafe_unretained is almost certainly wrong, unless',
303 'when interacting with NSFastEnumeration or NSInvocation.',
304 'Please use __weak in files build with ARC, nothing otherwise.',
311 'The use of "freeWhenDone:NO" with the NoCopy creation of ',
312 'Foundation types is prohibited.',
318 _BANNED_IOS_OBJC_FUNCTIONS = (
322 'TEST() macro should not be used in Objective-C++ code as it does not ',
323 'drain the autorelease pool at the end of the test. Use TEST_F() ',
324 'macro instead with a fixture inheriting from PlatformTest (or a ',
330 r'/\btesting::Test\b',
332 'testing::Test should not be used in Objective-C++ code as it does ',
333 'not drain the autorelease pool at the end of the test. Use ',
334 'PlatformTest instead.'
339 ' systemImageNamed:',
341 '+[UIImage systemImageNamed:] should not be used to create symbols.',
342 'Instead use a wrapper defined in:',
343 'ios/chrome/browser/ui/icons/chrome_symbol.h'
347 'ios/chrome/browser/ui/icons/chrome_symbol.mm',
352 _BANNED_IOS_EGTEST_FUNCTIONS : Sequence[BanRule] = (
354 r'/\bEXPECT_OCMOCK_VERIFY\b',
356 'EXPECT_OCMOCK_VERIFY should not be used in EarlGrey tests because ',
357 'it is meant for GTests. Use [mock verify] instead.'
363 _BANNED_CPP_FUNCTIONS : Sequence[BanRule] = (
365 r'/\busing namespace ',
367 'Using directives ("using namespace x") are banned by the Google Style',
368 'Guide ( http://google.github.io/styleguide/cppguide.html#Namespaces ).',
369 'Explicitly qualify symbols or use using declarations ("using x::foo").',
372 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
374 # Make sure that gtest's FRIEND_TEST() macro is not used; the
375 # FRIEND_TEST_ALL_PREFIXES() macro from base/gtest_prod_util.h should be
376 # used instead since that allows for FLAKY_ and DISABLED_ prefixes.
380 'Chromium code should not use gtest\'s FRIEND_TEST() macro. Include',
381 'base/gtest_prod_util.h and use FRIEND_TEST_ALL_PREFIXES() instead.',
389 'Overriding setMatrixClip() is prohibited; ',
390 'the base function is deprecated. ',
398 'The use of SkRefPtr is prohibited. ',
399 'Please use sk_sp<> instead.'
407 'The indirect use of SkRefPtr via SkAutoRef is prohibited. ',
408 'Please use sk_sp<> instead.'
416 'The use of SkAutoTUnref is dangerous because it implicitly ',
417 'converts to a raw pointer. Please use sk_sp<> instead.'
425 'The indirect use of SkAutoTUnref through SkAutoUnref is dangerous ',
426 'because it implicitly converts to a raw pointer. ',
427 'Please use sk_sp<> instead.'
433 r'/HANDLE_EINTR\(.*close',
435 'HANDLE_EINTR(close) is invalid. If close fails with EINTR, the file',
436 'descriptor will be closed, and it is incorrect to retry the close.',
437 'Either call close directly and ignore its return value, or wrap close',
438 'in IGNORE_EINTR to use its return value. See http://crbug.com/269623'
444 r'/IGNORE_EINTR\((?!.*close)',
446 'IGNORE_EINTR is only valid when wrapping close. To wrap other system',
447 'calls, use HANDLE_EINTR. See http://crbug.com/269623',
451 # Files that #define IGNORE_EINTR.
452 r'^base/posix/eintr_wrapper\.h$',
453 r'^ppapi/tests/test_broker\.cc$',
459 'Do not introduce new v8::Extensions into the code base, use',
460 'gin::Wrappable instead. See http://crbug.com/334679',
464 r'extensions/renderer/safe_builtins\.*',
468 '#pragma comment(lib,',
470 'Specify libraries to link with in build files and not in the source.',
474 r'^base/third_party/symbolize/.*',
475 r'^third_party/abseil-cpp/.*',
479 r'/base::SequenceChecker\b',
481 'Consider using SEQUENCE_CHECKER macros instead of the class directly.',
487 r'/base::ThreadChecker\b',
489 'Consider using THREAD_CHECKER macros instead of the class directly.',
495 r'/\b(?!(Sequenced|SingleThread))\w*TaskRunner::(GetCurrentDefault|CurrentDefaultHandle)',
497 'It is not allowed to call these methods from the subclasses ',
498 'of Sequenced or SingleThread task runners.',
504 r'/(Time(|Delta|Ticks)|ThreadTicks)::FromInternalValue|ToInternalValue',
506 'base::TimeXXX::FromInternalValue() and ToInternalValue() are',
507 'deprecated (http://crbug.com/634507). Please avoid converting away',
508 'from the Time types in Chromium code, especially if any math is',
509 'being done on time values. For interfacing with platform/library',
510 'APIs, use FromMicroseconds() or InMicroseconds(), or one of the other',
511 'type converter methods instead. For faking TimeXXX values (for unit',
512 'testing only), use TimeXXX() + Microseconds(N). For',
513 'other use cases, please contact base/time/OWNERS.',
519 'CallJavascriptFunctionUnsafe',
521 "Don't use CallJavascriptFunctionUnsafe() in new code. Instead, use",
522 'AllowJavascript(), OnJavascriptAllowed()/OnJavascriptDisallowed(),',
523 'and CallJavascriptFunction(). See https://goo.gl/qivavq.',
527 r'^content/browser/webui/web_ui_impl\.(cc|h)$',
528 r'^content/public/browser/web_ui\.h$',
529 r'^content/public/test/test_web_ui\.(cc|h)$',
535 'Instead of leveldb::DB::Open() use leveldb_env::OpenDB() from',
536 'third_party/leveldatabase/env_chromium.h. It exposes databases to',
537 "Chrome's tracing, making their memory usage visible.",
541 r'^third_party/leveldatabase/.*\.(cc|h)$',
545 'leveldb::NewMemEnv',
547 'Instead of leveldb::NewMemEnv() use leveldb_chrome::NewMemEnv() from',
548 'third_party/leveldatabase/leveldb_chrome.h. It exposes environments',
549 "to Chrome's tracing, making their memory usage visible.",
553 r'^third_party/leveldatabase/.*\.(cc|h)$',
557 'RunLoop::QuitCurrent',
559 'Please migrate away from RunLoop::QuitCurrent*() methods. Use member',
560 'methods of a specific RunLoop instance instead.',
566 'base::ScopedMockTimeMessageLoopTaskRunner',
568 'ScopedMockTimeMessageLoopTaskRunner is deprecated. Prefer',
569 'TaskEnvironment::TimeSource::MOCK_TIME. There are still a',
570 'few cases that may require a ScopedMockTimeMessageLoopTaskRunner',
571 '(i.e. mocking the main MessageLoopForUI in browser_tests), but check',
572 'with gab@ first if you think you need it)',
580 'Using std::regex adds unnecessary binary size to Chrome. Please use',
581 're2::RE2 instead (crbug.com/755321)',
584 # Abseil's benchmarks never linked into chrome.
585 ['third_party/abseil-cpp/.*_benchmark.cc'],
590 'std::stoi uses exceptions to communicate results. ',
591 'Use base::StringToInt() instead.',
594 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
599 'std::stol uses exceptions to communicate results. ',
600 'Use base::StringToInt() instead.',
603 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
608 'std::stoul uses exceptions to communicate results. ',
609 'Use base::StringToUint() instead.',
612 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
617 'std::stoll uses exceptions to communicate results. ',
618 'Use base::StringToInt64() instead.',
621 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
626 'std::stoull uses exceptions to communicate results. ',
627 'Use base::StringToUint64() instead.',
630 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
635 'std::stof uses exceptions to communicate results. ',
636 'For locale-independent values, e.g. reading numbers from disk',
637 'profiles, use base::StringToDouble().',
638 'For user-visible values, parse using ICU.',
641 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
646 'std::stod uses exceptions to communicate results. ',
647 'For locale-independent values, e.g. reading numbers from disk',
648 'profiles, use base::StringToDouble().',
649 'For user-visible values, parse using ICU.',
652 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
657 'std::stold uses exceptions to communicate results. ',
658 'For locale-independent values, e.g. reading numbers from disk',
659 'profiles, use base::StringToDouble().',
660 'For user-visible values, parse using ICU.',
663 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
666 r'/\bstd::to_string\b',
668 'std::to_string is locale dependent and slower than alternatives.',
669 'For locale-independent strings, e.g. writing numbers to disk',
670 'profiles, use base::NumberToString().',
671 'For user-visible strings, use base::FormatNumber() and',
672 'the related functions in base/i18n/number_formatting.h.',
674 False, # Only a warning since it is already used.
675 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
678 r'/\bstd::shared_ptr\b',
680 'std::shared_ptr should not be used. Use scoped_refptr instead.',
684 # Needed for interop with third-party library.
685 '^third_party/blink/renderer/core/typed_arrays/array_buffer/' +
686 'array_buffer_contents\.(cc|h)',
687 '^third_party/blink/renderer/bindings/core/v8/' +
688 'v8_wasm_response_extensions.cc',
689 '^gin/array_buffer\.(cc|h)',
690 '^chrome/services/sharing/nearby/',
691 # Needed for interop with third-party library libunwindstack.
692 '^base/profiler/libunwindstack_unwinder_android\.(cc|h)',
693 # gRPC provides some C++ libraries that use std::shared_ptr<>.
694 '^chromeos/ash/services/libassistant/grpc/',
695 '^chromecast/cast_core/grpc',
696 '^chromecast/cast_core/runtime/browser',
697 '^ios/chrome/test/earl_grey/chrome_egtest_plugin_client\.(mm|h)',
698 # Fuchsia provides C++ libraries that use std::shared_ptr<>.
699 '^base/fuchsia/filtered_service_directory\.(cc|h)',
700 '^base/fuchsia/service_directory_test_base\.h',
701 '.*fuchsia.*test\.(cc|h)',
702 # Needed for clang plugin tests
703 '^tools/clang/plugins/tests/',
704 _THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
707 r'/\bstd::weak_ptr\b',
709 'std::weak_ptr should not be used. Use base::WeakPtr instead.',
712 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
717 'long long is banned. Use stdint.h if you need a 64 bit number.',
719 False, # Only a warning since it is already used.
720 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
723 r'\b(absl|std)::any\b',
725 'absl::any / std::any are not safe to use in a component build.',
728 # Not an error in third party folders, though it probably should be :)
729 [_THIRD_PARTY_EXCEPT_BLINK],
734 'std::bind is banned because of lifetime risks.',
735 'Use base::BindOnce or base::BindRepeating instead.',
738 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
741 r'/\bstd::optional\b',
743 'std::optional is banned. Use absl::optional instead.',
746 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
749 r'/\b#include <chrono>\b',
751 '<chrono> overlaps with Time APIs in base. Keep using',
755 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
758 r'/\b#include <exception>\b',
760 'Exceptions are banned and disabled in Chromium.',
763 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
766 r'/\bstd::function\b',
768 'std::function is banned. Instead use base::OnceCallback or ',
769 'base::RepeatingCallback, which directly support Chromium\'s weak ',
770 'pointers, ref counting and more.',
774 # Has tests that template trait helpers don't unintentionally match
776 r'base/functional/callback_helpers_unittest\.cc',
777 # Required to implement interfaces from the third-party perfetto
779 r'base/tracing/perfetto_task_runner\.cc',
780 r'base/tracing/perfetto_task_runner\.h',
781 # Needed for interop with the third-party nearby library type
782 # location::nearby::connections::ResultCallback.
783 'chrome/services/sharing/nearby/nearby_connections_conversions\.cc'
784 # Needed for interop with the internal libassistant library.
785 'chromeos/ash/services/libassistant/callback_utils\.h',
786 # Needed for interop with Fuchsia fidl APIs.
787 'fuchsia_web/webengine/browser/context_impl_browsertest\.cc',
788 'fuchsia_web/webengine/browser/cookie_manager_impl_unittest\.cc',
789 'fuchsia_web/webengine/browser/media_player_impl_unittest\.cc',
790 # Required to interop with interfaces from the third-party perfetto
792 'services/tracing/public/cpp/perfetto/custom_event_recorder\.cc',
793 'services/tracing/public/cpp/perfetto/perfetto_traced_process\.cc',
794 'services/tracing/public/cpp/perfetto/perfetto_traced_process\.h',
795 'services/tracing/public/cpp/perfetto/perfetto_tracing_backend\.cc',
796 'services/tracing/public/cpp/perfetto/producer_client\.cc',
797 'services/tracing/public/cpp/perfetto/producer_client\.h',
798 'services/tracing/public/cpp/perfetto/producer_test_utils\.cc',
799 'services/tracing/public/cpp/perfetto/producer_test_utils\.h',
800 # Required for interop with the third-party webrtc library.
801 'third_party/blink/renderer/modules/peerconnection/mock_peer_connection_impl\.cc',
802 'third_party/blink/renderer/modules/peerconnection/mock_peer_connection_impl\.h',
804 # TODO(https://crbug.com/1364577): Various uses that should be
805 # migrated to something else.
806 # Should use base::OnceCallback or base::RepeatingCallback.
807 'base/allocator/dispatcher/initializer_unittest\.cc',
808 'chrome/browser/ash/accessibility/speech_monitor\.cc',
809 'chrome/browser/ash/accessibility/speech_monitor\.h',
810 'chrome/browser/ash/login/ash_hud_login_browsertest\.cc',
811 'chromecast/base/observer_unittest\.cc',
812 'chromecast/browser/cast_web_view\.h',
813 'chromecast/public/cast_media_shlib\.h',
814 'device/bluetooth/floss/exported_callback_manager\.h',
815 'device/bluetooth/floss/floss_dbus_client\.h',
816 'device/fido/cable/v2_handshake_unittest\.cc',
817 'device/fido/pin\.cc',
818 'services/tracing/perfetto/test_utils\.h',
819 # Should use base::FunctionRef.
820 'chrome/browser/media/webrtc/test_stats_dictionary\.cc',
821 'chrome/browser/media/webrtc/test_stats_dictionary\.h',
822 'chromeos/ash/services/libassistant/device_settings_controller\.cc',
823 'components/browser_ui/client_certificate/android/ssl_client_certificate_request\.cc',
824 'components/gwp_asan/client/sampling_malloc_shims_unittest\.cc',
825 'content/browser/font_unique_name_lookup/font_unique_name_lookup_unittest\.cc',
826 # Does not need std::function at all.
827 'components/omnibox/browser/autocomplete_result\.cc',
828 'device/fido/win/webauthn_api\.cc',
829 'media/audio/alsa/alsa_util\.cc',
830 'media/remoting/stream_provider\.h',
831 'sql/vfs_wrapper\.cc',
832 # TODO(https://crbug.com/1364585): Remove usage and exception list
834 'extensions/renderer/api/automation/automation_internal_custom_bindings\.cc',
835 'extensions/renderer/api/automation/automation_internal_custom_bindings\.h',
836 # TODO(https://crbug.com/1364579): Remove usage and exception list
838 'ui/views/controls/focus_ring\.h',
840 # Various pre-existing uses in //tools that is low-priority to fix.
841 'tools/binary_size/libsupersize/viewer/caspian/diff\.cc',
842 'tools/binary_size/libsupersize/viewer/caspian/model\.cc',
843 'tools/binary_size/libsupersize/viewer/caspian/model\.h',
844 'tools/binary_size/libsupersize/viewer/caspian/tree_builder\.h',
845 'tools/clang/base_bind_rewriters/BaseBindRewriters\.cpp',
847 # Not an error in third_party folders.
848 _THIRD_PARTY_EXCEPT_BLINK
852 r'/\b#include <random>\b',
854 'Do not use any random number engines from <random>. Instead',
855 'use base::RandomBitGenerator.',
858 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
861 r'/\b#include <X11/',
863 'Do not use Xlib. Use xproto (from //ui/gfx/x:xproto) instead.',
866 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
871 'std::ratio is banned by the Google Style Guide.',
874 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
877 ('base::ThreadRestrictions::ScopedAllowIO'),
879 'ScopedAllowIO is deprecated, use ScopedAllowBlocking instead.',
885 r'/\bRunMessageLoop\b',
887 'RunMessageLoop is deprecated, use RunLoop instead.',
895 'RunThisRunLoop is deprecated, use RunLoop directly instead.',
901 'RunAllPendingInMessageLoop()',
903 "Prefer RunLoop over RunAllPendingInMessageLoop, please contact gab@",
904 "if you're convinced you need this.",
910 'RunAllPendingInMessageLoop(BrowserThread',
912 'RunAllPendingInMessageLoop is deprecated. Use RunLoop for',
913 'BrowserThread::UI, BrowserTaskEnvironment::RunIOThreadUntilIdle',
914 'for BrowserThread::IO, and prefer RunLoop::QuitClosure to observe',
915 'async events instead of flushing threads.',
921 r'MessageLoopRunner',
923 'MessageLoopRunner is deprecated, use RunLoop instead.',
929 'GetDeferredQuitTaskForRunLoop',
931 "GetDeferredQuitTaskForRunLoop shouldn't be needed, please contact",
932 "gab@ if you found a use case where this is the only solution.",
938 'sqlite3_initialize(',
940 'Instead of calling sqlite3_initialize(), depend on //sql, ',
941 '#include "sql/initialize.h" and use sql::EnsureSqliteInitialized().',
945 r'^sql/initialization\.(cc|h)$',
946 r'^third_party/sqlite/.*\.(c|cc|h)$',
950 'std::random_shuffle',
952 'std::random_shuffle is deprecated in C++14, and removed in C++17. Use',
953 'base::RandomShuffle instead.'
959 'ios/web/public/test/http_server',
961 'web::HTTPserver is deprecated use net::EmbeddedTestServer instead.',
969 'Improper use of Microsoft::WRL::ComPtr<T>::GetAddressOf() has been ',
970 'implicated in a few leaks. ReleaseAndGetAddressOf() is safe but ',
971 'operator& is generally recommended. So always use operator& instead. ',
972 'See http://crbug.com/914910 for more conversion guidance.'
980 'SHFileOperation was deprecated in Windows Vista, and there are less ',
981 'complex functions to achieve the same goals. Use IFileOperation for ',
982 'any esoteric actions instead.'
990 'StringFromGUID2 introduces an unnecessary dependency on ole32.dll.',
991 'Use base::win::WStringFromGUID instead.'
995 r'/base/win/win_util_unittest.cc',
1001 'StringFromCLSID introduces an unnecessary dependency on ole32.dll.',
1002 'Use base::win::WStringFromGUID instead.'
1006 r'/base/win/win_util_unittest.cc',
1012 'The use of kCFAllocatorNull with the NoCopy creation of ',
1013 'CoreFoundation types is prohibited.',
1021 'mojo::ConvertTo and TypeConverter are deprecated. Please consider',
1022 'StructTraits / UnionTraits / EnumTraits / ArrayTraits / MapTraits /',
1023 'StringTraits if you would like to convert between custom types and',
1024 'the wire format of mojom types.'
1028 r'^fuchsia_web/webengine/browser/url_request_rewrite_rules_manager\.cc$',
1029 r'^fuchsia_web/webengine/url_request_rewrite_type_converters\.cc$',
1030 r'^third_party/blink/.*\.(cc|h)$',
1031 r'^content/renderer/.*\.(cc|h)$',
1035 'GetInterfaceProvider',
1037 'InterfaceProvider is deprecated.',
1038 'Please use ExecutionContext::GetBrowserInterfaceBroker and overrides',
1039 'or Platform::GetBrowserInterfaceBroker.'
1047 'New code should use Microsoft::WRL::ComPtr from wrl/client.h as a ',
1048 'replacement for CComPtr from ATL. See http://crbug.com/5027 for more ',
1055 r'/\b(IFACE|STD)METHOD_?\(',
1057 'IFACEMETHOD() and STDMETHOD() make code harder to format and read.',
1058 'Instead, always use IFACEMETHODIMP in the declaration.'
1061 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
1064 'set_owned_by_client',
1066 'set_owned_by_client is deprecated.',
1067 'views::View already owns the child views by default. This introduces ',
1068 'a competing ownership model which makes the code difficult to reason ',
1069 'about. See http://crbug.com/1044687 for more details.'
1075 'RemoveAllChildViewsWithoutDeleting',
1077 'RemoveAllChildViewsWithoutDeleting is deprecated.',
1078 'This method is deemed dangerous as, unless raw pointers are re-added,',
1079 'calls to this method introduce memory leaks.'
1085 r'/\bTRACE_EVENT_ASYNC_',
1087 'Please use TRACE_EVENT_NESTABLE_ASYNC_.. macros instead',
1088 'of TRACE_EVENT_ASYNC_.. (crbug.com/1038710).',
1092 r'^base/trace_event/.*',
1093 r'^base/tracing/.*',
1097 r'/\bbase::debug::DumpWithoutCrashingUnthrottled[(][)]',
1099 'base::debug::DumpWithoutCrashingUnthrottled() does not throttle',
1100 'dumps and may spam crash reports. Consider if the throttled',
1101 'variants suffice instead.',
1109 'Improper use of [base::win]::RoInitialize() has been implicated in a ',
1110 'few COM initialization leaks. Use base::win::ScopedWinrtInitializer ',
1111 'instead. See http://crbug.com/1197722 for more information.'
1115 r'^base/win/scoped_winrt_initializer\.cc$',
1121 'base::Watchdog is deprecated because it creates its own thread.',
1122 'Instead, manually start a timer on a SequencedTaskRunner.',
1130 'Do not use base::Passed. It is a legacy helper for capturing ',
1131 'move-only types with base::BindRepeating, but invoking the ',
1132 'resulting RepeatingCallback moves the captured value out of ',
1133 'the callback storage, and subsequent invocations may pass the ',
1134 'value in a valid but undefined state. Prefer base::BindOnce().',
1135 'See http://crbug.com/1326449 for context.'
1139 # False positive, but it is also fine to let bind internals reference
1141 r'^base[\\/]functional[\\/]bind\.h',
1142 r'^base[\\/]functional[\\/]bind_internal\.h',
1146 r'/\babsl::FunctionRef\b',
1148 'absl::FunctionRef is banned. Use base::FunctionRef instead.',
1152 # base::Bind{Once,Repeating} references absl::FunctionRef to disallow
1154 r'^base/functional/bind_internal\.h',
1155 # base::FunctionRef is implemented on top of absl::FunctionRef.
1156 r'^base/functional/function_ref.*\..+',
1157 # Not an error in third_party folders.
1158 _THIRD_PARTY_EXCEPT_BLINK,
1164 'Please use BASE_DECLARE_FEATURE() or BASE_FEATURE() instead of ',
1165 'directly declaring/defining features.'
1169 _THIRD_PARTY_EXCEPT_BLINK,
1174 _BANNED_MOJOM_PATTERNS : Sequence[BanRule] = (
1176 'handle<shared_buffer>',
1178 'Please use one of the more specific shared memory types instead:',
1179 ' mojo_base.mojom.ReadOnlySharedMemoryRegion',
1180 ' mojo_base.mojom.WritableSharedMemoryRegion',
1181 ' mojo_base.mojom.UnsafeSharedMemoryRegion',
1187 _IPC_ENUM_TRAITS_DEPRECATED = (
1188 'You are using IPC_ENUM_TRAITS() in your code. It has been deprecated.\n'
1189 'See http://www.chromium.org/Home/chromium-security/education/'
1190 'security-tips-for-ipc')
1192 _LONG_PATH_ERROR = (
1193 'Some files included in this CL have file names that are too long (> 200'
1194 ' characters). If committed, these files will cause issues on Windows. See'
1195 ' https://crbug.com/612667 for more details.'
1198 _JAVA_MULTIPLE_DEFINITION_EXCLUDED_PATHS = [
1199 r".*/AppHooksImpl\.java",
1200 r".*/BuildHooksAndroidImpl\.java",
1201 r".*/LicenseContentProvider\.java",
1202 r".*/PlatformServiceBridgeImpl.java",
1203 r".*chrome/android/feed/dummy/.*\.java",
1206 # List of image extensions that are used as resources in chromium.
1207 _IMAGE_EXTENSIONS = ['.svg', '.png', '.webp']
1209 # These paths contain test data and other known invalid JSON files.
1210 _KNOWN_TEST_DATA_AND_INVALID_JSON_FILE_PATTERNS = [
1212 r'testing/buildbot/',
1213 r'^components/policy/resources/policy_templates\.json$',
1214 r'^third_party/protobuf/',
1215 r'^third_party/blink/perf_tests/speedometer/resources/todomvc/learn.json',
1216 r'^third_party/blink/renderer/devtools/protocol\.json$',
1217 r'^third_party/blink/web_tests/external/wpt/',
1219 r'^tools/traceline/svgui/startup-release.json',
1220 # vscode configuration files allow comments
1224 # These are not checked on the public chromium-presubmit trybot.
1225 # Add files here that rely on .py files that exists only for target_os="android"
1227 _ANDROID_SPECIFIC_PYDEPS_FILES = [
1228 'chrome/android/features/create_stripped_java_factory.pydeps',
1232 _GENERIC_PYDEPS_FILES = [
1233 'android_webview/test/components/run_webview_component_smoketest.pydeps',
1234 'android_webview/tools/run_cts.pydeps',
1235 'base/android/jni_generator/jni_generator.pydeps',
1236 'base/android/jni_generator/jni_registration_generator.pydeps',
1237 'build/android/apk_operations.pydeps',
1238 'build/android/devil_chromium.pydeps',
1239 'build/android/gyp/aar.pydeps',
1240 'build/android/gyp/aidl.pydeps',
1241 'build/android/gyp/allot_native_libraries.pydeps',
1242 'build/android/gyp/apkbuilder.pydeps',
1243 'build/android/gyp/assert_static_initializers.pydeps',
1244 'build/android/gyp/bytecode_processor.pydeps',
1245 'build/android/gyp/bytecode_rewriter.pydeps',
1246 'build/android/gyp/check_flag_expectations.pydeps',
1247 'build/android/gyp/compile_java.pydeps',
1248 'build/android/gyp/compile_resources.pydeps',
1249 'build/android/gyp/copy_ex.pydeps',
1250 'build/android/gyp/create_apk_operations_script.pydeps',
1251 'build/android/gyp/create_app_bundle.pydeps',
1252 'build/android/gyp/create_app_bundle_apks.pydeps',
1253 'build/android/gyp/create_bundle_wrapper_script.pydeps',
1254 'build/android/gyp/create_java_binary_script.pydeps',
1255 'build/android/gyp/create_r_java.pydeps',
1256 'build/android/gyp/create_r_txt.pydeps',
1257 'build/android/gyp/create_size_info_files.pydeps',
1258 'build/android/gyp/create_test_apk_wrapper_script.pydeps',
1259 'build/android/gyp/create_ui_locale_resources.pydeps',
1260 'build/android/gyp/dex.pydeps',
1261 'build/android/gyp/dex_jdk_libs.pydeps',
1262 'build/android/gyp/dexsplitter.pydeps',
1263 'build/android/gyp/dist_aar.pydeps',
1264 'build/android/gyp/filter_zip.pydeps',
1265 'build/android/gyp/flatc_java.pydeps',
1266 'build/android/gyp/gcc_preprocess.pydeps',
1267 'build/android/gyp/generate_linker_version_script.pydeps',
1268 'build/android/gyp/ijar.pydeps',
1269 'build/android/gyp/jacoco_instr.pydeps',
1270 'build/android/gyp/java_cpp_enum.pydeps',
1271 'build/android/gyp/java_cpp_features.pydeps',
1272 'build/android/gyp/java_cpp_strings.pydeps',
1273 'build/android/gyp/java_google_api_keys.pydeps',
1274 'build/android/gyp/jinja_template.pydeps',
1275 'build/android/gyp/lint.pydeps',
1276 'build/android/gyp/merge_manifest.pydeps',
1277 'build/android/gyp/optimize_resources.pydeps',
1278 'build/android/gyp/prepare_resources.pydeps',
1279 'build/android/gyp/process_native_prebuilt.pydeps',
1280 'build/android/gyp/proguard.pydeps',
1281 'build/android/gyp/system_image_apks.pydeps',
1282 'build/android/gyp/trace_event_bytecode_rewriter.pydeps',
1283 'build/android/gyp/turbine.pydeps',
1284 'build/android/gyp/unused_resources.pydeps',
1285 'build/android/gyp/validate_static_library_dex_references.pydeps',
1286 'build/android/gyp/write_build_config.pydeps',
1287 'build/android/gyp/write_native_libraries_java.pydeps',
1288 'build/android/gyp/zip.pydeps',
1289 'build/android/incremental_install/generate_android_manifest.pydeps',
1290 'build/android/incremental_install/write_installer_json.pydeps',
1291 'build/android/pylib/results/presentation/test_results_presentation.pydeps',
1292 'build/android/resource_sizes.pydeps',
1293 'build/android/test_runner.pydeps',
1294 'build/android/test_wrapper/logdog_wrapper.pydeps',
1295 'build/lacros/lacros_resource_sizes.pydeps',
1296 'build/protoc_java.pydeps',
1297 'chrome/android/monochrome/scripts/monochrome_python_tests.pydeps',
1298 'chrome/test/chromedriver/log_replay/client_replay_unittest.pydeps',
1299 'chrome/test/chromedriver/test/run_py_tests.pydeps',
1300 'chromecast/resource_sizes/chromecast_resource_sizes.pydeps',
1301 'components/cronet/tools/generate_javadoc.pydeps',
1302 'components/cronet/tools/jar_src.pydeps',
1303 'components/module_installer/android/module_desc_java.pydeps',
1304 'content/public/android/generate_child_service.pydeps',
1305 'net/tools/testserver/testserver.pydeps',
1306 'testing/scripts/run_isolated_script_test.pydeps',
1307 'testing/merge_scripts/standard_isolated_script_merge.pydeps',
1308 'testing/merge_scripts/standard_gtest_merge.pydeps',
1309 'testing/merge_scripts/code_coverage/merge_results.pydeps',
1310 'testing/merge_scripts/code_coverage/merge_steps.pydeps',
1311 'third_party/android_platform/development/scripts/stack.pydeps',
1312 'third_party/blink/renderer/bindings/scripts/build_web_idl_database.pydeps',
1313 'third_party/blink/renderer/bindings/scripts/check_generated_file_list.pydeps',
1314 'third_party/blink/renderer/bindings/scripts/collect_idl_files.pydeps',
1315 'third_party/blink/renderer/bindings/scripts/generate_bindings.pydeps',
1316 'third_party/blink/renderer/bindings/scripts/validate_web_idl.pydeps',
1317 'third_party/blink/tools/blinkpy/web_tests/merge_results.pydeps',
1318 'third_party/blink/tools/merge_web_test_results.pydeps',
1319 'tools/binary_size/sizes.pydeps',
1320 'tools/binary_size/supersize.pydeps',
1321 'tools/perf/process_perf_results.pydeps',
1325 _ALL_PYDEPS_FILES = _ANDROID_SPECIFIC_PYDEPS_FILES + _GENERIC_PYDEPS_FILES
1328 # Bypass the AUTHORS check for these accounts.
1329 _KNOWN_ROBOTS = set(
1330 ) | set('%s@appspot.gserviceaccount.com' % s for s in ('findit-for-me',)
1331 ) | set('%s@developer.gserviceaccount.com' % s for s in ('3su6n15k.default',)
1332 ) | set('%s@chops-service-accounts.iam.gserviceaccount.com' % s
1333 for s in ('bling-autoroll-builder', 'v8-ci-autoroll-builder',
1334 'wpt-autoroller', 'chrome-weblayer-builder',
1335 'lacros-version-skew-roller', 'skylab-test-cros-roller',
1336 'infra-try-recipes-tester', 'lacros-tracking-roller',
1337 'lacros-sdk-version-roller')
1338 ) | set('%s@skia-public.iam.gserviceaccount.com' % s
1339 for s in ('chromium-autoroll', 'chromium-release-autoroll')
1340 ) | set('%s@skia-corp.google.com.iam.gserviceaccount.com' % s
1341 for s in ('chromium-internal-autoroll',)
1342 ) | set('%s@owners-cleanup-prod.google.com.iam.gserviceaccount.com' % s
1343 for s in ('swarming-tasks',))
1345 _INVALID_GRD_FILE_LINE = [
1346 (r'<file lang=.* path=.*', 'Path should come before lang in GRD files.')
1349 def _IsCPlusPlusFile(input_api, file_path):
1350 """Returns True if this file contains C++-like code (and not Python,
1351 Go, Java, MarkDown, ...)"""
1353 ext = input_api.os_path.splitext(file_path)[1]
1354 # This list is compatible with CppChecker.IsCppFile but we should
1355 # consider adding ".c" to it. If we do that we can use this function
1356 # at more places in the code.
1366 def _IsCPlusPlusHeaderFile(input_api, file_path):
1367 return input_api.os_path.splitext(file_path)[1] == ".h"
1370 def _IsJavaFile(input_api, file_path):
1371 return input_api.os_path.splitext(file_path)[1] == ".java"
1374 def _IsProtoFile(input_api, file_path):
1375 return input_api.os_path.splitext(file_path)[1] == ".proto"
1378 def _IsXmlOrGrdFile(input_api, file_path):
1379 ext = input_api.os_path.splitext(file_path)[1]
1380 return ext in ('.grd', '.xml')
1383 def CheckNoUpstreamDepsOnClank(input_api, output_api):
1384 """Prevent additions of dependencies from the upstream repo on //clank."""
1385 # clank can depend on clank
1386 if input_api.change.RepositoryRoot().endswith('clank'):
1388 build_file_patterns = [
1392 excluded_files = [r'build[/\\]config[/\\]android[/\\]config\.gni']
1393 bad_pattern = input_api.re.compile(r'^[^#]*//clank')
1395 error_message = 'Disallowed import on //clank in an upstream build file:'
1397 def FilterFile(affected_file):
1398 return input_api.FilterSourceFile(affected_file,
1399 files_to_check=build_file_patterns,
1400 files_to_skip=excluded_files)
1403 for f in input_api.AffectedSourceFiles(FilterFile):
1404 local_path = f.LocalPath()
1405 for line_number, line in f.ChangedContents():
1406 if (bad_pattern.search(line)):
1407 problems.append('%s:%d\n %s' %
1408 (local_path, line_number, line.strip()))
1410 return [output_api.PresubmitPromptOrNotify(error_message, problems)]
1415 def CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api):
1416 """Attempts to prevent use of functions intended only for testing in
1417 non-testing code. For now this is just a best-effort implementation
1418 that ignores header files and may have some false positives. A
1419 better implementation would probably need a proper C++ parser.
1421 # We only scan .cc files and the like, as the declaration of
1422 # for-testing functions in header files are hard to distinguish from
1423 # calls to such functions without a proper C++ parser.
1424 file_inclusion_pattern = [r'.+%s' % _IMPLEMENTATION_EXTENSIONS]
1426 base_function_pattern = r'[ :]test::[^\s]+|ForTest(s|ing)?|for_test(s|ing)?'
1427 inclusion_pattern = input_api.re.compile(r'(%s)\s*\(' %
1428 base_function_pattern)
1429 comment_pattern = input_api.re.compile(r'//.*(%s)' % base_function_pattern)
1430 allowlist_pattern = input_api.re.compile(r'// IN-TEST$')
1431 exclusion_pattern = input_api.re.compile(
1432 r'::[A-Za-z0-9_]+(%s)|(%s)[^;]+\{' %
1433 (base_function_pattern, base_function_pattern))
1434 # Avoid a false positive in this case, where the method name, the ::, and
1435 # the closing { are all on different lines due to line wrapping.
1436 # HelperClassForTesting::
1437 # HelperClassForTesting(
1440 method_defn_pattern = input_api.re.compile(r'[A-Za-z0-9_]+::$')
1442 def FilterFile(affected_file):
1443 files_to_skip = (_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS +
1444 input_api.DEFAULT_FILES_TO_SKIP)
1445 return input_api.FilterSourceFile(
1447 files_to_check=file_inclusion_pattern,
1448 files_to_skip=files_to_skip)
1451 for f in input_api.AffectedSourceFiles(FilterFile):
1452 local_path = f.LocalPath()
1453 in_method_defn = False
1454 for line_number, line in f.ChangedContents():
1455 if (inclusion_pattern.search(line)
1456 and not comment_pattern.search(line)
1457 and not exclusion_pattern.search(line)
1458 and not allowlist_pattern.search(line)
1459 and not in_method_defn):
1460 problems.append('%s:%d\n %s' %
1461 (local_path, line_number, line.strip()))
1462 in_method_defn = method_defn_pattern.search(line)
1466 output_api.PresubmitPromptOrNotify(_TEST_ONLY_WARNING, problems)
1472 def CheckNoProductionCodeUsingTestOnlyFunctionsJava(input_api, output_api):
1473 """This is a simplified version of
1474 CheckNoProductionCodeUsingTestOnlyFunctions for Java files.
1476 javadoc_start_re = input_api.re.compile(r'^\s*/\*\*')
1477 javadoc_end_re = input_api.re.compile(r'^\s*\*/')
1478 name_pattern = r'ForTest(s|ing)?'
1479 # Describes an occurrence of "ForTest*" inside a // comment.
1480 comment_re = input_api.re.compile(r'//.*%s' % name_pattern)
1481 # Describes @VisibleForTesting(otherwise = VisibleForTesting.PROTECTED)
1482 annotation_re = input_api.re.compile(r'@VisibleForTesting\(')
1484 inclusion_re = input_api.re.compile(r'(%s)\s*\(' % name_pattern)
1485 # Ignore definitions. (Comments are ignored separately.)
1486 exclusion_re = input_api.re.compile(r'(%s)[^;]+\{' % name_pattern)
1489 sources = lambda x: input_api.FilterSourceFile(
1491 files_to_skip=(('(?i).*test', r'.*\/junit\/') + input_api.
1492 DEFAULT_FILES_TO_SKIP),
1493 files_to_check=[r'.*\.java$'])
1494 for f in input_api.AffectedFiles(include_deletes=False,
1495 file_filter=sources):
1496 local_path = f.LocalPath()
1497 is_inside_javadoc = False
1498 for line_number, line in f.ChangedContents():
1499 if is_inside_javadoc and javadoc_end_re.search(line):
1500 is_inside_javadoc = False
1501 if not is_inside_javadoc and javadoc_start_re.search(line):
1502 is_inside_javadoc = True
1503 if is_inside_javadoc:
1505 if (inclusion_re.search(line) and not comment_re.search(line)
1506 and not annotation_re.search(line)
1507 and not exclusion_re.search(line)):
1508 problems.append('%s:%d\n %s' %
1509 (local_path, line_number, line.strip()))
1513 output_api.PresubmitPromptOrNotify(_TEST_ONLY_WARNING, problems)
1519 def CheckNoIOStreamInHeaders(input_api, output_api):
1520 """Checks to make sure no .h files include <iostream>."""
1522 pattern = input_api.re.compile(r'^#include\s*<iostream>',
1523 input_api.re.MULTILINE)
1524 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
1525 if not f.LocalPath().endswith('.h'):
1527 contents = input_api.ReadFile(f)
1528 if pattern.search(contents):
1533 output_api.PresubmitError(
1534 'Do not #include <iostream> in header files, since it inserts static '
1535 'initialization into every file including the header. Instead, '
1536 '#include <ostream>. See http://crbug.com/94794', files)
1541 def CheckNoStrCatRedefines(input_api, output_api):
1542 """Checks no windows headers with StrCat redefined are included directly."""
1544 files_to_check = (r'.+%s' % _HEADER_EXTENSIONS,
1545 r'.+%s' % _IMPLEMENTATION_EXTENSIONS)
1546 files_to_skip = (input_api.DEFAULT_FILES_TO_SKIP +
1547 _NON_BASE_DEPENDENT_PATHS)
1548 sources_filter = lambda f: input_api.FilterSourceFile(
1549 f, files_to_check=files_to_check, files_to_skip=files_to_skip)
1551 pattern_deny = input_api.re.compile(
1552 r'^#include\s*[<"](shlwapi|atlbase|propvarutil|sphelper).h[">]',
1553 input_api.re.MULTILINE)
1554 pattern_allow = input_api.re.compile(
1555 r'^#include\s"base/win/windows_defines.inc"', input_api.re.MULTILINE)
1556 for f in input_api.AffectedSourceFiles(sources_filter):
1557 contents = input_api.ReadFile(f)
1558 if pattern_deny.search(
1559 contents) and not pattern_allow.search(contents):
1560 files.append(f.LocalPath())
1564 output_api.PresubmitError(
1565 'Do not #include shlwapi.h, atlbase.h, propvarutil.h or sphelper.h '
1566 'directly since they pollute code with StrCat macro. Instead, '
1567 'include matching header from base/win. See http://crbug.com/856536',
1573 def CheckNoUNIT_TESTInSourceFiles(input_api, output_api):
1574 """Checks to make sure no source files use UNIT_TEST."""
1576 for f in input_api.AffectedFiles():
1577 if (not f.LocalPath().endswith(('.cc', '.mm'))):
1580 for line_num, line in f.ChangedContents():
1581 if 'UNIT_TEST ' in line or line.endswith('UNIT_TEST'):
1582 problems.append(' %s:%d' % (f.LocalPath(), line_num))
1587 output_api.PresubmitPromptWarning('UNIT_TEST is only for headers.\n' +
1588 '\n'.join(problems))
1592 def CheckNoDISABLETypoInTests(input_api, output_api):
1593 """Checks to prevent attempts to disable tests with DISABLE_ prefix.
1595 This test warns if somebody tries to disable a test with the DISABLE_ prefix
1596 instead of DISABLED_. To filter false positives, reports are only generated
1597 if a corresponding MAYBE_ line exists.
1601 # The following two patterns are looked for in tandem - is a test labeled
1602 # as MAYBE_ followed by a DISABLE_ (instead of the correct DISABLED)
1603 maybe_pattern = input_api.re.compile(r'MAYBE_([a-zA-Z0-9_]+)')
1604 disable_pattern = input_api.re.compile(r'DISABLE_([a-zA-Z0-9_]+)')
1606 # This is for the case that a test is disabled on all platforms.
1607 full_disable_pattern = input_api.re.compile(
1608 r'^\s*TEST[^(]*\([a-zA-Z0-9_]+,\s*DISABLE_[a-zA-Z0-9_]+\)',
1609 input_api.re.MULTILINE)
1611 for f in input_api.AffectedFiles(False):
1612 if not 'test' in f.LocalPath() or not f.LocalPath().endswith('.cc'):
1615 # Search for MABYE_, DISABLE_ pairs.
1616 disable_lines = {} # Maps of test name to line number.
1618 for line_num, line in f.ChangedContents():
1619 disable_match = disable_pattern.search(line)
1621 disable_lines[disable_match.group(1)] = line_num
1622 maybe_match = maybe_pattern.search(line)
1624 maybe_lines[maybe_match.group(1)] = line_num
1626 # Search for DISABLE_ occurrences within a TEST() macro.
1627 disable_tests = set(disable_lines.keys())
1628 maybe_tests = set(maybe_lines.keys())
1629 for test in disable_tests.intersection(maybe_tests):
1630 problems.append(' %s:%d' % (f.LocalPath(), disable_lines[test]))
1632 contents = input_api.ReadFile(f)
1633 full_disable_match = full_disable_pattern.search(contents)
1634 if full_disable_match:
1635 problems.append(' %s' % f.LocalPath())
1640 output_api.PresubmitPromptWarning(
1641 'Attempt to disable a test with DISABLE_ instead of DISABLED_?\n' +
1642 '\n'.join(problems))
1646 def CheckForgettingMAYBEInTests(input_api, output_api):
1647 """Checks to make sure tests disabled conditionally are not missing a
1648 corresponding MAYBE_ prefix.
1650 # Expect at least a lowercase character in the test name. This helps rule out
1651 # false positives with macros wrapping the actual tests name.
1652 define_maybe_pattern = input_api.re.compile(
1653 r'^\#define MAYBE_(?P<test_name>\w*[a-z]\w*)')
1654 # The test_maybe_pattern needs to handle all of these forms. The standard:
1655 # IN_PROC_TEST_F(SyncTest, MAYBE_Start) {
1656 # With a wrapper macro around the test name:
1657 # IN_PROC_TEST_F(SyncTest, E2E_ENABLED(MAYBE_Start)) {
1658 # And the odd-ball NACL_BROWSER_TEST_f format:
1659 # NACL_BROWSER_TEST_F(NaClBrowserTest, SimpleLoad, {
1660 # The optional E2E_ENABLED-style is handled with (\w*\()?
1661 # The NACL_BROWSER_TEST_F pattern is handled by allowing a trailing comma or
1663 test_maybe_pattern = (
1664 r'^\s*\w*TEST[^(]*\(\s*\w+,\s*(\w*\()?MAYBE_{test_name}[\),]')
1665 suite_maybe_pattern = r'^\s*\w*TEST[^(]*\(\s*MAYBE_{test_name}[\),]'
1668 # Read the entire files. We can't just read the affected lines, forgetting to
1669 # add MAYBE_ on a change would not show up otherwise.
1670 for f in input_api.AffectedFiles(False):
1671 if not 'test' in f.LocalPath() or not f.LocalPath().endswith('.cc'):
1673 contents = input_api.ReadFile(f)
1674 lines = contents.splitlines(True)
1675 current_position = 0
1676 warning_test_names = set()
1677 for line_num, line in enumerate(lines, start=1):
1678 current_position += len(line)
1679 maybe_match = define_maybe_pattern.search(line)
1681 test_name = maybe_match.group('test_name')
1682 # Do not warn twice for the same test.
1683 if (test_name in warning_test_names):
1685 warning_test_names.add(test_name)
1687 # Attempt to find the corresponding MAYBE_ test or suite, starting from
1688 # the current position.
1689 test_match = input_api.re.compile(
1690 test_maybe_pattern.format(test_name=test_name),
1691 input_api.re.MULTILINE).search(contents, current_position)
1692 suite_match = input_api.re.compile(
1693 suite_maybe_pattern.format(test_name=test_name),
1694 input_api.re.MULTILINE).search(contents, current_position)
1695 if not test_match and not suite_match:
1697 output_api.PresubmitPromptWarning(
1698 '%s:%d found MAYBE_ defined without corresponding test %s'
1699 % (f.LocalPath(), line_num, test_name)))
1703 def CheckDCHECK_IS_ONHasBraces(input_api, output_api):
1704 """Checks to make sure DCHECK_IS_ON() does not skip the parentheses."""
1706 pattern = input_api.re.compile(r'\bDCHECK_IS_ON\b(?!\(\))',
1707 input_api.re.MULTILINE)
1708 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
1709 if (not f.LocalPath().endswith(('.cc', '.mm', '.h'))):
1711 for lnum, line in f.ChangedContents():
1712 if input_api.re.search(pattern, line):
1714 output_api.PresubmitError((
1715 '%s:%d: Use of DCHECK_IS_ON() must be written as "#if '
1716 + 'DCHECK_IS_ON()", not forgetting the parentheses.') %
1717 (f.LocalPath(), lnum)))
1721 # TODO(crbug/1138055): Reimplement CheckUmaHistogramChangesOnUpload check in a
1722 # more reliable way. See
1723 # https://chromium-review.googlesource.com/c/chromium/src/+/2500269
1726 def CheckFlakyTestUsage(input_api, output_api):
1727 """Check that FlakyTest annotation is our own instead of the android one"""
1728 pattern = input_api.re.compile(r'import android.test.FlakyTest;')
1730 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
1731 if f.LocalPath().endswith('Test.java'):
1732 if pattern.search(input_api.ReadFile(f)):
1736 output_api.PresubmitError(
1737 'Use org.chromium.base.test.util.FlakyTest instead of '
1738 'android.test.FlakyTest', files)
1743 def CheckNoDEPSGIT(input_api, output_api):
1744 """Make sure .DEPS.git is never modified manually."""
1745 if any(f.LocalPath().endswith('.DEPS.git')
1746 for f in input_api.AffectedFiles()):
1748 output_api.PresubmitError(
1749 'Never commit changes to .DEPS.git. This file is maintained by an\n'
1750 'automated system based on what\'s in DEPS and your changes will be\n'
1752 'See https://sites.google.com/a/chromium.org/dev/developers/how-tos/'
1753 'get-the-code#Rolling_DEPS\n'
1754 'for more information')
1759 def CheckValidHostsInDEPSOnUpload(input_api, output_api):
1760 """Checks that DEPS file deps are from allowed_hosts."""
1761 # Run only if DEPS file has been modified to annoy fewer bystanders.
1762 if all(f.LocalPath() != 'DEPS' for f in input_api.AffectedFiles()):
1764 # Outsource work to gclient verify
1766 gclient_path = input_api.os_path.join(input_api.PresubmitLocalPath(),
1767 'third_party', 'depot_tools',
1769 input_api.subprocess.check_output(
1770 [input_api.python3_executable, gclient_path, 'verify'],
1771 stderr=input_api.subprocess.STDOUT)
1773 except input_api.subprocess.CalledProcessError as error:
1775 output_api.PresubmitError(
1776 'DEPS file must have only git dependencies.',
1777 long_text=error.output)
1781 def _GetMessageForMatchingType(input_api, affected_file, line_number, line,
1783 """Helper method for checking for banned constructs.
1785 Returns an string composed of the name of the file, the line number where the
1786 match has been found and the additional text passed as |message| in case the
1787 target type name matches the text inside the line passed as parameter.
1791 # Ignore comments about banned types.
1792 if input_api.re.search(r"^ *//", line):
1794 # A // nocheck comment will bypass this error.
1795 if line.endswith(" nocheck"):
1799 if ban_rule.pattern[0:1] == '/':
1800 regex = ban_rule.pattern[1:]
1801 if input_api.re.search(regex, line):
1803 elif ban_rule.pattern in line:
1807 result.append(' %s:%d:' % (affected_file.LocalPath(), line_number))
1808 for line in ban_rule.explanation:
1809 result.append(' %s' % line)
1814 def CheckNoBannedFunctions(input_api, output_api):
1815 """Make sure that banned functions are not used."""
1819 def IsExcludedFile(affected_file, excluded_paths):
1820 if not excluded_paths:
1823 local_path = affected_file.LocalPath()
1824 # Consistently use / as path separator to simplify the writing of regex
1826 local_path = local_path.replace(input_api.os_path.sep, '/')
1827 for item in excluded_paths:
1828 if input_api.re.match(item, local_path):
1832 def IsIosObjcFile(affected_file):
1833 local_path = affected_file.LocalPath()
1834 if input_api.os_path.splitext(local_path)[-1] not in ('.mm', '.m',
1837 basename = input_api.os_path.basename(local_path)
1838 if 'ios' in basename.split('_'):
1840 for sep in (input_api.os_path.sep, input_api.os_path.altsep):
1841 if sep and 'ios' in local_path.split(sep):
1845 def CheckForMatch(affected_file, line_num: int, line: str,
1847 if IsExcludedFile(affected_file, ban_rule.excluded_paths):
1850 problems = _GetMessageForMatchingType(input_api, f, line_num, line,
1853 if ban_rule.treat_as_error is not None and ban_rule.treat_as_error:
1854 errors.extend(problems)
1856 warnings.extend(problems)
1858 file_filter = lambda f: f.LocalPath().endswith(('.java'))
1859 for f in input_api.AffectedFiles(file_filter=file_filter):
1860 for line_num, line in f.ChangedContents():
1861 for ban_rule in _BANNED_JAVA_FUNCTIONS:
1862 CheckForMatch(f, line_num, line, ban_rule)
1864 file_filter = lambda f: f.LocalPath().endswith(('.mm', '.m', '.h'))
1865 for f in input_api.AffectedFiles(file_filter=file_filter):
1866 for line_num, line in f.ChangedContents():
1867 for ban_rule in _BANNED_OBJC_FUNCTIONS:
1868 CheckForMatch(f, line_num, line, ban_rule)
1870 for f in input_api.AffectedFiles(file_filter=IsIosObjcFile):
1871 for line_num, line in f.ChangedContents():
1872 for ban_rule in _BANNED_IOS_OBJC_FUNCTIONS:
1873 CheckForMatch(f, line_num, line, ban_rule)
1875 egtest_filter = lambda f: f.LocalPath().endswith(('_egtest.mm'))
1876 for f in input_api.AffectedFiles(file_filter=egtest_filter):
1877 for line_num, line in f.ChangedContents():
1878 for ban_rule in _BANNED_IOS_EGTEST_FUNCTIONS:
1879 CheckForMatch(f, line_num, line, ban_rule)
1881 file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm', '.h'))
1882 for f in input_api.AffectedFiles(file_filter=file_filter):
1883 for line_num, line in f.ChangedContents():
1884 for ban_rule in _BANNED_CPP_FUNCTIONS:
1885 CheckForMatch(f, line_num, line, ban_rule)
1887 file_filter = lambda f: f.LocalPath().endswith(('.mojom'))
1888 for f in input_api.AffectedFiles(file_filter=file_filter):
1889 for line_num, line in f.ChangedContents():
1890 for ban_rule in _BANNED_MOJOM_PATTERNS:
1891 CheckForMatch(f, line_num, line, ban_rule)
1897 output_api.PresubmitPromptWarning('Banned functions were used.\n' +
1898 '\n'.join(warnings)))
1901 output_api.PresubmitError('Banned functions were used.\n' +
1905 def CheckNoLayoutCallsInTests(input_api, output_api):
1906 """Make sure there are no explicit calls to View::Layout() in tests"""
1909 r'/(\.|->)Layout\(\);',
1911 'Direct calls to View::Layout() are not allowed in tests. '
1912 'If the view must be laid out here, use RunScheduledLayout(view). It '
1913 'is found in //ui/views/test/views_test_utils.h. '
1914 'See http://crbug.com/1350521 for more details.',
1918 file_filter = lambda f: input_api.re.search(
1919 r'_(unittest|browsertest|ui_test).*\.(cc|mm)$', f.LocalPath())
1920 for f in input_api.AffectedFiles(file_filter = file_filter):
1921 for line_num, line in f.ChangedContents():
1922 problems = _GetMessageForMatchingType(input_api, f,
1926 warnings.extend(problems)
1930 output_api.PresubmitPromptWarning(
1931 'Banned call to View::Layout() in tests.\n\n'.join(warnings)))
1934 def _CheckAndroidNoBannedImports(input_api, output_api):
1935 """Make sure that banned java imports are not used."""
1938 file_filter = lambda f: f.LocalPath().endswith(('.java'))
1939 for f in input_api.AffectedFiles(file_filter=file_filter):
1940 for line_num, line in f.ChangedContents():
1941 for ban_rule in _BANNED_JAVA_IMPORTS:
1942 # Consider merging this into the above function. There is no
1943 # real difference anymore other than helping with a little
1944 # bit of boilerplate text. Doing so means things like
1945 # `treat_as_error` will also be uniformly handled.
1946 problems = _GetMessageForMatchingType(input_api, f, line_num,
1949 errors.extend(problems)
1953 output_api.PresubmitError('Banned imports were used.\n' +
1958 def CheckNoPragmaOnce(input_api, output_api):
1959 """Make sure that banned functions are not used."""
1961 pattern = input_api.re.compile(r'^#pragma\s+once', input_api.re.MULTILINE)
1962 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
1963 if not f.LocalPath().endswith('.h'):
1965 if f.LocalPath().endswith('com_imported_mstscax.h'):
1967 contents = input_api.ReadFile(f)
1968 if pattern.search(contents):
1973 output_api.PresubmitError(
1974 'Do not use #pragma once in header files.\n'
1975 'See http://www.chromium.org/developers/coding-style#TOC-File-headers',
1981 def CheckNoTrinaryTrueFalse(input_api, output_api):
1982 """Checks to make sure we don't introduce use of foo ? true : false."""
1984 pattern = input_api.re.compile(r'\?\s*(true|false)\s*:\s*(true|false)')
1985 for f in input_api.AffectedFiles():
1986 if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
1989 for line_num, line in f.ChangedContents():
1990 if pattern.match(line):
1991 problems.append(' %s:%d' % (f.LocalPath(), line_num))
1996 output_api.PresubmitPromptWarning(
1997 'Please consider avoiding the "? true : false" pattern if possible.\n'
1998 + '\n'.join(problems))
2002 def CheckUnwantedDependencies(input_api, output_api):
2003 """Runs checkdeps on #include and import statements added in this
2004 change. Breaking - rules is an error, breaking ! rules is a
2007 # Return early if no relevant file types were modified.
2008 for f in input_api.AffectedFiles():
2009 path = f.LocalPath()
2010 if (_IsCPlusPlusFile(input_api, path) or _IsProtoFile(input_api, path)
2011 or _IsJavaFile(input_api, path)):
2017 # We need to wait until we have an input_api object and use this
2018 # roundabout construct to import checkdeps because this file is
2019 # eval-ed and thus doesn't have __file__.
2020 original_sys_path = sys.path
2022 sys.path = sys.path + [
2023 input_api.os_path.join(input_api.PresubmitLocalPath(),
2024 'buildtools', 'checkdeps')
2027 from rules import Rule
2029 # Restore sys.path to what it was before.
2030 sys.path = original_sys_path
2034 added_java_imports = []
2035 for f in input_api.AffectedFiles():
2036 if _IsCPlusPlusFile(input_api, f.LocalPath()):
2037 changed_lines = [line for _, line in f.ChangedContents()]
2038 added_includes.append([f.AbsoluteLocalPath(), changed_lines])
2039 elif _IsProtoFile(input_api, f.LocalPath()):
2040 changed_lines = [line for _, line in f.ChangedContents()]
2041 added_imports.append([f.AbsoluteLocalPath(), changed_lines])
2042 elif _IsJavaFile(input_api, f.LocalPath()):
2043 changed_lines = [line for _, line in f.ChangedContents()]
2044 added_java_imports.append([f.AbsoluteLocalPath(), changed_lines])
2046 deps_checker = checkdeps.DepsChecker(input_api.PresubmitLocalPath())
2048 error_descriptions = []
2049 warning_descriptions = []
2050 error_subjects = set()
2051 warning_subjects = set()
2053 for path, rule_type, rule_description in deps_checker.CheckAddedCppIncludes(
2055 path = input_api.os_path.relpath(path, input_api.PresubmitLocalPath())
2056 description_with_path = '%s\n %s' % (path, rule_description)
2057 if rule_type == Rule.DISALLOW:
2058 error_descriptions.append(description_with_path)
2059 error_subjects.add("#includes")
2061 warning_descriptions.append(description_with_path)
2062 warning_subjects.add("#includes")
2064 for path, rule_type, rule_description in deps_checker.CheckAddedProtoImports(
2066 path = input_api.os_path.relpath(path, input_api.PresubmitLocalPath())
2067 description_with_path = '%s\n %s' % (path, rule_description)
2068 if rule_type == Rule.DISALLOW:
2069 error_descriptions.append(description_with_path)
2070 error_subjects.add("imports")
2072 warning_descriptions.append(description_with_path)
2073 warning_subjects.add("imports")
2075 for path, rule_type, rule_description in deps_checker.CheckAddedJavaImports(
2076 added_java_imports, _JAVA_MULTIPLE_DEFINITION_EXCLUDED_PATHS):
2077 path = input_api.os_path.relpath(path, input_api.PresubmitLocalPath())
2078 description_with_path = '%s\n %s' % (path, rule_description)
2079 if rule_type == Rule.DISALLOW:
2080 error_descriptions.append(description_with_path)
2081 error_subjects.add("imports")
2083 warning_descriptions.append(description_with_path)
2084 warning_subjects.add("imports")
2087 if error_descriptions:
2089 output_api.PresubmitError(
2090 'You added one or more %s that violate checkdeps rules.' %
2091 " and ".join(error_subjects), error_descriptions))
2092 if warning_descriptions:
2094 output_api.PresubmitPromptOrNotify(
2095 'You added one or more %s of files that are temporarily\n'
2096 'allowed but being removed. Can you avoid introducing the\n'
2097 '%s? See relevant DEPS file(s) for details and contacts.' %
2098 (" and ".join(warning_subjects), "/".join(warning_subjects)),
2099 warning_descriptions))
2103 def CheckFilePermissions(input_api, output_api):
2104 """Check that all files have their permissions properly set."""
2105 if input_api.platform == 'win32':
2107 checkperms_tool = input_api.os_path.join(input_api.PresubmitLocalPath(),
2108 'tools', 'checkperms',
2111 input_api.python3_executable, checkperms_tool, '--root',
2112 input_api.change.RepositoryRoot()
2114 with input_api.CreateTemporaryFile() as file_list:
2115 for f in input_api.AffectedFiles():
2116 # checkperms.py file/directory arguments must be relative to the
2118 file_list.write((f.LocalPath() + '\n').encode('utf8'))
2120 args += ['--file-list', file_list.name]
2122 input_api.subprocess.check_output(args)
2124 except input_api.subprocess.CalledProcessError as error:
2126 output_api.PresubmitError('checkperms.py failed:',
2127 long_text=error.output.decode(
2132 def CheckNoAuraWindowPropertyHInHeaders(input_api, output_api):
2133 """Makes sure we don't include ui/aura/window_property.h
2136 pattern = input_api.re.compile(r'^#include\s*"ui/aura/window_property.h"')
2138 for f in input_api.AffectedFiles():
2139 if not f.LocalPath().endswith('.h'):
2141 for line_num, line in f.ChangedContents():
2142 if pattern.match(line):
2143 errors.append(' %s:%d' % (f.LocalPath(), line_num))
2148 output_api.PresubmitError(
2149 'Header files should not include ui/aura/window_property.h',
2154 def CheckNoInternalHeapIncludes(input_api, output_api):
2155 """Makes sure we don't include any headers from
2156 third_party/blink/renderer/platform/heap/impl or
2157 third_party/blink/renderer/platform/heap/v8_wrapper from files outside of
2158 third_party/blink/renderer/platform/heap
2160 impl_pattern = input_api.re.compile(
2161 r'^\s*#include\s*"third_party/blink/renderer/platform/heap/impl/.*"')
2162 v8_wrapper_pattern = input_api.re.compile(
2163 r'^\s*#include\s*"third_party/blink/renderer/platform/heap/v8_wrapper/.*"'
2165 # Consistently use / as path separator to simplify the writing of regex
2167 file_filter = lambda f: not input_api.re.match(
2168 r"^third_party/blink/renderer/platform/heap/.*",
2169 f.LocalPath().replace(input_api.os_path.sep, '/'))
2172 for f in input_api.AffectedFiles(file_filter=file_filter):
2173 for line_num, line in f.ChangedContents():
2174 if impl_pattern.match(line) or v8_wrapper_pattern.match(line):
2175 errors.append(' %s:%d' % (f.LocalPath(), line_num))
2180 output_api.PresubmitError(
2181 'Do not include files from third_party/blink/renderer/platform/heap/impl'
2182 ' or third_party/blink/renderer/platform/heap/v8_wrapper. Use the '
2183 'relevant counterparts from third_party/blink/renderer/platform/heap',
2188 def _CheckForVersionControlConflictsInFile(input_api, f):
2189 pattern = input_api.re.compile('^(?:<<<<<<<|>>>>>>>) |^=======$')
2191 for line_num, line in f.ChangedContents():
2192 if f.LocalPath().endswith(('.md', '.rst', '.txt')):
2193 # First-level headers in markdown look a lot like version control
2194 # conflict markers. http://daringfireball.net/projects/markdown/basics
2196 if pattern.match(line):
2197 errors.append(' %s:%d %s' % (f.LocalPath(), line_num, line))
2201 def CheckForVersionControlConflicts(input_api, output_api):
2202 """Usually this is not intentional and will cause a compile failure."""
2204 for f in input_api.AffectedFiles():
2205 errors.extend(_CheckForVersionControlConflictsInFile(input_api, f))
2210 output_api.PresubmitError(
2211 'Version control conflict markers found, please resolve.',
2216 def CheckGoogleSupportAnswerUrlOnUpload(input_api, output_api):
2217 pattern = input_api.re.compile('support\.google\.com\/chrome.*/answer')
2219 for f in input_api.AffectedFiles():
2220 for line_num, line in f.ChangedContents():
2221 if pattern.search(line):
2222 errors.append(' %s:%d %s' % (f.LocalPath(), line_num, line))
2227 output_api.PresubmitPromptWarning(
2228 'Found Google support URL addressed by answer number. Please replace '
2229 'with a p= identifier instead. See crbug.com/679462\n',
2234 def CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api):
2235 def FilterFile(affected_file):
2236 """Filter function for use with input_api.AffectedSourceFiles,
2237 below. This filters out everything except non-test files from
2238 top-level directories that generally speaking should not hard-code
2239 service URLs (e.g. src/android_webview/, src/content/ and others).
2241 return input_api.FilterSourceFile(
2243 files_to_check=[r'^(android_webview|base|content|net)/.*'],
2244 files_to_skip=(_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS +
2245 input_api.DEFAULT_FILES_TO_SKIP))
2247 base_pattern = ('"[^"]*(google|googleapis|googlezip|googledrive|appspot)'
2248 '\.(com|net)[^"]*"')
2249 comment_pattern = input_api.re.compile('//.*%s' % base_pattern)
2250 pattern = input_api.re.compile(base_pattern)
2251 problems = [] # items are (filename, line_number, line)
2252 for f in input_api.AffectedSourceFiles(FilterFile):
2253 for line_num, line in f.ChangedContents():
2254 if not comment_pattern.search(line) and pattern.search(line):
2255 problems.append((f.LocalPath(), line_num, line))
2259 output_api.PresubmitPromptOrNotify(
2260 'Most layers below src/chrome/ should not hardcode service URLs.\n'
2261 'Are you sure this is correct?', [
2262 ' %s:%d: %s' % (problem[0], problem[1], problem[2])
2263 for problem in problems
2270 def CheckChromeOsSyncedPrefRegistration(input_api, output_api):
2271 """Warns if Chrome OS C++ files register syncable prefs as browser prefs."""
2273 def FileFilter(affected_file):
2274 """Includes directories known to be Chrome OS only."""
2275 return input_api.FilterSourceFile(
2279 '^chromeos/', # Top-level src/chromeos.
2280 '.*/chromeos/', # Any path component.
2283 files_to_skip=(input_api.DEFAULT_FILES_TO_SKIP))
2287 for f in input_api.AffectedFiles(file_filter=FileFilter):
2288 for line_num, line in f.ChangedContents():
2289 if input_api.re.search('PrefRegistrySyncable::SYNCABLE_PREF',
2291 prefs.append(' %s:%d:' % (f.LocalPath(), line_num))
2292 prefs.append(' %s' % line)
2293 if input_api.re.search(
2294 'PrefRegistrySyncable::SYNCABLE_PRIORITY_PREF', line):
2295 priority_prefs.append(' %s:%d' % (f.LocalPath(), line_num))
2296 priority_prefs.append(' %s' % line)
2301 output_api.PresubmitPromptWarning(
2302 'Preferences were registered as SYNCABLE_PREF and will be controlled '
2303 'by browser sync settings. If these prefs should be controlled by OS '
2304 'sync settings use SYNCABLE_OS_PREF instead.\n' +
2306 if (priority_prefs):
2308 output_api.PresubmitPromptWarning(
2309 'Preferences were registered as SYNCABLE_PRIORITY_PREF and will be '
2310 'controlled by browser sync settings. If these prefs should be '
2311 'controlled by OS sync settings use SYNCABLE_OS_PRIORITY_PREF '
2312 'instead.\n' + '\n'.join(prefs)))
2316 # TODO: add unit tests.
2317 def CheckNoAbbreviationInPngFileName(input_api, output_api):
2318 """Makes sure there are no abbreviations in the name of PNG files.
2319 The native_client_sdk directory is excluded because it has auto-generated PNG
2320 files for documentation.
2323 files_to_check = [r'.*_[a-z]_.*\.png$|.*_[a-z]\.png$']
2324 files_to_skip = [r'^native_client_sdk/',
2326 r'^third_party/blink/web_tests/',
2328 file_filter = lambda f: input_api.FilterSourceFile(
2329 f, files_to_check=files_to_check, files_to_skip=files_to_skip)
2330 for f in input_api.AffectedFiles(include_deletes=False,
2331 file_filter=file_filter):
2332 errors.append(' %s' % f.LocalPath())
2337 output_api.PresubmitError(
2338 'The name of PNG files should not have abbreviations. \n'
2339 'Use _hover.png, _center.png, instead of _h.png, _c.png.\n'
2340 'Contact oshima@chromium.org if you have questions.', errors))
2343 def CheckNoProductIconsAddedToPublicRepo(input_api, output_api):
2344 """Heuristically identifies product icons based on their file name and reminds
2345 contributors not to add them to the Chromium repository.
2348 files_to_check = [r'.*google.*\.png$|.*google.*\.svg$|.*google.*\.icon$']
2349 file_filter = lambda f: input_api.FilterSourceFile(
2350 f, files_to_check=files_to_check)
2351 for f in input_api.AffectedFiles(include_deletes=False,
2352 file_filter=file_filter):
2353 errors.append(' %s' % f.LocalPath())
2357 # Give warnings instead of errors on presubmit --all and presubmit
2359 message_type = (output_api.PresubmitNotifyResult if input_api.no_diffs
2360 else output_api.PresubmitError)
2363 'Trademarked images should not be added to the public repo. '
2364 'See crbug.com/944754', errors))
2368 def _ExtractAddRulesFromParsedDeps(parsed_deps):
2369 """Extract the rules that add dependencies from a parsed DEPS file.
2372 parsed_deps: the locals dictionary from evaluating the DEPS file."""
2375 rule[1:] for rule in parsed_deps.get('include_rules', [])
2376 if rule.startswith('+') or rule.startswith('!')
2378 for _, rules in parsed_deps.get('specific_include_rules', {}).items():
2380 rule[1:] for rule in rules
2381 if rule.startswith('+') or rule.startswith('!')
2386 def _ParseDeps(contents):
2387 """Simple helper for parsing DEPS files."""
2389 # Stubs for handling special syntax in the root DEPS file.
2391 def __init__(self, local_scope):
2392 self._local_scope = local_scope
2394 def Lookup(self, var_name):
2395 """Implements the Var syntax."""
2397 return self._local_scope['vars'][var_name]
2399 raise Exception('Var is not defined: %s' % var_name)
2403 'Var': _VarImpl(local_scope).Lookup,
2407 exec(contents, global_scope, local_scope)
2411 def _CalculateAddedDeps(os_path, old_contents, new_contents):
2412 """Helper method for CheckAddedDepsHaveTargetApprovals. Returns
2413 a set of DEPS entries that we should look up.
2415 For a directory (rather than a specific filename) we fake a path to
2416 a specific filename by adding /DEPS. This is chosen as a file that
2417 will seldom or never be subject to per-file include_rules.
2419 # We ignore deps entries on auto-generated directories.
2420 AUTO_GENERATED_DIRS = ['grit', 'jni']
2422 old_deps = _ExtractAddRulesFromParsedDeps(_ParseDeps(old_contents))
2423 new_deps = _ExtractAddRulesFromParsedDeps(_ParseDeps(new_contents))
2425 added_deps = new_deps.difference(old_deps)
2428 for added_dep in added_deps:
2429 if added_dep.split('/')[0] in AUTO_GENERATED_DIRS:
2431 # Assume that a rule that ends in .h is a rule for a specific file.
2432 if added_dep.endswith('.h'):
2433 results.add(added_dep)
2435 results.add(os_path.join(added_dep, 'DEPS'))
2439 def CheckAddedDepsHaveTargetApprovals(input_api, output_api):
2440 """When a dependency prefixed with + is added to a DEPS file, we
2441 want to make sure that the change is reviewed by an OWNER of the
2442 target file or directory, to avoid layering violations from being
2443 introduced. This check verifies that this happens.
2445 # We rely on Gerrit's code-owners to check approvals.
2446 # input_api.gerrit is always set for Chromium, but other projects
2447 # might not use Gerrit.
2448 if not input_api.gerrit or input_api.no_diffs:
2450 if 'PRESUBMIT_SKIP_NETWORK' in input_api.environ:
2453 if (input_api.change.issue and
2454 input_api.gerrit.IsOwnersOverrideApproved(
2455 input_api.change.issue)):
2456 # Skip OWNERS check when Owners-Override label is approved. This is
2457 # intended for global owners, trusted bots, and on-call sheriffs.
2458 # Review is still required for these changes.
2460 except Exception as e:
2461 return [output_api.PresubmitPromptWarning(
2462 'Failed to retrieve owner override status - %s' % str(e))]
2464 virtual_depended_on_files = set()
2466 # Consistently use / as path separator to simplify the writing of regex
2468 file_filter = lambda f: not input_api.re.match(
2469 r"^third_party/blink/.*",
2470 f.LocalPath().replace(input_api.os_path.sep, '/'))
2471 for f in input_api.AffectedFiles(include_deletes=False,
2472 file_filter=file_filter):
2473 filename = input_api.os_path.basename(f.LocalPath())
2474 if filename == 'DEPS':
2475 virtual_depended_on_files.update(
2476 _CalculateAddedDeps(input_api.os_path,
2477 '\n'.join(f.OldContents()),
2478 '\n'.join(f.NewContents())))
2480 if not virtual_depended_on_files:
2483 if input_api.is_committing:
2486 output_api.PresubmitNotifyResult(
2487 '--tbr was specified, skipping OWNERS check for DEPS additions'
2490 # TODO(dcheng): Make this generate an error on dry runs if the reviewer
2491 # is not added, to prevent review serialization.
2492 if input_api.dry_run:
2494 output_api.PresubmitNotifyResult(
2495 'This is a dry run, skipping OWNERS check for DEPS additions'
2498 if not input_api.change.issue:
2500 output_api.PresubmitError(
2501 "DEPS approval by OWNERS check failed: this change has "
2502 "no change number, so we can't check it for approvals.")
2504 output = output_api.PresubmitError
2506 output = output_api.PresubmitNotifyResult
2508 owner_email, reviewers = (
2509 input_api.canned_checks.GetCodereviewOwnerAndReviewers(
2510 input_api, None, approval_needed=input_api.is_committing))
2512 owner_email = owner_email or input_api.change.author_email
2514 approval_status = input_api.owners_client.GetFilesApprovalStatus(
2515 virtual_depended_on_files, reviewers.union([owner_email]), [])
2517 f for f in virtual_depended_on_files
2518 if approval_status[f] != input_api.owners_client.APPROVED
2521 # We strip the /DEPS part that was added by
2522 # _FilesToCheckForIncomingDeps to fake a path to a file in a
2524 def StripDeps(path):
2525 start_deps = path.rfind('/DEPS')
2526 if start_deps != -1:
2527 return path[:start_deps]
2531 unapproved_dependencies = [
2532 "'+%s'," % StripDeps(path) for path in missing_files
2535 if unapproved_dependencies:
2538 'You need LGTM from owners of depends-on paths in DEPS that were '
2539 'modified in this CL:\n %s' %
2540 '\n '.join(sorted(unapproved_dependencies)))
2542 suggested_owners = input_api.owners_client.SuggestOwners(
2543 missing_files, exclude=[owner_email])
2545 output('Suggested missing target path OWNERS:\n %s' %
2546 '\n '.join(suggested_owners or [])))
2552 # TODO: add unit tests.
2553 def CheckSpamLogging(input_api, output_api):
2554 file_inclusion_pattern = [r'.+%s' % _IMPLEMENTATION_EXTENSIONS]
2556 _EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS +
2557 input_api.DEFAULT_FILES_TO_SKIP + (
2558 r"^base/logging\.h$",
2559 r"^base/logging\.cc$",
2560 r"^base/task/thread_pool/task_tracker\.cc$",
2561 r"^chrome/app/chrome_main_delegate\.cc$",
2562 r"^chrome/browser/chrome_browser_main\.cc$",
2563 r"^chrome/browser/ui/startup/startup_browser_creator\.cc$",
2564 r"^chrome/browser/browser_switcher/bho/.*",
2565 r"^chrome/browser/diagnostics/diagnostics_writer\.cc$",
2566 r"^chrome/chrome_cleaner/.*",
2567 r"^chrome/chrome_elf/dll_hash/dll_hash_main\.cc$",
2568 r"^chrome/installer/setup/.*",
2570 r"^components/browser_watcher/dump_stability_report_main_win\.cc$",
2571 r"^components/media_control/renderer/media_playback_options\.cc$",
2572 r"^components/viz/service/display/"
2573 r"overlay_strategy_underlay_cast\.cc$",
2574 r"^components/zucchini/.*",
2575 # TODO(peter): Remove exception. https://crbug.com/534537
2576 r"^content/browser/notifications/"
2577 r"notification_event_dispatcher_impl\.cc$",
2578 r"^content/common/gpu/client/gl_helper_benchmark\.cc$",
2579 r"^courgette/courgette_minimal_tool\.cc$",
2580 r"^courgette/courgette_tool\.cc$",
2581 r"^extensions/renderer/logging_native_handler\.cc$",
2582 r"^fuchsia_web/common/init_logging\.cc$",
2583 r"^fuchsia_web/runners/common/web_component\.cc$",
2584 r"^fuchsia_web/shell/.*_shell\.cc$",
2585 r"^headless/app/headless_shell\.cc$",
2586 r"^ipc/ipc_logging\.cc$",
2587 r"^native_client_sdk/",
2588 r"^remoting/base/logging\.h$",
2589 r"^remoting/host/.*",
2590 r"^sandbox/linux/.*",
2591 r"^storage/browser/file_system/dump_file_system\.cc$",
2593 r"^ui/base/resource/data_pack\.cc$",
2594 r"^ui/aura/bench/bench_main\.cc$",
2595 r"^ui/ozone/platform/cast/",
2596 r"^ui/base/x/xwmstartupcheck/"
2597 r"xwmstartupcheck\.cc$"))
2598 source_file_filter = lambda x: input_api.FilterSourceFile(
2599 x, files_to_check=file_inclusion_pattern, files_to_skip=files_to_skip)
2604 for f in input_api.AffectedSourceFiles(source_file_filter):
2605 for _, line in f.ChangedContents():
2606 if input_api.re.search(r"\bD?LOG\s*\(\s*INFO\s*\)", line):
2607 log_info.add(f.LocalPath())
2608 elif input_api.re.search(r"\bD?LOG_IF\s*\(\s*INFO\s*,", line):
2609 log_info.add(f.LocalPath())
2611 if input_api.re.search(r"\bprintf\(", line):
2612 printf.add(f.LocalPath())
2613 elif input_api.re.search(r"\bfprintf\((stdout|stderr)", line):
2614 printf.add(f.LocalPath())
2618 output_api.PresubmitError(
2619 'These files spam the console log with LOG(INFO):',
2624 output_api.PresubmitError(
2625 'These files spam the console log with printf/fprintf:',
2631 def CheckForAnonymousVariables(input_api, output_api):
2632 """These types are all expected to hold locks while in scope and
2633 so should never be anonymous (which causes them to be immediately
2635 they_who_must_be_named = [
2639 'SkAutoAlphaRestore',
2640 'SkAutoBitmapShaderInstall',
2641 'SkAutoBlitterChoose',
2642 'SkAutoBounderCommit',
2644 'SkAutoCanvasRestore',
2645 'SkAutoCommentBlock',
2647 'SkAutoDisableDirectionCheck',
2648 'SkAutoDisableOvalCheck',
2655 'SkAutoMaskFreeImage',
2656 'SkAutoMutexAcquire',
2657 'SkAutoPathBoundsUpdate',
2659 'SkAutoRasterClipValidate',
2665 anonymous = r'(%s)\s*[({]' % '|'.join(they_who_must_be_named)
2666 # bad: base::AutoLock(lock.get());
2667 # not bad: base::AutoLock lock(lock.get());
2668 bad_pattern = input_api.re.compile(anonymous)
2669 # good: new base::AutoLock(lock.get())
2670 good_pattern = input_api.re.compile(r'\bnew\s*' + anonymous)
2673 for f in input_api.AffectedFiles():
2674 if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
2676 for linenum, line in f.ChangedContents():
2677 if bad_pattern.search(line) and not good_pattern.search(line):
2678 errors.append('%s:%d' % (f.LocalPath(), linenum))
2682 output_api.PresubmitError(
2683 'These lines create anonymous variables that need to be named:',
2689 def CheckUniquePtrOnUpload(input_api, output_api):
2690 # Returns whether |template_str| is of the form <T, U...> for some types T
2691 # and U. Assumes that |template_str| is already in the form <...>.
2692 def HasMoreThanOneArg(template_str):
2693 # Level of <...> nesting.
2695 for c in template_str:
2700 elif c == ',' and nesting == 1:
2704 file_inclusion_pattern = [r'.+%s' % _IMPLEMENTATION_EXTENSIONS]
2705 sources = lambda affected_file: input_api.FilterSourceFile(
2707 files_to_skip=(_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS + input_api.
2708 DEFAULT_FILES_TO_SKIP),
2709 files_to_check=file_inclusion_pattern)
2711 # Pattern to capture a single "<...>" block of template arguments. It can
2712 # handle linearly nested blocks, such as "<std::vector<std::set<T>>>", but
2713 # cannot handle branching structures, such as "<pair<set<T>,set<U>>". The
2714 # latter would likely require counting that < and > match, which is not
2715 # expressible in regular languages. Should the need arise, one can introduce
2716 # limited counting (matching up to a total number of nesting depth), which
2717 # should cover all practical cases for already a low nesting limit.
2718 template_arg_pattern = (
2719 r'<[^>]*' # Opening block of <.
2720 r'>([^<]*>)?') # Closing block of >.
2721 # Prefix expressing that whatever follows is not already inside a <...>
2723 not_inside_template_arg_pattern = r'(^|[^<,\s]\s*)'
2724 null_construct_pattern = input_api.re.compile(
2725 not_inside_template_arg_pattern + r'\bstd::unique_ptr' +
2726 template_arg_pattern + r'\(\)')
2728 # Same as template_arg_pattern, but excluding type arrays, e.g., <T[]>.
2729 template_arg_no_array_pattern = (
2730 r'<[^>]*[^]]' # Opening block of <.
2731 r'>([^(<]*[^]]>)?') # Closing block of >.
2732 # Prefix saying that what follows is the start of an expression.
2733 start_of_expr_pattern = r'(=|\breturn|^)\s*'
2734 # Suffix saying that what follows are call parentheses with a non-empty list
2736 nonempty_arg_list_pattern = r'\(([^)]|$)'
2737 # Put the template argument into a capture group for deeper examination later.
2738 return_construct_pattern = input_api.re.compile(
2739 start_of_expr_pattern + r'std::unique_ptr' + '(?P<template_arg>' +
2740 template_arg_no_array_pattern + ')' + nonempty_arg_list_pattern)
2742 problems_constructor = []
2743 problems_nullptr = []
2744 for f in input_api.AffectedSourceFiles(sources):
2745 for line_number, line in f.ChangedContents():
2747 # return std::unique_ptr<T>(foo);
2748 # bar = std::unique_ptr<T>(foo);
2750 # return std::unique_ptr<T[]>(foo);
2751 # bar = std::unique_ptr<T[]>(foo);
2752 # And also allow cases when the second template argument is present. Those
2753 # cases cannot be handled by std::make_unique:
2754 # return std::unique_ptr<T, U>(foo);
2755 # bar = std::unique_ptr<T, U>(foo);
2756 local_path = f.LocalPath()
2757 return_construct_result = return_construct_pattern.search(line)
2758 if return_construct_result and not HasMoreThanOneArg(
2759 return_construct_result.group('template_arg')):
2760 problems_constructor.append(
2761 '%s:%d\n %s' % (local_path, line_number, line.strip()))
2763 # std::unique_ptr<T>()
2764 if null_construct_pattern.search(line):
2765 problems_nullptr.append(
2766 '%s:%d\n %s' % (local_path, line_number, line.strip()))
2769 if problems_nullptr:
2771 output_api.PresubmitPromptWarning(
2772 'The following files use std::unique_ptr<T>(). Use nullptr instead.',
2774 if problems_constructor:
2776 output_api.PresubmitError(
2777 'The following files use explicit std::unique_ptr constructor. '
2778 'Use std::make_unique<T>() instead, or use base::WrapUnique if '
2779 'std::make_unique is not an option.', problems_constructor))
2783 def CheckUserActionUpdate(input_api, output_api):
2784 """Checks if any new user action has been added."""
2785 if any('actions.xml' == input_api.os_path.basename(f)
2786 for f in input_api.LocalPaths()):
2787 # If actions.xml is already included in the changelist, the PRESUBMIT
2788 # for actions.xml will do a more complete presubmit check.
2791 file_inclusion_pattern = [r'.*\.(cc|mm)$']
2792 files_to_skip = (_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS +
2793 input_api.DEFAULT_FILES_TO_SKIP)
2794 file_filter = lambda f: input_api.FilterSourceFile(
2795 f, files_to_check=file_inclusion_pattern, files_to_skip=files_to_skip)
2797 action_re = r'[^a-zA-Z]UserMetricsAction\("([^"]*)'
2798 current_actions = None
2799 for f in input_api.AffectedFiles(file_filter=file_filter):
2800 for line_num, line in f.ChangedContents():
2801 match = input_api.re.search(action_re, line)
2803 # Loads contents in tools/metrics/actions/actions.xml to memory. It's
2805 if not current_actions:
2807 'tools/metrics/actions/actions.xml') as actions_f:
2808 current_actions = actions_f.read()
2809 # Search for the matched user action name in |current_actions|.
2810 for action_name in match.groups():
2811 action = 'name="{0}"'.format(action_name)
2812 if action not in current_actions:
2814 output_api.PresubmitPromptWarning(
2815 'File %s line %d: %s is missing in '
2816 'tools/metrics/actions/actions.xml. Please run '
2817 'tools/metrics/actions/extract_actions.py to update.'
2818 % (f.LocalPath(), line_num, action_name))
2823 def _ImportJSONCommentEater(input_api):
2825 sys.path = sys.path + [
2826 input_api.os_path.join(input_api.PresubmitLocalPath(), 'tools',
2827 'json_comment_eater')
2829 import json_comment_eater
2830 return json_comment_eater
2833 def _GetJSONParseError(input_api, filename, eat_comments=True):
2835 contents = input_api.ReadFile(filename)
2837 json_comment_eater = _ImportJSONCommentEater(input_api)
2838 contents = json_comment_eater.Nom(contents)
2840 input_api.json.loads(contents)
2841 except ValueError as e:
2846 def _GetIDLParseError(input_api, filename):
2848 contents = input_api.ReadFile(filename)
2849 for i, char in enumerate(contents):
2850 if not char.isascii():
2852 'Non-ascii character "%s" (ord %d) found at offset %d.' %
2853 (char, ord(char), i))
2854 idl_schema = input_api.os_path.join(input_api.PresubmitLocalPath(),
2855 'tools', 'json_schema_compiler',
2857 process = input_api.subprocess.Popen(
2858 [input_api.python3_executable, idl_schema],
2859 stdin=input_api.subprocess.PIPE,
2860 stdout=input_api.subprocess.PIPE,
2861 stderr=input_api.subprocess.PIPE,
2862 universal_newlines=True)
2863 (_, error) = process.communicate(input=contents)
2864 return error or None
2865 except ValueError as e:
2869 def CheckParseErrors(input_api, output_api):
2870 """Check that IDL and JSON files do not contain syntax errors."""
2872 '.idl': _GetIDLParseError,
2873 '.json': _GetJSONParseError,
2875 # Most JSON files are preprocessed and support comments, but these do not.
2876 json_no_comments_patterns = [
2879 # Only run IDL checker on files in these directories.
2880 idl_included_patterns = [
2881 r'^chrome/common/extensions/api/',
2882 r'^extensions/common/api/',
2885 def get_action(affected_file):
2886 filename = affected_file.LocalPath()
2887 return actions.get(input_api.os_path.splitext(filename)[1])
2889 def FilterFile(affected_file):
2890 action = get_action(affected_file)
2893 path = affected_file.LocalPath()
2895 if _MatchesFile(input_api,
2896 _KNOWN_TEST_DATA_AND_INVALID_JSON_FILE_PATTERNS, path):
2899 if (action == _GetIDLParseError
2900 and not _MatchesFile(input_api, idl_included_patterns, path)):
2905 for affected_file in input_api.AffectedFiles(file_filter=FilterFile,
2906 include_deletes=False):
2907 action = get_action(affected_file)
2909 if (action == _GetJSONParseError
2910 and _MatchesFile(input_api, json_no_comments_patterns,
2911 affected_file.LocalPath())):
2912 kwargs['eat_comments'] = False
2913 parse_error = action(input_api, affected_file.AbsoluteLocalPath(),
2917 output_api.PresubmitError(
2918 '%s could not be parsed: %s' %
2919 (affected_file.LocalPath(), parse_error)))
2923 def CheckJavaStyle(input_api, output_api):
2924 """Runs checkstyle on changed java files and returns errors if any exist."""
2926 # Return early if no java files were modified.
2928 _IsJavaFile(input_api, f.LocalPath())
2929 for f in input_api.AffectedFiles()):
2933 original_sys_path = sys.path
2935 sys.path = sys.path + [
2936 input_api.os_path.join(input_api.PresubmitLocalPath(), 'tools',
2937 'android', 'checkstyle')
2941 # Restore sys.path to what it was before.
2942 sys.path = original_sys_path
2944 return checkstyle.RunCheckstyle(
2947 'tools/android/checkstyle/chromium-style-5.0.xml',
2948 files_to_skip=_EXCLUDED_PATHS + input_api.DEFAULT_FILES_TO_SKIP)
2951 def CheckPythonDevilInit(input_api, output_api):
2952 """Checks to make sure devil is initialized correctly in python scripts."""
2953 script_common_initialize_pattern = input_api.re.compile(
2954 r'script_common\.InitializeEnvironment\(')
2955 devil_env_config_initialize = input_api.re.compile(
2956 r'devil_env\.config\.Initialize\(')
2960 sources = lambda affected_file: input_api.FilterSourceFile(
2962 files_to_skip=(_EXCLUDED_PATHS + input_api.DEFAULT_FILES_TO_SKIP + (
2963 r'^build/android/devil_chromium\.py',
2966 files_to_check=[r'.*\.py$'])
2968 for f in input_api.AffectedSourceFiles(sources):
2969 for line_num, line in f.ChangedContents():
2970 if (script_common_initialize_pattern.search(line)
2971 or devil_env_config_initialize.search(line)):
2972 errors.append("%s:%d" % (f.LocalPath(), line_num))
2978 output_api.PresubmitError(
2979 'Devil initialization should always be done using '
2980 'devil_chromium.Initialize() in the chromium project, to use better '
2981 'defaults for dependencies (ex. up-to-date version of adb).',
2987 def _MatchesFile(input_api, patterns, path):
2988 # Consistently use / as path separator to simplify the writing of regex
2990 path = path.replace(input_api.os_path.sep, '/')
2991 for pattern in patterns:
2992 if input_api.re.search(pattern, path):
2997 def _ChangeHasSecurityReviewer(input_api, owners_file):
2998 """Returns True iff the CL has a reviewer from SECURITY_OWNERS.
3001 input_api: The presubmit input API.
3002 owners_file: OWNERS file with required reviewers. Typically, this is
3003 something like ipc/SECURITY_OWNERS.
3005 Note: if the presubmit is running for commit rather than for upload, this
3006 only returns True if a security reviewer has also approved the CL.
3008 # Owners-Override should bypass all additional OWNERS enforcement checks.
3009 # A CR+1 vote will still be required to land this change.
3010 if (input_api.change.issue and input_api.gerrit.IsOwnersOverrideApproved(
3011 input_api.change.issue)):
3014 owner_email, reviewers = (
3015 input_api.canned_checks.GetCodereviewOwnerAndReviewers(
3018 approval_needed=input_api.is_committing and not input_api.dry_run))
3020 security_owners = input_api.owners_client.ListOwners(owners_file)
3021 return any(owner in reviewers for owner in security_owners)
3025 class _SecurityProblemWithItems:
3027 items: Sequence[str]
3031 class _MissingSecurityOwnersResult:
3032 owners_file_problems: Sequence[_SecurityProblemWithItems]
3033 has_security_sensitive_files: bool
3034 missing_reviewer_problem: Optional[_SecurityProblemWithItems]
3037 def _FindMissingSecurityOwners(input_api,
3039 file_patterns: Sequence[str],
3040 excluded_patterns: Sequence[str],
3041 required_owners_file: str,
3042 custom_rule_function: Optional[Callable] = None
3043 ) -> _MissingSecurityOwnersResult:
3044 """Find OWNERS files missing per-file rules for security-sensitive files.
3047 input_api: the PRESUBMIT input API object.
3048 output_api: the PRESUBMIT output API object.
3049 file_patterns: basename patterns that require a corresponding per-file
3050 security restriction.
3051 excluded_patterns: path patterns that should be exempted from
3052 requiring a security restriction.
3053 required_owners_file: path to the required OWNERS file, e.g.
3055 cc_alias: If not None, email that will be CCed automatically if the
3056 change contains security-sensitive files, as determined by
3057 `file_patterns` and `excluded_patterns`.
3058 custom_rule_function: If not None, will be called with `input_api` and
3059 the current file under consideration. Returning True will add an
3060 exact match per-file rule check for the current file.
3063 # `to_check` is a mapping of an OWNERS file path to Patterns.
3065 # Patterns is a dictionary mapping glob patterns (suitable for use in
3066 # per-file rules) to a PatternEntry.
3068 # PatternEntry is a dictionary with two keys:
3069 # - 'files': the files that are matched by this pattern
3070 # - 'rules': the per-file rules needed for this pattern
3072 # For example, if we expect OWNERS file to contain rules for *.mojom and
3073 # *_struct_traits*.*, Patterns might look like this:
3078 # 'per-file *.mojom=set noparent',
3079 # 'per-file *.mojom=file://ipc/SECURITY_OWNERS',
3082 # '*_struct_traits*.*': {
3085 # 'per-file *_struct_traits*.*=set noparent',
3086 # 'per-file *_struct_traits*.*=file://ipc/SECURITY_OWNERS',
3091 files_to_review = []
3093 def AddPatternToCheck(file, pattern):
3094 owners_file = input_api.os_path.join(
3095 input_api.os_path.dirname(file.LocalPath()), 'OWNERS')
3096 if owners_file not in to_check:
3097 to_check[owners_file] = {}
3098 if pattern not in to_check[owners_file]:
3099 to_check[owners_file][pattern] = {
3102 f'per-file {pattern}=set noparent',
3103 f'per-file {pattern}=file://{required_owners_file}',
3106 to_check[owners_file][pattern]['files'].append(file.LocalPath())
3107 files_to_review.append(file.LocalPath())
3109 # Only enforce security OWNERS rules for a directory if that directory has a
3110 # file that matches `file_patterns`. For example, if a directory only
3111 # contains *.mojom files and no *_messages*.h files, the check should only
3112 # ensure that rules for *.mojom files are present.
3113 for file in input_api.AffectedFiles(include_deletes=False):
3114 file_basename = input_api.os_path.basename(file.LocalPath())
3115 if custom_rule_function is not None and custom_rule_function(
3117 AddPatternToCheck(file, file_basename)
3121 input_api.fnmatch.fnmatch(file.LocalPath(), pattern)
3122 for pattern in excluded_patterns):
3125 for pattern in file_patterns:
3126 # Unlike `excluded_patterns`, `file_patterns` is checked only against the
3128 if input_api.fnmatch.fnmatch(file_basename, pattern):
3129 AddPatternToCheck(file, pattern)
3132 has_security_sensitive_files = bool(to_check)
3134 # Check if any newly added lines in OWNERS files intersect with required
3135 # per-file OWNERS lines. If so, ensure that a security reviewer is included.
3136 # This is a hack, but is needed because the OWNERS check (by design) ignores
3137 # new OWNERS entries; otherwise, a non-owner could add someone as a new
3138 # OWNER and have that newly-added OWNER self-approve their own addition.
3139 newly_covered_files = []
3140 for file in input_api.AffectedFiles(include_deletes=False):
3141 if not file.LocalPath() in to_check:
3143 for _, line in file.ChangedContents():
3144 for _, entry in to_check[file.LocalPath()].items():
3145 if line in entry['rules']:
3146 newly_covered_files.extend(entry['files'])
3148 missing_reviewer_problems = None
3149 if newly_covered_files and not _ChangeHasSecurityReviewer(
3150 input_api, required_owners_file):
3151 missing_reviewer_problems = _SecurityProblemWithItems(
3152 f'Review from an owner in {required_owners_file} is required for '
3153 'the following newly-added files:',
3154 [f'{file}' for file in sorted(set(newly_covered_files))])
3156 # Go through the OWNERS files to check, filtering out rules that are already
3157 # present in that OWNERS file.
3158 for owners_file, patterns in to_check.items():
3162 input_api.os_path.join(input_api.change.RepositoryRoot(),
3163 owners_file)).splitlines())
3164 for entry in patterns.values():
3166 rule for rule in entry['rules'] if rule not in lines
3169 # No OWNERS file, so all the rules are definitely missing.
3172 # All the remaining lines weren't found in OWNERS files, so emit an error.
3173 owners_file_problems = []
3175 for owners_file, patterns in to_check.items():
3178 for _, entry in patterns.items():
3179 files.extend(entry['files'])
3180 missing_lines.extend(entry['rules'])
3182 joined_missing_lines = '\n'.join(line for line in missing_lines)
3183 owners_file_problems.append(
3184 _SecurityProblemWithItems(
3185 'Found missing OWNERS lines for security-sensitive files. '
3186 f'Please add the following lines to {owners_file}:\n'
3187 f'{joined_missing_lines}\n\nTo ensure security review for:',
3190 return _MissingSecurityOwnersResult(owners_file_problems,
3191 has_security_sensitive_files,
3192 missing_reviewer_problems)
3195 def _CheckChangeForIpcSecurityOwners(input_api, output_api):
3196 # Whether or not a file affects IPC is (mostly) determined by a simple list
3197 # of filename patterns.
3202 '*_param_traits*.*',
3205 '*_mojom_traits*.*',
3206 '*_type_converter*.*',
3207 # Android native IPC:
3211 excluded_patterns = [
3212 # These third_party directories do not contain IPCs, but contain files
3213 # matching the above patterns, which trigger false positives.
3214 'third_party/crashpad/*',
3215 'third_party/blink/renderer/platform/bindings/*',
3216 'third_party/protobuf/benchmarks/python/*',
3217 'third_party/win_build_output/*',
3218 # Enum-only mojoms used for web metrics, so no security review needed.
3219 'third_party/blink/public/mojom/use_counter/metrics/*',
3220 # These files are just used to communicate between class loaders running
3221 # in the same process.
3222 'weblayer/browser/java/org/chromium/weblayer_private/interfaces/*',
3223 'weblayer/browser/java/org/chromium/weblayer_private/test_interfaces/*',
3226 def IsMojoServiceManifestFile(input_api, file):
3227 manifest_pattern = input_api.re.compile('manifests?\.(cc|h)$')
3228 test_manifest_pattern = input_api.re.compile('test_manifests?\.(cc|h)')
3229 if not manifest_pattern.search(file.LocalPath()):
3232 if test_manifest_pattern.search(file.LocalPath()):
3235 # All actual service manifest files should contain at least one
3236 # qualified reference to service_manager::Manifest.
3237 return any('service_manager::Manifest' in line
3238 for line in file.NewContents())
3240 return _FindMissingSecurityOwners(
3245 'ipc/SECURITY_OWNERS',
3246 custom_rule_function=IsMojoServiceManifestFile)
3249 def _CheckChangeForFuchsiaSecurityOwners(input_api, output_api):
3251 # Component specifications.
3252 '*.cml', # Component Framework v2.
3253 '*.cmx', # Component Framework v1.
3255 # Fuchsia IDL protocol specifications.
3259 # Don't check for owners files for changes in these directories.
3260 excluded_patterns = [
3261 'third_party/crashpad/*',
3264 return _FindMissingSecurityOwners(input_api, output_api, file_patterns,
3266 'build/fuchsia/SECURITY_OWNERS')
3269 def CheckSecurityOwners(input_api, output_api):
3270 """Checks that various security-sensitive files have an IPC OWNERS rule."""
3271 ipc_results = _CheckChangeForIpcSecurityOwners(input_api, output_api)
3272 fuchsia_results = _CheckChangeForFuchsiaSecurityOwners(
3273 input_api, output_api)
3275 if ipc_results.has_security_sensitive_files:
3276 output_api.AppendCC('ipc-security-reviews@chromium.org')
3280 missing_reviewer_problems = []
3281 if ipc_results.missing_reviewer_problem:
3282 missing_reviewer_problems.append(ipc_results.missing_reviewer_problem)
3283 if fuchsia_results.missing_reviewer_problem:
3284 missing_reviewer_problems.append(
3285 fuchsia_results.missing_reviewer_problem)
3287 # Missing reviewers are an error unless there's no issue number
3288 # associated with this branch; in that case, the presubmit is being run
3289 # with --all or --files.
3291 # Note that upload should never be an error; otherwise, it would be
3292 # impossible to upload changes at all.
3293 if input_api.is_committing and input_api.change.issue:
3294 make_presubmit_message = output_api.PresubmitError
3296 make_presubmit_message = output_api.PresubmitNotifyResult
3297 for problem in missing_reviewer_problems:
3299 make_presubmit_message(problem.problem, items=problem.items))
3301 owners_file_problems = []
3302 owners_file_problems.extend(ipc_results.owners_file_problems)
3303 owners_file_problems.extend(fuchsia_results.owners_file_problems)
3305 for problem in owners_file_problems:
3306 # Missing per-file rules are always an error. While swarming and caching
3307 # means that uploading a patchset with updated OWNERS files and sending
3308 # it to the CQ again should not have a large incremental cost, it is
3309 # still frustrating to discover the error only after the change has
3310 # already been uploaded.
3312 output_api.PresubmitError(problem.problem, items=problem.items))
3317 def _GetFilesUsingSecurityCriticalFunctions(input_api):
3318 """Checks affected files for changes to security-critical calls. This
3319 function checks the full change diff, to catch both additions/changes
3322 Returns a dict keyed by file name, and the value is a set of detected
3325 # Map of function pretty name (displayed in an error) to the pattern to
3327 _PATTERNS_TO_CHECK = {
3328 'content::GetServiceSandboxType<>()': 'GetServiceSandboxType\\<'
3330 _PATTERNS_TO_CHECK = {
3331 k: input_api.re.compile(v)
3332 for k, v in _PATTERNS_TO_CHECK.items()
3335 # We don't want to trigger on strings within this file.
3336 def presubmit_file_filter(f):
3337 return 'PRESUBMIT.py' != input_api.os_path.split(f.LocalPath())[1]
3339 # Scan all affected files for changes touching _FUNCTIONS_TO_CHECK.
3340 files_to_functions = {}
3341 for f in input_api.AffectedFiles(file_filter=presubmit_file_filter):
3342 diff = f.GenerateScmDiff()
3343 for line in diff.split('\n'):
3344 # Not using just RightHandSideLines() because removing a
3345 # call to a security-critical function can be just as important
3346 # as adding or changing the arguments.
3347 if line.startswith('-') or (line.startswith('+')
3348 and not line.startswith('++')):
3349 for name, pattern in _PATTERNS_TO_CHECK.items():
3350 if pattern.search(line):
3351 path = f.LocalPath()
3352 if not path in files_to_functions:
3353 files_to_functions[path] = set()
3354 files_to_functions[path].add(name)
3355 return files_to_functions
3358 def CheckSecurityChanges(input_api, output_api):
3359 """Checks that changes involving security-critical functions are reviewed
3360 by the security team.
3362 files_to_functions = _GetFilesUsingSecurityCriticalFunctions(input_api)
3363 if not len(files_to_functions):
3366 owners_file = 'ipc/SECURITY_OWNERS'
3367 if _ChangeHasSecurityReviewer(input_api, owners_file):
3370 msg = 'The following files change calls to security-sensitive functions\n' \
3371 'that need to be reviewed by {}.\n'.format(owners_file)
3372 for path, names in files_to_functions.items():
3373 msg += ' {}\n'.format(path)
3375 msg += ' {}\n'.format(name)
3378 if input_api.is_committing:
3379 output = output_api.PresubmitError
3381 output = output_api.PresubmitNotifyResult
3382 return [output(msg)]
3385 def CheckSetNoParent(input_api, output_api):
3386 """Checks that set noparent is only used together with an OWNERS file in
3387 //build/OWNERS.setnoparent (see also
3388 //docs/code_reviews.md#owners-files-details)
3390 # Return early if no OWNERS files were modified.
3391 if not any(f.LocalPath().endswith('OWNERS')
3392 for f in input_api.AffectedFiles(include_deletes=False)):
3397 allowed_owners_files_file = 'build/OWNERS.setnoparent'
3398 allowed_owners_files = set()
3399 with open(allowed_owners_files_file, 'r') as f:
3402 if not line or line.startswith('#'):
3404 allowed_owners_files.add(line)
3406 per_file_pattern = input_api.re.compile('per-file (.+)=(.+)')
3408 for f in input_api.AffectedFiles(include_deletes=False):
3409 if not f.LocalPath().endswith('OWNERS'):
3412 found_owners_files = set()
3413 found_set_noparent_lines = dict()
3415 # Parse the OWNERS file.
3416 for lineno, line in enumerate(f.NewContents(), 1):
3418 if line.startswith('set noparent'):
3419 found_set_noparent_lines[''] = lineno
3420 if line.startswith('file://'):
3421 if line in allowed_owners_files:
3422 found_owners_files.add('')
3423 if line.startswith('per-file'):
3424 match = per_file_pattern.match(line)
3426 glob = match.group(1).strip()
3427 directive = match.group(2).strip()
3428 if directive == 'set noparent':
3429 found_set_noparent_lines[glob] = lineno
3430 if directive.startswith('file://'):
3431 if directive in allowed_owners_files:
3432 found_owners_files.add(glob)
3434 # Check that every set noparent line has a corresponding file:// line
3435 # listed in build/OWNERS.setnoparent. An exception is made for top level
3436 # directories since src/OWNERS shouldn't review them.
3437 linux_path = f.LocalPath().replace(input_api.os_path.sep, '/')
3438 if (linux_path.count('/') != 1
3439 and (not linux_path in _EXCLUDED_SET_NO_PARENT_PATHS)):
3440 for set_noparent_line in found_set_noparent_lines:
3441 if set_noparent_line in found_owners_files:
3443 errors.append(' %s:%d' %
3445 found_set_noparent_lines[set_noparent_line]))
3449 if input_api.is_committing:
3450 output = output_api.PresubmitError
3452 output = output_api.PresubmitPromptWarning
3455 'Found the following "set noparent" restrictions in OWNERS files that '
3456 'do not include owners from build/OWNERS.setnoparent:',
3457 long_text='\n\n'.join(errors)))
3461 def CheckUselessForwardDeclarations(input_api, output_api):
3462 """Checks that added or removed lines in non third party affected
3463 header files do not lead to new useless class or struct forward
3467 class_pattern = input_api.re.compile(r'^class\s+(\w+);$',
3468 input_api.re.MULTILINE)
3469 struct_pattern = input_api.re.compile(r'^struct\s+(\w+);$',
3470 input_api.re.MULTILINE)
3471 for f in input_api.AffectedFiles(include_deletes=False):
3472 if (f.LocalPath().startswith('third_party')
3473 and not f.LocalPath().startswith('third_party/blink')
3474 and not f.LocalPath().startswith('third_party\\blink')):
3477 if not f.LocalPath().endswith('.h'):
3480 contents = input_api.ReadFile(f)
3481 fwd_decls = input_api.re.findall(class_pattern, contents)
3482 fwd_decls.extend(input_api.re.findall(struct_pattern, contents))
3484 useless_fwd_decls = []
3485 for decl in fwd_decls:
3486 count = sum(1 for _ in input_api.re.finditer(
3487 r'\b%s\b' % input_api.re.escape(decl), contents))
3489 useless_fwd_decls.append(decl)
3491 if not useless_fwd_decls:
3494 for line in f.GenerateScmDiff().splitlines():
3495 if (line.startswith('-') and not line.startswith('--')
3496 or line.startswith('+') and not line.startswith('++')):
3497 for decl in useless_fwd_decls:
3498 if input_api.re.search(r'\b%s\b' % decl, line[1:]):
3500 output_api.PresubmitPromptWarning(
3501 '%s: %s forward declaration is no longer needed'
3502 % (f.LocalPath(), decl)))
3503 useless_fwd_decls.remove(decl)
3508 def _CheckAndroidDebuggableBuild(input_api, output_api):
3509 """Checks that code uses BuildInfo.isDebugAndroid() instead of
3510 Build.TYPE.equals('') or ''.equals(Build.TYPE) to check if
3511 this is a debuggable build of Android.
3513 build_type_check_pattern = input_api.re.compile(
3514 r'\bBuild\.TYPE\.equals\(|\.equals\(\s*\bBuild\.TYPE\)')
3518 sources = lambda affected_file: input_api.FilterSourceFile(
3521 _EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS + input_api.
3522 DEFAULT_FILES_TO_SKIP + (
3523 r"^android_webview/support_library/boundary_interfaces/",
3524 r"^chrome/android/webapk/.*",
3526 r"tools/android/customtabs_benchmark/.*",
3527 r"webview/chromium/License.*",
3529 files_to_check=[r'.*\.java$'])
3531 for f in input_api.AffectedSourceFiles(sources):
3532 for line_num, line in f.ChangedContents():
3533 if build_type_check_pattern.search(line):
3534 errors.append("%s:%d" % (f.LocalPath(), line_num))
3540 output_api.PresubmitPromptWarning(
3541 'Build.TYPE.equals or .equals(Build.TYPE) usage is detected.'
3542 ' Please use BuildInfo.isDebugAndroid() instead.', errors))
3546 # TODO: add unit tests
3547 def _CheckAndroidToastUsage(input_api, output_api):
3548 """Checks that code uses org.chromium.ui.widget.Toast instead of
3549 android.widget.Toast (Chromium Toast doesn't force hardware
3550 acceleration on low-end devices, saving memory).
3552 toast_import_pattern = input_api.re.compile(
3553 r'^import android\.widget\.Toast;$')
3557 sources = lambda affected_file: input_api.FilterSourceFile(
3559 files_to_skip=(_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS + input_api.
3560 DEFAULT_FILES_TO_SKIP + (r'^chromecast/.*',
3562 files_to_check=[r'.*\.java$'])
3564 for f in input_api.AffectedSourceFiles(sources):
3565 for line_num, line in f.ChangedContents():
3566 if toast_import_pattern.search(line):
3567 errors.append("%s:%d" % (f.LocalPath(), line_num))
3573 output_api.PresubmitError(
3574 'android.widget.Toast usage is detected. Android toasts use hardware'
3575 ' acceleration, and can be\ncostly on low-end devices. Please use'
3576 ' org.chromium.ui.widget.Toast instead.\n'
3577 'Contact dskiba@chromium.org if you have any questions.',
3583 def _CheckAndroidCrLogUsage(input_api, output_api):
3584 """Checks that new logs using org.chromium.base.Log:
3585 - Are using 'TAG' as variable name for the tags (warn)
3586 - Are using a tag that is shorter than 20 characters (error)
3589 # Do not check format of logs in the given files
3590 cr_log_check_excluded_paths = [
3591 # //chrome/android/webapk cannot depend on //base
3592 r"^chrome/android/webapk/.*",
3593 # WebView license viewer code cannot depend on //base; used in stub APK.
3594 r"^android_webview/glue/java/src/com/android/"
3595 r"webview/chromium/License.*",
3596 # The customtabs_benchmark is a small app that does not depend on Chromium
3598 r"tools/android/customtabs_benchmark/.*",
3601 cr_log_import_pattern = input_api.re.compile(
3602 r'^import org\.chromium\.base\.Log;$', input_api.re.MULTILINE)
3603 class_in_base_pattern = input_api.re.compile(
3604 r'^package org\.chromium\.base;$', input_api.re.MULTILINE)
3605 has_some_log_import_pattern = input_api.re.compile(r'^import .*\.Log;$',
3606 input_api.re.MULTILINE)
3607 # Extract the tag from lines like `Log.d(TAG, "*");` or `Log.d("TAG", "*");`
3608 log_call_pattern = input_api.re.compile(r'\bLog\.\w\((?P<tag>\"?\w+)')
3609 log_decl_pattern = input_api.re.compile(
3610 r'static final String TAG = "(?P<name>(.*))"')
3611 rough_log_decl_pattern = input_api.re.compile(r'\bString TAG\s*=')
3613 REF_MSG = ('See docs/android_logging.md for more info.')
3614 sources = lambda x: input_api.FilterSourceFile(
3616 files_to_check=[r'.*\.java$'],
3617 files_to_skip=cr_log_check_excluded_paths)
3619 tag_decl_errors = []
3620 tag_length_errors = []
3622 tag_with_dot_errors = []
3623 util_log_errors = []
3625 for f in input_api.AffectedSourceFiles(sources):
3626 file_content = input_api.ReadFile(f)
3627 has_modified_logs = False
3629 if (cr_log_import_pattern.search(file_content)
3630 or (class_in_base_pattern.search(file_content)
3631 and not has_some_log_import_pattern.search(file_content))):
3632 # Checks to run for files using cr log
3633 for line_num, line in f.ChangedContents():
3634 if rough_log_decl_pattern.search(line):
3635 has_modified_logs = True
3637 # Check if the new line is doing some logging
3638 match = log_call_pattern.search(line)
3640 has_modified_logs = True
3642 # Make sure it uses "TAG"
3643 if not match.group('tag') == 'TAG':
3644 tag_errors.append("%s:%d" % (f.LocalPath(), line_num))
3646 # Report non cr Log function calls in changed lines
3647 for line_num, line in f.ChangedContents():
3648 if log_call_pattern.search(line):
3649 util_log_errors.append("%s:%d" % (f.LocalPath(), line_num))
3652 if has_modified_logs:
3653 # Make sure the tag is using the "cr" prefix and is not too long
3654 match = log_decl_pattern.search(file_content)
3655 tag_name = match.group('name') if match else None
3657 tag_decl_errors.append(f.LocalPath())
3658 elif len(tag_name) > 20:
3659 tag_length_errors.append(f.LocalPath())
3660 elif '.' in tag_name:
3661 tag_with_dot_errors.append(f.LocalPath())
3666 output_api.PresubmitPromptWarning(
3667 'Please define your tags using the suggested format: .\n'
3668 '"private static final String TAG = "<package tag>".\n'
3669 'They will be prepended with "cr_" automatically.\n' + REF_MSG,
3672 if tag_length_errors:
3674 output_api.PresubmitError(
3675 'The tag length is restricted by the system to be at most '
3676 '20 characters.\n' + REF_MSG, tag_length_errors))
3680 output_api.PresubmitPromptWarning(
3681 'Please use a variable named "TAG" for your log tags.\n' +
3682 REF_MSG, tag_errors))
3686 output_api.PresubmitPromptWarning(
3687 'Please use org.chromium.base.Log for new logs.\n' + REF_MSG,
3690 if tag_with_dot_errors:
3692 output_api.PresubmitPromptWarning(
3693 'Dot in log tags cause them to be elided in crash reports.\n' +
3694 REF_MSG, tag_with_dot_errors))
3699 def _CheckAndroidTestJUnitFrameworkImport(input_api, output_api):
3700 """Checks that junit.framework.* is no longer used."""
3701 deprecated_junit_framework_pattern = input_api.re.compile(
3702 r'^import junit\.framework\..*;', input_api.re.MULTILINE)
3703 sources = lambda x: input_api.FilterSourceFile(
3704 x, files_to_check=[r'.*\.java$'], files_to_skip=None)
3706 for f in input_api.AffectedFiles(file_filter=sources):
3707 for line_num, line in f.ChangedContents():
3708 if deprecated_junit_framework_pattern.search(line):
3709 errors.append("%s:%d" % (f.LocalPath(), line_num))
3714 output_api.PresubmitError(
3715 'APIs from junit.framework.* are deprecated, please use JUnit4 framework'
3716 '(org.junit.*) from //third_party/junit. Contact yolandyan@chromium.org'
3717 ' if you have any question.', errors))
3721 def _CheckAndroidTestJUnitInheritance(input_api, output_api):
3722 """Checks that if new Java test classes have inheritance.
3723 Either the new test class is JUnit3 test or it is a JUnit4 test class
3724 with a base class, either case is undesirable.
3726 class_declaration_pattern = input_api.re.compile(r'^public class \w*Test ')
3728 sources = lambda x: input_api.FilterSourceFile(
3729 x, files_to_check=[r'.*Test\.java$'], files_to_skip=None)
3731 for f in input_api.AffectedFiles(file_filter=sources):
3732 if not f.OldContents():
3733 class_declaration_start_flag = False
3734 for line_num, line in f.ChangedContents():
3735 if class_declaration_pattern.search(line):
3736 class_declaration_start_flag = True
3737 if class_declaration_start_flag and ' extends ' in line:
3738 errors.append('%s:%d' % (f.LocalPath(), line_num))
3740 class_declaration_start_flag = False
3745 output_api.PresubmitPromptWarning(
3746 'The newly created files include Test classes that inherits from base'
3747 ' class. Please do not use inheritance in JUnit4 tests or add new'
3748 ' JUnit3 tests. Contact yolandyan@chromium.org if you have any'
3749 ' questions.', errors))
3753 def _CheckAndroidTestAnnotationUsage(input_api, output_api):
3754 """Checks that android.test.suitebuilder.annotation.* is no longer used."""
3755 deprecated_annotation_import_pattern = input_api.re.compile(
3756 r'^import android\.test\.suitebuilder\.annotation\..*;',
3757 input_api.re.MULTILINE)
3758 sources = lambda x: input_api.FilterSourceFile(
3759 x, files_to_check=[r'.*\.java$'], files_to_skip=None)
3761 for f in input_api.AffectedFiles(file_filter=sources):
3762 for line_num, line in f.ChangedContents():
3763 if deprecated_annotation_import_pattern.search(line):
3764 errors.append("%s:%d" % (f.LocalPath(), line_num))
3769 output_api.PresubmitError(
3770 'Annotations in android.test.suitebuilder.annotation have been'
3771 ' deprecated since API level 24. Please use android.support.test.filters'
3772 ' from //third_party/android_support_test_runner:runner_java instead.'
3773 ' Contact yolandyan@chromium.org if you have any questions.',
3778 def _CheckAndroidNewMdpiAssetLocation(input_api, output_api):
3779 """Checks if MDPI assets are placed in a correct directory."""
3780 file_filter = lambda f: (f.LocalPath().endswith(
3781 '.png') and ('/res/drawable/'.replace('/', input_api.os_path.sep) in f.
3782 LocalPath() or '/res/drawable-ldrtl/'.replace(
3783 '/', input_api.os_path.sep) in f.LocalPath()))
3785 for f in input_api.AffectedFiles(include_deletes=False,
3786 file_filter=file_filter):
3787 errors.append(' %s' % f.LocalPath())
3792 output_api.PresubmitError(
3793 'MDPI assets should be placed in /res/drawable-mdpi/ or '
3794 '/res/drawable-ldrtl-mdpi/\ninstead of /res/drawable/ and'
3795 '/res/drawable-ldrtl/.\n'
3796 'Contact newt@chromium.org if you have questions.', errors))
3800 def _CheckAndroidWebkitImports(input_api, output_api):
3801 """Checks that code uses org.chromium.base.Callback instead of
3802 android.webview.ValueCallback except in the WebView glue layer
3805 valuecallback_import_pattern = input_api.re.compile(
3806 r'^import android\.webkit\.ValueCallback;$')
3810 sources = lambda affected_file: input_api.FilterSourceFile(
3812 files_to_skip=(_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS + input_api.
3813 DEFAULT_FILES_TO_SKIP + (
3814 r'^android_webview/glue/.*',
3817 files_to_check=[r'.*\.java$'])
3819 for f in input_api.AffectedSourceFiles(sources):
3820 for line_num, line in f.ChangedContents():
3821 if valuecallback_import_pattern.search(line):
3822 errors.append("%s:%d" % (f.LocalPath(), line_num))
3828 output_api.PresubmitError(
3829 'android.webkit.ValueCallback usage is detected outside of the glue'
3830 ' layer. To stay compatible with the support library, android.webkit.*'
3831 ' classes should only be used inside the glue layer and'
3832 ' org.chromium.base.Callback should be used instead.', errors))
3837 def _CheckAndroidXmlStyle(input_api, output_api, is_check_on_upload):
3838 """Checks Android XML styles """
3840 # Return early if no relevant files were modified.
3842 _IsXmlOrGrdFile(input_api, f.LocalPath())
3843 for f in input_api.AffectedFiles(include_deletes=False)):
3847 original_sys_path = sys.path
3849 sys.path = sys.path + [
3850 input_api.os_path.join(input_api.PresubmitLocalPath(), 'tools',
3851 'android', 'checkxmlstyle')
3853 import checkxmlstyle
3855 # Restore sys.path to what it was before.
3856 sys.path = original_sys_path
3858 if is_check_on_upload:
3859 return checkxmlstyle.CheckStyleOnUpload(input_api, output_api)
3861 return checkxmlstyle.CheckStyleOnCommit(input_api, output_api)
3864 def _CheckAndroidInfoBarDeprecation(input_api, output_api):
3865 """Checks Android Infobar Deprecation """
3868 original_sys_path = sys.path
3870 sys.path = sys.path + [
3871 input_api.os_path.join(input_api.PresubmitLocalPath(), 'tools',
3872 'android', 'infobar_deprecation')
3874 import infobar_deprecation
3876 # Restore sys.path to what it was before.
3877 sys.path = original_sys_path
3879 return infobar_deprecation.CheckDeprecationOnUpload(input_api, output_api)
3882 class _PydepsCheckerResult:
3883 def __init__(self, cmd, pydeps_path, process, old_contents):
3885 self._pydeps_path = pydeps_path
3886 self._process = process
3887 self._old_contents = old_contents
3890 """Returns an error message, or None."""
3892 if self._process.wait() != 0:
3893 # STDERR should already be printed.
3894 return 'Command failed: ' + self._cmd
3895 new_contents = self._process.stdout.read().splitlines()[2:]
3896 if self._old_contents != new_contents:
3898 difflib.context_diff(self._old_contents, new_contents))
3899 return ('File is stale: {}\n'
3900 'Diff (apply to fix):\n'
3902 'To regenerate, run:\n\n'
3903 ' {}').format(self._pydeps_path, diff, self._cmd)
3907 class PydepsChecker:
3908 def __init__(self, input_api, pydeps_files):
3909 self._file_cache = {}
3910 self._input_api = input_api
3911 self._pydeps_files = pydeps_files
3913 def _LoadFile(self, path):
3914 """Returns the list of paths within a .pydeps file relative to //."""
3915 if path not in self._file_cache:
3916 with open(path, encoding='utf-8') as f:
3917 self._file_cache[path] = f.read()
3918 return self._file_cache[path]
3920 def _ComputeNormalizedPydepsEntries(self, pydeps_path):
3921 """Returns an iterable of paths within the .pydep, relativized to //."""
3922 pydeps_data = self._LoadFile(pydeps_path)
3923 uses_gn_paths = '--gn-paths' in pydeps_data
3924 entries = (l for l in pydeps_data.splitlines()
3925 if not l.startswith('#'))
3927 # Paths look like: //foo/bar/baz
3928 return (e[2:] for e in entries)
3930 # Paths look like: path/relative/to/file.pydeps
3931 os_path = self._input_api.os_path
3932 pydeps_dir = os_path.dirname(pydeps_path)
3933 return (os_path.normpath(os_path.join(pydeps_dir, e))
3936 def _CreateFilesToPydepsMap(self):
3937 """Returns a map of local_path -> list_of_pydeps."""
3939 for pydep_local_path in self._pydeps_files:
3940 for path in self._ComputeNormalizedPydepsEntries(pydep_local_path):
3941 ret.setdefault(path, []).append(pydep_local_path)
3944 def ComputeAffectedPydeps(self):
3945 """Returns an iterable of .pydeps files that might need regenerating."""
3946 affected_pydeps = set()
3947 file_to_pydeps_map = None
3948 for f in self._input_api.AffectedFiles(include_deletes=True):
3949 local_path = f.LocalPath()
3950 # Changes to DEPS can lead to .pydeps changes if any .py files are in
3951 # subrepositories. We can't figure out which files change, so re-check
3953 # Changes to print_python_deps.py affect all .pydeps.
3954 if local_path in ('DEPS', 'PRESUBMIT.py'
3955 ) or local_path.endswith('print_python_deps.py'):
3956 return self._pydeps_files
3957 elif local_path.endswith('.pydeps'):
3958 if local_path in self._pydeps_files:
3959 affected_pydeps.add(local_path)
3960 elif local_path.endswith('.py'):
3961 if file_to_pydeps_map is None:
3962 file_to_pydeps_map = self._CreateFilesToPydepsMap()
3963 affected_pydeps.update(file_to_pydeps_map.get(local_path, ()))
3964 return affected_pydeps
3966 def DetermineIfStaleAsync(self, pydeps_path):
3967 """Runs print_python_deps.py to see if the files is stale."""
3970 old_pydeps_data = self._LoadFile(pydeps_path).splitlines()
3972 cmd = old_pydeps_data[1][1:].strip()
3973 if '--output' not in cmd:
3974 cmd += ' --output ' + pydeps_path
3975 old_contents = old_pydeps_data[2:]
3977 # A default cmd that should work in most cases (as long as pydeps filename
3978 # matches the script name) so that PRESUBMIT.py does not crash if pydeps
3979 # file is empty/new.
3980 cmd = 'build/print_python_deps.py {} --root={} --output={}'.format(
3981 pydeps_path[:-4], os.path.dirname(pydeps_path), pydeps_path)
3983 env = dict(os.environ)
3984 env['PYTHONDONTWRITEBYTECODE'] = '1'
3985 process = self._input_api.subprocess.Popen(
3986 cmd + ' --output ""',
3989 stdout=self._input_api.subprocess.PIPE,
3991 return _PydepsCheckerResult(cmd, pydeps_path, process, old_contents)
3994 def _ParseGclientArgs():
3996 with open('build/config/gclient_args.gni', 'r') as f:
3999 if not line or line.startswith('#'):
4001 attribute, value = line.split('=')
4002 args[attribute.strip()] = value.strip()
4006 def CheckPydepsNeedsUpdating(input_api, output_api, checker_for_tests=None):
4007 """Checks if a .pydeps file needs to be regenerated."""
4008 # This check is for Python dependency lists (.pydeps files), and involves
4009 # paths not only in the PRESUBMIT.py, but also in the .pydeps files. It
4010 # doesn't work on Windows and Mac, so skip it on other platforms.
4011 if not input_api.platform.startswith('linux'):
4015 # First, check for new / deleted .pydeps.
4016 for f in input_api.AffectedFiles(include_deletes=True):
4017 # Check whether we are running the presubmit check for a file in src.
4018 # f.LocalPath is relative to repo (src, or internal repo).
4019 # os_path.exists is relative to src repo.
4020 # Therefore if os_path.exists is true, it means f.LocalPath is relative
4021 # to src and we can conclude that the pydeps is in src.
4022 if f.LocalPath().endswith('.pydeps'):
4023 if input_api.os_path.exists(f.LocalPath()):
4024 if f.Action() == 'D' and f.LocalPath() in _ALL_PYDEPS_FILES:
4026 output_api.PresubmitError(
4027 'Please update _ALL_PYDEPS_FILES within //PRESUBMIT.py to '
4028 'remove %s' % f.LocalPath()))
4029 elif f.Action() != 'D' and f.LocalPath(
4030 ) not in _ALL_PYDEPS_FILES:
4032 output_api.PresubmitError(
4033 'Please update _ALL_PYDEPS_FILES within //PRESUBMIT.py to '
4034 'include %s' % f.LocalPath()))
4039 is_android = _ParseGclientArgs().get('checkout_android', 'false') == 'true'
4040 checker = checker_for_tests or PydepsChecker(input_api, _ALL_PYDEPS_FILES)
4041 affected_pydeps = set(checker.ComputeAffectedPydeps())
4042 affected_android_pydeps = affected_pydeps.intersection(
4043 set(_ANDROID_SPECIFIC_PYDEPS_FILES))
4044 if affected_android_pydeps and not is_android:
4046 output_api.PresubmitPromptOrNotify(
4047 'You have changed python files that may affect pydeps for android\n'
4048 'specific scripts. However, the relevant presubmit check cannot be\n'
4049 'run because you are not using an Android checkout. To validate that\n'
4050 'the .pydeps are correct, re-run presubmit in an Android checkout, or\n'
4051 'use the android-internal-presubmit optional trybot.\n'
4052 'Possibly stale pydeps files:\n{}'.format(
4053 '\n'.join(affected_android_pydeps))))
4055 all_pydeps = _ALL_PYDEPS_FILES if is_android else _GENERIC_PYDEPS_FILES
4056 pydeps_to_check = affected_pydeps.intersection(all_pydeps)
4057 # Process these concurrently, as each one takes 1-2 seconds.
4058 pydep_results = [checker.DetermineIfStaleAsync(p) for p in pydeps_to_check]
4059 for result in pydep_results:
4060 error_msg = result.GetError()
4062 results.append(output_api.PresubmitError(error_msg))
4067 def CheckSingletonInHeaders(input_api, output_api):
4068 """Checks to make sure no header files have |Singleton<|."""
4070 def FileFilter(affected_file):
4071 # It's ok for base/memory/singleton.h to have |Singleton<|.
4072 files_to_skip = (_EXCLUDED_PATHS + input_api.DEFAULT_FILES_TO_SKIP +
4073 (r"^base/memory/singleton\.h$",
4074 r"^net/quic/platform/impl/quic_singleton_impl\.h$"))
4075 return input_api.FilterSourceFile(affected_file,
4076 files_to_skip=files_to_skip)
4078 pattern = input_api.re.compile(r'(?<!class\sbase::)Singleton\s*<')
4080 for f in input_api.AffectedSourceFiles(FileFilter):
4081 if (f.LocalPath().endswith('.h') or f.LocalPath().endswith('.hxx')
4082 or f.LocalPath().endswith('.hpp')
4083 or f.LocalPath().endswith('.inl')):
4084 contents = input_api.ReadFile(f)
4085 for line in contents.splitlines(False):
4086 if (not line.lstrip().startswith('//')
4087 and # Strip C++ comment.
4088 pattern.search(line)):
4094 output_api.PresubmitError(
4095 'Found base::Singleton<T> in the following header files.\n' +
4096 'Please move them to an appropriate source file so that the ' +
4097 'template gets instantiated in a single compilation unit.',
4105 ( "-webkit-box", "flex" ),
4106 ( "-webkit-inline-box", "inline-flex" ),
4107 ( "-webkit-flex", "flex" ),
4108 ( "-webkit-inline-flex", "inline-flex" ),
4109 ( "-webkit-min-content", "min-content" ),
4110 ( "-webkit-max-content", "max-content" ),
4113 ( "-webkit-background-clip", "background-clip" ),
4114 ( "-webkit-background-origin", "background-origin" ),
4115 ( "-webkit-background-size", "background-size" ),
4116 ( "-webkit-box-shadow", "box-shadow" ),
4117 ( "-webkit-user-select", "user-select" ),
4120 ( "-webkit-gradient", "gradient" ),
4121 ( "-webkit-repeating-gradient", "repeating-gradient" ),
4122 ( "-webkit-linear-gradient", "linear-gradient" ),
4123 ( "-webkit-repeating-linear-gradient", "repeating-linear-gradient" ),
4124 ( "-webkit-radial-gradient", "radial-gradient" ),
4125 ( "-webkit-repeating-radial-gradient", "repeating-radial-gradient" ),
4129 # TODO: add unit tests
4130 def CheckNoDeprecatedCss(input_api, output_api):
4131 """ Make sure that we don't use deprecated CSS
4132 properties, functions or values. Our external
4133 documentation and iOS CSS for dom distiller
4134 (reader mode) are ignored by the hooks as it
4135 needs to be consumed by WebKit. """
4137 file_inclusion_pattern = [r".+\.css$"]
4138 files_to_skip = (_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS +
4139 input_api.DEFAULT_FILES_TO_SKIP +
4140 (r"^chrome/common/extensions/docs", r"^chrome/docs",
4141 r"^native_client_sdk"))
4142 file_filter = lambda f: input_api.FilterSourceFile(
4143 f, files_to_check=file_inclusion_pattern, files_to_skip=files_to_skip)
4144 for fpath in input_api.AffectedFiles(file_filter=file_filter):
4145 for line_num, line in fpath.ChangedContents():
4146 for (deprecated_value, value) in _DEPRECATED_CSS:
4147 if deprecated_value in line:
4149 output_api.PresubmitError(
4150 "%s:%d: Use of deprecated CSS %s, use %s instead" %
4151 (fpath.LocalPath(), line_num, deprecated_value,
4156 def CheckForRelativeIncludes(input_api, output_api):
4158 for f in input_api.AffectedFiles(include_deletes=False):
4159 if (f.LocalPath().startswith('third_party')
4160 and not f.LocalPath().startswith('third_party/blink')
4161 and not f.LocalPath().startswith('third_party\\blink')):
4164 if not _IsCPlusPlusFile(input_api, f.LocalPath()):
4167 relative_includes = [
4168 line for _, line in f.ChangedContents()
4169 if "#include" in line and "../" in line
4171 if not relative_includes:
4173 bad_files[f.LocalPath()] = relative_includes
4178 error_descriptions = []
4179 for file_path, bad_lines in bad_files.items():
4180 error_description = file_path
4181 for line in bad_lines:
4182 error_description += '\n ' + line
4183 error_descriptions.append(error_description)
4187 output_api.PresubmitError(
4188 'You added one or more relative #include paths (including "../").\n'
4189 'These shouldn\'t be used because they can be used to include headers\n'
4190 'from code that\'s not correctly specified as a dependency in the\n'
4191 'relevant BUILD.gn file(s).', error_descriptions))
4196 def CheckForCcIncludes(input_api, output_api):
4197 """Check that nobody tries to include a cc file. It's a relatively
4198 common error which results in duplicate symbols in object
4199 files. This may not always break the build until someone later gets
4200 very confusing linking errors."""
4202 for f in input_api.AffectedFiles(include_deletes=False):
4203 # We let third_party code do whatever it wants
4204 if (f.LocalPath().startswith('third_party')
4205 and not f.LocalPath().startswith('third_party/blink')
4206 and not f.LocalPath().startswith('third_party\\blink')):
4209 if not _IsCPlusPlusFile(input_api, f.LocalPath()):
4212 for _, line in f.ChangedContents():
4213 if line.startswith('#include "'):
4214 included_file = line.split('"')[1]
4215 if _IsCPlusPlusFile(input_api, included_file):
4216 # The most common naming for external files with C++ code,
4217 # apart from standard headers, is to call them foo.inc, but
4218 # Chromium sometimes uses foo-inc.cc so allow that as well.
4219 if not included_file.endswith(('.h', '-inc.cc')):
4221 output_api.PresubmitError(
4222 'Only header files or .inc files should be included in other\n'
4223 'C++ files. Compiling the contents of a cc file more than once\n'
4224 'will cause duplicate information in the build which may later\n'
4225 'result in strange link_errors.\n' +
4226 f.LocalPath() + ':\n ' + line))
4231 def _CheckWatchlistDefinitionsEntrySyntax(key, value, ast):
4232 if not isinstance(key, ast.Str):
4233 return 'Key at line %d must be a string literal' % key.lineno
4234 if not isinstance(value, ast.Dict):
4235 return 'Value at line %d must be a dict' % value.lineno
4236 if len(value.keys) != 1:
4237 return 'Dict at line %d must have single entry' % value.lineno
4238 if not isinstance(value.keys[0], ast.Str) or value.keys[0].s != 'filepath':
4240 'Entry at line %d must have a string literal \'filepath\' as key' %
4245 def _CheckWatchlistsEntrySyntax(key, value, ast, email_regex):
4246 if not isinstance(key, ast.Str):
4247 return 'Key at line %d must be a string literal' % key.lineno
4248 if not isinstance(value, ast.List):
4249 return 'Value at line %d must be a list' % value.lineno
4250 for element in value.elts:
4251 if not isinstance(element, ast.Str):
4252 return 'Watchlist elements on line %d is not a string' % key.lineno
4253 if not email_regex.match(element.s):
4254 return ('Watchlist element on line %d doesn\'t look like a valid '
4255 + 'email: %s') % (key.lineno, element.s)
4259 def _CheckWATCHLISTSEntries(wd_dict, w_dict, input_api):
4260 mismatch_template = (
4261 'Mismatch between WATCHLIST_DEFINITIONS entry (%s) and WATCHLISTS '
4264 email_regex = input_api.re.compile(
4265 r"^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]+$")
4271 if i >= len(wd_dict.keys):
4272 if i >= len(w_dict.keys):
4274 return mismatch_template % ('missing',
4275 'line %d' % w_dict.keys[i].lineno)
4276 elif i >= len(w_dict.keys):
4277 return (mismatch_template %
4278 ('line %d' % wd_dict.keys[i].lineno, 'missing'))
4280 wd_key = wd_dict.keys[i]
4281 w_key = w_dict.keys[i]
4283 result = _CheckWatchlistDefinitionsEntrySyntax(wd_key,
4284 wd_dict.values[i], ast)
4285 if result is not None:
4286 return 'Bad entry in WATCHLIST_DEFINITIONS dict: %s' % result
4288 result = _CheckWatchlistsEntrySyntax(w_key, w_dict.values[i], ast,
4290 if result is not None:
4291 return 'Bad entry in WATCHLISTS dict: %s' % result
4293 if wd_key.s != w_key.s:
4294 return mismatch_template % ('%s at line %d' %
4295 (wd_key.s, wd_key.lineno),
4297 (w_key.s, w_key.lineno))
4299 if wd_key.s < last_key:
4301 'WATCHLISTS dict is not sorted lexicographically at line %d and %d'
4302 % (wd_key.lineno, w_key.lineno))
4308 def _CheckWATCHLISTSSyntax(expression, input_api):
4310 if not isinstance(expression, ast.Expression):
4311 return 'WATCHLISTS file must contain a valid expression'
4312 dictionary = expression.body
4313 if not isinstance(dictionary, ast.Dict) or len(dictionary.keys) != 2:
4314 return 'WATCHLISTS file must have single dict with exactly two entries'
4316 first_key = dictionary.keys[0]
4317 first_value = dictionary.values[0]
4318 second_key = dictionary.keys[1]
4319 second_value = dictionary.values[1]
4321 if (not isinstance(first_key, ast.Str)
4322 or first_key.s != 'WATCHLIST_DEFINITIONS'
4323 or not isinstance(first_value, ast.Dict)):
4324 return ('The first entry of the dict in WATCHLISTS file must be '
4325 'WATCHLIST_DEFINITIONS dict')
4327 if (not isinstance(second_key, ast.Str) or second_key.s != 'WATCHLISTS'
4328 or not isinstance(second_value, ast.Dict)):
4329 return ('The second entry of the dict in WATCHLISTS file must be '
4332 return _CheckWATCHLISTSEntries(first_value, second_value, input_api)
4335 def CheckWATCHLISTS(input_api, output_api):
4336 for f in input_api.AffectedFiles(include_deletes=False):
4337 if f.LocalPath() == 'WATCHLISTS':
4338 contents = input_api.ReadFile(f, 'r')
4341 # First, make sure that it can be evaluated.
4342 input_api.ast.literal_eval(contents)
4343 # Get an AST tree for it and scan the tree for detailed style checking.
4344 expression = input_api.ast.parse(contents,
4345 filename='WATCHLISTS',
4347 except ValueError as e:
4349 output_api.PresubmitError('Cannot parse WATCHLISTS file',
4352 except SyntaxError as e:
4354 output_api.PresubmitError('Cannot parse WATCHLISTS file',
4357 except TypeError as e:
4359 output_api.PresubmitError('Cannot parse WATCHLISTS file',
4363 result = _CheckWATCHLISTSSyntax(expression, input_api)
4364 if result is not None:
4365 return [output_api.PresubmitError(result)]
4371 def CheckGnGlobForward(input_api, output_api):
4372 """Checks that forward_variables_from(invoker, "*") follows best practices.
4374 As documented at //build/docs/writing_gn_templates.md
4378 return input_api.FilterSourceFile(f, files_to_check=(r'.+\.gni', ))
4381 for f in input_api.AffectedSourceFiles(gn_files):
4382 for line_num, line in f.ChangedContents():
4383 if 'forward_variables_from(invoker, "*")' in line:
4385 'Bare forward_variables_from(invoker, "*") in %s:%d' %
4386 (f.LocalPath(), line_num))
4390 output_api.PresubmitPromptWarning(
4391 'forward_variables_from("*") without exclusions',
4392 items=sorted(problems),
4394 'The variables "visibility" and "test_only" should be '
4395 'explicitly listed in forward_variables_from(). For more '
4397 'https://chromium.googlesource.com/chromium/src/+/HEAD/'
4398 'build/docs/writing_gn_templates.md'
4399 '#Using-forward_variables_from'))
4404 def CheckNewHeaderWithoutGnChangeOnUpload(input_api, output_api):
4405 """Checks that newly added header files have corresponding GN changes.
4406 Note that this is only a heuristic. To be precise, run script:
4407 build/check_gn_headers.py.
4411 return input_api.FilterSourceFile(
4412 f, files_to_check=(r'.+%s' % _HEADER_EXTENSIONS, ))
4415 for f in input_api.AffectedSourceFiles(headers):
4416 if f.Action() != 'A':
4418 new_headers.append(f.LocalPath())
4421 return input_api.FilterSourceFile(f, files_to_check=(r'.+\.gn', ))
4423 all_gn_changed_contents = ''
4424 for f in input_api.AffectedSourceFiles(gn_files):
4425 for _, line in f.ChangedContents():
4426 all_gn_changed_contents += line
4429 for header in new_headers:
4430 basename = input_api.os_path.basename(header)
4431 if basename not in all_gn_changed_contents:
4432 problems.append(header)
4436 output_api.PresubmitPromptWarning(
4437 'Missing GN changes for new header files',
4438 items=sorted(problems),
4440 'Please double check whether newly added header files need '
4441 'corresponding changes in gn or gni files.\nThis checking is only a '
4442 'heuristic. Run build/check_gn_headers.py to be precise.\n'
4443 'Read https://crbug.com/661774 for more info.')
4448 def CheckCorrectProductNameInMessages(input_api, output_api):
4449 """Check that Chromium-branded strings don't include "Chrome" or vice versa.
4451 This assumes we won't intentionally reference one product from the other
4456 "filename_postfix": "google_chrome_strings.grd",
4457 "correct_name": "Chrome",
4458 "incorrect_name": "Chromium",
4460 "filename_postfix": "chromium_strings.grd",
4461 "correct_name": "Chromium",
4462 "incorrect_name": "Chrome",
4465 for test_case in test_cases:
4467 filename_filter = lambda x: x.LocalPath().endswith(test_case[
4468 "filename_postfix"])
4470 # Check each new line. Can yield false positives in multiline comments, but
4471 # easier than trying to parse the XML because messages can have nested
4472 # children, and associating message elements with affected lines is hard.
4473 for f in input_api.AffectedSourceFiles(filename_filter):
4474 for line_num, line in f.ChangedContents():
4475 if "<message" in line or "<!--" in line or "-->" in line:
4477 if test_case["incorrect_name"] in line:
4478 problems.append("Incorrect product name in %s:%d" %
4479 (f.LocalPath(), line_num))
4483 "Strings in %s-branded string files should reference \"%s\", not \"%s\""
4484 % (test_case["correct_name"], test_case["correct_name"],
4485 test_case["incorrect_name"]))
4486 all_problems.append(
4487 output_api.PresubmitPromptWarning(message, items=problems))
4492 def CheckForTooLargeFiles(input_api, output_api):
4493 """Avoid large files, especially binary files, in the repository since
4494 git doesn't scale well for those. They will be in everyone's repo
4495 clones forever, forever making Chromium slower to clone and work
4498 # Uploading files to cloud storage is not trivial so we don't want
4499 # to set the limit too low, but the upper limit for "normal" large
4500 # files seems to be 1-2 MB, with a handful around 5-8 MB, so
4501 # anything over 20 MB is exceptional.
4502 TOO_LARGE_FILE_SIZE_LIMIT = 20 * 1024 * 1024 # 10 MB
4504 too_large_files = []
4505 for f in input_api.AffectedFiles():
4506 # Check both added and modified files (but not deleted files).
4507 if f.Action() in ('A', 'M'):
4508 size = input_api.os_path.getsize(f.AbsoluteLocalPath())
4509 if size > TOO_LARGE_FILE_SIZE_LIMIT:
4510 too_large_files.append("%s: %d bytes" % (f.LocalPath(), size))
4514 'Do not commit large files to git since git scales badly for those.\n'
4516 'Instead put the large files in cloud storage and use DEPS to\n' +
4517 'fetch them.\n' + '\n'.join(too_large_files))
4519 output_api.PresubmitError('Too large files found in commit',
4520 long_text=message + '\n')
4526 def CheckFuzzTargetsOnUpload(input_api, output_api):
4527 """Checks specific for fuzz target sources."""
4528 EXPORTED_SYMBOLS = [
4529 'LLVMFuzzerInitialize',
4530 'LLVMFuzzerCustomMutator',
4531 'LLVMFuzzerCustomCrossOver',
4535 REQUIRED_HEADER = '#include "testing/libfuzzer/libfuzzer_exports.h"'
4537 def FilterFile(affected_file):
4538 """Ignore libFuzzer source code."""
4539 files_to_check = r'.*fuzz.*\.(h|hpp|hcc|cc|cpp|cxx)$'
4540 files_to_skip = r"^third_party/libFuzzer"
4542 return input_api.FilterSourceFile(affected_file,
4543 files_to_check=[files_to_check],
4544 files_to_skip=[files_to_skip])
4546 files_with_missing_header = []
4547 for f in input_api.AffectedSourceFiles(FilterFile):
4548 contents = input_api.ReadFile(f, 'r')
4549 if REQUIRED_HEADER in contents:
4552 if any(symbol in contents for symbol in EXPORTED_SYMBOLS):
4553 files_with_missing_header.append(f.LocalPath())
4555 if not files_with_missing_header:
4559 'If you define any of the libFuzzer optional functions (%s), it is '
4560 'recommended to add \'%s\' directive. Otherwise, the fuzz target may '
4561 'work incorrectly on Mac (crbug.com/687076).\nNote that '
4562 'LLVMFuzzerInitialize should not be used, unless your fuzz target needs '
4563 'to access command line arguments passed to the fuzzer. Instead, prefer '
4564 'static initialization and shared resources as documented in '
4565 'https://chromium.googlesource.com/chromium/src/+/main/testing/'
4566 'libfuzzer/efficient_fuzzing.md#simplifying-initialization_cleanup.\n'
4567 % (', '.join(EXPORTED_SYMBOLS), REQUIRED_HEADER))
4570 output_api.PresubmitPromptWarning(message="Missing '%s' in:" %
4572 items=files_with_missing_header,
4573 long_text=long_text)
4577 def _CheckNewImagesWarning(input_api, output_api):
4579 Warns authors who add images into the repo to make sure their images are
4580 optimized before committing.
4582 images_added = False
4585 filter_lambda = lambda x: input_api.FilterSourceFile(
4587 files_to_skip=(('(?i).*test', r'.*\/junit\/') + input_api.
4588 DEFAULT_FILES_TO_SKIP),
4589 files_to_check=[r'.*\/(drawable|mipmap)'])
4590 for f in input_api.AffectedFiles(include_deletes=False,
4591 file_filter=filter_lambda):
4592 local_path = f.LocalPath().lower()
4594 local_path.endswith(extension)
4595 for extension in _IMAGE_EXTENSIONS):
4597 image_paths.append(f)
4600 output_api.PresubmitPromptWarning(
4601 'It looks like you are trying to commit some images. If these are '
4602 'non-test-only images, please make sure to read and apply the tips in '
4603 'https://chromium.googlesource.com/chromium/src/+/HEAD/docs/speed/'
4604 'binary_size/optimization_advice.md#optimizing-images\nThis check is '
4605 'FYI only and will not block your CL on the CQ.', image_paths))
4609 def ChecksAndroidSpecificOnUpload(input_api, output_api):
4610 """Groups upload checks that target android code."""
4612 results.extend(_CheckAndroidCrLogUsage(input_api, output_api))
4613 results.extend(_CheckAndroidDebuggableBuild(input_api, output_api))
4614 results.extend(_CheckAndroidNewMdpiAssetLocation(input_api, output_api))
4615 results.extend(_CheckAndroidToastUsage(input_api, output_api))
4616 results.extend(_CheckAndroidTestJUnitInheritance(input_api, output_api))
4617 results.extend(_CheckAndroidTestJUnitFrameworkImport(
4618 input_api, output_api))
4619 results.extend(_CheckAndroidTestAnnotationUsage(input_api, output_api))
4620 results.extend(_CheckAndroidWebkitImports(input_api, output_api))
4621 results.extend(_CheckAndroidXmlStyle(input_api, output_api, True))
4622 results.extend(_CheckNewImagesWarning(input_api, output_api))
4623 results.extend(_CheckAndroidNoBannedImports(input_api, output_api))
4624 results.extend(_CheckAndroidInfoBarDeprecation(input_api, output_api))
4628 def ChecksAndroidSpecificOnCommit(input_api, output_api):
4629 """Groups commit checks that target android code."""
4631 results.extend(_CheckAndroidXmlStyle(input_api, output_api, False))
4634 # TODO(chrishall): could we additionally match on any path owned by
4635 # ui/accessibility/OWNERS ?
4636 _ACCESSIBILITY_PATHS = (
4637 r"^chrome/browser.*/accessibility/",
4638 r"^chrome/browser/extensions/api/automation.*/",
4639 r"^chrome/renderer/extensions/accessibility_.*",
4640 r"^chrome/tests/data/accessibility/",
4641 r"^components/services/screen_ai/",
4642 r"^content/browser/accessibility/",
4643 r"^content/renderer/accessibility/",
4644 r"^content/tests/data/accessibility/",
4645 r"^extensions/renderer/api/automation/",
4646 r"^services/accessibility/",
4647 r"^ui/accessibility/",
4648 r"^ui/views/accessibility/",
4651 def CheckAccessibilityRelnotesField(input_api, output_api):
4652 """Checks that commits to accessibility code contain an AX-Relnotes field in
4653 their commit message."""
4655 def FileFilter(affected_file):
4656 paths = _ACCESSIBILITY_PATHS
4657 return input_api.FilterSourceFile(affected_file, files_to_check=paths)
4659 # Only consider changes affecting accessibility paths.
4660 if not any(input_api.AffectedFiles(file_filter=FileFilter)):
4663 # AX-Relnotes can appear in either the description or the footer.
4664 # When searching the description, require 'AX-Relnotes:' to appear at the
4665 # beginning of a line.
4666 ax_regex = input_api.re.compile('ax-relnotes[:=]')
4667 description_has_relnotes = any(
4668 ax_regex.match(line)
4669 for line in input_api.change.DescriptionText().lower().splitlines())
4671 footer_relnotes = input_api.change.GitFootersFromDescription().get(
4673 if description_has_relnotes or footer_relnotes:
4676 # TODO(chrishall): link to Relnotes documentation in message.
4678 "Missing 'AX-Relnotes:' field required for accessibility changes"
4679 "\n please add 'AX-Relnotes: [release notes].' to describe any "
4680 "user-facing changes"
4681 "\n otherwise add 'AX-Relnotes: n/a.' if this change has no "
4682 "user-facing effects"
4683 "\n if this is confusing or annoying then please contact members "
4684 "of ui/accessibility/OWNERS.")
4686 return [output_api.PresubmitNotifyResult(message)]
4689 _ACCESSIBILITY_EVENTS_TEST_PATH = (
4690 r"^content/test/data/accessibility/event/.*\.html",
4693 _ACCESSIBILITY_TREE_TEST_PATH = (
4694 r"^content/test/data/accessibility/accname/.*\.html",
4695 r"^content/test/data/accessibility/aria/.*\.html",
4696 r"^content/test/data/accessibility/css/.*\.html",
4697 r"^content/test/data/accessibility/html/.*\.html",
4700 _ACCESSIBILITY_ANDROID_EVENTS_TEST_PATH = (
4701 r"^.*/WebContentsAccessibilityEventsTest\.java",
4704 _ACCESSIBILITY_ANDROID_TREE_TEST_PATH = (
4705 r"^.*/WebContentsAccessibilityTreeTest\.java",
4708 def CheckAccessibilityEventsTestsAreIncludedForAndroid(input_api, output_api):
4709 """Checks that commits that include a newly added, renamed/moved, or deleted
4710 test in the DumpAccessibilityEventsTest suite also includes a corresponding
4711 change to the Android test."""
4713 def FilePathFilter(affected_file):
4714 paths = _ACCESSIBILITY_EVENTS_TEST_PATH
4715 return input_api.FilterSourceFile(affected_file, files_to_check=paths)
4717 def AndroidFilePathFilter(affected_file):
4718 paths = _ACCESSIBILITY_ANDROID_EVENTS_TEST_PATH
4719 return input_api.FilterSourceFile(affected_file, files_to_check=paths)
4721 # Only consider changes in the events test data path with html type.
4723 input_api.AffectedFiles(include_deletes=True,
4724 file_filter=FilePathFilter)):
4727 # If the commit contains any change to the Android test file, ignore.
4729 input_api.AffectedFiles(include_deletes=True,
4730 file_filter=AndroidFilePathFilter)):
4733 # Only consider changes that are adding/renaming or deleting a file
4735 for f in input_api.AffectedFiles(include_deletes=True,
4736 file_filter=FilePathFilter):
4737 if f.Action() == 'A' or f.Action() == 'D':
4739 "It appears that you are adding, renaming or deleting"
4740 "\na dump_accessibility_events* test, but have not included"
4741 "\na corresponding change for Android."
4742 "\nPlease include (or remove) the test from:"
4743 "\n content/public/android/javatests/src/org/chromium/"
4744 "content/browser/accessibility/"
4745 "WebContentsAccessibilityEventsTest.java"
4746 "\nIf this message is confusing or annoying, please contact"
4747 "\nmembers of ui/accessibility/OWNERS.")
4749 # If no message was set, return empty.
4750 if not len(message):
4753 return [output_api.PresubmitPromptWarning(message)]
4756 def CheckAccessibilityTreeTestsAreIncludedForAndroid(input_api, output_api):
4757 """Checks that commits that include a newly added, renamed/moved, or deleted
4758 test in the DumpAccessibilityTreeTest suite also includes a corresponding
4759 change to the Android test."""
4761 def FilePathFilter(affected_file):
4762 paths = _ACCESSIBILITY_TREE_TEST_PATH
4763 return input_api.FilterSourceFile(affected_file, files_to_check=paths)
4765 def AndroidFilePathFilter(affected_file):
4766 paths = _ACCESSIBILITY_ANDROID_TREE_TEST_PATH
4767 return input_api.FilterSourceFile(affected_file, files_to_check=paths)
4769 # Only consider changes in the various tree test data paths with html type.
4771 input_api.AffectedFiles(include_deletes=True,
4772 file_filter=FilePathFilter)):
4775 # If the commit contains any change to the Android test file, ignore.
4777 input_api.AffectedFiles(include_deletes=True,
4778 file_filter=AndroidFilePathFilter)):
4781 # Only consider changes that are adding/renaming or deleting a file
4783 for f in input_api.AffectedFiles(include_deletes=True,
4784 file_filter=FilePathFilter):
4785 if f.Action() == 'A' or f.Action() == 'D':
4787 "It appears that you are adding, renaming or deleting"
4788 "\na dump_accessibility_tree* test, but have not included"
4789 "\na corresponding change for Android."
4790 "\nPlease include (or remove) the test from:"
4791 "\n content/public/android/javatests/src/org/chromium/"
4792 "content/browser/accessibility/"
4793 "WebContentsAccessibilityTreeTest.java"
4794 "\nIf this message is confusing or annoying, please contact"
4795 "\nmembers of ui/accessibility/OWNERS.")
4797 # If no message was set, return empty.
4798 if not len(message):
4801 return [output_api.PresubmitPromptWarning(message)]
4804 # string pattern, sequence of strings to show when pattern matches,
4805 # error flag. True if match is a presubmit error, otherwise it's a warning.
4806 _NON_INCLUSIVE_TERMS = (
4808 # Note that \b pattern in python re is pretty particular. In this
4809 # regexp, 'class WhiteList ...' will match, but 'class FooWhiteList
4810 # ...' will not. This may require some tweaking to catch these cases
4811 # without triggering a lot of false positives. Leaving it naive and
4812 # less matchy for now.
4813 r'/\b(?i)((black|white)list|master|slave)\b', # nocheck
4815 'Please don\'t use blacklist, whitelist, ' # nocheck
4816 'or slave in your', # nocheck
4817 'code and make every effort to use other terms. Using "// nocheck"',
4818 '"# nocheck" or "<!-- nocheck -->"',
4819 'at the end of the offending line will bypass this PRESUBMIT error',
4820 'but avoid using this whenever possible. Reach out to',
4821 'community@chromium.org if you have questions'),
4824 def ChecksCommon(input_api, output_api):
4825 """Checks common to both upload and commit."""
4828 input_api.canned_checks.PanProjectChecks(
4829 input_api, output_api, excluded_paths=_EXCLUDED_PATHS))
4831 author = input_api.change.author_email
4832 if author and author not in _KNOWN_ROBOTS:
4834 input_api.canned_checks.CheckAuthorizedAuthor(
4835 input_api, output_api))
4838 input_api.canned_checks.CheckChangeHasNoTabs(
4841 source_file_filter=lambda x: x.LocalPath().endswith('.grd')))
4844 input_api.canned_checks.CheckVPythonSpec(input_api, output_api)))
4846 dirmd = 'dirmd.bat' if input_api.is_windows else 'dirmd'
4847 dirmd_bin = input_api.os_path.join(input_api.PresubmitLocalPath(),
4848 'third_party', 'depot_tools', dirmd)
4851 input_api.canned_checks.CheckDirMetadataFormat(
4852 input_api, output_api, dirmd_bin)))
4854 input_api.canned_checks.CheckOwnersDirMetadataExclusive(
4855 input_api, output_api))
4857 input_api.canned_checks.CheckNoNewMetadataInOwners(
4858 input_api, output_api))
4860 input_api.canned_checks.CheckInclusiveLanguage(
4863 excluded_directories_relative_path=[
4864 'infra', 'inclusive_language_presubmit_exempt_dirs.txt'
4866 non_inclusive_terms=_NON_INCLUSIVE_TERMS))
4868 presubmit_py_filter = lambda f: input_api.FilterSourceFile(
4869 f, files_to_check=[r'.*PRESUBMIT\.py$'])
4870 for f in input_api.AffectedFiles(include_deletes=False,
4871 file_filter=presubmit_py_filter):
4872 full_path = input_api.os_path.dirname(f.AbsoluteLocalPath())
4873 test_file = input_api.os_path.join(full_path, 'PRESUBMIT_test.py')
4874 # The PRESUBMIT.py file (and the directory containing it) might have
4875 # been affected by being moved or removed, so only try to run the tests
4876 # if they still exist.
4877 if not input_api.os_path.exists(test_file):
4881 with open(f.LocalPath()) as fp:
4883 line.startswith('USE_PYTHON3 = True')
4884 for line in fp.readlines())
4887 input_api.canned_checks.RunUnitTestsInDirectory(
4891 files_to_check=[r'^PRESUBMIT_test\.py$'],
4892 run_on_python2=not use_python3,
4893 run_on_python3=use_python3,
4894 skip_shebang_check=True))
4898 def CheckPatchFiles(input_api, output_api):
4900 f.LocalPath() for f in input_api.AffectedFiles()
4901 if f.LocalPath().endswith(('.orig', '.rej'))
4903 # Cargo.toml.orig files are part of third-party crates downloaded from
4904 # crates.io and should be included.
4905 problems = [f for f in problems if not f.endswith('Cargo.toml.orig')]
4908 output_api.PresubmitError("Don't commit .rej and .orig files.",
4915 def CheckBuildConfigMacrosWithoutInclude(input_api, output_api):
4916 # Excludes OS_CHROMEOS, which is not defined in build_config.h.
4917 macro_re = input_api.re.compile(
4918 r'^\s*#(el)?if.*\bdefined\(((COMPILER_|ARCH_CPU_|WCHAR_T_IS_)[^)]*)')
4919 include_re = input_api.re.compile(r'^#include\s+"build/build_config.h"',
4920 input_api.re.MULTILINE)
4921 extension_re = input_api.re.compile(r'\.[a-z]+$')
4923 config_h_file = input_api.os_path.join('build', 'build_config.h')
4924 for f in input_api.AffectedFiles(include_deletes=False):
4925 # The build-config macros are allowed to be used in build_config.h
4926 # without including itself.
4927 if f.LocalPath() == config_h_file:
4929 if not f.LocalPath().endswith(
4930 ('.h', '.c', '.cc', '.cpp', '.m', '.mm')):
4932 found_line_number = None
4934 all_lines = input_api.ReadFile(f, 'r').splitlines()
4935 for line_num, line in enumerate(all_lines):
4936 match = macro_re.search(line)
4938 found_line_number = line_num
4939 found_macro = match.group(2)
4941 if not found_line_number:
4944 found_include_line = -1
4945 for line_num, line in enumerate(all_lines):
4946 if include_re.search(line):
4947 found_include_line = line_num
4949 if found_include_line >= 0 and found_include_line < found_line_number:
4952 if not f.LocalPath().endswith('.h'):
4953 primary_header_path = extension_re.sub('.h', f.AbsoluteLocalPath())
4955 content = input_api.ReadFile(primary_header_path, 'r')
4956 if include_re.search(content):
4960 errors.append('%s:%d %s macro is used without first including build/'
4962 (f.LocalPath(), found_line_number, found_macro))
4964 return [output_api.PresubmitPromptWarning('\n'.join(errors))]
4968 def CheckForSuperfluousStlIncludesInHeaders(input_api, output_api):
4969 stl_include_re = input_api.re.compile(r'^#include\s+<('
4983 std_namespace_re = input_api.re.compile(r'std::')
4985 for f in input_api.AffectedFiles():
4986 if not _IsCPlusPlusHeaderFile(input_api, f.LocalPath()):
4989 uses_std_namespace = False
4990 has_stl_include = False
4991 for line in f.NewContents():
4992 if has_stl_include and uses_std_namespace:
4995 if not has_stl_include and stl_include_re.search(line):
4996 has_stl_include = True
4999 if not uses_std_namespace and (std_namespace_re.search(line)
5000 or 'no-std-usage-because-pch-file' in line):
5001 uses_std_namespace = True
5004 if has_stl_include and not uses_std_namespace:
5006 '%s: Includes STL header(s) but does not reference std::' %
5009 return [output_api.PresubmitPromptWarning('\n'.join(errors))]
5013 def _CheckForDeprecatedOSMacrosInFile(input_api, f):
5014 """Check for sensible looking, totally invalid OS macros."""
5015 preprocessor_statement = input_api.re.compile(r'^\s*#')
5016 os_macro = input_api.re.compile(r'defined\(OS_([^)]+)\)')
5018 for lnum, line in f.ChangedContents():
5019 if preprocessor_statement.search(line):
5020 for match in os_macro.finditer(line):
5023 (f.LocalPath(), lnum, 'defined(OS_' + match.group(1) +
5024 ') -> BUILDFLAG(IS_' + match.group(1) + ')'))
5028 def CheckForDeprecatedOSMacros(input_api, output_api):
5029 """Check all affected files for invalid OS macros."""
5031 # The OS_ macros are allowed to be used in build/build_config.h.
5032 config_h_file = input_api.os_path.join('build', 'build_config.h')
5033 for f in input_api.AffectedSourceFiles(None):
5034 if not f.LocalPath().endswith(('.py', '.js', '.html', '.css', '.md')) \
5035 and f.LocalPath() != config_h_file:
5036 bad_macros.extend(_CheckForDeprecatedOSMacrosInFile(input_api, f))
5042 output_api.PresubmitError(
5043 'OS macros have been deprecated. Please use BUILDFLAGs instead (still '
5044 'defined in build_config.h):', bad_macros)
5048 def _CheckForInvalidIfDefinedMacrosInFile(input_api, f):
5049 """Check all affected files for invalid "if defined" macros."""
5050 ALWAYS_DEFINED_MACROS = (
5059 "TARGET_IPHONE_SIMULATOR",
5060 "TARGET_OS_EMBEDDED",
5066 ifdef_macro = input_api.re.compile(
5067 r'^\s*#.*(?:ifdef\s|defined\()([^\s\)]+)')
5069 for lnum, line in f.ChangedContents():
5070 for match in ifdef_macro.finditer(line):
5071 if match.group(1) in ALWAYS_DEFINED_MACROS:
5072 always_defined = ' %s is always defined. ' % match.group(1)
5073 did_you_mean = 'Did you mean \'#if %s\'?' % match.group(1)
5076 (f.LocalPath(), lnum, always_defined, did_you_mean))
5080 def CheckForInvalidIfDefinedMacros(input_api, output_api):
5081 """Check all affected files for invalid "if defined" macros."""
5083 skipped_paths = ['third_party/sqlite/', 'third_party/abseil-cpp/']
5084 for f in input_api.AffectedFiles():
5085 if any([f.LocalPath().startswith(path) for path in skipped_paths]):
5087 if f.LocalPath().endswith(('.h', '.c', '.cc', '.m', '.mm')):
5089 _CheckForInvalidIfDefinedMacrosInFile(input_api, f))
5095 output_api.PresubmitError(
5096 'Found ifdef check on always-defined macro[s]. Please fix your code\n'
5097 'or check the list of ALWAYS_DEFINED_MACROS in src/PRESUBMIT.py.',
5102 def CheckForIPCRules(input_api, output_api):
5103 """Check for same IPC rules described in
5104 http://www.chromium.org/Home/chromium-security/education/security-tips-for-ipc
5106 base_pattern = r'IPC_ENUM_TRAITS\('
5107 inclusion_pattern = input_api.re.compile(r'(%s)' % base_pattern)
5108 comment_pattern = input_api.re.compile(r'//.*(%s)' % base_pattern)
5111 for f in input_api.AffectedSourceFiles(None):
5112 local_path = f.LocalPath()
5113 if not local_path.endswith('.h'):
5115 for line_number, line in f.ChangedContents():
5116 if inclusion_pattern.search(
5117 line) and not comment_pattern.search(line):
5118 problems.append('%s:%d\n %s' %
5119 (local_path, line_number, line.strip()))
5123 output_api.PresubmitPromptWarning(_IPC_ENUM_TRAITS_DEPRECATED,
5130 def CheckForLongPathnames(input_api, output_api):
5131 """Check to make sure no files being submitted have long paths.
5132 This causes issues on Windows.
5135 for f in input_api.AffectedTestableFiles():
5136 local_path = f.LocalPath()
5137 # Windows has a path limit of 260 characters. Limit path length to 200 so
5138 # that we have some extra for the prefix on dev machines and the bots.
5139 if len(local_path) > 200:
5140 problems.append(local_path)
5143 return [output_api.PresubmitError(_LONG_PATH_ERROR, problems)]
5148 def CheckForIncludeGuards(input_api, output_api):
5149 """Check that header files have proper guards against multiple inclusion.
5150 If a file should not have such guards (and it probably should) then it
5151 should include the string "no-include-guard-because-multiply-included" or
5152 "no-include-guard-because-pch-file".
5155 def is_chromium_header_file(f):
5156 # We only check header files under the control of the Chromium
5157 # project. That is, those outside third_party apart from
5158 # third_party/blink.
5159 # We also exclude *_message_generator.h headers as they use
5160 # include guards in a special, non-typical way.
5161 file_with_path = input_api.os_path.normpath(f.LocalPath())
5162 return (file_with_path.endswith('.h')
5163 and not file_with_path.endswith('_message_generator.h')
5164 and not file_with_path.endswith('com_imported_mstscax.h')
5165 and (not file_with_path.startswith('third_party')
5166 or file_with_path.startswith(
5167 input_api.os_path.join('third_party', 'blink'))))
5169 def replace_special_with_underscore(string):
5170 return input_api.re.sub(r'[+\\/.-]', '_', string)
5174 for f in input_api.AffectedSourceFiles(is_chromium_header_file):
5176 guard_line_number = None
5177 seen_guard_end = False
5179 file_with_path = input_api.os_path.normpath(f.LocalPath())
5180 base_file_name = input_api.os_path.splitext(
5181 input_api.os_path.basename(file_with_path))[0]
5182 upper_base_file_name = base_file_name.upper()
5184 expected_guard = replace_special_with_underscore(
5185 file_with_path.upper() + '_')
5187 # For "path/elem/file_name.h" we should really only accept
5188 # PATH_ELEM_FILE_NAME_H_ per coding style. Unfortunately there
5189 # are too many (1000+) files with slight deviations from the
5190 # coding style. The most important part is that the include guard
5191 # is there, and that it's unique, not the name so this check is
5192 # forgiving for existing files.
5194 # As code becomes more uniform, this could be made stricter.
5196 guard_name_pattern_list = [
5197 # Anything with the right suffix (maybe with an extra _).
5200 # To cover include guards with old Blink style.
5203 # Anything including the uppercase name of the file.
5204 r'\w*' + input_api.re.escape(
5205 replace_special_with_underscore(upper_base_file_name)) +
5208 guard_name_pattern = '|'.join(guard_name_pattern_list)
5209 guard_pattern = input_api.re.compile(r'#ifndef\s+(' +
5210 guard_name_pattern + ')')
5212 for line_number, line in enumerate(f.NewContents()):
5213 if ('no-include-guard-because-multiply-included' in line
5214 or 'no-include-guard-because-pch-file' in line):
5215 guard_name = 'DUMMY' # To not trigger check outside the loop.
5218 if guard_name is None:
5219 match = guard_pattern.match(line)
5221 guard_name = match.group(1)
5222 guard_line_number = line_number
5224 # We allow existing files to use include guards whose names
5225 # don't match the chromium style guide, but new files should
5227 if guard_name != expected_guard:
5228 if f.Action() == 'A': # If file was just 'A'dded
5230 output_api.PresubmitPromptWarning(
5231 'Header using the wrong include guard name %s'
5234 (f.LocalPath(), line_number + 1)
5235 ], 'Expected: %r\nFound: %r' %
5236 (expected_guard, guard_name)))
5238 # The line after #ifndef should have a #define of the same name.
5239 if line_number == guard_line_number + 1:
5240 expected_line = '#define %s' % guard_name
5241 if line != expected_line:
5243 output_api.PresubmitPromptWarning(
5244 'Missing "%s" for include guard' %
5246 ['%s:%d' % (f.LocalPath(), line_number + 1)],
5247 'Expected: %r\nGot: %r' %
5248 (expected_line, line)))
5250 if not seen_guard_end and line == '#endif // %s' % guard_name:
5251 seen_guard_end = True
5252 elif seen_guard_end:
5253 if line.strip() != '':
5255 output_api.PresubmitPromptWarning(
5256 'Include guard %s not covering the whole file'
5257 % (guard_name), [f.LocalPath()]))
5258 break # Nothing else to check and enough to warn once.
5260 if guard_name is None:
5262 output_api.PresubmitPromptWarning(
5263 'Missing include guard in %s\n'
5264 'Recommended name: %s\n'
5265 'This check can be disabled by having the string\n'
5266 '"no-include-guard-because-multiply-included" or\n'
5267 '"no-include-guard-because-pch-file" in the header.'
5268 % (f.LocalPath(), expected_guard)))
5273 def CheckForWindowsLineEndings(input_api, output_api):
5274 """Check source code and known ascii text files for Windows style line
5277 known_text_files = r'.*\.(txt|html|htm|py|gyp|gypi|gn|isolate|icon)$'
5279 file_inclusion_pattern = (known_text_files,
5280 r'.+%s' % _IMPLEMENTATION_EXTENSIONS,
5281 r'.+%s' % _HEADER_EXTENSIONS)
5284 source_file_filter = lambda f: input_api.FilterSourceFile(
5285 f, files_to_check=file_inclusion_pattern, files_to_skip=None)
5286 for f in input_api.AffectedSourceFiles(source_file_filter):
5287 # Ignore test files that contain crlf intentionally.
5288 if f.LocalPath().endswith('crlf.txt'):
5290 include_file = False
5291 for line in input_api.ReadFile(f, 'r').splitlines(True):
5292 if line.endswith('\r\n'):
5295 problems.append(f.LocalPath())
5299 output_api.PresubmitPromptWarning(
5300 'Are you sure that you want '
5301 'these files to contain Windows style line endings?\n' +
5302 '\n'.join(problems))
5308 def CheckIconFilesForLicenseHeaders(input_api, output_api):
5309 """Check that .icon files (which are fragments of C++) have license headers.
5312 icon_files = (r'.*\.icon$', )
5314 icons = lambda x: input_api.FilterSourceFile(x, files_to_check=icon_files)
5315 return input_api.canned_checks.CheckLicense(input_api,
5317 source_file_filter=icons)
5320 def CheckForUseOfChromeAppsDeprecations(input_api, output_api):
5321 """Check source code for use of Chrome App technologies being
5325 def _CheckForDeprecatedTech(input_api,
5328 files_to_check=None,
5329 files_to_skip=None):
5331 if (files_to_check or files_to_skip):
5332 source_file_filter = lambda f: input_api.FilterSourceFile(
5333 f, files_to_check=files_to_check, files_to_skip=files_to_skip)
5335 source_file_filter = None
5339 for f in input_api.AffectedSourceFiles(source_file_filter):
5340 if f.Action() == 'D':
5342 for _, line in f.ChangedContents():
5343 if any(detect in line for detect in detection_list):
5344 problems.append(f.LocalPath())
5348 # to avoid this presubmit script triggering warnings
5349 files_to_skip = ['PRESUBMIT.py', 'PRESUBMIT_test.py']
5353 # NMF: any files with extensions .nmf or NMF
5354 _NMF_FILES = r'\.(nmf|NMF)$'
5355 problems += _CheckForDeprecatedTech(
5358 detection_list=[''], # any change to the file will trigger warning
5359 files_to_check=[r'.+%s' % _NMF_FILES])
5361 # MANIFEST: any manifest.json that in its diff includes "app":
5362 _MANIFEST_FILES = r'(manifest\.json)$'
5363 problems += _CheckForDeprecatedTech(
5366 detection_list=['"app":'],
5367 files_to_check=[r'.*%s' % _MANIFEST_FILES])
5369 # NaCl / PNaCl: any file that in its diff contains the strings in the list
5370 problems += _CheckForDeprecatedTech(
5373 detection_list=['config=nacl', 'enable-nacl', 'cpu=pnacl', 'nacl_io'],
5374 files_to_skip=files_to_skip + [r"^native_client_sdk/"])
5376 # PPAPI: any C/C++ file that in its diff includes a ppapi library
5377 problems += _CheckForDeprecatedTech(
5380 detection_list=['#include "ppapi', '#include <ppapi'],
5381 files_to_check=(r'.+%s' % _HEADER_EXTENSIONS,
5382 r'.+%s' % _IMPLEMENTATION_EXTENSIONS),
5383 files_to_skip=[r"^ppapi/"])
5387 output_api.PresubmitPromptWarning(
5388 'You are adding/modifying code'
5389 'related to technologies which will soon be deprecated (Chrome Apps, NaCl,'
5390 ' PNaCl, PPAPI). See this blog post for more details:\n'
5391 'https://blog.chromium.org/2020/08/changes-to-chrome-app-support-timeline.html\n'
5392 'and this documentation for options to replace these technologies:\n'
5393 'https://developer.chrome.com/docs/apps/migration/\n' +
5394 '\n'.join(problems))
5400 def CheckSyslogUseWarningOnUpload(input_api, output_api, src_file_filter=None):
5401 """Checks that all source files use SYSLOG properly."""
5403 for f in input_api.AffectedSourceFiles(src_file_filter):
5404 for line_number, line in f.ChangedContents():
5405 if 'SYSLOG' in line:
5406 syslog_files.append(f.LocalPath() + ':' + str(line_number))
5410 output_api.PresubmitPromptWarning(
5411 'Please make sure there are no privacy sensitive bits of data in SYSLOG'
5412 ' calls.\nFiles to check:\n',
5418 def CheckChangeOnUpload(input_api, output_api):
5419 if input_api.version < [2, 0, 0]:
5421 output_api.PresubmitError(
5422 "Your depot_tools is out of date. "
5423 "This PRESUBMIT.py requires at least presubmit_support version 2.0.0, "
5424 "but your version is %d.%d.%d" % tuple(input_api.version))
5428 input_api.canned_checks.CheckPatchFormatted(input_api, output_api))
5432 def CheckChangeOnCommit(input_api, output_api):
5433 if input_api.version < [2, 0, 0]:
5435 output_api.PresubmitError(
5436 "Your depot_tools is out of date. "
5437 "This PRESUBMIT.py requires at least presubmit_support version 2.0.0, "
5438 "but your version is %d.%d.%d" % tuple(input_api.version))
5442 # Make sure the tree is 'open'.
5444 input_api.canned_checks.CheckTreeIsOpen(
5447 json_url='http://chromium-status.appspot.com/current?format=json'))
5450 input_api.canned_checks.CheckPatchFormatted(input_api, output_api))
5452 input_api.canned_checks.CheckChangeHasBugField(input_api, output_api))
5454 input_api.canned_checks.CheckChangeHasNoUnwantedTags(
5455 input_api, output_api))
5459 def CheckStrings(input_api, output_api):
5460 """Check string ICU syntax validity and if translation screenshots exist."""
5461 # Skip translation screenshots check if a SkipTranslationScreenshotsCheck
5462 # footer is set to true.
5463 git_footers = input_api.change.GitFootersFromDescription()
5464 skip_screenshot_check_footer = [
5465 footer.lower() for footer in git_footers.get(
5466 u'Skip-Translation-Screenshots-Check', [])
5468 run_screenshot_check = u'true' not in skip_screenshot_check_footer
5473 from io import StringIO
5475 new_or_added_paths = set(f.LocalPath() for f in input_api.AffectedFiles()
5476 if (f.Action() == 'A' or f.Action() == 'M'))
5477 removed_paths = set(f.LocalPath()
5478 for f in input_api.AffectedFiles(include_deletes=True)
5479 if f.Action() == 'D')
5482 f for f in input_api.AffectedFiles()
5483 if f.LocalPath().endswith(('.grd', '.grdp'))
5486 f for f in affected_grds if not 'testdata' in f.LocalPath()
5488 if not affected_grds:
5491 affected_png_paths = [
5492 f.AbsoluteLocalPath() for f in input_api.AffectedFiles()
5493 if (f.LocalPath().endswith('.png'))
5496 # Check for screenshots. Developers can upload screenshots using
5497 # tools/translation/upload_screenshots.py which finds and uploads
5498 # images associated with .grd files (e.g. test_grd/IDS_STRING.png for the
5499 # message named IDS_STRING in test.grd) and produces a .sha1 file (e.g.
5500 # test_grd/IDS_STRING.png.sha1) for each png when the upload is successful.
5502 # The logic here is as follows:
5504 # - If the CL has a .png file under the screenshots directory for a grd
5505 # file, warn the developer. Actual images should never be checked into the
5508 # - If the CL contains modified or new messages in grd files and doesn't
5509 # contain the corresponding .sha1 files, warn the developer to add images
5510 # and upload them via tools/translation/upload_screenshots.py.
5512 # - If the CL contains modified or new messages in grd files and the
5513 # corresponding .sha1 files, everything looks good.
5515 # - If the CL contains removed messages in grd files but the corresponding
5516 # .sha1 files aren't removed, warn the developer to remove them.
5517 unnecessary_screenshots = []
5519 unnecessary_sha1_files = []
5521 # This checks verifies that the ICU syntax of messages this CL touched is
5522 # valid, and reports any found syntax errors.
5523 # Without this presubmit check, ICU syntax errors in Chromium strings can land
5524 # without developers being aware of them. Later on, such ICU syntax errors
5525 # break message extraction for translation, hence would block Chromium
5526 # translations until they are fixed.
5527 icu_syntax_errors = []
5529 def _CheckScreenshotAdded(screenshots_dir, message_id):
5530 sha1_path = input_api.os_path.join(screenshots_dir,
5531 message_id + '.png.sha1')
5532 if sha1_path not in new_or_added_paths:
5533 missing_sha1.append(sha1_path)
5535 def _CheckScreenshotRemoved(screenshots_dir, message_id):
5536 sha1_path = input_api.os_path.join(screenshots_dir,
5537 message_id + '.png.sha1')
5538 if input_api.os_path.exists(
5539 sha1_path) and sha1_path not in removed_paths:
5540 unnecessary_sha1_files.append(sha1_path)
5542 def _ValidateIcuSyntax(text, level, signatures):
5543 """Validates ICU syntax of a text string.
5545 Check if text looks similar to ICU and checks for ICU syntax correctness
5546 in this case. Reports various issues with ICU syntax and values of
5547 variants. Supports checking of nested messages. Accumulate information of
5548 each ICU messages found in the text for further checking.
5551 text: a string to check.
5552 level: a number of current nesting level.
5553 signatures: an accumulator, a list of tuple of (level, variable,
5557 None if a string is not ICU or no issue detected.
5558 A tuple of (message, start index, end index) if an issue detected.
5561 'plural': (frozenset(
5562 ['=0', '=1', 'zero', 'one', 'two', 'few', 'many',
5563 'other']), frozenset(['=1', 'other'])),
5564 'selectordinal': (frozenset(
5565 ['=0', '=1', 'zero', 'one', 'two', 'few', 'many',
5566 'other']), frozenset(['one', 'other'])),
5567 'select': (frozenset(), frozenset(['other'])),
5570 # Check if the message looks like an attempt to use ICU
5571 # plural. If yes - check if its syntax strictly matches ICU format.
5572 like = re.match(r'^[^{]*\{[^{]*\b(plural|selectordinal|select)\b',
5575 signatures.append((level, None, None, None))
5578 # Check for valid prefix and suffix
5580 r'^([^{]*\{)([a-zA-Z0-9_]+),\s*'
5581 r'(plural|selectordinal|select),\s*'
5582 r'(?:offset:\d+)?\s*(.*)', text, re.DOTALL)
5584 return (('This message looks like an ICU plural, '
5585 'but does not follow ICU syntax.'), like.start(),
5587 starting, variable, kind, variant_pairs = m.groups()
5588 variants, depth, last_pos = _ParseIcuVariants(variant_pairs,
5591 return ('Invalid ICU format. Unbalanced opening bracket', last_pos,
5594 ending = text[last_pos:]
5596 return ('Invalid ICU format. No initial opening bracket',
5597 last_pos - 1, last_pos)
5598 if not ending or '}' not in ending:
5599 return ('Invalid ICU format. No final closing bracket',
5600 last_pos - 1, last_pos)
5603 'Invalid ICU format. Extra characters at the start of a complex '
5604 'message (go/icu-message-migration): "%s"') % starting, 0,
5608 'Invalid ICU format. Extra characters at the end of a complex '
5609 'message (go/icu-message-migration): "%s"') % ending,
5610 last_pos - 1, len(text) - 1)
5611 if kind not in valid_types:
5612 return (('Unknown ICU message type %s. '
5613 'Valid types are: plural, select, selectordinal') % kind,
5615 known, required = valid_types[kind]
5616 defined_variants = set()
5617 for variant, variant_range, value, value_range in variants:
5618 start, end = variant_range
5619 if variant in defined_variants:
5620 return ('Variant "%s" is defined more than once' % variant,
5622 elif known and variant not in known:
5623 return ('Variant "%s" is not valid for %s message' %
5624 (variant, kind), start, end)
5625 defined_variants.add(variant)
5626 # Check for nested structure
5627 res = _ValidateIcuSyntax(value[1:-1], level + 1, signatures)
5629 return (res[0], res[1] + value_range[0] + 1,
5630 res[2] + value_range[0] + 1)
5631 missing = required - defined_variants
5633 return ('Required variants missing: %s' % ', '.join(missing), 0,
5635 signatures.append((level, variable, kind, defined_variants))
5637 def _ParseIcuVariants(text, offset=0):
5638 """Parse variants part of ICU complex message.
5640 Builds a tuple of variant names and values, as well as
5641 their offsets in the input string.
5644 text: a string to parse
5645 offset: additional offset to add to positions in the text to get correct
5646 position in the complete ICU string.
5649 List of tuples, each tuple consist of four fields: variant name,
5650 variant name span (tuple of two integers), variant value, value
5651 span (tuple of two integers).
5653 depth, start, end = 0, -1, -1
5656 for idx, char in enumerate(text):
5660 chunk = text[end + 1:start]
5662 pos = offset + end + 1 + chunk.find(key)
5663 span = (pos, pos + len(key))
5667 return variants, depth, offset + idx
5671 variants.append((key, span, text[start:end + 1],
5672 (offset + start, offset + end + 1)))
5673 return variants, depth, offset + end + 1
5676 old_sys_path = sys.path
5677 sys.path = sys.path + [
5678 input_api.os_path.join(input_api.PresubmitLocalPath(), 'tools',
5681 from helper import grd_helper
5683 sys.path = old_sys_path
5685 for f in affected_grds:
5686 file_path = f.LocalPath()
5687 old_id_to_msg_map = {}
5688 new_id_to_msg_map = {}
5689 # Note that this code doesn't check if the file has been deleted. This is
5690 # OK because it only uses the old and new file contents and doesn't load
5691 # the file via its path.
5692 # It's also possible that a file's content refers to a renamed or deleted
5693 # file via a <part> tag, such as <part file="now-deleted-file.grdp">. This
5694 # is OK as well, because grd_helper ignores <part> tags when loading .grd or
5696 if file_path.endswith('.grdp'):
5698 old_id_to_msg_map = grd_helper.GetGrdpMessagesFromString(
5699 '\n'.join(f.OldContents()))
5701 new_id_to_msg_map = grd_helper.GetGrdpMessagesFromString(
5702 '\n'.join(f.NewContents()))
5704 file_dir = input_api.os_path.dirname(file_path) or '.'
5706 old_id_to_msg_map = grd_helper.GetGrdMessages(
5707 StringIO('\n'.join(f.OldContents())), file_dir)
5709 new_id_to_msg_map = grd_helper.GetGrdMessages(
5710 StringIO('\n'.join(f.NewContents())), file_dir)
5712 grd_name, ext = input_api.os_path.splitext(
5713 input_api.os_path.basename(file_path))
5714 screenshots_dir = input_api.os_path.join(
5715 input_api.os_path.dirname(file_path),
5716 grd_name + ext.replace('.', '_'))
5718 # Compute added, removed and modified message IDs.
5719 old_ids = set(old_id_to_msg_map)
5720 new_ids = set(new_id_to_msg_map)
5721 added_ids = new_ids - old_ids
5722 removed_ids = old_ids - new_ids
5723 modified_ids = set([])
5724 for key in old_ids.intersection(new_ids):
5725 if (old_id_to_msg_map[key].ContentsAsXml('', True) !=
5726 new_id_to_msg_map[key].ContentsAsXml('', True)):
5727 # The message content itself changed. Require an updated screenshot.
5728 modified_ids.add(key)
5729 elif old_id_to_msg_map[key].attrs['meaning'] != \
5730 new_id_to_msg_map[key].attrs['meaning']:
5731 # The message meaning changed. Ensure there is a screenshot for it.
5732 sha1_path = input_api.os_path.join(screenshots_dir,
5734 if sha1_path not in new_or_added_paths and not \
5735 input_api.os_path.exists(sha1_path):
5736 # There is neither a previous screenshot nor is a new one added now.
5737 # Require a screenshot.
5738 modified_ids.add(key)
5740 if run_screenshot_check:
5741 # Check the screenshot directory for .png files. Warn if there is any.
5742 for png_path in affected_png_paths:
5743 if png_path.startswith(screenshots_dir):
5744 unnecessary_screenshots.append(png_path)
5746 for added_id in added_ids:
5747 _CheckScreenshotAdded(screenshots_dir, added_id)
5749 for modified_id in modified_ids:
5750 _CheckScreenshotAdded(screenshots_dir, modified_id)
5752 for removed_id in removed_ids:
5753 _CheckScreenshotRemoved(screenshots_dir, removed_id)
5755 # Check new and changed strings for ICU syntax errors.
5756 for key in added_ids.union(modified_ids):
5757 msg = new_id_to_msg_map[key].ContentsAsXml('', True)
5758 err = _ValidateIcuSyntax(msg, 0, [])
5760 icu_syntax_errors.append(str(key) + ': ' + str(err[0]))
5763 if run_screenshot_check:
5764 if unnecessary_screenshots:
5766 output_api.PresubmitError(
5767 'Do not include actual screenshots in the changelist. Run '
5768 'tools/translate/upload_screenshots.py to upload them instead:',
5769 sorted(unnecessary_screenshots)))
5773 output_api.PresubmitError(
5774 'You are adding or modifying UI strings.\n'
5775 'To ensure the best translations, take screenshots of the relevant UI '
5776 '(https://g.co/chrome/translation) and add these files to your '
5777 'changelist:', sorted(missing_sha1)))
5779 if unnecessary_sha1_files:
5781 output_api.PresubmitError(
5782 'You removed strings associated with these files. Remove:',
5783 sorted(unnecessary_sha1_files)))
5786 output_api.PresubmitPromptOrNotify('Skipping translation '
5787 'screenshots check.'))
5789 if icu_syntax_errors:
5791 output_api.PresubmitPromptWarning(
5792 'ICU syntax errors were found in the following strings (problems or '
5793 'feedback? Contact rainhard@chromium.org):',
5794 items=icu_syntax_errors))
5799 def CheckTranslationExpectations(input_api, output_api,
5801 translation_expectations_path=None,
5805 f for f in input_api.AffectedFiles()
5806 if (f.LocalPath().endswith('.grd') or f.LocalPath().endswith('.grdp'))
5808 if not affected_grds:
5812 old_sys_path = sys.path
5813 sys.path = sys.path + [
5814 input_api.os_path.join(input_api.PresubmitLocalPath(), 'tools',
5817 from helper import git_helper
5818 from helper import translation_helper
5820 sys.path = old_sys_path
5822 # Check that translation expectations can be parsed and we can get a list of
5823 # translatable grd files. |repo_root| and |translation_expectations_path| are
5824 # only passed by tests.
5826 repo_root = input_api.PresubmitLocalPath()
5827 if not translation_expectations_path:
5828 translation_expectations_path = input_api.os_path.join(
5829 repo_root, 'tools', 'gritsettings', 'translation_expectations.pyl')
5831 grd_files = git_helper.list_grds_in_repository(repo_root)
5833 # Ignore bogus grd files used only for testing
5834 # ui/webui/resources/tools/generate_grd.py.
5835 ignore_path = input_api.os_path.join('ui', 'webui', 'resources', 'tools',
5837 grd_files = [p for p in grd_files if ignore_path not in p]
5840 translation_helper.get_translatable_grds(
5841 repo_root, grd_files, translation_expectations_path)
5842 except Exception as e:
5844 output_api.PresubmitNotifyResult(
5845 'Failed to get a list of translatable grd files. This happens when:\n'
5846 ' - One of the modified grd or grdp files cannot be parsed or\n'
5847 ' - %s is not updated.\n'
5848 'Stack:\n%s' % (translation_expectations_path, str(e)))
5853 def CheckStableMojomChanges(input_api, output_api):
5854 """Changes to [Stable] mojom types must preserve backward-compatibility."""
5855 changed_mojoms = input_api.AffectedFiles(
5856 include_deletes=True,
5857 file_filter=lambda f: f.LocalPath().endswith(('.mojom')))
5859 if not changed_mojoms or input_api.no_diffs:
5863 for mojom in changed_mojoms:
5865 'filename': mojom.LocalPath(),
5866 'old': '\n'.join(mojom.OldContents()) or None,
5867 'new': '\n'.join(mojom.NewContents()) or None,
5870 process = input_api.subprocess.Popen([
5871 input_api.python3_executable,
5872 input_api.os_path.join(
5873 input_api.PresubmitLocalPath(), 'mojo', 'public', 'tools', 'mojom',
5874 'check_stable_mojom_compatibility.py'), '--src-root',
5875 input_api.PresubmitLocalPath()
5877 stdin=input_api.subprocess.PIPE,
5878 stdout=input_api.subprocess.PIPE,
5879 stderr=input_api.subprocess.PIPE,
5880 universal_newlines=True)
5881 (x, error) = process.communicate(input=input_api.json.dumps(delta))
5882 if process.returncode:
5884 output_api.PresubmitError(
5885 'One or more [Stable] mojom definitions appears to have been changed '
5886 'in a way that is not backward-compatible.',
5891 def CheckDeprecationOfPreferences(input_api, output_api):
5892 """Removing a preference should come with a deprecation."""
5894 def FilterFile(affected_file):
5895 """Accept only .cc files and the like."""
5896 file_inclusion_pattern = [r'.+%s' % _IMPLEMENTATION_EXTENSIONS]
5897 files_to_skip = (_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS +
5898 input_api.DEFAULT_FILES_TO_SKIP)
5899 return input_api.FilterSourceFile(
5901 files_to_check=file_inclusion_pattern,
5902 files_to_skip=files_to_skip)
5904 def ModifiedLines(affected_file):
5905 """Returns a list of tuples (line number, line text) of added and removed
5908 Deleted lines share the same line number as the previous line.
5910 This relies on the scm diff output describing each changed code section
5911 with a line of the form
5913 ^@@ <old line num>,<old size> <new line num>,<new size> @@$
5917 for line in affected_file.GenerateScmDiff().splitlines():
5918 # Extract <new line num> of the patch fragment (see format above).
5919 m = input_api.re.match(r'^@@ [0-9\,\+\-]+ \+([0-9]+)\,[0-9]+ @@',
5922 line_num = int(m.groups(1)[0])
5924 if ((line.startswith('+') and not line.startswith('++'))
5925 or (line.startswith('-') and not line.startswith('--'))):
5926 modified_lines.append((line_num, line))
5928 if not line.startswith('-'):
5930 return modified_lines
5932 def FindLineWith(lines, needle):
5933 """Returns the line number (i.e. index + 1) in `lines` containing `needle`.
5935 If 0 or >1 lines contain `needle`, -1 is returned.
5937 matching_line_numbers = [
5938 # + 1 for 1-based counting of line numbers.
5939 i + 1 for i, line in enumerate(lines) if needle in line
5941 return matching_line_numbers[0] if len(
5942 matching_line_numbers) == 1 else -1
5944 def ModifiedPrefMigration(affected_file):
5945 """Returns whether the MigrateObsolete.*Pref functions were modified."""
5946 # Determine first and last lines of MigrateObsolete.*Pref functions.
5947 new_contents = affected_file.NewContents()
5948 range_1 = (FindLineWith(new_contents,
5949 'BEGIN_MIGRATE_OBSOLETE_LOCAL_STATE_PREFS'),
5950 FindLineWith(new_contents,
5951 'END_MIGRATE_OBSOLETE_LOCAL_STATE_PREFS'))
5952 range_2 = (FindLineWith(new_contents,
5953 'BEGIN_MIGRATE_OBSOLETE_PROFILE_PREFS'),
5954 FindLineWith(new_contents,
5955 'END_MIGRATE_OBSOLETE_PROFILE_PREFS'))
5956 if (-1 in range_1 + range_2):
5958 'Broken .*MIGRATE_OBSOLETE_.*_PREFS markers in browser_prefs.cc.'
5961 # Check whether any of the modified lines are part of the
5962 # MigrateObsolete.*Pref functions.
5963 for line_nr, line in ModifiedLines(affected_file):
5964 if (range_1[0] <= line_nr <= range_1[1]
5965 or range_2[0] <= line_nr <= range_2[1]):
5969 register_pref_pattern = input_api.re.compile(r'Register.+Pref')
5970 browser_prefs_file_pattern = input_api.re.compile(
5971 r'chrome/browser/prefs/browser_prefs.cc')
5973 changes = input_api.AffectedFiles(include_deletes=True,
5974 file_filter=FilterFile)
5975 potential_problems = []
5977 for line in f.GenerateScmDiff().splitlines():
5978 # Check deleted lines for pref registrations.
5979 if (line.startswith('-') and not line.startswith('--')
5980 and register_pref_pattern.search(line)):
5981 potential_problems.append('%s: %s' % (f.LocalPath(), line))
5983 if browser_prefs_file_pattern.search(f.LocalPath()):
5984 # If the developer modified the MigrateObsolete.*Prefs() functions, we
5985 # assume that they knew that they have to deprecate preferences and don't
5988 if ModifiedPrefMigration(f):
5990 except Exception as e:
5991 return [output_api.PresubmitError(str(e))]
5993 if potential_problems:
5995 output_api.PresubmitPromptWarning(
5996 'Discovered possible removal of preference registrations.\n\n'
5997 'Please make sure to properly deprecate preferences by clearing their\n'
5998 'value for a couple of milestones before finally removing the code.\n'
5999 'Otherwise data may stay in the preferences files forever. See\n'
6000 'Migrate*Prefs() in chrome/browser/prefs/browser_prefs.cc and\n'
6001 'chrome/browser/prefs/README.md for examples.\n'
6002 'This may be a false positive warning (e.g. if you move preference\n'
6003 'registrations to a different place).\n', potential_problems)
6008 def CheckConsistentGrdChanges(input_api, output_api):
6009 """Changes to GRD files must be consistent for tools to read them."""
6010 changed_grds = input_api.AffectedFiles(
6011 include_deletes=False,
6012 file_filter=lambda f: f.LocalPath().endswith(('.grd')))
6014 invalid_file_regexes = [(input_api.re.compile(matcher), msg)
6015 for matcher, msg in _INVALID_GRD_FILE_LINE]
6016 for grd in changed_grds:
6017 for i, line in enumerate(grd.NewContents()):
6018 for matcher, msg in invalid_file_regexes:
6019 if matcher.search(line):
6021 output_api.PresubmitError(
6022 'Problem on {grd}:{i} - {msg}'.format(
6023 grd=grd.LocalPath(), i=i + 1, msg=msg)))
6027 def CheckMPArchApiUsage(input_api, output_api):
6028 """CC the MPArch watchlist if the CL uses an API that is ambiguous in the
6029 presence of MPArch features such as bfcache, prerendering, and fenced frames.
6032 # Only consider top-level directories that (1) can use content APIs or
6033 # problematic blink APIs, (2) apply to desktop or android chrome, and (3)
6034 # are known to have a significant number of uses of the APIs of concern.
6036 r'^(chrome|components|content|extensions|third_party/blink/renderer)/.+%s' %
6037 _IMPLEMENTATION_EXTENSIONS,
6038 r'^(chrome|components|content|extensions|third_party/blink/renderer)/.+%s' %
6041 files_to_skip = (_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS +
6042 input_api.DEFAULT_FILES_TO_SKIP)
6043 source_file_filter = lambda f: input_api.FilterSourceFile(
6044 f, files_to_check=files_to_check, files_to_skip=files_to_skip)
6046 # Here we list the classes/methods we're monitoring. For the "fyi" cases,
6047 # we add the CL to the watchlist, but we don't omit a warning or have it be
6048 # included in the triage rotation.
6049 # Note that since these are are just regular expressions and we don't have
6050 # the compiler's AST, we could have spurious matches (e.g. an unrelated class
6051 # could have a method named IsInMainFrame).
6052 fyi_concerning_class_pattern = input_api.re.compile(
6053 r'WebContentsObserver|WebContentsUserData')
6054 # A subset of WebContentsObserver overrides where there's particular risk for
6055 # confusing tab and page level operations and data (e.g. incorrectly
6056 # resetting page state in DidFinishNavigation).
6057 fyi_concerning_wco_methods = [
6058 'DidStartNavigation',
6059 'ReadyToCommitNavigation',
6060 'DidFinishNavigation',
6062 'RenderViewDeleted',
6063 'RenderViewHostChanged',
6067 concerning_nav_handle_methods = [
6070 concerning_web_contents_methods = [
6071 'FromRenderFrameHost',
6072 'FromRenderViewHost',
6074 fyi_concerning_web_contents_methods = [
6075 'GetRenderViewHost',
6077 concerning_rfh_methods = [
6081 fyi_concerning_rfh_methods = [
6082 'GetFrameTreeNodeId',
6084 concerning_rfhi_methods = [
6087 concerning_ftn_methods = [
6090 concerning_blink_frame_methods = [
6091 'IsCrossOriginToNearestMainFrame',
6093 concerning_method_pattern = input_api.re.compile(r'(' + r'|'.join(
6094 item for sublist in [
6095 concerning_nav_handle_methods,
6096 concerning_web_contents_methods, concerning_rfh_methods,
6097 concerning_rfhi_methods, concerning_ftn_methods,
6098 concerning_blink_frame_methods,
6099 ] for item in sublist) + r')\(')
6100 fyi_concerning_method_pattern = input_api.re.compile(r'(' + r'|'.join(
6101 item for sublist in [
6102 fyi_concerning_wco_methods, fyi_concerning_web_contents_methods,
6103 fyi_concerning_rfh_methods,
6104 ] for item in sublist) + r')\(')
6107 used_fyi_methods = False
6108 for f in input_api.AffectedFiles(include_deletes=False,
6109 file_filter=source_file_filter):
6110 for line_num, line in f.ChangedContents():
6111 fyi_class_match = fyi_concerning_class_pattern.search(line)
6113 used_fyi_methods = True
6114 fyi_method_match = fyi_concerning_method_pattern.search(line)
6115 if fyi_method_match:
6116 used_fyi_methods = True
6117 method_match = concerning_method_pattern.search(line)
6119 used_apis.add(method_match[1])
6122 if used_fyi_methods:
6123 output_api.AppendCC('mparch-reviews+watchfyi@chromium.org')
6127 output_api.AppendCC('mparch-reviews+watch@chromium.org')
6128 message = ('This change uses API(s) that are ambiguous in the presence of '
6129 'MPArch features such as bfcache, prerendering, and fenced '
6132 'Please double check whether new code assumes that a WebContents only '
6133 'contains a single page at a time. Notably, checking whether a frame '
6134 'is the \"main frame\" is not specific enough to determine whether it '
6135 'corresponds to the document reflected in the omnibox. A WebContents '
6136 'may have additional main frames for prerendered pages, bfcached '
6137 'pages, fenced frames, etc. '
6138 'See this doc [1] and the comments on the individual APIs '
6139 'for guidance and this doc [2] for context. The MPArch review '
6140 'watchlist has been CC\'d on this change to help identify any issues.\n'
6141 '[1] https://docs.google.com/document/d/13l16rWTal3o5wce4i0RwdpMP5ESELLKr439Faj2BBRo/edit?usp=sharing\n'
6142 '[2] https://docs.google.com/document/d/1NginQ8k0w3znuwTiJ5qjYmBKgZDekvEPC22q0I4swxQ/edit?usp=sharing'
6145 output_api.PresubmitNotifyResult(message,
6146 items=list(used_apis),
6147 long_text=explanation)
6151 def CheckAssertAshOnlyCode(input_api, output_api):
6152 """Errors if a BUILD.gn file in an ash/ directory doesn't include
6153 assert(is_chromeos_ash).
6156 def FileFilter(affected_file):
6157 """Includes directories known to be Ash only."""
6158 return input_api.FilterSourceFile(
6161 r'^ash/.*BUILD\.gn', # Top-level src/ash/.
6162 r'.*/ash/.*BUILD\.gn'), # Any path component.
6163 files_to_skip=(input_api.DEFAULT_FILES_TO_SKIP))
6166 pattern = input_api.re.compile(r'assert\(is_chromeos_ash')
6167 for f in input_api.AffectedFiles(include_deletes=False,
6168 file_filter=FileFilter):
6169 if (not pattern.search(input_api.ReadFile(f))):
6171 output_api.PresubmitError(
6172 'Please add assert(is_chromeos_ash) to %s. If that\'s not '
6173 'possible, please create and issue and add a comment such '
6174 'as:\n # TODO(https://crbug.com/XXX): add '
6175 'assert(is_chromeos_ash) when ...' % f.LocalPath()))
6179 def _IsRendererOnlyCppFile(input_api, affected_file):
6180 path = affected_file.LocalPath()
6181 if not _IsCPlusPlusFile(input_api, path):
6184 # Any code under a "renderer" subdirectory is assumed to be Renderer-only.
6185 if "/renderer/" in path:
6188 # Blink's public/web API is only used/included by Renderer-only code. Note
6189 # that public/platform API may be used in non-Renderer processes (e.g. there
6190 # are some includes in code used by Utility, PDF, or Plugin processes).
6191 if "/blink/public/web/" in path:
6194 # We assume that everything else may be used outside of Renderer processes.
6197 # TODO(https://crbug.com/1273182): Remove these checks, once they are replaced
6198 # by the Chromium Clang Plugin (which will be preferable because it will
6199 # 1) report errors earlier - at compile-time and 2) cover more rules).
6200 def CheckRawPtrUsage(input_api, output_api):
6201 """Rough checks that raw_ptr<T> usage guidelines are followed."""
6203 # The regex below matches "raw_ptr<" following a word boundary, but not in a
6205 raw_ptr_matcher = input_api.re.compile(r'^((?!//).)*\braw_ptr<')
6206 file_filter = lambda f: _IsRendererOnlyCppFile(input_api, f)
6207 for f, line_num, line in input_api.RightHandSideLines(file_filter):
6208 if raw_ptr_matcher.search(line):
6210 output_api.PresubmitError(
6211 'Problem on {path}:{line} - '\
6212 'raw_ptr<T> should not be used in Renderer-only code '\
6213 '(as documented in the "Pointers to unprotected memory" '\
6214 'section in //base/memory/raw_ptr.md)'.format(
6215 path=f.LocalPath(), line=line_num)))
6219 def CheckPythonShebang(input_api, output_api):
6220 """Checks that python scripts use #!/usr/bin/env instead of hardcoding a
6224 sources = lambda affected_file: input_api.FilterSourceFile(
6226 files_to_skip=((_THIRD_PARTY_EXCEPT_BLINK,
6227 r'third_party/blink/web_tests/external/') + input_api.
6228 DEFAULT_FILES_TO_SKIP),
6229 files_to_check=[r'.*\.py$'])
6230 for f in input_api.AffectedSourceFiles(sources):
6231 for line_num, line in f.ChangedContents():
6232 if line_num == 1 and line.startswith('#!/usr/bin/python'):
6233 errors.append(f.LocalPath())
6239 output_api.PresubmitError(
6240 "Please use '#!/usr/bin/env python/2/3' as the shebang of %s" %
6245 def CheckBatchAnnotation(input_api, output_api):
6246 """Checks that tests have either @Batch or @DoNotBatch annotation. If this
6247 is not an instrumentation test, disregard."""
6249 batch_annotation = input_api.re.compile(r'^\s*@Batch')
6250 do_not_batch_annotation = input_api.re.compile(r'^\s*@DoNotBatch')
6251 robolectric_test = input_api.re.compile(r'[rR]obolectric')
6252 test_class_declaration = input_api.re.compile(r'^\s*public\sclass.*Test')
6253 uiautomator_test = input_api.re.compile(r'[uU]i[aA]utomator')
6255 missing_annotation_errors = []
6256 extra_annotation_errors = []
6258 def _FilterFile(affected_file):
6259 return input_api.FilterSourceFile(
6261 files_to_skip=input_api.DEFAULT_FILES_TO_SKIP,
6262 files_to_check=[r'.*Test\.java$'])
6264 for f in input_api.AffectedSourceFiles(_FilterFile):
6265 batch_matched = None
6266 do_not_batch_matched = None
6267 is_instrumentation_test = True
6268 for line in f.NewContents():
6269 if robolectric_test.search(line) or uiautomator_test.search(line):
6270 # Skip Robolectric and UiAutomator tests.
6271 is_instrumentation_test = False
6273 if not batch_matched:
6274 batch_matched = batch_annotation.search(line)
6275 if not do_not_batch_matched:
6276 do_not_batch_matched = do_not_batch_annotation.search(line)
6277 test_class_declaration_matched = test_class_declaration.search(
6279 if test_class_declaration_matched:
6281 if (is_instrumentation_test and
6282 not batch_matched and
6283 not do_not_batch_matched):
6284 missing_annotation_errors.append(str(f.LocalPath()))
6285 if (not is_instrumentation_test and
6287 do_not_batch_matched)):
6288 extra_annotation_errors.append(str(f.LocalPath()))
6292 if missing_annotation_errors:
6294 output_api.PresubmitPromptWarning(
6296 Instrumentation tests should use either @Batch or @DoNotBatch. If tests are not
6297 safe to run in batch, please use @DoNotBatch with reasons.
6298 """, missing_annotation_errors))
6299 if extra_annotation_errors:
6301 output_api.PresubmitPromptWarning(
6303 Robolectric tests do not need a @Batch or @DoNotBatch annotations.
6304 """, extra_annotation_errors))
6309 def CheckMockAnnotation(input_api, output_api):
6310 """Checks that we have annotated all Mockito.mock()-ed or Mockito.spy()-ed
6311 classes with @Mock or @Spy. If this is not an instrumentation test,
6314 # This is just trying to be approximately correct. We are not writing a
6315 # Java parser, so special cases like statically importing mock() then
6316 # calling an unrelated non-mockito spy() function will cause a false
6318 package_name = input_api.re.compile(r'^package\s+(\w+(?:\.\w+)+);')
6319 mock_static_import = input_api.re.compile(
6320 r'^import\s+static\s+org.mockito.Mockito.(?:mock|spy);')
6321 import_class = input_api.re.compile(r'import\s+((?:\w+\.)+)(\w+);')
6322 mock_annotation = input_api.re.compile(r'^\s*@(?:Mock|Spy)')
6323 field_type = input_api.re.compile(r'(\w+)(?:<\w+>)?\s+\w+\s*(?:;|=)')
6324 mock_or_spy_function_call = r'(?:mock|spy)\(\s*(?:new\s*)?(\w+)(?:\.class|\()'
6325 fully_qualified_mock_function = input_api.re.compile(
6326 r'Mockito\.' + mock_or_spy_function_call)
6327 statically_imported_mock_function = input_api.re.compile(
6328 r'\W' + mock_or_spy_function_call)
6329 robolectric_test = input_api.re.compile(r'[rR]obolectric')
6330 uiautomator_test = input_api.re.compile(r'[uU]i[aA]utomator')
6332 def _DoClassLookup(class_name, class_name_map, package):
6333 found = class_name_map.get(class_name)
6334 if found is not None:
6337 return package + '.' + class_name
6339 def _FilterFile(affected_file):
6340 return input_api.FilterSourceFile(
6342 files_to_skip=input_api.DEFAULT_FILES_TO_SKIP,
6343 files_to_check=[r'.*Test\.java$'])
6345 mocked_by_function_classes = set()
6346 mocked_by_annotation_classes = set()
6347 class_to_filename = {}
6348 for f in input_api.AffectedSourceFiles(_FilterFile):
6349 mock_function_regex = fully_qualified_mock_function
6350 next_line_is_annotated = False
6351 fully_qualified_class_map = {}
6354 for line in f.NewContents():
6355 if robolectric_test.search(line) or uiautomator_test.search(line):
6356 # Skip Robolectric and UiAutomator tests.
6359 m = package_name.search(line)
6361 package = m.group(1)
6364 if mock_static_import.search(line):
6365 mock_function_regex = statically_imported_mock_function
6368 m = import_class.search(line)
6370 fully_qualified_class_map[m.group(2)] = m.group(1) + m.group(2)
6373 if next_line_is_annotated:
6374 next_line_is_annotated = False
6375 fully_qualified_class = _DoClassLookup(
6376 field_type.search(line).group(1), fully_qualified_class_map,
6378 mocked_by_annotation_classes.add(fully_qualified_class)
6381 if mock_annotation.search(line):
6382 next_line_is_annotated = True
6385 m = mock_function_regex.search(line)
6387 fully_qualified_class = _DoClassLookup(m.group(1),
6388 fully_qualified_class_map, package)
6389 # Skipping builtin classes, since they don't get optimized.
6390 if fully_qualified_class.startswith(
6391 'android.') or fully_qualified_class.startswith(
6394 class_to_filename[fully_qualified_class] = str(f.LocalPath())
6395 mocked_by_function_classes.add(fully_qualified_class)
6398 missed_classes = mocked_by_function_classes - mocked_by_annotation_classes
6400 error_locations = []
6401 for c in missed_classes:
6402 error_locations.append(c + ' in ' + class_to_filename[c])
6404 output_api.PresubmitPromptWarning(
6406 Mockito.mock()/spy() cause issues with our Java optimizer. You have 3 options:
6407 1) If the mocked variable can be a class member, annotate the member with
6409 2) If the mocked variable cannot be a class member, create a dummy member
6410 variable of that type, annotated with @Mock/@Spy. This dummy does not need
6411 to be used or initialized in any way.
6412 3) If the mocked type is definitely not going to be optimized, whether it's a
6413 builtin type which we don't ship, or a class you know R8 will treat
6414 specially, you can ignore this warning.
6415 """, error_locations))