1 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 """Top-level presubmit script for Chromium.
7 See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
8 for more details about the presubmit API built into depot_tools.
10 PRESUBMIT_VERSION = '2.0.0'
12 # This line is 'magic' in that git-cl looks for it to decide whether to
13 # use Python3 instead of Python2 when running the code in this file.
18 (r"^components[\\/]variations[\\/]proto[\\/]devtools[\\/]"
19 r"client_variations.js"),
20 r"^native_client_sdk[\\/]src[\\/]build_tools[\\/]make_rules.py",
21 r"^native_client_sdk[\\/]src[\\/]build_tools[\\/]make_simple.py",
22 r"^native_client_sdk[\\/]src[\\/]tools[\\/].*.mk",
23 r"^net[\\/]tools[\\/]spdyshark[\\/].*",
25 r"^third_party[\\/]blink[\\/].*",
26 r"^third_party[\\/]breakpad[\\/].*",
27 # sqlite is an imported third party dependency.
28 r"^third_party[\\/]sqlite[\\/].*",
33 r".+[\\/]pnacl_shim\.c$",
34 r"^gpu[\\/]config[\\/].*_list_json\.cc$",
35 r"tools[\\/]md_browser[\\/].*\.css$",
36 # Test pages for Maps telemetry tests.
37 r"tools[\\/]perf[\\/]page_sets[\\/]maps_perf_test.*",
38 # Test pages for WebRTC telemetry tests.
39 r"tools[\\/]perf[\\/]page_sets[\\/]webrtc_cases.*",
42 _EXCLUDED_SET_NO_PARENT_PATHS = (
43 # It's for historical reasons that blink isn't a top level directory, where
44 # it would be allowed to have "set noparent" to avoid top level owners
45 # accidentally +1ing changes.
46 'third_party/blink/OWNERS',
50 # Fragment of a regular expression that matches C++ and Objective-C++
51 # implementation files.
52 _IMPLEMENTATION_EXTENSIONS = r'\.(cc|cpp|cxx|mm)$'
55 # Fragment of a regular expression that matches C++ and Objective-C++
57 _HEADER_EXTENSIONS = r'\.(h|hpp|hxx)$'
60 # Regular expression that matches code only used for test binaries
62 _TEST_CODE_EXCLUDED_PATHS = (
63 r'.*[\\/](fake_|test_|mock_).+%s' % _IMPLEMENTATION_EXTENSIONS,
64 r'.+_test_(base|support|util)%s' % _IMPLEMENTATION_EXTENSIONS,
65 # Test suite files, like:
67 # bar_unittest_mac.cc (suffix)
68 # baz_unittests.cc (plural)
69 r'.+_(api|browser|eg|int|perf|pixel|unit|ui)?test(s)?(_[a-z]+)?%s' %
70 _IMPLEMENTATION_EXTENSIONS,
71 r'.+_(fuzz|fuzzer)(_[a-z]+)?%s' % _IMPLEMENTATION_EXTENSIONS,
72 r'.+sync_service_impl_harness%s' % _IMPLEMENTATION_EXTENSIONS,
73 r'.*[\\/](test|tool(s)?)[\\/].*',
74 # content_shell is used for running content_browsertests.
75 r'content[\\/]shell[\\/].*',
77 r'content[\\/]web_test[\\/].*',
78 # Non-production example code.
79 r'mojo[\\/]examples[\\/].*',
80 # Launcher for running iOS tests on the simulator.
81 r'testing[\\/]iossim[\\/]iossim\.mm$',
82 # EarlGrey app side code for tests.
83 r'ios[\\/].*_app_interface\.mm$',
85 r'ui[\\/]views[\\/]examples[\\/].*',
90 _THIRD_PARTY_EXCEPT_BLINK = 'third_party/(?!blink/)'
92 _TEST_ONLY_WARNING = (
93 'You might be calling functions intended only for testing from\n'
94 'production code. If you are doing this from inside another method\n'
95 'named as *ForTesting(), then consider exposing things to have tests\n'
96 'make that same call directly.\n'
97 'If that is not possible, you may put a comment on the same line with\n'
99 'to tell the PRESUBMIT script that the code is inside a *ForTesting()\n'
100 'method and can be ignored. Do not do this inside production code.\n'
101 'The android-binary-size trybot will block if the method exists in the\n'
105 _INCLUDE_ORDER_WARNING = (
106 'Your #include order seems to be broken. Remember to use the right '
107 'collation (LC_COLLATE=C) and check\nhttps://google.github.io/styleguide/'
108 'cppguide.html#Names_and_Order_of_Includes')
110 # Format: Sequence of tuples containing:
111 # * Full import path.
112 # * Sequence of strings to show when the pattern matches.
113 # * Sequence of path or filename exceptions to this rule
114 _BANNED_JAVA_IMPORTS = (
118 'Use org.chromium.url.GURL instead of java.net.URI, where possible.',
121 'net/android/javatests/src/org/chromium/net/'
122 'AndroidProxySelectorTest.java',
123 'components/cronet/',
124 'third_party/robolectric/local/',
128 'android.support.test.rule.UiThreadTestRule;',
130 'Do not use UiThreadTestRule, just use '
131 '@org.chromium.base.test.UiThreadTest on test methods that should run '
132 'on the UI thread. See https://crbug.com/1111893.',
137 'android.support.test.annotation.UiThreadTest;',
139 'Do not use android.support.test.annotation.UiThreadTest, use '
140 'org.chromium.base.test.UiThreadTest instead. See '
141 'https://crbug.com/1111893.',
146 'android.support.test.rule.ActivityTestRule;',
148 'Do not use ActivityTestRule, use '
149 'org.chromium.base.test.BaseActivityTestRule instead.',
152 'components/cronet/',
157 # Format: Sequence of tuples containing:
158 # * String pattern or, if starting with a slash, a regular expression.
159 # * Sequence of strings to show when the pattern matches.
160 # * Error flag. True if a match is a presubmit error, otherwise it's a warning.
161 _BANNED_JAVA_FUNCTIONS = (
163 'StrictMode.allowThreadDiskReads()',
165 'Prefer using StrictModeContext.allowDiskReads() to using StrictMode '
171 'StrictMode.allowThreadDiskWrites()',
173 'Prefer using StrictModeContext.allowDiskWrites() to using StrictMode '
179 '.waitForIdleSync()',
181 'Do not use waitForIdleSync as it masks underlying issues. There is '
182 'almost always something else you should wait on instead.',
188 # Format: Sequence of tuples containing:
189 # * String pattern or, if starting with a slash, a regular expression.
190 # * Sequence of strings to show when the pattern matches.
191 # * Error flag. True if a match is a presubmit error, otherwise it's a warning.
192 _BANNED_OBJC_FUNCTIONS = (
196 'The use of -[NSView addTrackingRect:owner:userData:assumeInside:] is'
197 'prohibited. Please use CrTrackingArea instead.',
198 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
203 r'/NSTrackingArea\W',
205 'The use of NSTrackingAreas is prohibited. Please use CrTrackingArea',
207 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
212 'convertPointFromBase:',
214 'The use of -[NSView convertPointFromBase:] is almost certainly wrong.',
215 'Please use |convertPoint:(point) fromView:nil| instead.',
216 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
221 'convertPointToBase:',
223 'The use of -[NSView convertPointToBase:] is almost certainly wrong.',
224 'Please use |convertPoint:(point) toView:nil| instead.',
225 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
230 'convertRectFromBase:',
232 'The use of -[NSView convertRectFromBase:] is almost certainly wrong.',
233 'Please use |convertRect:(point) fromView:nil| instead.',
234 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
239 'convertRectToBase:',
241 'The use of -[NSView convertRectToBase:] is almost certainly wrong.',
242 'Please use |convertRect:(point) toView:nil| instead.',
243 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
248 'convertSizeFromBase:',
250 'The use of -[NSView convertSizeFromBase:] is almost certainly wrong.',
251 'Please use |convertSize:(point) fromView:nil| instead.',
252 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
257 'convertSizeToBase:',
259 'The use of -[NSView convertSizeToBase:] is almost certainly wrong.',
260 'Please use |convertSize:(point) toView:nil| instead.',
261 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
266 r"/\s+UTF8String\s*]",
268 'The use of -[NSString UTF8String] is dangerous as it can return null',
269 'even if |canBeConvertedToEncoding:NSUTF8StringEncoding| returns YES.',
270 'Please use |SysNSStringToUTF8| instead.',
275 r'__unsafe_unretained',
277 'The use of __unsafe_unretained is almost certainly wrong, unless',
278 'when interacting with NSFastEnumeration or NSInvocation.',
279 'Please use __weak in files build with ARC, nothing otherwise.',
286 'The use of "freeWhenDone:NO" with the NoCopy creation of ',
287 'Foundation types is prohibited.',
293 # Format: Sequence of tuples containing:
294 # * String pattern or, if starting with a slash, a regular expression.
295 # * Sequence of strings to show when the pattern matches.
296 # * Error flag. True if a match is a presubmit error, otherwise it's a warning.
297 _BANNED_IOS_OBJC_FUNCTIONS = (
301 'TEST() macro should not be used in Objective-C++ code as it does not ',
302 'drain the autorelease pool at the end of the test. Use TEST_F() ',
303 'macro instead with a fixture inheriting from PlatformTest (or a ',
309 r'/\btesting::Test\b',
311 'testing::Test should not be used in Objective-C++ code as it does ',
312 'not drain the autorelease pool at the end of the test. Use ',
313 'PlatformTest instead.'
319 # Format: Sequence of tuples containing:
320 # * String pattern or, if starting with a slash, a regular expression.
321 # * Sequence of strings to show when the pattern matches.
322 # * Error flag. True if a match is a presubmit error, otherwise it's a warning.
323 _BANNED_IOS_EGTEST_FUNCTIONS = (
325 r'/\bEXPECT_OCMOCK_VERIFY\b',
327 'EXPECT_OCMOCK_VERIFY should not be used in EarlGrey tests because ',
328 'it is meant for GTests. Use [mock verify] instead.'
334 # Format: Sequence of tuples containing:
335 # * String pattern or, if starting with a slash, a regular expression.
336 # * Sequence of strings to show when the pattern matches.
337 # * Error flag. True if a match is a presubmit error, otherwise it's a warning.
338 # * Sequence of paths to *not* check (regexps).
339 _BANNED_CPP_FUNCTIONS = (
341 r'/\busing namespace ',
343 'Using directives ("using namespace x") are banned by the Google Style',
344 'Guide ( http://google.github.io/styleguide/cppguide.html#Namespaces ).',
345 'Explicitly qualify symbols or use using declarations ("using x::foo").',
348 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
350 # Make sure that gtest's FRIEND_TEST() macro is not used; the
351 # FRIEND_TEST_ALL_PREFIXES() macro from base/gtest_prod_util.h should be
352 # used instead since that allows for FLAKY_ and DISABLED_ prefixes.
356 'Chromium code should not use gtest\'s FRIEND_TEST() macro. Include',
357 'base/gtest_prod_util.h and use FRIEND_TEST_ALL_PREFIXES() instead.',
365 'Overriding setMatrixClip() is prohibited; ',
366 'the base function is deprecated. ',
374 'The use of SkRefPtr is prohibited. ',
375 'Please use sk_sp<> instead.'
383 'The indirect use of SkRefPtr via SkAutoRef is prohibited. ',
384 'Please use sk_sp<> instead.'
392 'The use of SkAutoTUnref is dangerous because it implicitly ',
393 'converts to a raw pointer. Please use sk_sp<> instead.'
401 'The indirect use of SkAutoTUnref through SkAutoUnref is dangerous ',
402 'because it implicitly converts to a raw pointer. ',
403 'Please use sk_sp<> instead.'
409 r'/HANDLE_EINTR\(.*close',
411 'HANDLE_EINTR(close) is invalid. If close fails with EINTR, the file',
412 'descriptor will be closed, and it is incorrect to retry the close.',
413 'Either call close directly and ignore its return value, or wrap close',
414 'in IGNORE_EINTR to use its return value. See http://crbug.com/269623'
420 r'/IGNORE_EINTR\((?!.*close)',
422 'IGNORE_EINTR is only valid when wrapping close. To wrap other system',
423 'calls, use HANDLE_EINTR. See http://crbug.com/269623',
427 # Files that #define IGNORE_EINTR.
428 r'^base[\\/]posix[\\/]eintr_wrapper\.h$',
429 r'^ppapi[\\/]tests[\\/]test_broker\.cc$',
435 'Do not introduce new v8::Extensions into the code base, use',
436 'gin::Wrappable instead. See http://crbug.com/334679',
440 r'extensions[\\/]renderer[\\/]safe_builtins\.*',
444 '#pragma comment(lib,',
446 'Specify libraries to link with in build files and not in the source.',
450 r'^base[\\/]third_party[\\/]symbolize[\\/].*',
451 r'^third_party[\\/]abseil-cpp[\\/].*',
455 r'/base::SequenceChecker\b',
457 'Consider using SEQUENCE_CHECKER macros instead of the class directly.',
463 r'/base::ThreadChecker\b',
465 'Consider using THREAD_CHECKER macros instead of the class directly.',
471 r'/(Time(|Delta|Ticks)|ThreadTicks)::FromInternalValue|ToInternalValue',
473 'base::TimeXXX::FromInternalValue() and ToInternalValue() are',
474 'deprecated (http://crbug.com/634507). Please avoid converting away',
475 'from the Time types in Chromium code, especially if any math is',
476 'being done on time values. For interfacing with platform/library',
477 'APIs, use FromMicroseconds() or InMicroseconds(), or one of the other',
478 'type converter methods instead. For faking TimeXXX values (for unit',
479 'testing only), use TimeXXX() + TimeDelta::FromMicroseconds(N). For',
480 'other use cases, please contact base/time/OWNERS.',
486 'CallJavascriptFunctionUnsafe',
488 "Don't use CallJavascriptFunctionUnsafe() in new code. Instead, use",
489 'AllowJavascript(), OnJavascriptAllowed()/OnJavascriptDisallowed(),',
490 'and CallJavascriptFunction(). See https://goo.gl/qivavq.',
494 r'^content[\\/]browser[\\/]webui[\\/]web_ui_impl\.(cc|h)$',
495 r'^content[\\/]public[\\/]browser[\\/]web_ui\.h$',
496 r'^content[\\/]public[\\/]test[\\/]test_web_ui\.(cc|h)$',
502 'Instead of leveldb::DB::Open() use leveldb_env::OpenDB() from',
503 'third_party/leveldatabase/env_chromium.h. It exposes databases to',
504 "Chrome's tracing, making their memory usage visible.",
508 r'^third_party/leveldatabase/.*\.(cc|h)$',
512 'leveldb::NewMemEnv',
514 'Instead of leveldb::NewMemEnv() use leveldb_chrome::NewMemEnv() from',
515 'third_party/leveldatabase/leveldb_chrome.h. It exposes environments',
516 "to Chrome's tracing, making their memory usage visible.",
520 r'^third_party/leveldatabase/.*\.(cc|h)$',
524 'RunLoop::QuitCurrent',
526 'Please migrate away from RunLoop::QuitCurrent*() methods. Use member',
527 'methods of a specific RunLoop instance instead.',
533 'base::ScopedMockTimeMessageLoopTaskRunner',
535 'ScopedMockTimeMessageLoopTaskRunner is deprecated. Prefer',
536 'TaskEnvironment::TimeSource::MOCK_TIME. There are still a',
537 'few cases that may require a ScopedMockTimeMessageLoopTaskRunner',
538 '(i.e. mocking the main MessageLoopForUI in browser_tests), but check',
539 'with gab@ first if you think you need it)',
547 'Using std::regex adds unnecessary binary size to Chrome. Please use',
548 're2::RE2 instead (crbug.com/755321)',
551 # Abseil's benchmarks never linked into chrome.
552 ['third_party/abseil-cpp/.*_benchmark.cc'],
557 'std::stoi uses exceptions to communicate results. ',
558 'Use base::StringToInt() instead.',
561 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
566 'std::stol uses exceptions to communicate results. ',
567 'Use base::StringToInt() instead.',
570 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
575 'std::stoul uses exceptions to communicate results. ',
576 'Use base::StringToUint() instead.',
579 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
584 'std::stoll uses exceptions to communicate results. ',
585 'Use base::StringToInt64() instead.',
588 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
593 'std::stoull uses exceptions to communicate results. ',
594 'Use base::StringToUint64() instead.',
597 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
602 'std::stof uses exceptions to communicate results. ',
603 'For locale-independent values, e.g. reading numbers from disk',
604 'profiles, use base::StringToDouble().',
605 'For user-visible values, parse using ICU.',
608 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
613 'std::stod uses exceptions to communicate results. ',
614 'For locale-independent values, e.g. reading numbers from disk',
615 'profiles, use base::StringToDouble().',
616 'For user-visible values, parse using ICU.',
619 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
624 'std::stold uses exceptions to communicate results. ',
625 'For locale-independent values, e.g. reading numbers from disk',
626 'profiles, use base::StringToDouble().',
627 'For user-visible values, parse using ICU.',
630 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
633 r'/\bstd::to_string\b',
635 'std::to_string is locale dependent and slower than alternatives.',
636 'For locale-independent strings, e.g. writing numbers to disk',
637 'profiles, use base::NumberToString().',
638 'For user-visible strings, use base::FormatNumber() and',
639 'the related functions in base/i18n/number_formatting.h.',
641 False, # Only a warning since it is already used.
642 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
645 r'/\bstd::shared_ptr\b',
647 'std::shared_ptr should not be used. Use scoped_refptr instead.',
651 # Needed for interop with third-party library.
652 '^third_party/blink/renderer/core/typed_arrays/array_buffer/' +
653 'array_buffer_contents\.(cc|h)',
654 '^gin/array_buffer\.(cc|h)',
655 '^chrome/services/sharing/nearby/',
656 # gRPC provides some C++ libraries that use std::shared_ptr<>.
657 '^chromeos/services/libassistant/grpc/',
658 # Fuchsia provides C++ libraries that use std::shared_ptr<>.
659 '.*fuchsia.*test\.(cc|h)',
660 _THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
663 r'/\bstd::weak_ptr\b',
665 'std::weak_ptr should not be used. Use base::WeakPtr instead.',
668 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
673 'long long is banned. Use stdint.h if you need a 64 bit number.',
675 False, # Only a warning since it is already used.
676 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
681 'std::bind is banned because of lifetime risks.',
682 'Use base::BindOnce or base::BindRepeating instead.',
685 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
688 r'/\b#include <chrono>\b',
690 '<chrono> overlaps with Time APIs in base. Keep using',
694 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
697 r'/\b#include <exception>\b',
699 'Exceptions are banned and disabled in Chromium.',
702 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
705 r'/\bstd::function\b',
707 'std::function is banned. Instead use base::OnceCallback or ',
708 'base::RepeatingCallback, which directly support Chromium\'s weak ',
709 'pointers, ref counting and more.',
711 False, # Only a warning since it is already used.
712 [_THIRD_PARTY_EXCEPT_BLINK], # Do not warn in third_party folders.
715 r'/\b#include <random>\b',
717 'Do not use any random number engines from <random>. Instead',
718 'use base::RandomBitGenerator.',
721 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
724 r'/\b#include <X11/',
726 'Do not use Xlib. Use xproto (from //ui/gfx/x:xproto) instead.',
729 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
734 'std::ratio is banned by the Google Style Guide.',
737 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
740 (r'/base::ThreadRestrictions::(ScopedAllowIO|AssertIOAllowed|'
741 r'DisallowWaiting|AssertWaitAllowed|SetWaitAllowed|ScopedAllowWait)'),
743 'Use the new API in base/threading/thread_restrictions.h.',
749 r'/\bRunMessageLoop\b',
751 'RunMessageLoop is deprecated, use RunLoop instead.',
759 'RunThisRunLoop is deprecated, use RunLoop directly instead.',
765 'RunAllPendingInMessageLoop()',
767 "Prefer RunLoop over RunAllPendingInMessageLoop, please contact gab@",
768 "if you're convinced you need this.",
774 'RunAllPendingInMessageLoop(BrowserThread',
776 'RunAllPendingInMessageLoop is deprecated. Use RunLoop for',
777 'BrowserThread::UI, BrowserTaskEnvironment::RunIOThreadUntilIdle',
778 'for BrowserThread::IO, and prefer RunLoop::QuitClosure to observe',
779 'async events instead of flushing threads.',
785 r'MessageLoopRunner',
787 'MessageLoopRunner is deprecated, use RunLoop instead.',
793 'GetDeferredQuitTaskForRunLoop',
795 "GetDeferredQuitTaskForRunLoop shouldn't be needed, please contact",
796 "gab@ if you found a use case where this is the only solution.",
802 'sqlite3_initialize(',
804 'Instead of calling sqlite3_initialize(), depend on //sql, ',
805 '#include "sql/initialize.h" and use sql::EnsureSqliteInitialized().',
809 r'^sql/initialization\.(cc|h)$',
810 r'^third_party/sqlite/.*\.(c|cc|h)$',
814 'std::random_shuffle',
816 'std::random_shuffle is deprecated in C++14, and removed in C++17. Use',
817 'base::RandomShuffle instead.'
823 'ios/web/public/test/http_server',
825 'web::HTTPserver is deprecated use net::EmbeddedTestServer instead.',
833 'Improper use of Microsoft::WRL::ComPtr<T>::GetAddressOf() has been ',
834 'implicated in a few leaks. ReleaseAndGetAddressOf() is safe but ',
835 'operator& is generally recommended. So always use operator& instead. ',
836 'See http://crbug.com/914910 for more conversion guidance.'
844 'SHFileOperation was deprecated in Windows Vista, and there are less ',
845 'complex functions to achieve the same goals. Use IFileOperation for ',
846 'any esoteric actions instead.'
854 'StringFromGUID2 introduces an unnecessary dependency on ole32.dll.',
855 'Use base::win::WStringFromGUID instead.'
859 r'/base/win/win_util_unittest.cc'
865 'StringFromCLSID introduces an unnecessary dependency on ole32.dll.',
866 'Use base::win::WStringFromGUID instead.'
870 r'/base/win/win_util_unittest.cc'
876 'The use of kCFAllocatorNull with the NoCopy creation of ',
877 'CoreFoundation types is prohibited.',
885 'mojo::ConvertTo and TypeConverter are deprecated. Please consider',
886 'StructTraits / UnionTraits / EnumTraits / ArrayTraits / MapTraits /',
887 'StringTraits if you would like to convert between custom types and',
888 'the wire format of mojom types.'
892 r'^fuchsia/engine/browser/url_request_rewrite_rules_manager\.cc$',
893 r'^fuchsia/engine/url_request_rewrite_type_converters\.cc$',
894 r'^third_party/blink/.*\.(cc|h)$',
895 r'^content/renderer/.*\.(cc|h)$',
899 'GetInterfaceProvider',
901 'InterfaceProvider is deprecated.',
902 'Please use ExecutionContext::GetBrowserInterfaceBroker and overrides',
903 'or Platform::GetBrowserInterfaceBroker.'
911 'New code should use Microsoft::WRL::ComPtr from wrl/client.h as a ',
912 'replacement for CComPtr from ATL. See http://crbug.com/5027 for more ',
919 r'/\b(IFACE|STD)METHOD_?\(',
921 'IFACEMETHOD() and STDMETHOD() make code harder to format and read.',
922 'Instead, always use IFACEMETHODIMP in the declaration.'
925 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
928 'set_owned_by_client',
930 'set_owned_by_client is deprecated.',
931 'views::View already owns the child views by default. This introduces ',
932 'a competing ownership model which makes the code difficult to reason ',
933 'about. See http://crbug.com/1044687 for more details.'
939 'RemoveAllChildViewsWithoutDeleting',
941 'RemoveAllChildViewsWithoutDeleting is deprecated.',
942 'This method is deemed dangerous as, unless raw pointers are re-added,',
943 'calls to this method introduce memory leaks.'
949 r'/\bTRACE_EVENT_ASYNC_',
951 'Please use TRACE_EVENT_NESTABLE_ASYNC_.. macros instead',
952 'of TRACE_EVENT_ASYNC_.. (crbug.com/1038710).',
956 r'^base/trace_event/.*',
963 'Improper use of [base::win]::RoInitialize() has been implicated in a ',
964 'few COM initialization leaks. Use base::win::ScopedWinrtInitializer ',
965 'instead. See http://crbug.com/1197722 for more information.'
969 r'^base[\\/]win[\\/]scoped_winrt_initializer\.cc$'
973 r'/DISALLOW_(COPY|ASSIGN|COPY_AND_ASSIGN|IMPLICIT_CONSTRUCTORS)\(',
975 'DISALLOW_xxx macros are deprecated. See base/macros.h for details.',
982 # Format: Sequence of tuples containing:
983 # * String pattern or, if starting with a slash, a regular expression.
984 # * Sequence of strings to show when the pattern matches.
985 _DEPRECATED_MOJO_TYPES = (
987 r'/\bmojo::AssociatedInterfacePtrInfo\b',
989 'mojo::AssociatedInterfacePtrInfo<Interface> is deprecated.',
990 'Use mojo::PendingAssociatedRemote<Interface> instead.',
994 r'/\bmojo::AssociatedInterfaceRequest\b',
996 'mojo::AssociatedInterfaceRequest<Interface> is deprecated.',
997 'Use mojo::PendingAssociatedReceiver<Interface> instead.',
1001 r'/\bmojo::InterfacePtr\b',
1003 'mojo::InterfacePtr<Interface> is deprecated.',
1004 'Use mojo::Remote<Interface> instead.',
1008 r'/\bmojo::InterfacePtrInfo\b',
1010 'mojo::InterfacePtrInfo<Interface> is deprecated.',
1011 'Use mojo::PendingRemote<Interface> instead.',
1015 r'/\bmojo::InterfaceRequest\b',
1017 'mojo::InterfaceRequest<Interface> is deprecated.',
1018 'Use mojo::PendingReceiver<Interface> instead.',
1022 r'/\bmojo::MakeRequest\b',
1024 'mojo::MakeRequest is deprecated.',
1025 'Use mojo::Remote::BindNewPipeAndPassReceiver() instead.',
1030 _IPC_ENUM_TRAITS_DEPRECATED = (
1031 'You are using IPC_ENUM_TRAITS() in your code. It has been deprecated.\n'
1032 'See http://www.chromium.org/Home/chromium-security/education/'
1033 'security-tips-for-ipc')
1035 _LONG_PATH_ERROR = (
1036 'Some files included in this CL have file names that are too long (> 200'
1037 ' characters). If committed, these files will cause issues on Windows. See'
1038 ' https://crbug.com/612667 for more details.'
1041 _JAVA_MULTIPLE_DEFINITION_EXCLUDED_PATHS = [
1042 r".*[\\/]AppHooksImpl\.java",
1043 r".*[\\/]BuildHooksAndroidImpl\.java",
1044 r".*[\\/]LicenseContentProvider\.java",
1045 r".*[\\/]PlatformServiceBridgeImpl.java",
1046 r".*chrome[\\\/]android[\\\/]feed[\\\/]dummy[\\\/].*\.java",
1049 # List of image extensions that are used as resources in chromium.
1050 _IMAGE_EXTENSIONS = ['.svg', '.png', '.webp']
1052 # These paths contain test data and other known invalid JSON files.
1053 _KNOWN_TEST_DATA_AND_INVALID_JSON_FILE_PATTERNS = [
1054 r'test[\\/]data[\\/]',
1055 r'testing[\\/]buildbot[\\/]',
1056 r'^components[\\/]policy[\\/]resources[\\/]policy_templates\.json$',
1057 r'^third_party[\\/]protobuf[\\/]',
1058 r'^third_party[\\/]blink[\\/]renderer[\\/]devtools[\\/]protocol\.json$',
1059 r'^third_party[\\/]blink[\\/]web_tests[\\/]external[\\/]wpt[\\/]',
1063 _VALID_OS_MACROS = (
1064 # Please keep sorted.
1070 'OS_CAT', # For testing.
1072 'OS_CYGWIN', # third_party code.
1090 # These are not checked on the public chromium-presubmit trybot.
1091 # Add files here that rely on .py files that exists only for target_os="android"
1093 _ANDROID_SPECIFIC_PYDEPS_FILES = [
1094 'chrome/android/features/create_stripped_java_factory.pydeps',
1098 _GENERIC_PYDEPS_FILES = [
1099 'android_webview/tools/run_cts.pydeps',
1100 'base/android/jni_generator/jni_generator.pydeps',
1101 'base/android/jni_generator/jni_registration_generator.pydeps',
1102 'build/android/apk_operations.pydeps',
1103 'build/android/devil_chromium.pydeps',
1104 'build/android/gyp/aar.pydeps',
1105 'build/android/gyp/aidl.pydeps',
1106 'build/android/gyp/allot_native_libraries.pydeps',
1107 'build/android/gyp/apkbuilder.pydeps',
1108 'build/android/gyp/assert_static_initializers.pydeps',
1109 'build/android/gyp/bytecode_processor.pydeps',
1110 'build/android/gyp/bytecode_rewriter.pydeps',
1111 'build/android/gyp/check_flag_expectations.pydeps',
1112 'build/android/gyp/compile_java.pydeps',
1113 'build/android/gyp/compile_resources.pydeps',
1114 'build/android/gyp/copy_ex.pydeps',
1115 'build/android/gyp/create_apk_operations_script.pydeps',
1116 'build/android/gyp/create_app_bundle.pydeps',
1117 'build/android/gyp/create_app_bundle_apks.pydeps',
1118 'build/android/gyp/create_bundle_wrapper_script.pydeps',
1119 'build/android/gyp/create_java_binary_script.pydeps',
1120 'build/android/gyp/create_r_java.pydeps',
1121 'build/android/gyp/create_r_txt.pydeps',
1122 'build/android/gyp/create_size_info_files.pydeps',
1123 'build/android/gyp/create_ui_locale_resources.pydeps',
1124 'build/android/gyp/desugar.pydeps',
1125 'build/android/gyp/dex.pydeps',
1126 'build/android/gyp/dex_jdk_libs.pydeps',
1127 'build/android/gyp/dexsplitter.pydeps',
1128 'build/android/gyp/dist_aar.pydeps',
1129 'build/android/gyp/filter_zip.pydeps',
1130 'build/android/gyp/gcc_preprocess.pydeps',
1131 'build/android/gyp/generate_linker_version_script.pydeps',
1132 'build/android/gyp/ijar.pydeps',
1133 'build/android/gyp/jacoco_instr.pydeps',
1134 'build/android/gyp/java_cpp_enum.pydeps',
1135 'build/android/gyp/java_cpp_features.pydeps',
1136 'build/android/gyp/java_cpp_strings.pydeps',
1137 'build/android/gyp/java_google_api_keys.pydeps',
1138 'build/android/gyp/jetify_jar.pydeps',
1139 'build/android/gyp/jinja_template.pydeps',
1140 'build/android/gyp/lint.pydeps',
1141 'build/android/gyp/merge_manifest.pydeps',
1142 'build/android/gyp/prepare_resources.pydeps',
1143 'build/android/gyp/process_native_prebuilt.pydeps',
1144 'build/android/gyp/proguard.pydeps',
1145 'build/android/gyp/turbine.pydeps',
1146 'build/android/gyp/unused_resources.pydeps',
1147 'build/android/gyp/validate_static_library_dex_references.pydeps',
1148 'build/android/gyp/write_build_config.pydeps',
1149 'build/android/gyp/write_native_libraries_java.pydeps',
1150 'build/android/gyp/zip.pydeps',
1151 'build/android/incremental_install/generate_android_manifest.pydeps',
1152 'build/android/incremental_install/write_installer_json.pydeps',
1153 'build/android/resource_sizes.pydeps',
1154 'build/android/test_runner.pydeps',
1155 'build/android/test_wrapper/logdog_wrapper.pydeps',
1156 'build/lacros/lacros_resource_sizes.pydeps',
1157 'build/protoc_java.pydeps',
1158 'chrome/android/monochrome/scripts/monochrome_python_tests.pydeps',
1159 'chrome/test/chromedriver/log_replay/client_replay_unittest.pydeps',
1160 'chrome/test/chromedriver/test/run_py_tests.pydeps',
1161 'chromecast/resource_sizes/chromecast_resource_sizes.pydeps',
1162 'components/cronet/tools/generate_javadoc.pydeps',
1163 'components/cronet/tools/jar_src.pydeps',
1164 'components/module_installer/android/module_desc_java.pydeps',
1165 'content/public/android/generate_child_service.pydeps',
1166 'net/tools/testserver/testserver.pydeps',
1167 'testing/scripts/run_android_wpt.pydeps',
1168 'testing/scripts/run_isolated_script_test.pydeps',
1169 'third_party/android_platform/development/scripts/stack.pydeps',
1170 'third_party/blink/renderer/bindings/scripts/build_web_idl_database.pydeps',
1171 'third_party/blink/renderer/bindings/scripts/collect_idl_files.pydeps',
1172 'third_party/blink/renderer/bindings/scripts/generate_bindings.pydeps',
1173 'third_party/blink/renderer/bindings/scripts/validate_web_idl.pydeps',
1174 'tools/binary_size/sizes.pydeps',
1175 'tools/binary_size/supersize.pydeps',
1179 _ALL_PYDEPS_FILES = _ANDROID_SPECIFIC_PYDEPS_FILES + _GENERIC_PYDEPS_FILES
1182 # Bypass the AUTHORS check for these accounts.
1183 _KNOWN_ROBOTS = set(
1184 ) | set('%s@appspot.gserviceaccount.com' % s for s in ('findit-for-me',)
1185 ) | set('%s@developer.gserviceaccount.com' % s for s in ('3su6n15k.default',)
1186 ) | set('%s@chops-service-accounts.iam.gserviceaccount.com' % s
1187 for s in ('bling-autoroll-builder', 'v8-ci-autoroll-builder',
1188 'wpt-autoroller', 'chrome-weblayer-builder',
1189 'lacros-version-skew-roller', 'skylab-test-cros-roller')
1190 ) | set('%s@skia-public.iam.gserviceaccount.com' % s
1191 for s in ('chromium-autoroll', 'chromium-release-autoroll')
1192 ) | set('%s@skia-corp.google.com.iam.gserviceaccount.com' % s
1193 for s in ('chromium-internal-autoroll',)
1194 ) | set('%s@owners-cleanup-prod.google.com.iam.gserviceaccount.com' % s
1195 for s in ('swarming-tasks',))
1197 _INVALID_GRD_FILE_LINE = [
1198 (r'<file lang=.* path=.*', 'Path should come before lang in GRD files.')
1201 def _IsCPlusPlusFile(input_api, file_path):
1202 """Returns True if this file contains C++-like code (and not Python,
1203 Go, Java, MarkDown, ...)"""
1205 ext = input_api.os_path.splitext(file_path)[1]
1206 # This list is compatible with CppChecker.IsCppFile but we should
1207 # consider adding ".c" to it. If we do that we can use this function
1208 # at more places in the code.
1217 def _IsCPlusPlusHeaderFile(input_api, file_path):
1218 return input_api.os_path.splitext(file_path)[1] == ".h"
1221 def _IsJavaFile(input_api, file_path):
1222 return input_api.os_path.splitext(file_path)[1] == ".java"
1225 def _IsProtoFile(input_api, file_path):
1226 return input_api.os_path.splitext(file_path)[1] == ".proto"
1229 def CheckNoUpstreamDepsOnClank(input_api, output_api):
1230 """Prevent additions of dependencies from the upstream repo on //clank."""
1231 # clank can depend on clank
1232 if input_api.change.RepositoryRoot().endswith('clank'):
1234 build_file_patterns = [
1239 r'build[/\\]config[/\\]android[/\\]config\.gni'
1241 bad_pattern = input_api.re.compile(r'^[^#]*//clank')
1243 error_message = 'Disallowed import on //clank in an upstream build file:'
1245 def FilterFile(affected_file):
1246 return input_api.FilterSourceFile(
1248 files_to_check=build_file_patterns,
1249 files_to_skip=excluded_files)
1252 for f in input_api.AffectedSourceFiles(FilterFile):
1253 local_path = f.LocalPath()
1254 for line_number, line in f.ChangedContents():
1255 if (bad_pattern.search(line)):
1257 '%s:%d\n %s' % (local_path, line_number, line.strip()))
1259 return [output_api.PresubmitPromptOrNotify(error_message, problems)]
1264 def CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api):
1265 """Attempts to prevent use of functions intended only for testing in
1266 non-testing code. For now this is just a best-effort implementation
1267 that ignores header files and may have some false positives. A
1268 better implementation would probably need a proper C++ parser.
1270 # We only scan .cc files and the like, as the declaration of
1271 # for-testing functions in header files are hard to distinguish from
1272 # calls to such functions without a proper C++ parser.
1273 file_inclusion_pattern = [r'.+%s' % _IMPLEMENTATION_EXTENSIONS]
1275 base_function_pattern = r'[ :]test::[^\s]+|ForTest(s|ing)?|for_test(s|ing)?'
1276 inclusion_pattern = input_api.re.compile(r'(%s)\s*\(' % base_function_pattern)
1277 comment_pattern = input_api.re.compile(r'//.*(%s)' % base_function_pattern)
1278 allowlist_pattern = input_api.re.compile(r'// IN-TEST$')
1279 exclusion_pattern = input_api.re.compile(
1280 r'::[A-Za-z0-9_]+(%s)|(%s)[^;]+\{' % (
1281 base_function_pattern, base_function_pattern))
1282 # Avoid a false positive in this case, where the method name, the ::, and
1283 # the closing { are all on different lines due to line wrapping.
1284 # HelperClassForTesting::
1285 # HelperClassForTesting(
1288 method_defn_pattern = input_api.re.compile(r'[A-Za-z0-9_]+::$')
1290 def FilterFile(affected_file):
1291 files_to_skip = (_EXCLUDED_PATHS +
1292 _TEST_CODE_EXCLUDED_PATHS +
1293 input_api.DEFAULT_FILES_TO_SKIP)
1294 return input_api.FilterSourceFile(
1296 files_to_check=file_inclusion_pattern,
1297 files_to_skip=files_to_skip)
1300 for f in input_api.AffectedSourceFiles(FilterFile):
1301 local_path = f.LocalPath()
1302 in_method_defn = False
1303 for line_number, line in f.ChangedContents():
1304 if (inclusion_pattern.search(line) and
1305 not comment_pattern.search(line) and
1306 not exclusion_pattern.search(line) and
1307 not allowlist_pattern.search(line) and
1308 not in_method_defn):
1310 '%s:%d\n %s' % (local_path, line_number, line.strip()))
1311 in_method_defn = method_defn_pattern.search(line)
1314 return [output_api.PresubmitPromptOrNotify(_TEST_ONLY_WARNING, problems)]
1319 def CheckNoProductionCodeUsingTestOnlyFunctionsJava(input_api, output_api):
1320 """This is a simplified version of
1321 CheckNoProductionCodeUsingTestOnlyFunctions for Java files.
1323 javadoc_start_re = input_api.re.compile(r'^\s*/\*\*')
1324 javadoc_end_re = input_api.re.compile(r'^\s*\*/')
1325 name_pattern = r'ForTest(s|ing)?'
1326 # Describes an occurrence of "ForTest*" inside a // comment.
1327 comment_re = input_api.re.compile(r'//.*%s' % name_pattern)
1328 # Describes @VisibleForTesting(otherwise = VisibleForTesting.PROTECTED)
1329 annotation_re = input_api.re.compile(r'@VisibleForTesting\(')
1331 inclusion_re = input_api.re.compile(r'(%s)\s*\(' % name_pattern)
1332 # Ignore definitions. (Comments are ignored separately.)
1333 exclusion_re = input_api.re.compile(r'(%s)[^;]+\{' % name_pattern)
1336 sources = lambda x: input_api.FilterSourceFile(
1338 files_to_skip=(('(?i).*test', r'.*\/junit\/')
1339 + input_api.DEFAULT_FILES_TO_SKIP),
1340 files_to_check=[r'.*\.java$']
1342 for f in input_api.AffectedFiles(include_deletes=False, file_filter=sources):
1343 local_path = f.LocalPath()
1344 is_inside_javadoc = False
1345 for line_number, line in f.ChangedContents():
1346 if is_inside_javadoc and javadoc_end_re.search(line):
1347 is_inside_javadoc = False
1348 if not is_inside_javadoc and javadoc_start_re.search(line):
1349 is_inside_javadoc = True
1350 if is_inside_javadoc:
1352 if (inclusion_re.search(line) and
1353 not comment_re.search(line) and
1354 not annotation_re.search(line) and
1355 not exclusion_re.search(line)):
1357 '%s:%d\n %s' % (local_path, line_number, line.strip()))
1360 return [output_api.PresubmitPromptOrNotify(_TEST_ONLY_WARNING, problems)]
1365 def CheckNoIOStreamInHeaders(input_api, output_api):
1366 """Checks to make sure no .h files include <iostream>."""
1368 pattern = input_api.re.compile(r'^#include\s*<iostream>',
1369 input_api.re.MULTILINE)
1370 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
1371 if not f.LocalPath().endswith('.h'):
1373 contents = input_api.ReadFile(f)
1374 if pattern.search(contents):
1378 return [output_api.PresubmitError(
1379 'Do not #include <iostream> in header files, since it inserts static '
1380 'initialization into every file including the header. Instead, '
1381 '#include <ostream>. See http://crbug.com/94794',
1385 def _CheckNoStrCatRedefines(input_api, output_api):
1386 """Checks no windows headers with StrCat redefined are included directly."""
1388 pattern_deny = input_api.re.compile(
1389 r'^#include\s*[<"](shlwapi|atlbase|propvarutil|sphelper).h[">]',
1390 input_api.re.MULTILINE)
1391 pattern_allow = input_api.re.compile(
1392 r'^#include\s"base/win/windows_defines.inc"',
1393 input_api.re.MULTILINE)
1394 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
1395 contents = input_api.ReadFile(f)
1396 if pattern_deny.search(contents) and not pattern_allow.search(contents):
1397 files.append(f.LocalPath())
1400 return [output_api.PresubmitError(
1401 'Do not #include shlwapi.h, atlbase.h, propvarutil.h or sphelper.h '
1402 'directly since they pollute code with StrCat macro. Instead, '
1403 'include matching header from base/win. See http://crbug.com/856536',
1408 def CheckNoUNIT_TESTInSourceFiles(input_api, output_api):
1409 """Checks to make sure no source files use UNIT_TEST."""
1411 for f in input_api.AffectedFiles():
1412 if (not f.LocalPath().endswith(('.cc', '.mm'))):
1415 for line_num, line in f.ChangedContents():
1416 if 'UNIT_TEST ' in line or line.endswith('UNIT_TEST'):
1417 problems.append(' %s:%d' % (f.LocalPath(), line_num))
1421 return [output_api.PresubmitPromptWarning('UNIT_TEST is only for headers.\n' +
1422 '\n'.join(problems))]
1424 def CheckNoDISABLETypoInTests(input_api, output_api):
1425 """Checks to prevent attempts to disable tests with DISABLE_ prefix.
1427 This test warns if somebody tries to disable a test with the DISABLE_ prefix
1428 instead of DISABLED_. To filter false positives, reports are only generated
1429 if a corresponding MAYBE_ line exists.
1433 # The following two patterns are looked for in tandem - is a test labeled
1434 # as MAYBE_ followed by a DISABLE_ (instead of the correct DISABLED)
1435 maybe_pattern = input_api.re.compile(r'MAYBE_([a-zA-Z0-9_]+)')
1436 disable_pattern = input_api.re.compile(r'DISABLE_([a-zA-Z0-9_]+)')
1438 # This is for the case that a test is disabled on all platforms.
1439 full_disable_pattern = input_api.re.compile(
1440 r'^\s*TEST[^(]*\([a-zA-Z0-9_]+,\s*DISABLE_[a-zA-Z0-9_]+\)',
1441 input_api.re.MULTILINE)
1443 for f in input_api.AffectedFiles(False):
1444 if not 'test' in f.LocalPath() or not f.LocalPath().endswith('.cc'):
1447 # Search for MABYE_, DISABLE_ pairs.
1448 disable_lines = {} # Maps of test name to line number.
1450 for line_num, line in f.ChangedContents():
1451 disable_match = disable_pattern.search(line)
1453 disable_lines[disable_match.group(1)] = line_num
1454 maybe_match = maybe_pattern.search(line)
1456 maybe_lines[maybe_match.group(1)] = line_num
1458 # Search for DISABLE_ occurrences within a TEST() macro.
1459 disable_tests = set(disable_lines.keys())
1460 maybe_tests = set(maybe_lines.keys())
1461 for test in disable_tests.intersection(maybe_tests):
1462 problems.append(' %s:%d' % (f.LocalPath(), disable_lines[test]))
1464 contents = input_api.ReadFile(f)
1465 full_disable_match = full_disable_pattern.search(contents)
1466 if full_disable_match:
1467 problems.append(' %s' % f.LocalPath())
1472 output_api.PresubmitPromptWarning(
1473 'Attempt to disable a test with DISABLE_ instead of DISABLED_?\n' +
1474 '\n'.join(problems))
1478 def CheckDCHECK_IS_ONHasBraces(input_api, output_api):
1479 """Checks to make sure DCHECK_IS_ON() does not skip the parentheses."""
1481 pattern = input_api.re.compile(r'DCHECK_IS_ON\b(?!\(\))',
1482 input_api.re.MULTILINE)
1483 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
1484 if (not f.LocalPath().endswith(('.cc', '.mm', '.h'))):
1486 for lnum, line in f.ChangedContents():
1487 if input_api.re.search(pattern, line):
1488 errors.append(output_api.PresubmitError(
1489 ('%s:%d: Use of DCHECK_IS_ON() must be written as "#if ' +
1490 'DCHECK_IS_ON()", not forgetting the parentheses.')
1491 % (f.LocalPath(), lnum)))
1495 # TODO(crbug/1138055): Reimplement CheckUmaHistogramChangesOnUpload check in a
1496 # more reliable way. See
1497 # https://chromium-review.googlesource.com/c/chromium/src/+/2500269
1500 def CheckFlakyTestUsage(input_api, output_api):
1501 """Check that FlakyTest annotation is our own instead of the android one"""
1502 pattern = input_api.re.compile(r'import android.test.FlakyTest;')
1504 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
1505 if f.LocalPath().endswith('Test.java'):
1506 if pattern.search(input_api.ReadFile(f)):
1509 return [output_api.PresubmitError(
1510 'Use org.chromium.base.test.util.FlakyTest instead of '
1511 'android.test.FlakyTest',
1516 def CheckNoDEPSGIT(input_api, output_api):
1517 """Make sure .DEPS.git is never modified manually."""
1518 if any(f.LocalPath().endswith('.DEPS.git') for f in
1519 input_api.AffectedFiles()):
1520 return [output_api.PresubmitError(
1521 'Never commit changes to .DEPS.git. This file is maintained by an\n'
1522 'automated system based on what\'s in DEPS and your changes will be\n'
1524 'See https://sites.google.com/a/chromium.org/dev/developers/how-tos/'
1525 'get-the-code#Rolling_DEPS\n'
1526 'for more information')]
1530 def CheckValidHostsInDEPSOnUpload(input_api, output_api):
1531 """Checks that DEPS file deps are from allowed_hosts."""
1532 # Run only if DEPS file has been modified to annoy fewer bystanders.
1533 if all(f.LocalPath() != 'DEPS' for f in input_api.AffectedFiles()):
1535 # Outsource work to gclient verify
1537 gclient_path = input_api.os_path.join(
1538 input_api.PresubmitLocalPath(),
1539 'third_party', 'depot_tools', 'gclient.py')
1540 input_api.subprocess.check_output(
1541 [input_api.python_executable, gclient_path, 'verify'],
1542 stderr=input_api.subprocess.STDOUT)
1544 except input_api.subprocess.CalledProcessError as error:
1545 return [output_api.PresubmitError(
1546 'DEPS file must have only git dependencies.',
1547 long_text=error.output)]
1550 def _GetMessageForMatchingType(input_api, affected_file, line_number, line,
1551 type_name, message):
1552 """Helper method for CheckNoBannedFunctions and CheckNoDeprecatedMojoTypes.
1554 Returns an string composed of the name of the file, the line number where the
1555 match has been found and the additional text passed as |message| in case the
1556 target type name matches the text inside the line passed as parameter.
1560 if input_api.re.search(r"^ *//", line): # Ignore comments about banned types.
1562 if line.endswith(" nocheck"): # A // nocheck comment will bypass this error.
1566 if type_name[0:1] == '/':
1567 regex = type_name[1:]
1568 if input_api.re.search(regex, line):
1570 elif type_name in line:
1574 result.append(' %s:%d:' % (affected_file.LocalPath(), line_number))
1575 for message_line in message:
1576 result.append(' %s' % message_line)
1581 def CheckNoBannedFunctions(input_api, output_api):
1582 """Make sure that banned functions are not used."""
1586 def IsExcludedFile(affected_file, excluded_paths):
1587 local_path = affected_file.LocalPath()
1588 for item in excluded_paths:
1589 if input_api.re.match(item, local_path):
1593 def IsIosObjcFile(affected_file):
1594 local_path = affected_file.LocalPath()
1595 if input_api.os_path.splitext(local_path)[-1] not in ('.mm', '.m', '.h'):
1597 basename = input_api.os_path.basename(local_path)
1598 if 'ios' in basename.split('_'):
1600 for sep in (input_api.os_path.sep, input_api.os_path.altsep):
1601 if sep and 'ios' in local_path.split(sep):
1605 def CheckForMatch(affected_file, line_num, line, func_name, message, error):
1606 problems = _GetMessageForMatchingType(input_api, f, line_num, line,
1610 errors.extend(problems)
1612 warnings.extend(problems)
1614 file_filter = lambda f: f.LocalPath().endswith(('.java'))
1615 for f in input_api.AffectedFiles(file_filter=file_filter):
1616 for line_num, line in f.ChangedContents():
1617 for func_name, message, error in _BANNED_JAVA_FUNCTIONS:
1618 CheckForMatch(f, line_num, line, func_name, message, error)
1620 file_filter = lambda f: f.LocalPath().endswith(('.mm', '.m', '.h'))
1621 for f in input_api.AffectedFiles(file_filter=file_filter):
1622 for line_num, line in f.ChangedContents():
1623 for func_name, message, error in _BANNED_OBJC_FUNCTIONS:
1624 CheckForMatch(f, line_num, line, func_name, message, error)
1626 for f in input_api.AffectedFiles(file_filter=IsIosObjcFile):
1627 for line_num, line in f.ChangedContents():
1628 for func_name, message, error in _BANNED_IOS_OBJC_FUNCTIONS:
1629 CheckForMatch(f, line_num, line, func_name, message, error)
1631 egtest_filter = lambda f: f.LocalPath().endswith(('_egtest.mm'))
1632 for f in input_api.AffectedFiles(file_filter=egtest_filter):
1633 for line_num, line in f.ChangedContents():
1634 for func_name, message, error in _BANNED_IOS_EGTEST_FUNCTIONS:
1635 CheckForMatch(f, line_num, line, func_name, message, error)
1637 file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm', '.h'))
1638 for f in input_api.AffectedFiles(file_filter=file_filter):
1639 for line_num, line in f.ChangedContents():
1640 for func_name, message, error, excluded_paths in _BANNED_CPP_FUNCTIONS:
1641 if IsExcludedFile(f, excluded_paths):
1643 CheckForMatch(f, line_num, line, func_name, message, error)
1647 result.append(output_api.PresubmitPromptWarning(
1648 'Banned functions were used.\n' + '\n'.join(warnings)))
1650 result.append(output_api.PresubmitError(
1651 'Banned functions were used.\n' + '\n'.join(errors)))
1655 def _CheckAndroidNoBannedImports(input_api, output_api):
1656 """Make sure that banned java imports are not used."""
1659 def IsException(path, exceptions):
1660 for exception in exceptions:
1661 if (path.startswith(exception)):
1665 file_filter = lambda f: f.LocalPath().endswith(('.java'))
1666 for f in input_api.AffectedFiles(file_filter=file_filter):
1667 for line_num, line in f.ChangedContents():
1668 for import_name, message, exceptions in _BANNED_JAVA_IMPORTS:
1669 if IsException(f.LocalPath(), exceptions):
1671 problems = _GetMessageForMatchingType(input_api, f, line_num, line,
1672 'import ' + import_name, message)
1674 errors.extend(problems)
1677 result.append(output_api.PresubmitError(
1678 'Banned imports were used.\n' + '\n'.join(errors)))
1682 def CheckNoDeprecatedMojoTypes(input_api, output_api):
1683 """Make sure that old Mojo types are not used."""
1687 # For any path that is not an "ok" or an "error" path, a warning will be
1688 # raised if deprecated mojo types are found.
1689 ok_paths = ['components/arc']
1690 error_paths = ['third_party/blink', 'content']
1692 file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm', '.h'))
1693 for f in input_api.AffectedFiles(file_filter=file_filter):
1694 # Don't check //components/arc, not yet migrated (see crrev.com/c/1868870).
1695 if any(map(lambda path: f.LocalPath().startswith(path), ok_paths)):
1698 for line_num, line in f.ChangedContents():
1699 for func_name, message in _DEPRECATED_MOJO_TYPES:
1700 problems = _GetMessageForMatchingType(input_api, f, line_num, line,
1704 # Raise errors inside |error_paths| and warnings everywhere else.
1705 if any(map(lambda path: f.LocalPath().startswith(path), error_paths)):
1706 errors.extend(problems)
1708 warnings.extend(problems)
1712 result.append(output_api.PresubmitPromptWarning(
1713 'Banned Mojo types were used.\n' + '\n'.join(warnings)))
1715 result.append(output_api.PresubmitError(
1716 'Banned Mojo types were used.\n' + '\n'.join(errors)))
1720 def CheckNoPragmaOnce(input_api, output_api):
1721 """Make sure that banned functions are not used."""
1723 pattern = input_api.re.compile(r'^#pragma\s+once',
1724 input_api.re.MULTILINE)
1725 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
1726 if not f.LocalPath().endswith('.h'):
1728 contents = input_api.ReadFile(f)
1729 if pattern.search(contents):
1733 return [output_api.PresubmitError(
1734 'Do not use #pragma once in header files.\n'
1735 'See http://www.chromium.org/developers/coding-style#TOC-File-headers',
1740 def CheckNoTrinaryTrueFalse(input_api, output_api):
1741 """Checks to make sure we don't introduce use of foo ? true : false."""
1743 pattern = input_api.re.compile(r'\?\s*(true|false)\s*:\s*(true|false)')
1744 for f in input_api.AffectedFiles():
1745 if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
1748 for line_num, line in f.ChangedContents():
1749 if pattern.match(line):
1750 problems.append(' %s:%d' % (f.LocalPath(), line_num))
1754 return [output_api.PresubmitPromptWarning(
1755 'Please consider avoiding the "? true : false" pattern if possible.\n' +
1756 '\n'.join(problems))]
1759 def CheckUnwantedDependencies(input_api, output_api):
1760 """Runs checkdeps on #include and import statements added in this
1761 change. Breaking - rules is an error, breaking ! rules is a
1765 # We need to wait until we have an input_api object and use this
1766 # roundabout construct to import checkdeps because this file is
1767 # eval-ed and thus doesn't have __file__.
1768 original_sys_path = sys.path
1770 sys.path = sys.path + [input_api.os_path.join(
1771 input_api.PresubmitLocalPath(), 'buildtools', 'checkdeps')]
1773 from rules import Rule
1775 # Restore sys.path to what it was before.
1776 sys.path = original_sys_path
1780 added_java_imports = []
1781 for f in input_api.AffectedFiles():
1782 if _IsCPlusPlusFile(input_api, f.LocalPath()):
1783 changed_lines = [line for _, line in f.ChangedContents()]
1784 added_includes.append([f.AbsoluteLocalPath(), changed_lines])
1785 elif _IsProtoFile(input_api, f.LocalPath()):
1786 changed_lines = [line for _, line in f.ChangedContents()]
1787 added_imports.append([f.AbsoluteLocalPath(), changed_lines])
1788 elif _IsJavaFile(input_api, f.LocalPath()):
1789 changed_lines = [line for _, line in f.ChangedContents()]
1790 added_java_imports.append([f.AbsoluteLocalPath(), changed_lines])
1792 deps_checker = checkdeps.DepsChecker(input_api.PresubmitLocalPath())
1794 error_descriptions = []
1795 warning_descriptions = []
1796 error_subjects = set()
1797 warning_subjects = set()
1799 for path, rule_type, rule_description in deps_checker.CheckAddedCppIncludes(
1801 path = input_api.os_path.relpath(path, input_api.PresubmitLocalPath())
1802 description_with_path = '%s\n %s' % (path, rule_description)
1803 if rule_type == Rule.DISALLOW:
1804 error_descriptions.append(description_with_path)
1805 error_subjects.add("#includes")
1807 warning_descriptions.append(description_with_path)
1808 warning_subjects.add("#includes")
1810 for path, rule_type, rule_description in deps_checker.CheckAddedProtoImports(
1812 path = input_api.os_path.relpath(path, input_api.PresubmitLocalPath())
1813 description_with_path = '%s\n %s' % (path, rule_description)
1814 if rule_type == Rule.DISALLOW:
1815 error_descriptions.append(description_with_path)
1816 error_subjects.add("imports")
1818 warning_descriptions.append(description_with_path)
1819 warning_subjects.add("imports")
1821 for path, rule_type, rule_description in deps_checker.CheckAddedJavaImports(
1822 added_java_imports, _JAVA_MULTIPLE_DEFINITION_EXCLUDED_PATHS):
1823 path = input_api.os_path.relpath(path, input_api.PresubmitLocalPath())
1824 description_with_path = '%s\n %s' % (path, rule_description)
1825 if rule_type == Rule.DISALLOW:
1826 error_descriptions.append(description_with_path)
1827 error_subjects.add("imports")
1829 warning_descriptions.append(description_with_path)
1830 warning_subjects.add("imports")
1833 if error_descriptions:
1834 results.append(output_api.PresubmitError(
1835 'You added one or more %s that violate checkdeps rules.'
1836 % " and ".join(error_subjects),
1837 error_descriptions))
1838 if warning_descriptions:
1839 results.append(output_api.PresubmitPromptOrNotify(
1840 'You added one or more %s of files that are temporarily\n'
1841 'allowed but being removed. Can you avoid introducing the\n'
1842 '%s? See relevant DEPS file(s) for details and contacts.' %
1843 (" and ".join(warning_subjects), "/".join(warning_subjects)),
1844 warning_descriptions))
1848 def CheckFilePermissions(input_api, output_api):
1849 """Check that all files have their permissions properly set."""
1850 if input_api.platform == 'win32':
1852 checkperms_tool = input_api.os_path.join(
1853 input_api.PresubmitLocalPath(),
1854 'tools', 'checkperms', 'checkperms.py')
1855 args = [input_api.python_executable, checkperms_tool,
1856 '--root', input_api.change.RepositoryRoot()]
1857 with input_api.CreateTemporaryFile() as file_list:
1858 for f in input_api.AffectedFiles():
1859 # checkperms.py file/directory arguments must be relative to the
1861 file_list.write((f.LocalPath() + '\n').encode('utf8'))
1863 args += ['--file-list', file_list.name]
1865 input_api.subprocess.check_output(args)
1867 except input_api.subprocess.CalledProcessError as error:
1868 return [output_api.PresubmitError(
1869 'checkperms.py failed:',
1870 long_text=error.output.decode('utf-8', 'ignore'))]
1873 def CheckNoAuraWindowPropertyHInHeaders(input_api, output_api):
1874 """Makes sure we don't include ui/aura/window_property.h
1877 pattern = input_api.re.compile(r'^#include\s*"ui/aura/window_property.h"')
1879 for f in input_api.AffectedFiles():
1880 if not f.LocalPath().endswith('.h'):
1882 for line_num, line in f.ChangedContents():
1883 if pattern.match(line):
1884 errors.append(' %s:%d' % (f.LocalPath(), line_num))
1888 results.append(output_api.PresubmitError(
1889 'Header files should not include ui/aura/window_property.h', errors))
1893 def CheckNoInternalHeapIncludes(input_api, output_api):
1894 """Makes sure we don't include any headers from
1895 third_party/blink/renderer/platform/heap/impl or
1896 third_party/blink/renderer/platform/heap/v8_wrapper from files outside of
1897 third_party/blink/renderer/platform/heap
1899 impl_pattern = input_api.re.compile(
1900 r'^\s*#include\s*"third_party/blink/renderer/platform/heap/impl/.*"')
1901 v8_wrapper_pattern = input_api.re.compile(
1902 r'^\s*#include\s*"third_party/blink/renderer/platform/heap/v8_wrapper/.*"')
1903 file_filter = lambda f: not input_api.re.match(
1904 r"^third_party[\\/]blink[\\/]renderer[\\/]platform[\\/]heap[\\/].*",
1908 for f in input_api.AffectedFiles(file_filter=file_filter):
1909 for line_num, line in f.ChangedContents():
1910 if impl_pattern.match(line) or v8_wrapper_pattern.match(line):
1911 errors.append(' %s:%d' % (f.LocalPath(), line_num))
1915 results.append(output_api.PresubmitError(
1916 'Do not include files from third_party/blink/renderer/platform/heap/impl'
1917 ' or third_party/blink/renderer/platform/heap/v8_wrapper. Use the '
1918 'relevant counterparts from third_party/blink/renderer/platform/heap',
1923 def _CheckForVersionControlConflictsInFile(input_api, f):
1924 pattern = input_api.re.compile('^(?:<<<<<<<|>>>>>>>) |^=======$')
1926 for line_num, line in f.ChangedContents():
1927 if f.LocalPath().endswith(('.md', '.rst', '.txt')):
1928 # First-level headers in markdown look a lot like version control
1929 # conflict markers. http://daringfireball.net/projects/markdown/basics
1931 if pattern.match(line):
1932 errors.append(' %s:%d %s' % (f.LocalPath(), line_num, line))
1936 def CheckForVersionControlConflicts(input_api, output_api):
1937 """Usually this is not intentional and will cause a compile failure."""
1939 for f in input_api.AffectedFiles():
1940 errors.extend(_CheckForVersionControlConflictsInFile(input_api, f))
1944 results.append(output_api.PresubmitError(
1945 'Version control conflict markers found, please resolve.', errors))
1949 def CheckGoogleSupportAnswerUrlOnUpload(input_api, output_api):
1950 pattern = input_api.re.compile('support\.google\.com\/chrome.*/answer')
1952 for f in input_api.AffectedFiles():
1953 for line_num, line in f.ChangedContents():
1954 if pattern.search(line):
1955 errors.append(' %s:%d %s' % (f.LocalPath(), line_num, line))
1959 results.append(output_api.PresubmitPromptWarning(
1960 'Found Google support URL addressed by answer number. Please replace '
1961 'with a p= identifier instead. See crbug.com/679462\n', errors))
1965 def CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api):
1966 def FilterFile(affected_file):
1967 """Filter function for use with input_api.AffectedSourceFiles,
1968 below. This filters out everything except non-test files from
1969 top-level directories that generally speaking should not hard-code
1970 service URLs (e.g. src/android_webview/, src/content/ and others).
1972 return input_api.FilterSourceFile(
1974 files_to_check=[r'^(android_webview|base|content|net)[\\/].*'],
1975 files_to_skip=(_EXCLUDED_PATHS +
1976 _TEST_CODE_EXCLUDED_PATHS +
1977 input_api.DEFAULT_FILES_TO_SKIP))
1979 base_pattern = ('"[^"]*(google|googleapis|googlezip|googledrive|appspot)'
1980 '\.(com|net)[^"]*"')
1981 comment_pattern = input_api.re.compile('//.*%s' % base_pattern)
1982 pattern = input_api.re.compile(base_pattern)
1983 problems = [] # items are (filename, line_number, line)
1984 for f in input_api.AffectedSourceFiles(FilterFile):
1985 for line_num, line in f.ChangedContents():
1986 if not comment_pattern.search(line) and pattern.search(line):
1987 problems.append((f.LocalPath(), line_num, line))
1990 return [output_api.PresubmitPromptOrNotify(
1991 'Most layers below src/chrome/ should not hardcode service URLs.\n'
1992 'Are you sure this is correct?',
1994 problem[0], problem[1], problem[2]) for problem in problems])]
1999 def CheckChromeOsSyncedPrefRegistration(input_api, output_api):
2000 """Warns if Chrome OS C++ files register syncable prefs as browser prefs."""
2001 def FileFilter(affected_file):
2002 """Includes directories known to be Chrome OS only."""
2003 return input_api.FilterSourceFile(
2005 files_to_check=('^ash/',
2006 '^chromeos/', # Top-level src/chromeos.
2007 '/chromeos/', # Any path component.
2010 files_to_skip=(input_api.DEFAULT_FILES_TO_SKIP))
2014 for f in input_api.AffectedFiles(file_filter=FileFilter):
2015 for line_num, line in f.ChangedContents():
2016 if input_api.re.search('PrefRegistrySyncable::SYNCABLE_PREF', line):
2017 prefs.append(' %s:%d:' % (f.LocalPath(), line_num))
2018 prefs.append(' %s' % line)
2019 if input_api.re.search(
2020 'PrefRegistrySyncable::SYNCABLE_PRIORITY_PREF', line):
2021 priority_prefs.append(' %s:%d' % (f.LocalPath(), line_num))
2022 priority_prefs.append(' %s' % line)
2026 results.append(output_api.PresubmitPromptWarning(
2027 'Preferences were registered as SYNCABLE_PREF and will be controlled '
2028 'by browser sync settings. If these prefs should be controlled by OS '
2029 'sync settings use SYNCABLE_OS_PREF instead.\n' + '\n'.join(prefs)))
2030 if (priority_prefs):
2031 results.append(output_api.PresubmitPromptWarning(
2032 'Preferences were registered as SYNCABLE_PRIORITY_PREF and will be '
2033 'controlled by browser sync settings. If these prefs should be '
2034 'controlled by OS sync settings use SYNCABLE_OS_PRIORITY_PREF '
2035 'instead.\n' + '\n'.join(prefs)))
2039 # TODO: add unit tests.
2040 def CheckNoAbbreviationInPngFileName(input_api, output_api):
2041 """Makes sure there are no abbreviations in the name of PNG files.
2042 The native_client_sdk directory is excluded because it has auto-generated PNG
2043 files for documentation.
2046 files_to_check = [r'.*_[a-z]_.*\.png$|.*_[a-z]\.png$']
2047 files_to_skip = [r'^native_client_sdk[\\/]']
2048 file_filter = lambda f: input_api.FilterSourceFile(
2049 f, files_to_check=files_to_check, files_to_skip=files_to_skip)
2050 for f in input_api.AffectedFiles(include_deletes=False,
2051 file_filter=file_filter):
2052 errors.append(' %s' % f.LocalPath())
2056 results.append(output_api.PresubmitError(
2057 'The name of PNG files should not have abbreviations. \n'
2058 'Use _hover.png, _center.png, instead of _h.png, _c.png.\n'
2059 'Contact oshima@chromium.org if you have questions.', errors))
2063 def _ExtractAddRulesFromParsedDeps(parsed_deps):
2064 """Extract the rules that add dependencies from a parsed DEPS file.
2067 parsed_deps: the locals dictionary from evaluating the DEPS file."""
2070 rule[1:] for rule in parsed_deps.get('include_rules', [])
2071 if rule.startswith('+') or rule.startswith('!')
2073 for _, rules in parsed_deps.get('specific_include_rules',
2076 rule[1:] for rule in rules
2077 if rule.startswith('+') or rule.startswith('!')
2082 def _ParseDeps(contents):
2083 """Simple helper for parsing DEPS files."""
2084 # Stubs for handling special syntax in the root DEPS file.
2087 def __init__(self, local_scope):
2088 self._local_scope = local_scope
2090 def Lookup(self, var_name):
2091 """Implements the Var syntax."""
2093 return self._local_scope['vars'][var_name]
2095 raise Exception('Var is not defined: %s' % var_name)
2099 'Var': _VarImpl(local_scope).Lookup,
2103 exec(contents, global_scope, local_scope)
2107 def _CalculateAddedDeps(os_path, old_contents, new_contents):
2108 """Helper method for CheckAddedDepsHaveTargetApprovals. Returns
2109 a set of DEPS entries that we should look up.
2111 For a directory (rather than a specific filename) we fake a path to
2112 a specific filename by adding /DEPS. This is chosen as a file that
2113 will seldom or never be subject to per-file include_rules.
2115 # We ignore deps entries on auto-generated directories.
2116 AUTO_GENERATED_DIRS = ['grit', 'jni']
2118 old_deps = _ExtractAddRulesFromParsedDeps(_ParseDeps(old_contents))
2119 new_deps = _ExtractAddRulesFromParsedDeps(_ParseDeps(new_contents))
2121 added_deps = new_deps.difference(old_deps)
2124 for added_dep in added_deps:
2125 if added_dep.split('/')[0] in AUTO_GENERATED_DIRS:
2127 # Assume that a rule that ends in .h is a rule for a specific file.
2128 if added_dep.endswith('.h'):
2129 results.add(added_dep)
2131 results.add(os_path.join(added_dep, 'DEPS'))
2135 def CheckAddedDepsHaveTargetApprovals(input_api, output_api):
2136 """When a dependency prefixed with + is added to a DEPS file, we
2137 want to make sure that the change is reviewed by an OWNER of the
2138 target file or directory, to avoid layering violations from being
2139 introduced. This check verifies that this happens.
2141 # We rely on Gerrit's code-owners to check approvals.
2142 # input_api.gerrit is always set for Chromium, but other projects
2143 # might not use Gerrit.
2144 if not input_api.gerrit:
2146 if (input_api.change.issue and
2147 input_api.gerrit.IsOwnersOverrideApproved(input_api.change.issue)):
2148 # Skip OWNERS check when Owners-Override label is approved. This is intended
2149 # for global owners, trusted bots, and on-call sheriffs. Review is still
2150 # required for these changes.
2153 virtual_depended_on_files = set()
2155 file_filter = lambda f: not input_api.re.match(
2156 r"^third_party[\\/]blink[\\/].*", f.LocalPath())
2157 for f in input_api.AffectedFiles(include_deletes=False,
2158 file_filter=file_filter):
2159 filename = input_api.os_path.basename(f.LocalPath())
2160 if filename == 'DEPS':
2161 virtual_depended_on_files.update(_CalculateAddedDeps(
2163 '\n'.join(f.OldContents()),
2164 '\n'.join(f.NewContents())))
2166 if not virtual_depended_on_files:
2169 if input_api.is_committing:
2171 return [output_api.PresubmitNotifyResult(
2172 '--tbr was specified, skipping OWNERS check for DEPS additions')]
2173 if input_api.dry_run:
2174 return [output_api.PresubmitNotifyResult(
2175 'This is a dry run, skipping OWNERS check for DEPS additions')]
2176 if not input_api.change.issue:
2177 return [output_api.PresubmitError(
2178 "DEPS approval by OWNERS check failed: this change has "
2179 "no change number, so we can't check it for approvals.")]
2180 output = output_api.PresubmitError
2182 output = output_api.PresubmitNotifyResult
2184 owner_email, reviewers = (
2185 input_api.canned_checks.GetCodereviewOwnerAndReviewers(
2188 approval_needed=input_api.is_committing))
2190 owner_email = owner_email or input_api.change.author_email
2192 approval_status = input_api.owners_client.GetFilesApprovalStatus(
2193 virtual_depended_on_files, reviewers.union([owner_email]), [])
2195 f for f in virtual_depended_on_files
2196 if approval_status[f] != input_api.owners_client.APPROVED]
2198 # We strip the /DEPS part that was added by
2199 # _FilesToCheckForIncomingDeps to fake a path to a file in a
2201 def StripDeps(path):
2202 start_deps = path.rfind('/DEPS')
2203 if start_deps != -1:
2204 return path[:start_deps]
2207 unapproved_dependencies = ["'+%s'," % StripDeps(path)
2208 for path in missing_files]
2210 if unapproved_dependencies:
2212 output('You need LGTM from owners of depends-on paths in DEPS that were '
2213 'modified in this CL:\n %s' %
2214 '\n '.join(sorted(unapproved_dependencies)))]
2215 suggested_owners = input_api.owners_client.SuggestOwners(
2216 missing_files, exclude=[owner_email])
2217 output_list.append(output(
2218 'Suggested missing target path OWNERS:\n %s' %
2219 '\n '.join(suggested_owners or [])))
2225 # TODO: add unit tests.
2226 def CheckSpamLogging(input_api, output_api):
2227 file_inclusion_pattern = [r'.+%s' % _IMPLEMENTATION_EXTENSIONS]
2228 files_to_skip = (_EXCLUDED_PATHS +
2229 _TEST_CODE_EXCLUDED_PATHS +
2230 input_api.DEFAULT_FILES_TO_SKIP +
2231 (r"^base[\\/]logging\.h$",
2232 r"^base[\\/]logging\.cc$",
2233 r"^base[\\/]task[\\/]thread_pool[\\/]task_tracker\.cc$",
2234 r"^chrome[\\/]app[\\/]chrome_main_delegate\.cc$",
2235 r"^chrome[\\/]browser[\\/]chrome_browser_main\.cc$",
2236 r"^chrome[\\/]browser[\\/]ui[\\/]startup[\\/]"
2237 r"startup_browser_creator\.cc$",
2238 r"^chrome[\\/]browser[\\/]browser_switcher[\\/]bho[\\/].*",
2239 r"^chrome[\\/]browser[\\/]diagnostics[\\/]" +
2240 r"diagnostics_writer\.cc$",
2241 r"^chrome[\\/]chrome_cleaner[\\/].*",
2242 r"^chrome[\\/]chrome_elf[\\/]dll_hash[\\/]" +
2243 r"dll_hash_main\.cc$",
2244 r"^chrome[\\/]installer[\\/]setup[\\/].*",
2245 r"^chromecast[\\/]",
2246 r"^cloud_print[\\/]",
2247 r"^components[\\/]browser_watcher[\\/]"
2248 r"dump_stability_report_main_win.cc$",
2249 r"^components[\\/]media_control[\\/]renderer[\\/]"
2250 r"media_playback_options\.cc$",
2251 r"^components[\\/]viz[\\/]service[\\/]display[\\/]"
2252 r"overlay_strategy_underlay_cast\.cc$",
2253 r"^components[\\/]zucchini[\\/].*",
2254 # TODO(peter): Remove exception. https://crbug.com/534537
2255 r"^content[\\/]browser[\\/]notifications[\\/]"
2256 r"notification_event_dispatcher_impl\.cc$",
2257 r"^content[\\/]common[\\/]gpu[\\/]client[\\/]"
2258 r"gl_helper_benchmark\.cc$",
2259 r"^courgette[\\/]courgette_minimal_tool\.cc$",
2260 r"^courgette[\\/]courgette_tool\.cc$",
2261 r"^extensions[\\/]renderer[\\/]logging_native_handler\.cc$",
2262 r"^fuchsia[\\/]engine[\\/]browser[\\/]frame_impl.cc$",
2263 r"^fuchsia[\\/]engine[\\/]context_provider_main.cc$",
2264 r"^fuchsia[\\/]runners[\\/]common[\\/]web_component.cc$",
2265 r"^headless[\\/]app[\\/]headless_shell\.cc$",
2266 r"^ipc[\\/]ipc_logging\.cc$",
2267 r"^native_client_sdk[\\/]",
2268 r"^remoting[\\/]base[\\/]logging\.h$",
2269 r"^remoting[\\/]host[\\/].*",
2270 r"^sandbox[\\/]linux[\\/].*",
2271 r"^storage[\\/]browser[\\/]file_system[\\/]" +
2272 r"dump_file_system.cc$",
2274 r"^ui[\\/]base[\\/]resource[\\/]data_pack.cc$",
2275 r"^ui[\\/]aura[\\/]bench[\\/]bench_main\.cc$",
2276 r"^ui[\\/]ozone[\\/]platform[\\/]cast[\\/]",
2277 r"^ui[\\/]base[\\/]x[\\/]xwmstartupcheck[\\/]"
2278 r"xwmstartupcheck\.cc$"))
2279 source_file_filter = lambda x: input_api.FilterSourceFile(
2280 x, files_to_check=file_inclusion_pattern, files_to_skip=files_to_skip)
2285 for f in input_api.AffectedSourceFiles(source_file_filter):
2286 for _, line in f.ChangedContents():
2287 if input_api.re.search(r"\bD?LOG\s*\(\s*INFO\s*\)", line):
2288 log_info.add(f.LocalPath())
2289 elif input_api.re.search(r"\bD?LOG_IF\s*\(\s*INFO\s*,", line):
2290 log_info.add(f.LocalPath())
2292 if input_api.re.search(r"\bprintf\(", line):
2293 printf.add(f.LocalPath())
2294 elif input_api.re.search(r"\bfprintf\((stdout|stderr)", line):
2295 printf.add(f.LocalPath())
2298 return [output_api.PresubmitError(
2299 'These files spam the console log with LOG(INFO):',
2302 return [output_api.PresubmitError(
2303 'These files spam the console log with printf/fprintf:',
2308 def CheckForAnonymousVariables(input_api, output_api):
2309 """These types are all expected to hold locks while in scope and
2310 so should never be anonymous (which causes them to be immediately
2312 they_who_must_be_named = [
2316 'SkAutoAlphaRestore',
2317 'SkAutoBitmapShaderInstall',
2318 'SkAutoBlitterChoose',
2319 'SkAutoBounderCommit',
2321 'SkAutoCanvasRestore',
2322 'SkAutoCommentBlock',
2324 'SkAutoDisableDirectionCheck',
2325 'SkAutoDisableOvalCheck',
2332 'SkAutoMaskFreeImage',
2333 'SkAutoMutexAcquire',
2334 'SkAutoPathBoundsUpdate',
2336 'SkAutoRasterClipValidate',
2342 anonymous = r'(%s)\s*[({]' % '|'.join(they_who_must_be_named)
2343 # bad: base::AutoLock(lock.get());
2344 # not bad: base::AutoLock lock(lock.get());
2345 bad_pattern = input_api.re.compile(anonymous)
2346 # good: new base::AutoLock(lock.get())
2347 good_pattern = input_api.re.compile(r'\bnew\s*' + anonymous)
2350 for f in input_api.AffectedFiles():
2351 if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
2353 for linenum, line in f.ChangedContents():
2354 if bad_pattern.search(line) and not good_pattern.search(line):
2355 errors.append('%s:%d' % (f.LocalPath(), linenum))
2358 return [output_api.PresubmitError(
2359 'These lines create anonymous variables that need to be named:',
2364 def CheckUniquePtrOnUpload(input_api, output_api):
2365 # Returns whether |template_str| is of the form <T, U...> for some types T
2366 # and U. Assumes that |template_str| is already in the form <...>.
2367 def HasMoreThanOneArg(template_str):
2368 # Level of <...> nesting.
2370 for c in template_str:
2375 elif c == ',' and nesting == 1:
2379 file_inclusion_pattern = [r'.+%s' % _IMPLEMENTATION_EXTENSIONS]
2380 sources = lambda affected_file: input_api.FilterSourceFile(
2382 files_to_skip=(_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS +
2383 input_api.DEFAULT_FILES_TO_SKIP),
2384 files_to_check=file_inclusion_pattern)
2386 # Pattern to capture a single "<...>" block of template arguments. It can
2387 # handle linearly nested blocks, such as "<std::vector<std::set<T>>>", but
2388 # cannot handle branching structures, such as "<pair<set<T>,set<U>>". The
2389 # latter would likely require counting that < and > match, which is not
2390 # expressible in regular languages. Should the need arise, one can introduce
2391 # limited counting (matching up to a total number of nesting depth), which
2392 # should cover all practical cases for already a low nesting limit.
2393 template_arg_pattern = (
2394 r'<[^>]*' # Opening block of <.
2395 r'>([^<]*>)?') # Closing block of >.
2396 # Prefix expressing that whatever follows is not already inside a <...>
2398 not_inside_template_arg_pattern = r'(^|[^<,\s]\s*)'
2399 null_construct_pattern = input_api.re.compile(
2400 not_inside_template_arg_pattern
2401 + r'\bstd::unique_ptr'
2402 + template_arg_pattern
2405 # Same as template_arg_pattern, but excluding type arrays, e.g., <T[]>.
2406 template_arg_no_array_pattern = (
2407 r'<[^>]*[^]]' # Opening block of <.
2408 r'>([^(<]*[^]]>)?') # Closing block of >.
2409 # Prefix saying that what follows is the start of an expression.
2410 start_of_expr_pattern = r'(=|\breturn|^)\s*'
2411 # Suffix saying that what follows are call parentheses with a non-empty list
2413 nonempty_arg_list_pattern = r'\(([^)]|$)'
2414 # Put the template argument into a capture group for deeper examination later.
2415 return_construct_pattern = input_api.re.compile(
2416 start_of_expr_pattern
2417 + r'std::unique_ptr'
2418 + '(?P<template_arg>'
2419 + template_arg_no_array_pattern
2421 + nonempty_arg_list_pattern)
2423 problems_constructor = []
2424 problems_nullptr = []
2425 for f in input_api.AffectedSourceFiles(sources):
2426 for line_number, line in f.ChangedContents():
2428 # return std::unique_ptr<T>(foo);
2429 # bar = std::unique_ptr<T>(foo);
2431 # return std::unique_ptr<T[]>(foo);
2432 # bar = std::unique_ptr<T[]>(foo);
2433 # And also allow cases when the second template argument is present. Those
2434 # cases cannot be handled by std::make_unique:
2435 # return std::unique_ptr<T, U>(foo);
2436 # bar = std::unique_ptr<T, U>(foo);
2437 local_path = f.LocalPath()
2438 return_construct_result = return_construct_pattern.search(line)
2439 if return_construct_result and not HasMoreThanOneArg(
2440 return_construct_result.group('template_arg')):
2441 problems_constructor.append(
2442 '%s:%d\n %s' % (local_path, line_number, line.strip()))
2444 # std::unique_ptr<T>()
2445 if null_construct_pattern.search(line):
2446 problems_nullptr.append(
2447 '%s:%d\n %s' % (local_path, line_number, line.strip()))
2450 if problems_nullptr:
2451 errors.append(output_api.PresubmitError(
2452 'The following files use std::unique_ptr<T>(). Use nullptr instead.',
2454 if problems_constructor:
2455 errors.append(output_api.PresubmitError(
2456 'The following files use explicit std::unique_ptr constructor.'
2457 'Use std::make_unique<T>() instead.',
2458 problems_constructor))
2462 def CheckUserActionUpdate(input_api, output_api):
2463 """Checks if any new user action has been added."""
2464 if any('actions.xml' == input_api.os_path.basename(f) for f in
2465 input_api.LocalPaths()):
2466 # If actions.xml is already included in the changelist, the PRESUBMIT
2467 # for actions.xml will do a more complete presubmit check.
2470 file_inclusion_pattern = [r'.*\.(cc|mm)$']
2471 files_to_skip = (_EXCLUDED_PATHS +
2472 _TEST_CODE_EXCLUDED_PATHS +
2473 input_api.DEFAULT_FILES_TO_SKIP )
2474 file_filter = lambda f: input_api.FilterSourceFile(
2475 f, files_to_check=file_inclusion_pattern, files_to_skip=files_to_skip)
2477 action_re = r'[^a-zA-Z]UserMetricsAction\("([^"]*)'
2478 current_actions = None
2479 for f in input_api.AffectedFiles(file_filter=file_filter):
2480 for line_num, line in f.ChangedContents():
2481 match = input_api.re.search(action_re, line)
2483 # Loads contents in tools/metrics/actions/actions.xml to memory. It's
2485 if not current_actions:
2486 with open('tools/metrics/actions/actions.xml') as actions_f:
2487 current_actions = actions_f.read()
2488 # Search for the matched user action name in |current_actions|.
2489 for action_name in match.groups():
2490 action = 'name="{0}"'.format(action_name)
2491 if action not in current_actions:
2492 return [output_api.PresubmitPromptWarning(
2493 'File %s line %d: %s is missing in '
2494 'tools/metrics/actions/actions.xml. Please run '
2495 'tools/metrics/actions/extract_actions.py to update.'
2496 % (f.LocalPath(), line_num, action_name))]
2500 def _ImportJSONCommentEater(input_api):
2502 sys.path = sys.path + [input_api.os_path.join(
2503 input_api.PresubmitLocalPath(),
2504 'tools', 'json_comment_eater')]
2505 import json_comment_eater
2506 return json_comment_eater
2509 def _GetJSONParseError(input_api, filename, eat_comments=True):
2511 contents = input_api.ReadFile(filename)
2513 json_comment_eater = _ImportJSONCommentEater(input_api)
2514 contents = json_comment_eater.Nom(contents)
2516 input_api.json.loads(contents)
2517 except ValueError as e:
2522 def _GetIDLParseError(input_api, filename):
2524 contents = input_api.ReadFile(filename)
2525 idl_schema = input_api.os_path.join(
2526 input_api.PresubmitLocalPath(),
2527 'tools', 'json_schema_compiler', 'idl_schema.py')
2528 process = input_api.subprocess.Popen(
2529 [input_api.python_executable, idl_schema],
2530 stdin=input_api.subprocess.PIPE,
2531 stdout=input_api.subprocess.PIPE,
2532 stderr=input_api.subprocess.PIPE,
2533 universal_newlines=True)
2534 (_, error) = process.communicate(input=contents)
2535 return error or None
2536 except ValueError as e:
2540 def CheckParseErrors(input_api, output_api):
2541 """Check that IDL and JSON files do not contain syntax errors."""
2543 '.idl': _GetIDLParseError,
2544 '.json': _GetJSONParseError,
2546 # Most JSON files are preprocessed and support comments, but these do not.
2547 json_no_comments_patterns = [
2550 # Only run IDL checker on files in these directories.
2551 idl_included_patterns = [
2552 r'^chrome[\\/]common[\\/]extensions[\\/]api[\\/]',
2553 r'^extensions[\\/]common[\\/]api[\\/]',
2556 def get_action(affected_file):
2557 filename = affected_file.LocalPath()
2558 return actions.get(input_api.os_path.splitext(filename)[1])
2560 def FilterFile(affected_file):
2561 action = get_action(affected_file)
2564 path = affected_file.LocalPath()
2566 if _MatchesFile(input_api,
2567 _KNOWN_TEST_DATA_AND_INVALID_JSON_FILE_PATTERNS,
2571 if (action == _GetIDLParseError and
2572 not _MatchesFile(input_api, idl_included_patterns, path)):
2577 for affected_file in input_api.AffectedFiles(
2578 file_filter=FilterFile, include_deletes=False):
2579 action = get_action(affected_file)
2581 if (action == _GetJSONParseError and
2582 _MatchesFile(input_api, json_no_comments_patterns,
2583 affected_file.LocalPath())):
2584 kwargs['eat_comments'] = False
2585 parse_error = action(input_api,
2586 affected_file.AbsoluteLocalPath(),
2589 results.append(output_api.PresubmitError('%s could not be parsed: %s' %
2590 (affected_file.LocalPath(), parse_error)))
2594 def CheckJavaStyle(input_api, output_api):
2595 """Runs checkstyle on changed java files and returns errors if any exist."""
2597 original_sys_path = sys.path
2599 sys.path = sys.path + [input_api.os_path.join(
2600 input_api.PresubmitLocalPath(), 'tools', 'android', 'checkstyle')]
2603 # Restore sys.path to what it was before.
2604 sys.path = original_sys_path
2606 return checkstyle.RunCheckstyle(
2607 input_api, output_api, 'tools/android/checkstyle/chromium-style-5.0.xml',
2608 files_to_skip=_EXCLUDED_PATHS + input_api.DEFAULT_FILES_TO_SKIP)
2611 def CheckPythonDevilInit(input_api, output_api):
2612 """Checks to make sure devil is initialized correctly in python scripts."""
2613 script_common_initialize_pattern = input_api.re.compile(
2614 r'script_common\.InitializeEnvironment\(')
2615 devil_env_config_initialize = input_api.re.compile(
2616 r'devil_env\.config\.Initialize\(')
2620 sources = lambda affected_file: input_api.FilterSourceFile(
2622 files_to_skip=(_EXCLUDED_PATHS + input_api.DEFAULT_FILES_TO_SKIP +
2623 (r'^build[\\/]android[\\/]devil_chromium\.py',
2624 r'^third_party[\\/].*',)),
2625 files_to_check=[r'.*\.py$'])
2627 for f in input_api.AffectedSourceFiles(sources):
2628 for line_num, line in f.ChangedContents():
2629 if (script_common_initialize_pattern.search(line) or
2630 devil_env_config_initialize.search(line)):
2631 errors.append("%s:%d" % (f.LocalPath(), line_num))
2636 results.append(output_api.PresubmitError(
2637 'Devil initialization should always be done using '
2638 'devil_chromium.Initialize() in the chromium project, to use better '
2639 'defaults for dependencies (ex. up-to-date version of adb).',
2645 def _MatchesFile(input_api, patterns, path):
2646 for pattern in patterns:
2647 if input_api.re.search(pattern, path):
2652 def _GetOwnersFilesToCheckForIpcOwners(input_api):
2653 """Gets a list of OWNERS files to check for correct security owners.
2656 A dictionary mapping an OWNER file to the list of OWNERS rules it must
2657 contain to cover IPC-related files with noparent reviewer rules.
2659 # Whether or not a file affects IPC is (mostly) determined by a simple list
2660 # of filename patterns.
2665 '*_param_traits*.*',
2668 '*_mojom_traits*.*',
2669 '*_struct_traits*.*',
2670 '*_type_converter*.*',
2672 # Android native IPC:
2674 # Blink uses a different file naming convention:
2678 '*TypeConverter*.*',
2681 # These third_party directories do not contain IPCs, but contain files
2682 # matching the above patterns, which trigger false positives.
2684 'third_party/crashpad/*',
2685 'third_party/blink/renderer/platform/bindings/*',
2686 'third_party/protobuf/benchmarks/python/*',
2687 'third_party/win_build_output/*',
2688 'third_party/feed_library/*',
2689 # These files are just used to communicate between class loaders running
2690 # in the same process.
2691 'weblayer/browser/java/org/chromium/weblayer_private/interfaces/*',
2692 'weblayer/browser/java/org/chromium/weblayer_private/test_interfaces/*',
2696 # Dictionary mapping an OWNERS file path to Patterns.
2697 # Patterns is a dictionary mapping glob patterns (suitable for use in per-file
2698 # rules ) to a PatternEntry.
2699 # PatternEntry is a dictionary with two keys:
2700 # - 'files': the files that are matched by this pattern
2701 # - 'rules': the per-file rules needed for this pattern
2702 # For example, if we expect OWNERS file to contain rules for *.mojom and
2703 # *_struct_traits*.*, Patterns might look like this:
2708 # 'per-file *.mojom=set noparent',
2709 # 'per-file *.mojom=file://ipc/SECURITY_OWNERS',
2712 # '*_struct_traits*.*': {
2715 # 'per-file *_struct_traits*.*=set noparent',
2716 # 'per-file *_struct_traits*.*=file://ipc/SECURITY_OWNERS',
2722 def AddPatternToCheck(input_file, pattern):
2723 owners_file = input_api.os_path.join(
2724 input_api.os_path.dirname(input_file.LocalPath()), 'OWNERS')
2725 if owners_file not in to_check:
2726 to_check[owners_file] = {}
2727 if pattern not in to_check[owners_file]:
2728 to_check[owners_file][pattern] = {
2731 'per-file %s=set noparent' % pattern,
2732 'per-file %s=file://ipc/SECURITY_OWNERS' % pattern,
2735 to_check[owners_file][pattern]['files'].append(input_file)
2737 # Iterate through the affected files to see what we actually need to check
2738 # for. We should only nag patch authors about per-file rules if a file in that
2739 # directory would match that pattern. If a directory only contains *.mojom
2740 # files and no *_messages*.h files, we should only nag about rules for
2742 for f in input_api.AffectedFiles(include_deletes=False):
2743 # Manifest files don't have a strong naming convention. Instead, try to find
2744 # affected .cc and .h files which look like they contain a manifest
2746 manifest_pattern = input_api.re.compile('manifests?\.(cc|h)$')
2747 test_manifest_pattern = input_api.re.compile('test_manifests?\.(cc|h)')
2748 if (manifest_pattern.search(f.LocalPath()) and not
2749 test_manifest_pattern.search(f.LocalPath())):
2750 # We expect all actual service manifest files to contain at least one
2751 # qualified reference to service_manager::Manifest.
2752 if 'service_manager::Manifest' in '\n'.join(f.NewContents()):
2753 AddPatternToCheck(f, input_api.os_path.basename(f.LocalPath()))
2754 for pattern in file_patterns:
2755 if input_api.fnmatch.fnmatch(
2756 input_api.os_path.basename(f.LocalPath()), pattern):
2758 for exclude in exclude_paths:
2759 if input_api.fnmatch.fnmatch(f.LocalPath(), exclude):
2764 AddPatternToCheck(f, pattern)
2770 def _AddOwnersFilesToCheckForFuchsiaSecurityOwners(input_api, to_check):
2771 """Adds OWNERS files to check for correct Fuchsia security owners."""
2774 # Component specifications.
2775 '*.cml', # Component Framework v2.
2776 '*.cmx', # Component Framework v1.
2778 # Fuchsia IDL protocol specifications.
2782 # Don't check for owners files for changes in these directories.
2784 'third_party/crashpad/*',
2787 def AddPatternToCheck(input_file, pattern):
2788 owners_file = input_api.os_path.join(
2789 input_api.os_path.dirname(input_file.LocalPath()), 'OWNERS')
2790 if owners_file not in to_check:
2791 to_check[owners_file] = {}
2792 if pattern not in to_check[owners_file]:
2793 to_check[owners_file][pattern] = {
2796 'per-file %s=set noparent' % pattern,
2797 'per-file %s=file://fuchsia/SECURITY_OWNERS' % pattern,
2800 to_check[owners_file][pattern]['files'].append(input_file)
2802 # Iterate through the affected files to see what we actually need to check
2803 # for. We should only nag patch authors about per-file rules if a file in that
2804 # directory would match that pattern.
2805 for f in input_api.AffectedFiles(include_deletes=False):
2807 for exclude in exclude_paths:
2808 if input_api.fnmatch.fnmatch(f.LocalPath(), exclude):
2813 for pattern in file_patterns:
2814 if input_api.fnmatch.fnmatch(
2815 input_api.os_path.basename(f.LocalPath()), pattern):
2816 AddPatternToCheck(f, pattern)
2822 def CheckSecurityOwners(input_api, output_api):
2823 """Checks that affected files involving IPC have an IPC OWNERS rule."""
2824 to_check = _GetOwnersFilesToCheckForIpcOwners(input_api)
2825 _AddOwnersFilesToCheckForFuchsiaSecurityOwners(input_api, to_check)
2828 # If there are any OWNERS files to check, there are IPC-related changes in
2829 # this CL. Auto-CC the review list.
2830 output_api.AppendCC('ipc-security-reviews@chromium.org')
2832 # Go through the OWNERS files to check, filtering out rules that are already
2833 # present in that OWNERS file.
2834 for owners_file, patterns in to_check.items():
2836 with open(owners_file) as f:
2837 lines = set(f.read().splitlines())
2838 for entry in patterns.values():
2839 entry['rules'] = [rule for rule in entry['rules'] if rule not in lines
2842 # No OWNERS file, so all the rules are definitely missing.
2845 # All the remaining lines weren't found in OWNERS files, so emit an error.
2847 for owners_file, patterns in to_check.items():
2850 for _, entry in patterns.items():
2851 missing_lines.extend(entry['rules'])
2852 files.extend([' %s' % f.LocalPath() for f in entry['files']])
2855 'Because of the presence of files:\n%s\n\n'
2856 '%s needs the following %d lines added:\n\n%s' %
2857 ('\n'.join(files), owners_file, len(missing_lines),
2858 '\n'.join(missing_lines)))
2862 if input_api.is_committing:
2863 output = output_api.PresubmitError
2865 output = output_api.PresubmitPromptWarning
2866 results.append(output(
2867 'Found OWNERS files that need to be updated for IPC security ' +
2868 'review coverage.\nPlease update the OWNERS files below:',
2869 long_text='\n\n'.join(errors)))
2874 def _GetFilesUsingSecurityCriticalFunctions(input_api):
2875 """Checks affected files for changes to security-critical calls. This
2876 function checks the full change diff, to catch both additions/changes
2879 Returns a dict keyed by file name, and the value is a set of detected
2882 # Map of function pretty name (displayed in an error) to the pattern to
2884 _PATTERNS_TO_CHECK = {
2885 'content::GetServiceSandboxType<>()':
2886 'GetServiceSandboxType\\<'
2888 _PATTERNS_TO_CHECK = {
2889 k: input_api.re.compile(v)
2890 for k, v in _PATTERNS_TO_CHECK.items()
2893 # Scan all affected files for changes touching _FUNCTIONS_TO_CHECK.
2894 files_to_functions = {}
2895 for f in input_api.AffectedFiles():
2896 diff = f.GenerateScmDiff()
2897 for line in diff.split('\n'):
2898 # Not using just RightHandSideLines() because removing a
2899 # call to a security-critical function can be just as important
2900 # as adding or changing the arguments.
2901 if line.startswith('-') or (line.startswith('+') and
2902 not line.startswith('++')):
2903 for name, pattern in _PATTERNS_TO_CHECK.items():
2904 if pattern.search(line):
2905 path = f.LocalPath()
2906 if not path in files_to_functions:
2907 files_to_functions[path] = set()
2908 files_to_functions[path].add(name)
2909 return files_to_functions
2912 def CheckSecurityChanges(input_api, output_api):
2913 """Checks that changes involving security-critical functions are reviewed
2914 by the security team.
2916 files_to_functions = _GetFilesUsingSecurityCriticalFunctions(input_api)
2917 if not len(files_to_functions):
2920 owner_email, reviewers = (
2921 input_api.canned_checks.GetCodereviewOwnerAndReviewers(
2924 approval_needed=input_api.is_committing))
2926 # Load the OWNERS file for security changes.
2927 owners_file = 'ipc/SECURITY_OWNERS'
2928 security_owners = input_api.owners_client.ListOwners(owners_file)
2929 has_security_owner = any([owner in reviewers for owner in security_owners])
2930 if has_security_owner:
2933 msg = 'The following files change calls to security-sensive functions\n' \
2934 'that need to be reviewed by {}.\n'.format(owners_file)
2935 for path, names in files_to_functions.items():
2936 msg += ' {}\n'.format(path)
2938 msg += ' {}\n'.format(name)
2941 if input_api.is_committing:
2942 output = output_api.PresubmitError
2944 output = output_api.PresubmitNotifyResult
2945 return [output(msg)]
2948 def CheckSetNoParent(input_api, output_api):
2949 """Checks that set noparent is only used together with an OWNERS file in
2950 //build/OWNERS.setnoparent (see also
2951 //docs/code_reviews.md#owners-files-details)
2955 allowed_owners_files_file = 'build/OWNERS.setnoparent'
2956 allowed_owners_files = set()
2957 with open(allowed_owners_files_file, 'r') as f:
2960 if not line or line.startswith('#'):
2962 allowed_owners_files.add(line)
2964 per_file_pattern = input_api.re.compile('per-file (.+)=(.+)')
2966 for f in input_api.AffectedFiles(include_deletes=False):
2967 if not f.LocalPath().endswith('OWNERS'):
2970 found_owners_files = set()
2971 found_set_noparent_lines = dict()
2973 # Parse the OWNERS file.
2974 for lineno, line in enumerate(f.NewContents(), 1):
2976 if line.startswith('set noparent'):
2977 found_set_noparent_lines[''] = lineno
2978 if line.startswith('file://'):
2979 if line in allowed_owners_files:
2980 found_owners_files.add('')
2981 if line.startswith('per-file'):
2982 match = per_file_pattern.match(line)
2984 glob = match.group(1).strip()
2985 directive = match.group(2).strip()
2986 if directive == 'set noparent':
2987 found_set_noparent_lines[glob] = lineno
2988 if directive.startswith('file://'):
2989 if directive in allowed_owners_files:
2990 found_owners_files.add(glob)
2992 # Check that every set noparent line has a corresponding file:// line
2993 # listed in build/OWNERS.setnoparent. An exception is made for top level
2994 # directories since src/OWNERS shouldn't review them.
2995 if (f.LocalPath().count('/') != 1 and
2996 (not f.LocalPath() in _EXCLUDED_SET_NO_PARENT_PATHS)):
2997 for set_noparent_line in found_set_noparent_lines:
2998 if set_noparent_line in found_owners_files:
3000 errors.append(' %s:%d' % (f.LocalPath(),
3001 found_set_noparent_lines[set_noparent_line]))
3005 if input_api.is_committing:
3006 output = output_api.PresubmitError
3008 output = output_api.PresubmitPromptWarning
3009 results.append(output(
3010 'Found the following "set noparent" restrictions in OWNERS files that '
3011 'do not include owners from build/OWNERS.setnoparent:',
3012 long_text='\n\n'.join(errors)))
3016 def CheckUselessForwardDeclarations(input_api, output_api):
3017 """Checks that added or removed lines in non third party affected
3018 header files do not lead to new useless class or struct forward
3022 class_pattern = input_api.re.compile(r'^class\s+(\w+);$',
3023 input_api.re.MULTILINE)
3024 struct_pattern = input_api.re.compile(r'^struct\s+(\w+);$',
3025 input_api.re.MULTILINE)
3026 for f in input_api.AffectedFiles(include_deletes=False):
3027 if (f.LocalPath().startswith('third_party') and
3028 not f.LocalPath().startswith('third_party/blink') and
3029 not f.LocalPath().startswith('third_party\\blink')):
3032 if not f.LocalPath().endswith('.h'):
3035 contents = input_api.ReadFile(f)
3036 fwd_decls = input_api.re.findall(class_pattern, contents)
3037 fwd_decls.extend(input_api.re.findall(struct_pattern, contents))
3039 useless_fwd_decls = []
3040 for decl in fwd_decls:
3041 count = sum(1 for _ in input_api.re.finditer(
3042 r'\b%s\b' % input_api.re.escape(decl), contents))
3044 useless_fwd_decls.append(decl)
3046 if not useless_fwd_decls:
3049 for line in f.GenerateScmDiff().splitlines():
3050 if (line.startswith('-') and not line.startswith('--') or
3051 line.startswith('+') and not line.startswith('++')):
3052 for decl in useless_fwd_decls:
3053 if input_api.re.search(r'\b%s\b' % decl, line[1:]):
3054 results.append(output_api.PresubmitPromptWarning(
3055 '%s: %s forward declaration is no longer needed' %
3056 (f.LocalPath(), decl)))
3057 useless_fwd_decls.remove(decl)
3061 def _CheckAndroidDebuggableBuild(input_api, output_api):
3062 """Checks that code uses BuildInfo.isDebugAndroid() instead of
3063 Build.TYPE.equals('') or ''.equals(Build.TYPE) to check if
3064 this is a debuggable build of Android.
3066 build_type_check_pattern = input_api.re.compile(
3067 r'\bBuild\.TYPE\.equals\(|\.equals\(\s*\bBuild\.TYPE\)')
3071 sources = lambda affected_file: input_api.FilterSourceFile(
3073 files_to_skip=(_EXCLUDED_PATHS +
3074 _TEST_CODE_EXCLUDED_PATHS +
3075 input_api.DEFAULT_FILES_TO_SKIP +
3076 (r"^android_webview[\\/]support_library[\\/]"
3077 "boundary_interfaces[\\/]",
3078 r"^chrome[\\/]android[\\/]webapk[\\/].*",
3079 r'^third_party[\\/].*',
3080 r"tools[\\/]android[\\/]customtabs_benchmark[\\/].*",
3081 r"webview[\\/]chromium[\\/]License.*",)),
3082 files_to_check=[r'.*\.java$'])
3084 for f in input_api.AffectedSourceFiles(sources):
3085 for line_num, line in f.ChangedContents():
3086 if build_type_check_pattern.search(line):
3087 errors.append("%s:%d" % (f.LocalPath(), line_num))
3092 results.append(output_api.PresubmitPromptWarning(
3093 'Build.TYPE.equals or .equals(Build.TYPE) usage is detected.'
3094 ' Please use BuildInfo.isDebugAndroid() instead.',
3099 # TODO: add unit tests
3100 def _CheckAndroidToastUsage(input_api, output_api):
3101 """Checks that code uses org.chromium.ui.widget.Toast instead of
3102 android.widget.Toast (Chromium Toast doesn't force hardware
3103 acceleration on low-end devices, saving memory).
3105 toast_import_pattern = input_api.re.compile(
3106 r'^import android\.widget\.Toast;$')
3110 sources = lambda affected_file: input_api.FilterSourceFile(
3112 files_to_skip=(_EXCLUDED_PATHS +
3113 _TEST_CODE_EXCLUDED_PATHS +
3114 input_api.DEFAULT_FILES_TO_SKIP +
3115 (r'^chromecast[\\/].*',
3116 r'^remoting[\\/].*')),
3117 files_to_check=[r'.*\.java$'])
3119 for f in input_api.AffectedSourceFiles(sources):
3120 for line_num, line in f.ChangedContents():
3121 if toast_import_pattern.search(line):
3122 errors.append("%s:%d" % (f.LocalPath(), line_num))
3127 results.append(output_api.PresubmitError(
3128 'android.widget.Toast usage is detected. Android toasts use hardware'
3129 ' acceleration, and can be\ncostly on low-end devices. Please use'
3130 ' org.chromium.ui.widget.Toast instead.\n'
3131 'Contact dskiba@chromium.org if you have any questions.',
3137 def _CheckAndroidCrLogUsage(input_api, output_api):
3138 """Checks that new logs using org.chromium.base.Log:
3139 - Are using 'TAG' as variable name for the tags (warn)
3140 - Are using a tag that is shorter than 20 characters (error)
3143 # Do not check format of logs in the given files
3144 cr_log_check_excluded_paths = [
3145 # //chrome/android/webapk cannot depend on //base
3146 r"^chrome[\\/]android[\\/]webapk[\\/].*",
3147 # WebView license viewer code cannot depend on //base; used in stub APK.
3148 r"^android_webview[\\/]glue[\\/]java[\\/]src[\\/]com[\\/]android[\\/]"
3149 r"webview[\\/]chromium[\\/]License.*",
3150 # The customtabs_benchmark is a small app that does not depend on Chromium
3152 r"tools[\\/]android[\\/]customtabs_benchmark[\\/].*",
3155 cr_log_import_pattern = input_api.re.compile(
3156 r'^import org\.chromium\.base\.Log;$', input_api.re.MULTILINE)
3157 class_in_base_pattern = input_api.re.compile(
3158 r'^package org\.chromium\.base;$', input_api.re.MULTILINE)
3159 has_some_log_import_pattern = input_api.re.compile(
3160 r'^import .*\.Log;$', input_api.re.MULTILINE)
3161 # Extract the tag from lines like `Log.d(TAG, "*");` or `Log.d("TAG", "*");`
3162 log_call_pattern = input_api.re.compile(r'\bLog\.\w\((?P<tag>\"?\w+)')
3163 log_decl_pattern = input_api.re.compile(
3164 r'static final String TAG = "(?P<name>(.*))"')
3165 rough_log_decl_pattern = input_api.re.compile(r'\bString TAG\s*=')
3167 REF_MSG = ('See docs/android_logging.md for more info.')
3168 sources = lambda x: input_api.FilterSourceFile(x,
3169 files_to_check=[r'.*\.java$'],
3170 files_to_skip=cr_log_check_excluded_paths)
3172 tag_decl_errors = []
3173 tag_length_errors = []
3175 tag_with_dot_errors = []
3176 util_log_errors = []
3178 for f in input_api.AffectedSourceFiles(sources):
3179 file_content = input_api.ReadFile(f)
3180 has_modified_logs = False
3182 if (cr_log_import_pattern.search(file_content) or
3183 (class_in_base_pattern.search(file_content) and
3184 not has_some_log_import_pattern.search(file_content))):
3185 # Checks to run for files using cr log
3186 for line_num, line in f.ChangedContents():
3187 if rough_log_decl_pattern.search(line):
3188 has_modified_logs = True
3190 # Check if the new line is doing some logging
3191 match = log_call_pattern.search(line)
3193 has_modified_logs = True
3195 # Make sure it uses "TAG"
3196 if not match.group('tag') == 'TAG':
3197 tag_errors.append("%s:%d" % (f.LocalPath(), line_num))
3199 # Report non cr Log function calls in changed lines
3200 for line_num, line in f.ChangedContents():
3201 if log_call_pattern.search(line):
3202 util_log_errors.append("%s:%d" % (f.LocalPath(), line_num))
3205 if has_modified_logs:
3206 # Make sure the tag is using the "cr" prefix and is not too long
3207 match = log_decl_pattern.search(file_content)
3208 tag_name = match.group('name') if match else None
3210 tag_decl_errors.append(f.LocalPath())
3211 elif len(tag_name) > 20:
3212 tag_length_errors.append(f.LocalPath())
3213 elif '.' in tag_name:
3214 tag_with_dot_errors.append(f.LocalPath())
3218 results.append(output_api.PresubmitPromptWarning(
3219 'Please define your tags using the suggested format: .\n'
3220 '"private static final String TAG = "<package tag>".\n'
3221 'They will be prepended with "cr_" automatically.\n' + REF_MSG,
3224 if tag_length_errors:
3225 results.append(output_api.PresubmitError(
3226 'The tag length is restricted by the system to be at most '
3227 '20 characters.\n' + REF_MSG,
3231 results.append(output_api.PresubmitPromptWarning(
3232 'Please use a variable named "TAG" for your log tags.\n' + REF_MSG,
3236 results.append(output_api.PresubmitPromptWarning(
3237 'Please use org.chromium.base.Log for new logs.\n' + REF_MSG,
3240 if tag_with_dot_errors:
3241 results.append(output_api.PresubmitPromptWarning(
3242 'Dot in log tags cause them to be elided in crash reports.\n' + REF_MSG,
3243 tag_with_dot_errors))
3248 def _CheckAndroidTestJUnitFrameworkImport(input_api, output_api):
3249 """Checks that junit.framework.* is no longer used."""
3250 deprecated_junit_framework_pattern = input_api.re.compile(
3251 r'^import junit\.framework\..*;',
3252 input_api.re.MULTILINE)
3253 sources = lambda x: input_api.FilterSourceFile(
3254 x, files_to_check=[r'.*\.java$'], files_to_skip=None)
3256 for f in input_api.AffectedFiles(file_filter=sources):
3257 for line_num, line in f.ChangedContents():
3258 if deprecated_junit_framework_pattern.search(line):
3259 errors.append("%s:%d" % (f.LocalPath(), line_num))
3263 results.append(output_api.PresubmitError(
3264 'APIs from junit.framework.* are deprecated, please use JUnit4 framework'
3265 '(org.junit.*) from //third_party/junit. Contact yolandyan@chromium.org'
3266 ' if you have any question.', errors))
3270 def _CheckAndroidTestJUnitInheritance(input_api, output_api):
3271 """Checks that if new Java test classes have inheritance.
3272 Either the new test class is JUnit3 test or it is a JUnit4 test class
3273 with a base class, either case is undesirable.
3275 class_declaration_pattern = input_api.re.compile(r'^public class \w*Test ')
3277 sources = lambda x: input_api.FilterSourceFile(
3278 x, files_to_check=[r'.*Test\.java$'], files_to_skip=None)
3280 for f in input_api.AffectedFiles(file_filter=sources):
3281 if not f.OldContents():
3282 class_declaration_start_flag = False
3283 for line_num, line in f.ChangedContents():
3284 if class_declaration_pattern.search(line):
3285 class_declaration_start_flag = True
3286 if class_declaration_start_flag and ' extends ' in line:
3287 errors.append('%s:%d' % (f.LocalPath(), line_num))
3289 class_declaration_start_flag = False
3293 results.append(output_api.PresubmitPromptWarning(
3294 'The newly created files include Test classes that inherits from base'
3295 ' class. Please do not use inheritance in JUnit4 tests or add new'
3296 ' JUnit3 tests. Contact yolandyan@chromium.org if you have any'
3297 ' questions.', errors))
3301 def _CheckAndroidTestAnnotationUsage(input_api, output_api):
3302 """Checks that android.test.suitebuilder.annotation.* is no longer used."""
3303 deprecated_annotation_import_pattern = input_api.re.compile(
3304 r'^import android\.test\.suitebuilder\.annotation\..*;',
3305 input_api.re.MULTILINE)
3306 sources = lambda x: input_api.FilterSourceFile(
3307 x, files_to_check=[r'.*\.java$'], files_to_skip=None)
3309 for f in input_api.AffectedFiles(file_filter=sources):
3310 for line_num, line in f.ChangedContents():
3311 if deprecated_annotation_import_pattern.search(line):
3312 errors.append("%s:%d" % (f.LocalPath(), line_num))
3316 results.append(output_api.PresubmitError(
3317 'Annotations in android.test.suitebuilder.annotation have been'
3318 ' deprecated since API level 24. Please use android.support.test.filters'
3319 ' from //third_party/android_support_test_runner:runner_java instead.'
3320 ' Contact yolandyan@chromium.org if you have any questions.', errors))
3324 def _CheckAndroidNewMdpiAssetLocation(input_api, output_api):
3325 """Checks if MDPI assets are placed in a correct directory."""
3326 file_filter = lambda f: (f.LocalPath().endswith('.png') and
3327 ('/res/drawable/' in f.LocalPath() or
3328 '/res/drawable-ldrtl/' in f.LocalPath()))
3330 for f in input_api.AffectedFiles(include_deletes=False,
3331 file_filter=file_filter):
3332 errors.append(' %s' % f.LocalPath())
3336 results.append(output_api.PresubmitError(
3337 'MDPI assets should be placed in /res/drawable-mdpi/ or '
3338 '/res/drawable-ldrtl-mdpi/\ninstead of /res/drawable/ and'
3339 '/res/drawable-ldrtl/.\n'
3340 'Contact newt@chromium.org if you have questions.', errors))
3344 def _CheckAndroidWebkitImports(input_api, output_api):
3345 """Checks that code uses org.chromium.base.Callback instead of
3346 android.webview.ValueCallback except in the WebView glue layer
3349 valuecallback_import_pattern = input_api.re.compile(
3350 r'^import android\.webkit\.ValueCallback;$')
3354 sources = lambda affected_file: input_api.FilterSourceFile(
3356 files_to_skip=(_EXCLUDED_PATHS +
3357 _TEST_CODE_EXCLUDED_PATHS +
3358 input_api.DEFAULT_FILES_TO_SKIP +
3359 (r'^android_webview[\\/]glue[\\/].*',
3360 r'^weblayer[\\/].*',)),
3361 files_to_check=[r'.*\.java$'])
3363 for f in input_api.AffectedSourceFiles(sources):
3364 for line_num, line in f.ChangedContents():
3365 if valuecallback_import_pattern.search(line):
3366 errors.append("%s:%d" % (f.LocalPath(), line_num))
3371 results.append(output_api.PresubmitError(
3372 'android.webkit.ValueCallback usage is detected outside of the glue'
3373 ' layer. To stay compatible with the support library, android.webkit.*'
3374 ' classes should only be used inside the glue layer and'
3375 ' org.chromium.base.Callback should be used instead.',
3381 def _CheckAndroidXmlStyle(input_api, output_api, is_check_on_upload):
3382 """Checks Android XML styles """
3384 original_sys_path = sys.path
3386 sys.path = sys.path + [input_api.os_path.join(
3387 input_api.PresubmitLocalPath(), 'tools', 'android', 'checkxmlstyle')]
3388 import checkxmlstyle
3390 # Restore sys.path to what it was before.
3391 sys.path = original_sys_path
3393 if is_check_on_upload:
3394 return checkxmlstyle.CheckStyleOnUpload(input_api, output_api)
3396 return checkxmlstyle.CheckStyleOnCommit(input_api, output_api)
3399 class PydepsChecker(object):
3400 def __init__(self, input_api, pydeps_files):
3401 self._file_cache = {}
3402 self._input_api = input_api
3403 self._pydeps_files = pydeps_files
3405 def _LoadFile(self, path):
3406 """Returns the list of paths within a .pydeps file relative to //."""
3407 if path not in self._file_cache:
3408 with open(path) as f:
3409 self._file_cache[path] = f.read()
3410 return self._file_cache[path]
3412 def _ComputeNormalizedPydepsEntries(self, pydeps_path):
3413 """Returns an interable of paths within the .pydep, relativized to //."""
3414 pydeps_data = self._LoadFile(pydeps_path)
3415 uses_gn_paths = '--gn-paths' in pydeps_data
3416 entries = (l for l in pydeps_data.splitlines() if not l.startswith('#'))
3418 # Paths look like: //foo/bar/baz
3419 return (e[2:] for e in entries)
3421 # Paths look like: path/relative/to/file.pydeps
3422 os_path = self._input_api.os_path
3423 pydeps_dir = os_path.dirname(pydeps_path)
3424 return (os_path.normpath(os_path.join(pydeps_dir, e)) for e in entries)
3426 def _CreateFilesToPydepsMap(self):
3427 """Returns a map of local_path -> list_of_pydeps."""
3429 for pydep_local_path in self._pydeps_files:
3430 for path in self._ComputeNormalizedPydepsEntries(pydep_local_path):
3431 ret.setdefault(path, []).append(pydep_local_path)
3434 def ComputeAffectedPydeps(self):
3435 """Returns an iterable of .pydeps files that might need regenerating."""
3436 affected_pydeps = set()
3437 file_to_pydeps_map = None
3438 for f in self._input_api.AffectedFiles(include_deletes=True):
3439 local_path = f.LocalPath()
3440 # Changes to DEPS can lead to .pydeps changes if any .py files are in
3441 # subrepositories. We can't figure out which files change, so re-check
3443 # Changes to print_python_deps.py affect all .pydeps.
3444 if local_path in ('DEPS', 'PRESUBMIT.py') or local_path.endswith(
3445 'print_python_deps.py'):
3446 return self._pydeps_files
3447 elif local_path.endswith('.pydeps'):
3448 if local_path in self._pydeps_files:
3449 affected_pydeps.add(local_path)
3450 elif local_path.endswith('.py'):
3451 if file_to_pydeps_map is None:
3452 file_to_pydeps_map = self._CreateFilesToPydepsMap()
3453 affected_pydeps.update(file_to_pydeps_map.get(local_path, ()))
3454 return affected_pydeps
3456 def DetermineIfStale(self, pydeps_path):
3457 """Runs print_python_deps.py to see if the files is stale."""
3461 old_pydeps_data = self._LoadFile(pydeps_path).splitlines()
3463 cmd = old_pydeps_data[1][1:].strip()
3464 if '--output' not in cmd:
3465 cmd += ' --output ' + pydeps_path
3466 old_contents = old_pydeps_data[2:]
3468 # A default cmd that should work in most cases (as long as pydeps filename
3469 # matches the script name) so that PRESUBMIT.py does not crash if pydeps
3470 # file is empty/new.
3471 cmd = 'build/print_python_deps.py {} --root={} --output={}'.format(
3472 pydeps_path[:-4], os.path.dirname(pydeps_path), pydeps_path)
3474 env = dict(os.environ)
3475 env['PYTHONDONTWRITEBYTECODE'] = '1'
3476 new_pydeps_data = self._input_api.subprocess.check_output(
3477 cmd + ' --output ""', shell=True, env=env)
3478 new_contents = new_pydeps_data.splitlines()[2:]
3479 if old_contents != new_contents:
3480 return cmd, '\n'.join(difflib.context_diff(old_contents, new_contents))
3483 def _ParseGclientArgs():
3485 with open('build/config/gclient_args.gni', 'r') as f:
3488 if not line or line.startswith('#'):
3490 attribute, value = line.split('=')
3491 args[attribute.strip()] = value.strip()
3495 def CheckPydepsNeedsUpdating(input_api, output_api, checker_for_tests=None):
3496 """Checks if a .pydeps file needs to be regenerated."""
3497 # This check is for Python dependency lists (.pydeps files), and involves
3498 # paths not only in the PRESUBMIT.py, but also in the .pydeps files. It
3499 # doesn't work on Windows and Mac, so skip it on other platforms.
3500 if input_api.platform != 'linux2':
3502 is_android = _ParseGclientArgs().get('checkout_android', 'false') == 'true'
3503 pydeps_to_check = _ALL_PYDEPS_FILES if is_android else _GENERIC_PYDEPS_FILES
3505 # First, check for new / deleted .pydeps.
3506 for f in input_api.AffectedFiles(include_deletes=True):
3507 # Check whether we are running the presubmit check for a file in src.
3508 # f.LocalPath is relative to repo (src, or internal repo).
3509 # os_path.exists is relative to src repo.
3510 # Therefore if os_path.exists is true, it means f.LocalPath is relative
3511 # to src and we can conclude that the pydeps is in src.
3512 if input_api.os_path.exists(f.LocalPath()):
3513 if f.LocalPath().endswith('.pydeps'):
3514 if f.Action() == 'D' and f.LocalPath() in _ALL_PYDEPS_FILES:
3515 results.append(output_api.PresubmitError(
3516 'Please update _ALL_PYDEPS_FILES within //PRESUBMIT.py to '
3517 'remove %s' % f.LocalPath()))
3518 elif f.Action() != 'D' and f.LocalPath() not in _ALL_PYDEPS_FILES:
3519 results.append(output_api.PresubmitError(
3520 'Please update _ALL_PYDEPS_FILES within //PRESUBMIT.py to '
3521 'include %s' % f.LocalPath()))
3526 checker = checker_for_tests or PydepsChecker(input_api, _ALL_PYDEPS_FILES)
3527 affected_pydeps = set(checker.ComputeAffectedPydeps())
3528 affected_android_pydeps = affected_pydeps.intersection(
3529 set(_ANDROID_SPECIFIC_PYDEPS_FILES))
3530 if affected_android_pydeps and not is_android:
3531 results.append(output_api.PresubmitPromptOrNotify(
3532 'You have changed python files that may affect pydeps for android\n'
3533 'specific scripts. However, the relevant presumbit check cannot be\n'
3534 'run because you are not using an Android checkout. To validate that\n'
3535 'the .pydeps are correct, re-run presubmit in an Android checkout, or\n'
3536 'use the android-internal-presubmit optional trybot.\n'
3537 'Possibly stale pydeps files:\n{}'.format(
3538 '\n'.join(affected_android_pydeps))))
3540 affected_pydeps_to_check = affected_pydeps.intersection(set(pydeps_to_check))
3541 for pydep_path in affected_pydeps_to_check:
3543 result = checker.DetermineIfStale(pydep_path)
3546 results.append(output_api.PresubmitError(
3547 'File is stale: %s\nDiff (apply to fix):\n%s\n'
3548 'To regenerate, run:\n\n %s' %
3549 (pydep_path, diff, cmd)))
3550 except input_api.subprocess.CalledProcessError as error:
3551 return [output_api.PresubmitError('Error running: %s' % error.cmd,
3552 long_text=error.output)]
3557 def CheckSingletonInHeaders(input_api, output_api):
3558 """Checks to make sure no header files have |Singleton<|."""
3559 def FileFilter(affected_file):
3560 # It's ok for base/memory/singleton.h to have |Singleton<|.
3561 files_to_skip = (_EXCLUDED_PATHS +
3562 input_api.DEFAULT_FILES_TO_SKIP +
3563 (r"^base[\\/]memory[\\/]singleton\.h$",
3564 r"^net[\\/]quic[\\/]platform[\\/]impl[\\/]"
3565 r"quic_singleton_impl\.h$"))
3566 return input_api.FilterSourceFile(affected_file,
3567 files_to_skip=files_to_skip)
3569 pattern = input_api.re.compile(r'(?<!class\sbase::)Singleton\s*<')
3571 for f in input_api.AffectedSourceFiles(FileFilter):
3572 if (f.LocalPath().endswith('.h') or f.LocalPath().endswith('.hxx') or
3573 f.LocalPath().endswith('.hpp') or f.LocalPath().endswith('.inl')):
3574 contents = input_api.ReadFile(f)
3575 for line in contents.splitlines(False):
3576 if (not line.lstrip().startswith('//') and # Strip C++ comment.
3577 pattern.search(line)):
3582 return [output_api.PresubmitError(
3583 'Found base::Singleton<T> in the following header files.\n' +
3584 'Please move them to an appropriate source file so that the ' +
3585 'template gets instantiated in a single compilation unit.',
3592 ( "-webkit-box", "flex" ),
3593 ( "-webkit-inline-box", "inline-flex" ),
3594 ( "-webkit-flex", "flex" ),
3595 ( "-webkit-inline-flex", "inline-flex" ),
3596 ( "-webkit-min-content", "min-content" ),
3597 ( "-webkit-max-content", "max-content" ),
3600 ( "-webkit-background-clip", "background-clip" ),
3601 ( "-webkit-background-origin", "background-origin" ),
3602 ( "-webkit-background-size", "background-size" ),
3603 ( "-webkit-box-shadow", "box-shadow" ),
3604 ( "-webkit-user-select", "user-select" ),
3607 ( "-webkit-gradient", "gradient" ),
3608 ( "-webkit-repeating-gradient", "repeating-gradient" ),
3609 ( "-webkit-linear-gradient", "linear-gradient" ),
3610 ( "-webkit-repeating-linear-gradient", "repeating-linear-gradient" ),
3611 ( "-webkit-radial-gradient", "radial-gradient" ),
3612 ( "-webkit-repeating-radial-gradient", "repeating-radial-gradient" ),
3616 # TODO: add unit tests
3617 def CheckNoDeprecatedCss(input_api, output_api):
3618 """ Make sure that we don't use deprecated CSS
3619 properties, functions or values. Our external
3620 documentation and iOS CSS for dom distiller
3621 (reader mode) are ignored by the hooks as it
3622 needs to be consumed by WebKit. """
3624 file_inclusion_pattern = [r".+\.css$"]
3625 files_to_skip = (_EXCLUDED_PATHS +
3626 _TEST_CODE_EXCLUDED_PATHS +
3627 input_api.DEFAULT_FILES_TO_SKIP +
3628 (r"^chrome/common/extensions/docs",
3630 r"^components/dom_distiller/core/css/distilledpage_ios.css",
3631 r"^components/neterror/resources/neterror.css",
3632 r"^native_client_sdk"))
3633 file_filter = lambda f: input_api.FilterSourceFile(
3634 f, files_to_check=file_inclusion_pattern, files_to_skip=files_to_skip)
3635 for fpath in input_api.AffectedFiles(file_filter=file_filter):
3636 for line_num, line in fpath.ChangedContents():
3637 for (deprecated_value, value) in _DEPRECATED_CSS:
3638 if deprecated_value in line:
3639 results.append(output_api.PresubmitError(
3640 "%s:%d: Use of deprecated CSS %s, use %s instead" %
3641 (fpath.LocalPath(), line_num, deprecated_value, value)))
3645 def CheckForRelativeIncludes(input_api, output_api):
3647 for f in input_api.AffectedFiles(include_deletes=False):
3648 if (f.LocalPath().startswith('third_party') and
3649 not f.LocalPath().startswith('third_party/blink') and
3650 not f.LocalPath().startswith('third_party\\blink')):
3653 if not _IsCPlusPlusFile(input_api, f.LocalPath()):
3656 relative_includes = [line for _, line in f.ChangedContents()
3657 if "#include" in line and "../" in line]
3658 if not relative_includes:
3660 bad_files[f.LocalPath()] = relative_includes
3665 error_descriptions = []
3666 for file_path, bad_lines in bad_files.items():
3667 error_description = file_path
3668 for line in bad_lines:
3669 error_description += '\n ' + line
3670 error_descriptions.append(error_description)
3673 results.append(output_api.PresubmitError(
3674 'You added one or more relative #include paths (including "../").\n'
3675 'These shouldn\'t be used because they can be used to include headers\n'
3676 'from code that\'s not correctly specified as a dependency in the\n'
3677 'relevant BUILD.gn file(s).',
3678 error_descriptions))
3683 def CheckForCcIncludes(input_api, output_api):
3684 """Check that nobody tries to include a cc file. It's a relatively
3685 common error which results in duplicate symbols in object
3686 files. This may not always break the build until someone later gets
3687 very confusing linking errors."""
3689 for f in input_api.AffectedFiles(include_deletes=False):
3690 # We let third_party code do whatever it wants
3691 if (f.LocalPath().startswith('third_party') and
3692 not f.LocalPath().startswith('third_party/blink') and
3693 not f.LocalPath().startswith('third_party\\blink')):
3696 if not _IsCPlusPlusFile(input_api, f.LocalPath()):
3699 for _, line in f.ChangedContents():
3700 if line.startswith('#include "'):
3701 included_file = line.split('"')[1]
3702 if _IsCPlusPlusFile(input_api, included_file):
3703 # The most common naming for external files with C++ code,
3704 # apart from standard headers, is to call them foo.inc, but
3705 # Chromium sometimes uses foo-inc.cc so allow that as well.
3706 if not included_file.endswith(('.h', '-inc.cc')):
3707 results.append(output_api.PresubmitError(
3708 'Only header files or .inc files should be included in other\n'
3709 'C++ files. Compiling the contents of a cc file more than once\n'
3710 'will cause duplicate information in the build which may later\n'
3711 'result in strange link_errors.\n' +
3712 f.LocalPath() + ':\n ' +
3718 def _CheckWatchlistDefinitionsEntrySyntax(key, value, ast):
3719 if not isinstance(key, ast.Str):
3720 return 'Key at line %d must be a string literal' % key.lineno
3721 if not isinstance(value, ast.Dict):
3722 return 'Value at line %d must be a dict' % value.lineno
3723 if len(value.keys) != 1:
3724 return 'Dict at line %d must have single entry' % value.lineno
3725 if not isinstance(value.keys[0], ast.Str) or value.keys[0].s != 'filepath':
3727 'Entry at line %d must have a string literal \'filepath\' as key' %
3732 def _CheckWatchlistsEntrySyntax(key, value, ast, email_regex):
3733 if not isinstance(key, ast.Str):
3734 return 'Key at line %d must be a string literal' % key.lineno
3735 if not isinstance(value, ast.List):
3736 return 'Value at line %d must be a list' % value.lineno
3737 for element in value.elts:
3738 if not isinstance(element, ast.Str):
3739 return 'Watchlist elements on line %d is not a string' % key.lineno
3740 if not email_regex.match(element.s):
3741 return ('Watchlist element on line %d doesn\'t look like a valid ' +
3742 'email: %s') % (key.lineno, element.s)
3746 def _CheckWATCHLISTSEntries(wd_dict, w_dict, input_api):
3747 mismatch_template = (
3748 'Mismatch between WATCHLIST_DEFINITIONS entry (%s) and WATCHLISTS '
3751 email_regex = input_api.re.compile(
3752 r"^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]+$")
3758 if i >= len(wd_dict.keys):
3759 if i >= len(w_dict.keys):
3761 return mismatch_template % ('missing', 'line %d' % w_dict.keys[i].lineno)
3762 elif i >= len(w_dict.keys):
3764 mismatch_template % ('line %d' % wd_dict.keys[i].lineno, 'missing'))
3766 wd_key = wd_dict.keys[i]
3767 w_key = w_dict.keys[i]
3769 result = _CheckWatchlistDefinitionsEntrySyntax(
3770 wd_key, wd_dict.values[i], ast)
3771 if result is not None:
3772 return 'Bad entry in WATCHLIST_DEFINITIONS dict: %s' % result
3774 result = _CheckWatchlistsEntrySyntax(
3775 w_key, w_dict.values[i], ast, email_regex)
3776 if result is not None:
3777 return 'Bad entry in WATCHLISTS dict: %s' % result
3779 if wd_key.s != w_key.s:
3780 return mismatch_template % (
3781 '%s at line %d' % (wd_key.s, wd_key.lineno),
3782 '%s at line %d' % (w_key.s, w_key.lineno))
3784 if wd_key.s < last_key:
3786 'WATCHLISTS dict is not sorted lexicographically at line %d and %d' %
3787 (wd_key.lineno, w_key.lineno))
3793 def _CheckWATCHLISTSSyntax(expression, input_api):
3795 if not isinstance(expression, ast.Expression):
3796 return 'WATCHLISTS file must contain a valid expression'
3797 dictionary = expression.body
3798 if not isinstance(dictionary, ast.Dict) or len(dictionary.keys) != 2:
3799 return 'WATCHLISTS file must have single dict with exactly two entries'
3801 first_key = dictionary.keys[0]
3802 first_value = dictionary.values[0]
3803 second_key = dictionary.keys[1]
3804 second_value = dictionary.values[1]
3806 if (not isinstance(first_key, ast.Str) or
3807 first_key.s != 'WATCHLIST_DEFINITIONS' or
3808 not isinstance(first_value, ast.Dict)):
3810 'The first entry of the dict in WATCHLISTS file must be '
3811 'WATCHLIST_DEFINITIONS dict')
3813 if (not isinstance(second_key, ast.Str) or
3814 second_key.s != 'WATCHLISTS' or
3815 not isinstance(second_value, ast.Dict)):
3817 'The second entry of the dict in WATCHLISTS file must be '
3820 return _CheckWATCHLISTSEntries(first_value, second_value, input_api)
3823 def CheckWATCHLISTS(input_api, output_api):
3824 for f in input_api.AffectedFiles(include_deletes=False):
3825 if f.LocalPath() == 'WATCHLISTS':
3826 contents = input_api.ReadFile(f, 'r')
3829 # First, make sure that it can be evaluated.
3830 input_api.ast.literal_eval(contents)
3831 # Get an AST tree for it and scan the tree for detailed style checking.
3832 expression = input_api.ast.parse(
3833 contents, filename='WATCHLISTS', mode='eval')
3834 except ValueError as e:
3835 return [output_api.PresubmitError(
3836 'Cannot parse WATCHLISTS file', long_text=repr(e))]
3837 except SyntaxError as e:
3838 return [output_api.PresubmitError(
3839 'Cannot parse WATCHLISTS file', long_text=repr(e))]
3840 except TypeError as e:
3841 return [output_api.PresubmitError(
3842 'Cannot parse WATCHLISTS file', long_text=repr(e))]
3844 result = _CheckWATCHLISTSSyntax(expression, input_api)
3845 if result is not None:
3846 return [output_api.PresubmitError(result)]
3852 def CheckGnGlobForward(input_api, output_api):
3853 """Checks that forward_variables_from(invoker, "*") follows best practices.
3855 As documented at //build/docs/writing_gn_templates.md
3858 return input_api.FilterSourceFile(f, files_to_check=(r'.+\.gni', ))
3861 for f in input_api.AffectedSourceFiles(gn_files):
3862 for line_num, line in f.ChangedContents():
3863 if 'forward_variables_from(invoker, "*")' in line:
3865 'Bare forward_variables_from(invoker, "*") in %s:%d' % (
3866 f.LocalPath(), line_num))
3869 return [output_api.PresubmitPromptWarning(
3870 'forward_variables_from("*") without exclusions',
3871 items=sorted(problems),
3872 long_text=('The variables "visibilty" and "test_only" should be '
3873 'explicitly listed in forward_variables_from(). For more '
3875 'https://chromium.googlesource.com/chromium/src/+/HEAD/'
3876 'build/docs/writing_gn_templates.md'
3877 '#Using-forward_variables_from'))]
3881 def CheckNewHeaderWithoutGnChangeOnUpload(input_api, output_api):
3882 """Checks that newly added header files have corresponding GN changes.
3883 Note that this is only a heuristic. To be precise, run script:
3884 build/check_gn_headers.py.
3888 return input_api.FilterSourceFile(
3889 f, files_to_check=(r'.+%s' % _HEADER_EXTENSIONS, ))
3892 for f in input_api.AffectedSourceFiles(headers):
3893 if f.Action() != 'A':
3895 new_headers.append(f.LocalPath())
3898 return input_api.FilterSourceFile(f, files_to_check=(r'.+\.gn', ))
3900 all_gn_changed_contents = ''
3901 for f in input_api.AffectedSourceFiles(gn_files):
3902 for _, line in f.ChangedContents():
3903 all_gn_changed_contents += line
3906 for header in new_headers:
3907 basename = input_api.os_path.basename(header)
3908 if basename not in all_gn_changed_contents:
3909 problems.append(header)
3912 return [output_api.PresubmitPromptWarning(
3913 'Missing GN changes for new header files', items=sorted(problems),
3914 long_text='Please double check whether newly added header files need '
3915 'corresponding changes in gn or gni files.\nThis checking is only a '
3916 'heuristic. Run build/check_gn_headers.py to be precise.\n'
3917 'Read https://crbug.com/661774 for more info.')]
3921 def CheckCorrectProductNameInMessages(input_api, output_api):
3922 """Check that Chromium-branded strings don't include "Chrome" or vice versa.
3924 This assumes we won't intentionally reference one product from the other
3929 "filename_postfix": "google_chrome_strings.grd",
3930 "correct_name": "Chrome",
3931 "incorrect_name": "Chromium",
3933 "filename_postfix": "chromium_strings.grd",
3934 "correct_name": "Chromium",
3935 "incorrect_name": "Chrome",
3938 for test_case in test_cases:
3940 filename_filter = lambda x: x.LocalPath().endswith(
3941 test_case["filename_postfix"])
3943 # Check each new line. Can yield false positives in multiline comments, but
3944 # easier than trying to parse the XML because messages can have nested
3945 # children, and associating message elements with affected lines is hard.
3946 for f in input_api.AffectedSourceFiles(filename_filter):
3947 for line_num, line in f.ChangedContents():
3948 if "<message" in line or "<!--" in line or "-->" in line:
3950 if test_case["incorrect_name"] in line:
3952 "Incorrect product name in %s:%d" % (f.LocalPath(), line_num))
3956 "Strings in %s-branded string files should reference \"%s\", not \"%s\""
3957 % (test_case["correct_name"], test_case["correct_name"],
3958 test_case["incorrect_name"]))
3959 all_problems.append(
3960 output_api.PresubmitPromptWarning(message, items=problems))
3965 def CheckForTooLargeFiles(input_api, output_api):
3966 """Avoid large files, especially binary files, in the repository since
3967 git doesn't scale well for those. They will be in everyone's repo
3968 clones forever, forever making Chromium slower to clone and work
3971 # Uploading files to cloud storage is not trivial so we don't want
3972 # to set the limit too low, but the upper limit for "normal" large
3973 # files seems to be 1-2 MB, with a handful around 5-8 MB, so
3974 # anything over 20 MB is exceptional.
3975 TOO_LARGE_FILE_SIZE_LIMIT = 20 * 1024 * 1024 # 10 MB
3977 too_large_files = []
3978 for f in input_api.AffectedFiles():
3979 # Check both added and modified files (but not deleted files).
3980 if f.Action() in ('A', 'M'):
3981 size = input_api.os_path.getsize(f.AbsoluteLocalPath())
3982 if size > TOO_LARGE_FILE_SIZE_LIMIT:
3983 too_large_files.append("%s: %d bytes" % (f.LocalPath(), size))
3987 'Do not commit large files to git since git scales badly for those.\n' +
3988 'Instead put the large files in cloud storage and use DEPS to\n' +
3989 'fetch them.\n' + '\n'.join(too_large_files)
3991 return [output_api.PresubmitError(
3992 'Too large files found in commit', long_text=message + '\n')]
3997 def CheckFuzzTargetsOnUpload(input_api, output_api):
3998 """Checks specific for fuzz target sources."""
3999 EXPORTED_SYMBOLS = [
4000 'LLVMFuzzerInitialize',
4001 'LLVMFuzzerCustomMutator',
4002 'LLVMFuzzerCustomCrossOver',
4006 REQUIRED_HEADER = '#include "testing/libfuzzer/libfuzzer_exports.h"'
4008 def FilterFile(affected_file):
4009 """Ignore libFuzzer source code."""
4010 files_to_check = r'.*fuzz.*\.(h|hpp|hcc|cc|cpp|cxx)$'
4011 files_to_skip = r"^third_party[\\/]libFuzzer"
4013 return input_api.FilterSourceFile(
4015 files_to_check=[files_to_check],
4016 files_to_skip=[files_to_skip])
4018 files_with_missing_header = []
4019 for f in input_api.AffectedSourceFiles(FilterFile):
4020 contents = input_api.ReadFile(f, 'r')
4021 if REQUIRED_HEADER in contents:
4024 if any(symbol in contents for symbol in EXPORTED_SYMBOLS):
4025 files_with_missing_header.append(f.LocalPath())
4027 if not files_with_missing_header:
4031 'If you define any of the libFuzzer optional functions (%s), it is '
4032 'recommended to add \'%s\' directive. Otherwise, the fuzz target may '
4033 'work incorrectly on Mac (crbug.com/687076).\nNote that '
4034 'LLVMFuzzerInitialize should not be used, unless your fuzz target needs '
4035 'to access command line arguments passed to the fuzzer. Instead, prefer '
4036 'static initialization and shared resources as documented in '
4037 'https://chromium.googlesource.com/chromium/src/+/main/testing/'
4038 'libfuzzer/efficient_fuzzing.md#simplifying-initialization_cleanup.\n' % (
4039 ', '.join(EXPORTED_SYMBOLS), REQUIRED_HEADER)
4042 return [output_api.PresubmitPromptWarning(
4043 message="Missing '%s' in:" % REQUIRED_HEADER,
4044 items=files_with_missing_header,
4045 long_text=long_text)]
4048 def _CheckNewImagesWarning(input_api, output_api):
4050 Warns authors who add images into the repo to make sure their images are
4051 optimized before committing.
4053 images_added = False
4056 filter_lambda = lambda x: input_api.FilterSourceFile(
4058 files_to_skip=(('(?i).*test', r'.*\/junit\/')
4059 + input_api.DEFAULT_FILES_TO_SKIP),
4060 files_to_check=[r'.*\/(drawable|mipmap)' ]
4062 for f in input_api.AffectedFiles(
4063 include_deletes=False, file_filter=filter_lambda):
4064 local_path = f.LocalPath().lower()
4065 if any(local_path.endswith(extension) for extension in _IMAGE_EXTENSIONS):
4067 image_paths.append(f)
4069 errors.append(output_api.PresubmitPromptWarning(
4070 'It looks like you are trying to commit some images. If these are '
4071 'non-test-only images, please make sure to read and apply the tips in '
4072 'https://chromium.googlesource.com/chromium/src/+/HEAD/docs/speed/'
4073 'binary_size/optimization_advice.md#optimizing-images\nThis check is '
4074 'FYI only and will not block your CL on the CQ.', image_paths))
4078 def ChecksAndroidSpecificOnUpload(input_api, output_api):
4079 """Groups upload checks that target android code."""
4081 results.extend(_CheckAndroidCrLogUsage(input_api, output_api))
4082 results.extend(_CheckAndroidDebuggableBuild(input_api, output_api))
4083 results.extend(_CheckAndroidNewMdpiAssetLocation(input_api, output_api))
4084 results.extend(_CheckAndroidToastUsage(input_api, output_api))
4085 results.extend(_CheckAndroidTestJUnitInheritance(input_api, output_api))
4086 results.extend(_CheckAndroidTestJUnitFrameworkImport(input_api, output_api))
4087 results.extend(_CheckAndroidTestAnnotationUsage(input_api, output_api))
4088 results.extend(_CheckAndroidWebkitImports(input_api, output_api))
4089 results.extend(_CheckAndroidXmlStyle(input_api, output_api, True))
4090 results.extend(_CheckNewImagesWarning(input_api, output_api))
4091 results.extend(_CheckAndroidNoBannedImports(input_api, output_api))
4094 def ChecksAndroidSpecificOnCommit(input_api, output_api):
4095 """Groups commit checks that target android code."""
4097 results.extend(_CheckAndroidXmlStyle(input_api, output_api, False))
4100 # TODO(chrishall): could we additionally match on any path owned by
4101 # ui/accessibility/OWNERS ?
4102 _ACCESSIBILITY_PATHS = (
4103 r"^chrome[\\/]browser.*[\\/]accessibility[\\/]",
4104 r"^chrome[\\/]browser[\\/]extensions[\\/]api[\\/]automation.*[\\/]",
4105 r"^chrome[\\/]renderer[\\/]extensions[\\/]accessibility_.*",
4106 r"^chrome[\\/]tests[\\/]data[\\/]accessibility[\\/]",
4107 r"^content[\\/]browser[\\/]accessibility[\\/]",
4108 r"^content[\\/]renderer[\\/]accessibility[\\/]",
4109 r"^content[\\/]tests[\\/]data[\\/]accessibility[\\/]",
4110 r"^extensions[\\/]renderer[\\/]api[\\/]automation[\\/]",
4111 r"^ui[\\/]accessibility[\\/]",
4112 r"^ui[\\/]views[\\/]accessibility[\\/]",
4115 def CheckAccessibilityRelnotesField(input_api, output_api):
4116 """Checks that commits to accessibility code contain an AX-Relnotes field in
4117 their commit message."""
4118 def FileFilter(affected_file):
4119 paths = _ACCESSIBILITY_PATHS
4120 return input_api.FilterSourceFile(affected_file, files_to_check=paths)
4122 # Only consider changes affecting accessibility paths.
4123 if not any(input_api.AffectedFiles(file_filter=FileFilter)):
4126 # AX-Relnotes can appear in either the description or the footer.
4127 # When searching the description, require 'AX-Relnotes:' to appear at the
4128 # beginning of a line.
4129 ax_regex = input_api.re.compile('ax-relnotes[:=]')
4130 description_has_relnotes = any(ax_regex.match(line)
4131 for line in input_api.change.DescriptionText().lower().splitlines())
4133 footer_relnotes = input_api.change.GitFootersFromDescription().get(
4135 if description_has_relnotes or footer_relnotes:
4138 # TODO(chrishall): link to Relnotes documentation in message.
4139 message = ("Missing 'AX-Relnotes:' field required for accessibility changes"
4140 "\n please add 'AX-Relnotes: [release notes].' to describe any "
4141 "user-facing changes"
4142 "\n otherwise add 'AX-Relnotes: n/a.' if this change has no "
4143 "user-facing effects"
4144 "\n if this is confusing or annoying then please contact members "
4145 "of ui/accessibility/OWNERS.")
4147 return [output_api.PresubmitNotifyResult(message)]
4149 # string pattern, sequence of strings to show when pattern matches,
4150 # error flag. True if match is a presubmit error, otherwise it's a warning.
4151 _NON_INCLUSIVE_TERMS = (
4153 # Note that \b pattern in python re is pretty particular. In this
4154 # regexp, 'class WhiteList ...' will match, but 'class FooWhiteList
4155 # ...' will not. This may require some tweaking to catch these cases
4156 # without triggering a lot of false positives. Leaving it naive and
4157 # less matchy for now.
4158 r'/\b(?i)((black|white)list|slave)\b', # nocheck
4160 'Please don\'t use blacklist, whitelist, ' # nocheck
4161 'or slave in your', # nocheck
4162 'code and make every effort to use other terms. Using "// nocheck"',
4163 '"# nocheck" or "<!-- nocheck -->"',
4164 'at the end of the offending line will bypass this PRESUBMIT error',
4165 'but avoid using this whenever possible. Reach out to',
4166 'community@chromium.org if you have questions'),
4169 def ChecksCommon(input_api, output_api):
4170 """Checks common to both upload and commit."""
4172 results.extend(input_api.canned_checks.PanProjectChecks(
4173 input_api, output_api,
4174 excluded_paths=_EXCLUDED_PATHS))
4176 author = input_api.change.author_email
4177 if author and author not in _KNOWN_ROBOTS:
4179 input_api.canned_checks.CheckAuthorizedAuthor(input_api, output_api))
4182 input_api.canned_checks.CheckChangeHasNoTabs(
4185 source_file_filter=lambda x: x.LocalPath().endswith('.grd')))
4186 results.extend(input_api.RunTests(
4187 input_api.canned_checks.CheckVPythonSpec(input_api, output_api)))
4189 dirmd_bin = input_api.os_path.join(
4190 input_api.PresubmitLocalPath(), 'third_party', 'depot_tools', 'dirmd')
4191 results.extend(input_api.RunTests(
4192 input_api.canned_checks.CheckDirMetadataFormat(
4193 input_api, output_api, dirmd_bin)))
4195 input_api.canned_checks.CheckOwnersDirMetadataExclusive(
4196 input_api, output_api))
4198 input_api.canned_checks.CheckNoNewMetadataInOwners(
4199 input_api, output_api))
4200 results.extend(input_api.canned_checks.CheckInclusiveLanguage(
4201 input_api, output_api,
4202 excluded_directories_relative_path = [
4204 'inclusive_language_presubmit_exempt_dirs.txt'
4206 non_inclusive_terms=_NON_INCLUSIVE_TERMS))
4208 for f in input_api.AffectedFiles():
4209 path, name = input_api.os_path.split(f.LocalPath())
4210 if name == 'PRESUBMIT.py':
4211 full_path = input_api.os_path.join(input_api.PresubmitLocalPath(), path)
4212 test_file = input_api.os_path.join(path, 'PRESUBMIT_test.py')
4213 if f.Action() != 'D' and input_api.os_path.exists(test_file):
4214 # The PRESUBMIT.py file (and the directory containing it) might
4215 # have been affected by being moved or removed, so only try to
4216 # run the tests if they still exist.
4218 with open(f.LocalPath()) as fp:
4219 use_python3 = any(line.startswith('USE_PYTHON3 = True')
4220 for line in fp.readlines())
4222 results.extend(input_api.canned_checks.RunUnitTestsInDirectory(
4223 input_api, output_api, full_path,
4224 files_to_check=[r'^PRESUBMIT_test\.py$'],
4225 run_on_python2=not use_python3,
4226 run_on_python3=use_python3))
4230 def CheckPatchFiles(input_api, output_api):
4231 problems = [f.LocalPath() for f in input_api.AffectedFiles()
4232 if f.LocalPath().endswith(('.orig', '.rej'))]
4234 return [output_api.PresubmitError(
4235 "Don't commit .rej and .orig files.", problems)]
4240 def CheckBuildConfigMacrosWithoutInclude(input_api, output_api):
4241 # Excludes OS_CHROMEOS, which is not defined in build_config.h.
4242 macro_re = input_api.re.compile(r'^\s*#(el)?if.*\bdefined\(((OS_(?!CHROMEOS)|'
4243 'COMPILER_|ARCH_CPU_|WCHAR_T_IS_)[^)]*)')
4244 include_re = input_api.re.compile(
4245 r'^#include\s+"build/build_config.h"', input_api.re.MULTILINE)
4246 extension_re = input_api.re.compile(r'\.[a-z]+$')
4248 for f in input_api.AffectedFiles(include_deletes=False):
4249 if not f.LocalPath().endswith(('.h', '.c', '.cc', '.cpp', '.m', '.mm')):
4251 found_line_number = None
4253 all_lines = input_api.ReadFile(f, 'r').splitlines()
4254 for line_num, line in enumerate(all_lines):
4255 match = macro_re.search(line)
4257 found_line_number = line_num
4258 found_macro = match.group(2)
4260 if not found_line_number:
4263 found_include_line = -1
4264 for line_num, line in enumerate(all_lines):
4265 if include_re.search(line):
4266 found_include_line = line_num
4268 if found_include_line >= 0 and found_include_line < found_line_number:
4271 if not f.LocalPath().endswith('.h'):
4272 primary_header_path = extension_re.sub('.h', f.AbsoluteLocalPath())
4274 content = input_api.ReadFile(primary_header_path, 'r')
4275 if include_re.search(content):
4279 errors.append('%s:%d %s macro is used without first including build/'
4281 % (f.LocalPath(), found_line_number, found_macro))
4283 return [output_api.PresubmitPromptWarning('\n'.join(errors))]
4287 def CheckForSuperfluousStlIncludesInHeaders(input_api, output_api):
4288 stl_include_re = input_api.re.compile(
4303 std_namespace_re = input_api.re.compile(r'std::')
4305 for f in input_api.AffectedFiles():
4306 if not _IsCPlusPlusHeaderFile(input_api, f.LocalPath()):
4309 uses_std_namespace = False
4310 has_stl_include = False
4311 for line in f.NewContents():
4312 if has_stl_include and uses_std_namespace:
4315 if not has_stl_include and stl_include_re.search(line):
4316 has_stl_include = True
4319 if not uses_std_namespace and std_namespace_re.search(line):
4320 uses_std_namespace = True
4323 if has_stl_include and not uses_std_namespace:
4324 errors.append('%s: Includes STL header(s) but does not reference std::'
4327 return [output_api.PresubmitPromptWarning('\n'.join(errors))]
4331 def _DidYouMeanOSMacro(bad_macro):
4333 return {'A': 'OS_ANDROID',
4344 'W': 'OS_WIN'}[bad_macro[3].upper()]
4349 def _CheckForInvalidOSMacrosInFile(input_api, f):
4350 """Check for sensible looking, totally invalid OS macros."""
4351 preprocessor_statement = input_api.re.compile(r'^\s*#')
4352 os_macro = input_api.re.compile(r'defined\((OS_[^)]+)\)')
4354 for lnum, line in f.ChangedContents():
4355 if preprocessor_statement.search(line):
4356 for match in os_macro.finditer(line):
4357 if not match.group(1) in _VALID_OS_MACROS:
4358 good = _DidYouMeanOSMacro(match.group(1))
4359 did_you_mean = ' (did you mean %s?)' % good if good else ''
4360 results.append(' %s:%d %s%s' % (f.LocalPath(),
4367 def CheckForInvalidOSMacros(input_api, output_api):
4368 """Check all affected files for invalid OS macros."""
4370 for f in input_api.AffectedSourceFiles(None):
4371 if not f.LocalPath().endswith(('.py', '.js', '.html', '.css', '.md')):
4372 bad_macros.extend(_CheckForInvalidOSMacrosInFile(input_api, f))
4377 return [output_api.PresubmitError(
4378 'Possibly invalid OS macro[s] found. Please fix your code\n'
4379 'or add your macro to src/PRESUBMIT.py.', bad_macros)]
4382 def _CheckForInvalidIfDefinedMacrosInFile(input_api, f):
4383 """Check all affected files for invalid "if defined" macros."""
4384 ALWAYS_DEFINED_MACROS = (
4393 "TARGET_IPHONE_SIMULATOR",
4394 "TARGET_OS_EMBEDDED",
4400 ifdef_macro = input_api.re.compile(r'^\s*#.*(?:ifdef\s|defined\()([^\s\)]+)')
4402 for lnum, line in f.ChangedContents():
4403 for match in ifdef_macro.finditer(line):
4404 if match.group(1) in ALWAYS_DEFINED_MACROS:
4405 always_defined = ' %s is always defined. ' % match.group(1)
4406 did_you_mean = 'Did you mean \'#if %s\'?' % match.group(1)
4407 results.append(' %s:%d %s\n\t%s' % (f.LocalPath(),
4414 def CheckForInvalidIfDefinedMacros(input_api, output_api):
4415 """Check all affected files for invalid "if defined" macros."""
4417 skipped_paths = ['third_party/sqlite/', 'third_party/abseil-cpp/']
4418 for f in input_api.AffectedFiles():
4419 if any([f.LocalPath().startswith(path) for path in skipped_paths]):
4421 if f.LocalPath().endswith(('.h', '.c', '.cc', '.m', '.mm')):
4422 bad_macros.extend(_CheckForInvalidIfDefinedMacrosInFile(input_api, f))
4427 return [output_api.PresubmitError(
4428 'Found ifdef check on always-defined macro[s]. Please fix your code\n'
4429 'or check the list of ALWAYS_DEFINED_MACROS in src/PRESUBMIT.py.',
4433 def CheckForIPCRules(input_api, output_api):
4434 """Check for same IPC rules described in
4435 http://www.chromium.org/Home/chromium-security/education/security-tips-for-ipc
4437 base_pattern = r'IPC_ENUM_TRAITS\('
4438 inclusion_pattern = input_api.re.compile(r'(%s)' % base_pattern)
4439 comment_pattern = input_api.re.compile(r'//.*(%s)' % base_pattern)
4442 for f in input_api.AffectedSourceFiles(None):
4443 local_path = f.LocalPath()
4444 if not local_path.endswith('.h'):
4446 for line_number, line in f.ChangedContents():
4447 if inclusion_pattern.search(line) and not comment_pattern.search(line):
4449 '%s:%d\n %s' % (local_path, line_number, line.strip()))
4452 return [output_api.PresubmitPromptWarning(
4453 _IPC_ENUM_TRAITS_DEPRECATED, problems)]
4458 def CheckForLongPathnames(input_api, output_api):
4459 """Check to make sure no files being submitted have long paths.
4460 This causes issues on Windows.
4463 for f in input_api.AffectedTestableFiles():
4464 local_path = f.LocalPath()
4465 # Windows has a path limit of 260 characters. Limit path length to 200 so
4466 # that we have some extra for the prefix on dev machines and the bots.
4467 if len(local_path) > 200:
4468 problems.append(local_path)
4471 return [output_api.PresubmitError(_LONG_PATH_ERROR, problems)]
4476 def CheckForIncludeGuards(input_api, output_api):
4477 """Check that header files have proper guards against multiple inclusion.
4478 If a file should not have such guards (and it probably should) then it
4479 should include the string "no-include-guard-because-multiply-included".
4481 def is_chromium_header_file(f):
4482 # We only check header files under the control of the Chromium
4483 # project. That is, those outside third_party apart from
4484 # third_party/blink.
4485 # We also exclude *_message_generator.h headers as they use
4486 # include guards in a special, non-typical way.
4487 file_with_path = input_api.os_path.normpath(f.LocalPath())
4488 return (file_with_path.endswith('.h') and
4489 not file_with_path.endswith('_message_generator.h') and
4490 (not file_with_path.startswith('third_party') or
4491 file_with_path.startswith(
4492 input_api.os_path.join('third_party', 'blink'))))
4494 def replace_special_with_underscore(string):
4495 return input_api.re.sub(r'[+\\/.-]', '_', string)
4499 for f in input_api.AffectedSourceFiles(is_chromium_header_file):
4501 guard_line_number = None
4502 seen_guard_end = False
4504 file_with_path = input_api.os_path.normpath(f.LocalPath())
4505 base_file_name = input_api.os_path.splitext(
4506 input_api.os_path.basename(file_with_path))[0]
4507 upper_base_file_name = base_file_name.upper()
4509 expected_guard = replace_special_with_underscore(
4510 file_with_path.upper() + '_')
4512 # For "path/elem/file_name.h" we should really only accept
4513 # PATH_ELEM_FILE_NAME_H_ per coding style. Unfortunately there
4514 # are too many (1000+) files with slight deviations from the
4515 # coding style. The most important part is that the include guard
4516 # is there, and that it's unique, not the name so this check is
4517 # forgiving for existing files.
4519 # As code becomes more uniform, this could be made stricter.
4521 guard_name_pattern_list = [
4522 # Anything with the right suffix (maybe with an extra _).
4525 # To cover include guards with old Blink style.
4528 # Anything including the uppercase name of the file.
4529 r'\w*' + input_api.re.escape(replace_special_with_underscore(
4530 upper_base_file_name)) + r'\w*',
4532 guard_name_pattern = '|'.join(guard_name_pattern_list)
4533 guard_pattern = input_api.re.compile(
4534 r'#ifndef\s+(' + guard_name_pattern + ')')
4536 for line_number, line in enumerate(f.NewContents()):
4537 if 'no-include-guard-because-multiply-included' in line:
4538 guard_name = 'DUMMY' # To not trigger check outside the loop.
4541 if guard_name is None:
4542 match = guard_pattern.match(line)
4544 guard_name = match.group(1)
4545 guard_line_number = line_number
4547 # We allow existing files to use include guards whose names
4548 # don't match the chromium style guide, but new files should
4550 if not f.OldContents():
4551 if guard_name != expected_guard:
4552 errors.append(output_api.PresubmitPromptWarning(
4553 'Header using the wrong include guard name %s' % guard_name,
4554 ['%s:%d' % (f.LocalPath(), line_number + 1)],
4555 'Expected: %r\nFound: %r' % (expected_guard, guard_name)))
4557 # The line after #ifndef should have a #define of the same name.
4558 if line_number == guard_line_number + 1:
4559 expected_line = '#define %s' % guard_name
4560 if line != expected_line:
4561 errors.append(output_api.PresubmitPromptWarning(
4562 'Missing "%s" for include guard' % expected_line,
4563 ['%s:%d' % (f.LocalPath(), line_number + 1)],
4564 'Expected: %r\nGot: %r' % (expected_line, line)))
4566 if not seen_guard_end and line == '#endif // %s' % guard_name:
4567 seen_guard_end = True
4568 elif seen_guard_end:
4569 if line.strip() != '':
4570 errors.append(output_api.PresubmitPromptWarning(
4571 'Include guard %s not covering the whole file' % (
4572 guard_name), [f.LocalPath()]))
4573 break # Nothing else to check and enough to warn once.
4575 if guard_name is None:
4576 errors.append(output_api.PresubmitPromptWarning(
4577 'Missing include guard %s' % expected_guard,
4579 'Missing include guard in %s\n'
4580 'Recommended name: %s\n'
4581 'This check can be disabled by having the string\n'
4582 'no-include-guard-because-multiply-included in the header.' %
4583 (f.LocalPath(), expected_guard)))
4588 def CheckForWindowsLineEndings(input_api, output_api):
4589 """Check source code and known ascii text files for Windows style line
4592 known_text_files = r'.*\.(txt|html|htm|mhtml|py|gyp|gypi|gn|isolate|icon)$'
4594 file_inclusion_pattern = (
4596 r'.+%s' % _IMPLEMENTATION_EXTENSIONS,
4597 r'.+%s' % _HEADER_EXTENSIONS
4601 source_file_filter = lambda f: input_api.FilterSourceFile(
4602 f, files_to_check=file_inclusion_pattern, files_to_skip=None)
4603 for f in input_api.AffectedSourceFiles(source_file_filter):
4604 include_file = False
4605 for line in input_api.ReadFile(f, 'r').splitlines(True):
4606 if line.endswith('\r\n'):
4609 problems.append(f.LocalPath())
4612 return [output_api.PresubmitPromptWarning('Are you sure that you want '
4613 'these files to contain Windows style line endings?\n' +
4614 '\n'.join(problems))]
4618 def CheckIconFilesForLicenseHeaders(input_api, output_api):
4619 """Check that .icon files (which are fragments of C++) have license headers.
4622 icon_files = (r'.*\.icon$',)
4624 icons = lambda x: input_api.FilterSourceFile(x, files_to_check=icon_files)
4625 return input_api.canned_checks.CheckLicense(
4626 input_api, output_api, source_file_filter=icons)
4628 def CheckForUseOfChromeAppsDeprecations(input_api, output_api):
4629 """Check source code for use of Chrome App technologies being
4633 def _CheckForDeprecatedTech(input_api, output_api,
4634 detection_list, files_to_check = None, files_to_skip = None):
4636 if (files_to_check or files_to_skip):
4637 source_file_filter = lambda f: input_api.FilterSourceFile(
4638 f, files_to_check=files_to_check,
4639 files_to_skip=files_to_skip)
4641 source_file_filter = None
4645 for f in input_api.AffectedSourceFiles(source_file_filter):
4646 if f.Action() == 'D':
4648 for _, line in f.ChangedContents():
4649 if any( detect in line for detect in detection_list ):
4650 problems.append(f.LocalPath())
4654 # to avoid this presubmit script triggering warnings
4655 files_to_skip = ['PRESUBMIT.py','PRESUBMIT_test.py']
4659 # NMF: any files with extensions .nmf or NMF
4660 _NMF_FILES = r'\.(nmf|NMF)$'
4661 problems += _CheckForDeprecatedTech(input_api, output_api,
4662 detection_list = [''], # any change to the file will trigger warning
4663 files_to_check = [ r'.+%s' % _NMF_FILES ])
4665 # MANIFEST: any manifest.json that in its diff includes "app":
4666 _MANIFEST_FILES = r'(manifest\.json)$'
4667 problems += _CheckForDeprecatedTech(input_api, output_api,
4668 detection_list = ['"app":'],
4669 files_to_check = [ r'.*%s' % _MANIFEST_FILES ])
4671 # NaCl / PNaCl: any file that in its diff contains the strings in the list
4672 problems += _CheckForDeprecatedTech(input_api, output_api,
4673 detection_list = ['config=nacl','enable-nacl','cpu=pnacl', 'nacl_io'],
4674 files_to_skip = files_to_skip + [ r"^native_client_sdk[\\/]"])
4676 # PPAPI: any C/C++ file that in its diff includes a ppappi library
4677 problems += _CheckForDeprecatedTech(input_api, output_api,
4678 detection_list = ['#include "ppapi','#include <ppapi'],
4680 r'.+%s' % _HEADER_EXTENSIONS,
4681 r'.+%s' % _IMPLEMENTATION_EXTENSIONS ),
4682 files_to_skip = [r"^ppapi[\\/]"] )
4685 return [output_api.PresubmitPromptWarning('You are adding/modifying code'
4686 'related to technologies which will soon be deprecated (Chrome Apps, NaCl,'
4687 ' PNaCl, PPAPI). See this blog post for more details:\n'
4688 'https://blog.chromium.org/2020/08/changes-to-chrome-app-support-timeline.html\n'
4689 'and this documentation for options to replace these technologies:\n'
4690 'https://developer.chrome.com/docs/apps/migration/\n'+
4691 '\n'.join(problems))]
4696 def CheckSyslogUseWarningOnUpload(input_api, output_api, src_file_filter=None):
4697 """Checks that all source files use SYSLOG properly."""
4699 for f in input_api.AffectedSourceFiles(src_file_filter):
4700 for line_number, line in f.ChangedContents():
4701 if 'SYSLOG' in line:
4702 syslog_files.append(f.LocalPath() + ':' + str(line_number))
4705 return [output_api.PresubmitPromptWarning(
4706 'Please make sure there are no privacy sensitive bits of data in SYSLOG'
4707 ' calls.\nFiles to check:\n', items=syslog_files)]
4711 def CheckChangeOnUpload(input_api, output_api):
4712 if input_api.version < [2, 0, 0]:
4713 return [output_api.PresubmitError("Your depot_tools is out of date. "
4714 "This PRESUBMIT.py requires at least presubmit_support version 2.0.0, "
4715 "but your version is %d.%d.%d" % tuple(input_api.version))]
4718 input_api.canned_checks.CheckPatchFormatted(input_api, output_api))
4722 def CheckChangeOnCommit(input_api, output_api):
4723 if input_api.version < [2, 0, 0]:
4724 return [output_api.PresubmitError("Your depot_tools is out of date. "
4725 "This PRESUBMIT.py requires at least presubmit_support version 2.0.0, "
4726 "but your version is %d.%d.%d" % tuple(input_api.version))]
4729 # Make sure the tree is 'open'.
4730 results.extend(input_api.canned_checks.CheckTreeIsOpen(
4733 json_url='http://chromium-status.appspot.com/current?format=json'))
4736 input_api.canned_checks.CheckPatchFormatted(input_api, output_api))
4737 results.extend(input_api.canned_checks.CheckChangeHasBugField(
4738 input_api, output_api))
4739 results.extend(input_api.canned_checks.CheckChangeHasNoUnwantedTags(
4740 input_api, output_api))
4741 results.extend(input_api.canned_checks.CheckChangeHasDescription(
4742 input_api, output_api))
4746 def CheckStrings(input_api, output_api):
4747 """Check string ICU syntax validity and if translation screenshots exist."""
4748 # Skip translation screenshots check if a SkipTranslationScreenshotsCheck
4749 # footer is set to true.
4750 git_footers = input_api.change.GitFootersFromDescription()
4751 skip_screenshot_check_footer = [
4753 for footer in git_footers.get(u'Skip-Translation-Screenshots-Check', [])]
4754 run_screenshot_check = u'true' not in skip_screenshot_check_footer
4759 from io import StringIO
4761 new_or_added_paths = set(f.LocalPath()
4762 for f in input_api.AffectedFiles()
4763 if (f.Action() == 'A' or f.Action() == 'M'))
4764 removed_paths = set(f.LocalPath()
4765 for f in input_api.AffectedFiles(include_deletes=True)
4766 if f.Action() == 'D')
4769 f for f in input_api.AffectedFiles()
4770 if f.LocalPath().endswith(('.grd', '.grdp'))
4772 affected_grds = [f for f in affected_grds if not 'testdata' in f.LocalPath()]
4773 if not affected_grds:
4776 affected_png_paths = [f.AbsoluteLocalPath()
4777 for f in input_api.AffectedFiles()
4778 if (f.LocalPath().endswith('.png'))]
4780 # Check for screenshots. Developers can upload screenshots using
4781 # tools/translation/upload_screenshots.py which finds and uploads
4782 # images associated with .grd files (e.g. test_grd/IDS_STRING.png for the
4783 # message named IDS_STRING in test.grd) and produces a .sha1 file (e.g.
4784 # test_grd/IDS_STRING.png.sha1) for each png when the upload is successful.
4786 # The logic here is as follows:
4788 # - If the CL has a .png file under the screenshots directory for a grd
4789 # file, warn the developer. Actual images should never be checked into the
4792 # - If the CL contains modified or new messages in grd files and doesn't
4793 # contain the corresponding .sha1 files, warn the developer to add images
4794 # and upload them via tools/translation/upload_screenshots.py.
4796 # - If the CL contains modified or new messages in grd files and the
4797 # corresponding .sha1 files, everything looks good.
4799 # - If the CL contains removed messages in grd files but the corresponding
4800 # .sha1 files aren't removed, warn the developer to remove them.
4801 unnecessary_screenshots = []
4803 unnecessary_sha1_files = []
4805 # This checks verifies that the ICU syntax of messages this CL touched is
4806 # valid, and reports any found syntax errors.
4807 # Without this presubmit check, ICU syntax errors in Chromium strings can land
4808 # without developers being aware of them. Later on, such ICU syntax errors
4809 # break message extraction for translation, hence would block Chromium
4810 # translations until they are fixed.
4811 icu_syntax_errors = []
4813 def _CheckScreenshotAdded(screenshots_dir, message_id):
4814 sha1_path = input_api.os_path.join(
4815 screenshots_dir, message_id + '.png.sha1')
4816 if sha1_path not in new_or_added_paths:
4817 missing_sha1.append(sha1_path)
4820 def _CheckScreenshotRemoved(screenshots_dir, message_id):
4821 sha1_path = input_api.os_path.join(
4822 screenshots_dir, message_id + '.png.sha1')
4823 if input_api.os_path.exists(sha1_path) and sha1_path not in removed_paths:
4824 unnecessary_sha1_files.append(sha1_path)
4827 def _ValidateIcuSyntax(text, level, signatures):
4828 """Validates ICU syntax of a text string.
4830 Check if text looks similar to ICU and checks for ICU syntax correctness
4831 in this case. Reports various issues with ICU syntax and values of
4832 variants. Supports checking of nested messages. Accumulate information of
4833 each ICU messages found in the text for further checking.
4836 text: a string to check.
4837 level: a number of current nesting level.
4838 signatures: an accumulator, a list of tuple of (level, variable,
4842 None if a string is not ICU or no issue detected.
4843 A tuple of (message, start index, end index) if an issue detected.
4846 'plural': (frozenset(
4847 ['=0', '=1', 'zero', 'one', 'two', 'few', 'many', 'other']),
4848 frozenset(['=1', 'other'])),
4849 'selectordinal': (frozenset(
4850 ['=0', '=1', 'zero', 'one', 'two', 'few', 'many', 'other']),
4851 frozenset(['one', 'other'])),
4852 'select': (frozenset(), frozenset(['other'])),
4855 # Check if the message looks like an attempt to use ICU
4856 # plural. If yes - check if its syntax strictly matches ICU format.
4857 like = re.match(r'^[^{]*\{[^{]*\b(plural|selectordinal|select)\b', text)
4859 signatures.append((level, None, None, None))
4862 # Check for valid prefix and suffix
4864 r'^([^{]*\{)([a-zA-Z0-9_]+),\s*'
4865 r'(plural|selectordinal|select),\s*'
4866 r'(?:offset:\d+)?\s*(.*)', text, re.DOTALL)
4868 return (('This message looks like an ICU plural, '
4869 'but does not follow ICU syntax.'), like.start(), like.end())
4870 starting, variable, kind, variant_pairs = m.groups()
4871 variants, depth, last_pos = _ParseIcuVariants(variant_pairs, m.start(4))
4873 return ('Invalid ICU format. Unbalanced opening bracket', last_pos,
4876 ending = text[last_pos:]
4878 return ('Invalid ICU format. No initial opening bracket', last_pos - 1,
4880 if not ending or '}' not in ending:
4881 return ('Invalid ICU format. No final closing bracket', last_pos - 1,
4885 ('Invalid ICU format. Extra characters at the start of a complex '
4886 'message (go/icu-message-migration): "%s"') %
4887 starting, 0, len(starting))
4889 return (('Invalid ICU format. Extra characters at the end of a complex '
4890 'message (go/icu-message-migration): "%s"')
4891 % ending, last_pos - 1, len(text) - 1)
4892 if kind not in valid_types:
4893 return (('Unknown ICU message type %s. '
4894 'Valid types are: plural, select, selectordinal') % kind, 0, 0)
4895 known, required = valid_types[kind]
4896 defined_variants = set()
4897 for variant, variant_range, value, value_range in variants:
4898 start, end = variant_range
4899 if variant in defined_variants:
4900 return ('Variant "%s" is defined more than once' % variant,
4902 elif known and variant not in known:
4903 return ('Variant "%s" is not valid for %s message' % (variant, kind),
4905 defined_variants.add(variant)
4906 # Check for nested structure
4907 res = _ValidateIcuSyntax(value[1:-1], level + 1, signatures)
4909 return (res[0], res[1] + value_range[0] + 1,
4910 res[2] + value_range[0] + 1)
4911 missing = required - defined_variants
4913 return ('Required variants missing: %s' % ', '.join(missing), 0,
4915 signatures.append((level, variable, kind, defined_variants))
4918 def _ParseIcuVariants(text, offset=0):
4919 """Parse variants part of ICU complex message.
4921 Builds a tuple of variant names and values, as well as
4922 their offsets in the input string.
4925 text: a string to parse
4926 offset: additional offset to add to positions in the text to get correct
4927 position in the complete ICU string.
4930 List of tuples, each tuple consist of four fields: variant name,
4931 variant name span (tuple of two integers), variant value, value
4932 span (tuple of two integers).
4934 depth, start, end = 0, -1, -1
4937 for idx, char in enumerate(text):
4941 chunk = text[end + 1:start]
4943 pos = offset + end + 1 + chunk.find(key)
4944 span = (pos, pos + len(key))
4948 return variants, depth, offset + idx
4952 variants.append((key, span, text[start:end + 1], (offset + start,
4954 return variants, depth, offset + end + 1
4957 old_sys_path = sys.path
4958 sys.path = sys.path + [input_api.os_path.join(
4959 input_api.PresubmitLocalPath(), 'tools', 'translation')]
4960 from helper import grd_helper
4962 sys.path = old_sys_path
4964 for f in affected_grds:
4965 file_path = f.LocalPath()
4966 old_id_to_msg_map = {}
4967 new_id_to_msg_map = {}
4968 # Note that this code doesn't check if the file has been deleted. This is
4969 # OK because it only uses the old and new file contents and doesn't load
4970 # the file via its path.
4971 # It's also possible that a file's content refers to a renamed or deleted
4972 # file via a <part> tag, such as <part file="now-deleted-file.grdp">. This
4973 # is OK as well, because grd_helper ignores <part> tags when loading .grd or
4975 if file_path.endswith('.grdp'):
4977 old_id_to_msg_map = grd_helper.GetGrdpMessagesFromString(
4978 '\n'.join(f.OldContents()))
4980 new_id_to_msg_map = grd_helper.GetGrdpMessagesFromString(
4981 '\n'.join(f.NewContents()))
4983 file_dir = input_api.os_path.dirname(file_path) or '.'
4985 old_id_to_msg_map = grd_helper.GetGrdMessages(
4986 StringIO('\n'.join(f.OldContents())), file_dir)
4988 new_id_to_msg_map = grd_helper.GetGrdMessages(
4989 StringIO('\n'.join(f.NewContents())), file_dir)
4991 grd_name, ext = input_api.os_path.splitext(
4992 input_api.os_path.basename(file_path))
4993 screenshots_dir = input_api.os_path.join(
4994 input_api.os_path.dirname(file_path), grd_name + ext.replace('.', '_'))
4996 # Compute added, removed and modified message IDs.
4997 old_ids = set(old_id_to_msg_map)
4998 new_ids = set(new_id_to_msg_map)
4999 added_ids = new_ids - old_ids
5000 removed_ids = old_ids - new_ids
5001 modified_ids = set([])
5002 for key in old_ids.intersection(new_ids):
5003 if (old_id_to_msg_map[key].ContentsAsXml('', True)
5004 != new_id_to_msg_map[key].ContentsAsXml('', True)):
5005 # The message content itself changed. Require an updated screenshot.
5006 modified_ids.add(key)
5007 elif old_id_to_msg_map[key].attrs['meaning'] != \
5008 new_id_to_msg_map[key].attrs['meaning']:
5009 # The message meaning changed. Ensure there is a screenshot for it.
5010 sha1_path = input_api.os_path.join(screenshots_dir, key + '.png.sha1')
5011 if sha1_path not in new_or_added_paths and not \
5012 input_api.os_path.exists(sha1_path):
5013 # There is neither a previous screenshot nor is a new one added now.
5014 # Require a screenshot.
5015 modified_ids.add(key)
5017 if run_screenshot_check:
5018 # Check the screenshot directory for .png files. Warn if there is any.
5019 for png_path in affected_png_paths:
5020 if png_path.startswith(screenshots_dir):
5021 unnecessary_screenshots.append(png_path)
5023 for added_id in added_ids:
5024 _CheckScreenshotAdded(screenshots_dir, added_id)
5026 for modified_id in modified_ids:
5027 _CheckScreenshotAdded(screenshots_dir, modified_id)
5029 for removed_id in removed_ids:
5030 _CheckScreenshotRemoved(screenshots_dir, removed_id)
5032 # Check new and changed strings for ICU syntax errors.
5033 for key in added_ids.union(modified_ids):
5034 msg = new_id_to_msg_map[key].ContentsAsXml('', True)
5035 err = _ValidateIcuSyntax(msg, 0, [])
5037 icu_syntax_errors.append(str(key) + ': ' + str(err[0]))
5040 if run_screenshot_check:
5041 if unnecessary_screenshots:
5042 results.append(output_api.PresubmitError(
5043 'Do not include actual screenshots in the changelist. Run '
5044 'tools/translate/upload_screenshots.py to upload them instead:',
5045 sorted(unnecessary_screenshots)))
5048 results.append(output_api.PresubmitError(
5049 'You are adding or modifying UI strings.\n'
5050 'To ensure the best translations, take screenshots of the relevant UI '
5051 '(https://g.co/chrome/translation) and add these files to your '
5052 'changelist:', sorted(missing_sha1)))
5054 if unnecessary_sha1_files:
5055 results.append(output_api.PresubmitError(
5056 'You removed strings associated with these files. Remove:',
5057 sorted(unnecessary_sha1_files)))
5059 results.append(output_api.PresubmitPromptOrNotify('Skipping translation '
5060 'screenshots check.'))
5062 if icu_syntax_errors:
5063 results.append(output_api.PresubmitPromptWarning(
5064 'ICU syntax errors were found in the following strings (problems or '
5065 'feedback? Contact rainhard@chromium.org):', items=icu_syntax_errors))
5070 def CheckTranslationExpectations(input_api, output_api,
5072 translation_expectations_path=None,
5075 affected_grds = [f for f in input_api.AffectedFiles()
5076 if (f.LocalPath().endswith('.grd') or
5077 f.LocalPath().endswith('.grdp'))]
5078 if not affected_grds:
5082 old_sys_path = sys.path
5083 sys.path = sys.path + [
5084 input_api.os_path.join(
5085 input_api.PresubmitLocalPath(), 'tools', 'translation')]
5086 from helper import git_helper
5087 from helper import translation_helper
5089 sys.path = old_sys_path
5091 # Check that translation expectations can be parsed and we can get a list of
5092 # translatable grd files. |repo_root| and |translation_expectations_path| are
5093 # only passed by tests.
5095 repo_root = input_api.PresubmitLocalPath()
5096 if not translation_expectations_path:
5097 translation_expectations_path = input_api.os_path.join(
5098 repo_root, 'tools', 'gritsettings',
5099 'translation_expectations.pyl')
5101 grd_files = git_helper.list_grds_in_repository(repo_root)
5103 # Ignore bogus grd files used only for testing
5104 # ui/webui/resoucres/tools/generate_grd.py.
5105 ignore_path = input_api.os_path.join(
5106 'ui', 'webui', 'resources', 'tools', 'tests')
5107 grd_files = [p for p in grd_files if ignore_path not in p]
5110 translation_helper.get_translatable_grds(repo_root, grd_files,
5111 translation_expectations_path)
5112 except Exception as e:
5113 return [output_api.PresubmitNotifyResult(
5114 'Failed to get a list of translatable grd files. This happens when:\n'
5115 ' - One of the modified grd or grdp files cannot be parsed or\n'
5116 ' - %s is not updated.\n'
5117 'Stack:\n%s' % (translation_expectations_path, str(e)))]
5121 def CheckStableMojomChanges(input_api, output_api):
5122 """Changes to [Stable] mojom types must preserve backward-compatibility."""
5123 changed_mojoms = input_api.AffectedFiles(
5124 include_deletes=True,
5125 file_filter=lambda f: f.LocalPath().endswith(('.mojom')))
5127 for mojom in changed_mojoms:
5128 old_contents = ''.join(mojom.OldContents()) or None
5129 new_contents = ''.join(mojom.NewContents()) or None
5131 'filename': mojom.LocalPath(),
5132 'old': '\n'.join(mojom.OldContents()) or None,
5133 'new': '\n'.join(mojom.NewContents()) or None,
5136 process = input_api.subprocess.Popen(
5137 [input_api.python_executable,
5138 input_api.os_path.join(input_api.PresubmitLocalPath(), 'mojo',
5139 'public', 'tools', 'mojom',
5140 'check_stable_mojom_compatibility.py'),
5141 '--src-root', input_api.PresubmitLocalPath()],
5142 stdin=input_api.subprocess.PIPE,
5143 stdout=input_api.subprocess.PIPE,
5144 stderr=input_api.subprocess.PIPE,
5145 universal_newlines=True)
5146 (x, error) = process.communicate(input=input_api.json.dumps(delta))
5147 if process.returncode:
5148 return [output_api.PresubmitError(
5149 'One or more [Stable] mojom definitions appears to have been changed '
5150 'in a way that is not backward-compatible.',
5154 def CheckDeprecationOfPreferences(input_api, output_api):
5155 """Removing a preference should come with a deprecation."""
5157 def FilterFile(affected_file):
5158 """Accept only .cc files and the like."""
5159 file_inclusion_pattern = [r'.+%s' % _IMPLEMENTATION_EXTENSIONS]
5160 files_to_skip = (_EXCLUDED_PATHS +
5161 _TEST_CODE_EXCLUDED_PATHS +
5162 input_api.DEFAULT_FILES_TO_SKIP)
5163 return input_api.FilterSourceFile(
5165 files_to_check=file_inclusion_pattern,
5166 files_to_skip=files_to_skip)
5168 def ModifiedLines(affected_file):
5169 """Returns a list of tuples (line number, line text) of added and removed
5172 Deleted lines share the same line number as the previous line.
5174 This relies on the scm diff output describing each changed code section
5175 with a line of the form
5177 ^@@ <old line num>,<old size> <new line num>,<new size> @@$
5181 for line in affected_file.GenerateScmDiff().splitlines():
5182 # Extract <new line num> of the patch fragment (see format above).
5183 m = input_api.re.match(r'^@@ [0-9\,\+\-]+ \+([0-9]+)\,[0-9]+ @@', line)
5185 line_num = int(m.groups(1)[0])
5187 if ((line.startswith('+') and not line.startswith('++')) or
5188 (line.startswith('-') and not line.startswith('--'))):
5189 modified_lines.append((line_num, line))
5191 if not line.startswith('-'):
5193 return modified_lines
5195 def FindLineWith(lines, needle):
5196 """Returns the line number (i.e. index + 1) in `lines` containing `needle`.
5198 If 0 or >1 lines contain `needle`, -1 is returned.
5200 matching_line_numbers = [
5201 # + 1 for 1-based counting of line numbers.
5205 return matching_line_numbers[0] if len(matching_line_numbers) == 1 else -1
5207 def ModifiedPrefMigration(affected_file):
5208 """Returns whether the MigrateObsolete.*Pref functions were modified."""
5209 # Determine first and last lines of MigrateObsolete.*Pref functions.
5210 new_contents = affected_file.NewContents();
5212 FindLineWith(new_contents, 'BEGIN_MIGRATE_OBSOLETE_LOCAL_STATE_PREFS'),
5213 FindLineWith(new_contents, 'END_MIGRATE_OBSOLETE_LOCAL_STATE_PREFS'))
5215 FindLineWith(new_contents, 'BEGIN_MIGRATE_OBSOLETE_PROFILE_PREFS'),
5216 FindLineWith(new_contents, 'END_MIGRATE_OBSOLETE_PROFILE_PREFS'))
5217 if (-1 in range_1 + range_2):
5219 'Broken .*MIGRATE_OBSOLETE_.*_PREFS markers in browser_prefs.cc.')
5221 # Check whether any of the modified lines are part of the
5222 # MigrateObsolete.*Pref functions.
5223 for line_nr, line in ModifiedLines(affected_file):
5224 if (range_1[0] <= line_nr <= range_1[1] or
5225 range_2[0] <= line_nr <= range_2[1]):
5229 register_pref_pattern = input_api.re.compile(r'Register.+Pref')
5230 browser_prefs_file_pattern = input_api.re.compile(
5231 r'chrome/browser/prefs/browser_prefs.cc')
5233 changes = input_api.AffectedFiles(include_deletes=True,
5234 file_filter=FilterFile)
5235 potential_problems = []
5237 for line in f.GenerateScmDiff().splitlines():
5238 # Check deleted lines for pref registrations.
5239 if (line.startswith('-') and not line.startswith('--') and
5240 register_pref_pattern.search(line)):
5241 potential_problems.append('%s: %s' % (f.LocalPath(), line))
5243 if browser_prefs_file_pattern.search(f.LocalPath()):
5244 # If the developer modified the MigrateObsolete.*Prefs() functions, we
5245 # assume that they knew that they have to deprecate preferences and don't
5248 if ModifiedPrefMigration(f):
5250 except Exception as e:
5251 return [output_api.PresubmitError(str(e))]
5253 if potential_problems:
5254 return [output_api.PresubmitPromptWarning(
5255 'Discovered possible removal of preference registrations.\n\n'
5256 'Please make sure to properly deprecate preferences by clearing their\n'
5257 'value for a couple of milestones before finally removing the code.\n'
5258 'Otherwise data may stay in the preferences files forever. See\n'
5259 'Migrate*Prefs() in chrome/browser/prefs/browser_prefs.cc and\n'
5260 'chrome/browser/prefs/README.md for examples.\n'
5261 'This may be a false positive warning (e.g. if you move preference\n'
5262 'registrations to a different place).\n',
5267 def CheckConsistentGrdChanges(input_api, output_api):
5268 """Changes to GRD files must be consistent for tools to read them."""
5269 changed_grds = input_api.AffectedFiles(
5270 include_deletes=False,
5271 file_filter=lambda f: f.LocalPath().endswith(('.grd')))
5273 invalid_file_regexes = [(input_api.re.compile(matcher), msg) for matcher, msg in _INVALID_GRD_FILE_LINE]
5274 for grd in changed_grds:
5275 for i, line in enumerate(grd.NewContents()):
5276 for matcher, msg in invalid_file_regexes:
5277 if matcher.search(line):
5278 errors.append(output_api.PresubmitError('Problem on {grd}:{i} - {msg}'.format(grd=grd.LocalPath(), i=i + 1, msg=msg)))