1 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 """Top-level presubmit script for Chromium.
7 See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
8 for more details about the presubmit API built into depot_tools.
13 r"^native_client_sdk[\\/]src[\\/]build_tools[\\/]make_rules.py",
14 r"^native_client_sdk[\\/]src[\\/]build_tools[\\/]make_simple.py",
15 r"^native_client_sdk[\\/]src[\\/]tools[\\/].*.mk",
16 r"^net[\\/]tools[\\/]spdyshark[\\/].*",
18 r"^third_party[\\/]blink[\\/].*",
19 r"^third_party[\\/]breakpad[\\/].*",
20 # sqlite is an imported third party dependency.
21 r"^third_party[\\/]sqlite[\\/].*",
26 r".+[\\/]pnacl_shim\.c$",
27 r"^gpu[\\/]config[\\/].*_list_json\.cc$",
28 r"tools[\\/]md_browser[\\/].*\.css$",
29 # Test pages for Maps telemetry tests.
30 r"tools[\\/]perf[\\/]page_sets[\\/]maps_perf_test.*",
31 # Test pages for WebRTC telemetry tests.
32 r"tools[\\/]perf[\\/]page_sets[\\/]webrtc_cases.*",
36 # Fragment of a regular expression that matches C++ and Objective-C++
37 # implementation files.
38 _IMPLEMENTATION_EXTENSIONS = r'\.(cc|cpp|cxx|mm)$'
41 # Fragment of a regular expression that matches C++ and Objective-C++
43 _HEADER_EXTENSIONS = r'\.(h|hpp|hxx)$'
46 # Regular expression that matches code only used for test binaries
48 _TEST_CODE_EXCLUDED_PATHS = (
49 r'.*[\\/](fake_|test_|mock_).+%s' % _IMPLEMENTATION_EXTENSIONS,
50 r'.+_test_(base|support|util)%s' % _IMPLEMENTATION_EXTENSIONS,
51 r'.+_(api|browser|eg|int|perf|pixel|unit|ui)?test(_[a-z]+)?%s' %
52 _IMPLEMENTATION_EXTENSIONS,
53 r'.+_(fuzz|fuzzer)(_[a-z]+)?%s' % _IMPLEMENTATION_EXTENSIONS,
54 r'.+profile_sync_service_harness%s' % _IMPLEMENTATION_EXTENSIONS,
55 r'.*[\\/](test|tool(s)?)[\\/].*',
56 # content_shell is used for running layout tests.
57 r'content[\\/]shell[\\/].*',
58 # Non-production example code.
59 r'mojo[\\/]examples[\\/].*',
60 # Launcher for running iOS tests on the simulator.
61 r'testing[\\/]iossim[\\/]iossim\.mm$',
62 # EarlGrey app side code for tests.
63 r'ios[\\/].*_app_interface\.mm$',
65 r'ui[\\/]views[\\/]examples[\\/].*',
68 _THIRD_PARTY_EXCEPT_BLINK = 'third_party/(?!blink/)'
70 _TEST_ONLY_WARNING = (
71 'You might be calling functions intended only for testing from\n'
72 'production code. It is OK to ignore this warning if you know what\n'
73 'you are doing, as the heuristics used to detect the situation are\n'
74 'not perfect. The commit queue will not block on this warning,\n'
75 'however the android-binary-size trybot will block if the method\n'
76 'exists in the release apk.')
79 _INCLUDE_ORDER_WARNING = (
80 'Your #include order seems to be broken. Remember to use the right '
81 'collation (LC_COLLATE=C) and check\nhttps://google.github.io/styleguide/'
82 'cppguide.html#Names_and_Order_of_Includes')
84 # Format: Sequence of tuples containing:
86 # * Sequence of strings to show when the pattern matches.
87 # * Sequence of path or filename exceptions to this rule
88 _BANNED_JAVA_IMPORTS = (
92 'Use org.chromium.url.GURL instead of java.net.URI, where possible.',
95 'net/android/javatests/src/org/chromium/net/'
96 'AndroidProxySelectorTest.java',
98 'third_party/robolectric/local/',
103 # Format: Sequence of tuples containing:
104 # * String pattern or, if starting with a slash, a regular expression.
105 # * Sequence of strings to show when the pattern matches.
106 # * Error flag. True if a match is a presubmit error, otherwise it's a warning.
107 _BANNED_JAVA_FUNCTIONS = (
109 'StrictMode.allowThreadDiskReads()',
111 'Prefer using StrictModeContext.allowDiskReads() to using StrictMode '
117 'StrictMode.allowThreadDiskWrites()',
119 'Prefer using StrictModeContext.allowDiskWrites() to using StrictMode '
126 # Format: Sequence of tuples containing:
127 # * String pattern or, if starting with a slash, a regular expression.
128 # * Sequence of strings to show when the pattern matches.
129 # * Error flag. True if a match is a presubmit error, otherwise it's a warning.
130 _BANNED_OBJC_FUNCTIONS = (
134 'The use of -[NSView addTrackingRect:owner:userData:assumeInside:] is'
135 'prohibited. Please use CrTrackingArea instead.',
136 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
141 r'/NSTrackingArea\W',
143 'The use of NSTrackingAreas is prohibited. Please use CrTrackingArea',
145 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
150 'convertPointFromBase:',
152 'The use of -[NSView convertPointFromBase:] is almost certainly wrong.',
153 'Please use |convertPoint:(point) fromView:nil| instead.',
154 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
159 'convertPointToBase:',
161 'The use of -[NSView convertPointToBase:] is almost certainly wrong.',
162 'Please use |convertPoint:(point) toView:nil| instead.',
163 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
168 'convertRectFromBase:',
170 'The use of -[NSView convertRectFromBase:] is almost certainly wrong.',
171 'Please use |convertRect:(point) fromView:nil| instead.',
172 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
177 'convertRectToBase:',
179 'The use of -[NSView convertRectToBase:] is almost certainly wrong.',
180 'Please use |convertRect:(point) toView:nil| instead.',
181 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
186 'convertSizeFromBase:',
188 'The use of -[NSView convertSizeFromBase:] is almost certainly wrong.',
189 'Please use |convertSize:(point) fromView:nil| instead.',
190 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
195 'convertSizeToBase:',
197 'The use of -[NSView convertSizeToBase:] is almost certainly wrong.',
198 'Please use |convertSize:(point) toView:nil| instead.',
199 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
204 r"/\s+UTF8String\s*]",
206 'The use of -[NSString UTF8String] is dangerous as it can return null',
207 'even if |canBeConvertedToEncoding:NSUTF8StringEncoding| returns YES.',
208 'Please use |SysNSStringToUTF8| instead.',
213 r'__unsafe_unretained',
215 'The use of __unsafe_unretained is almost certainly wrong, unless',
216 'when interacting with NSFastEnumeration or NSInvocation.',
217 'Please use __weak in files build with ARC, nothing otherwise.',
224 'The use of "freeWhenDone:NO" with the NoCopy creation of ',
225 'Foundation types is prohibited.',
231 # Format: Sequence of tuples containing:
232 # * String pattern or, if starting with a slash, a regular expression.
233 # * Sequence of strings to show when the pattern matches.
234 # * Error flag. True if a match is a presubmit error, otherwise it's a warning.
235 _BANNED_IOS_OBJC_FUNCTIONS = (
239 'TEST() macro should not be used in Objective-C++ code as it does not ',
240 'drain the autorelease pool at the end of the test. Use TEST_F() ',
241 'macro instead with a fixture inheriting from PlatformTest (or a ',
247 r'/\btesting::Test\b',
249 'testing::Test should not be used in Objective-C++ code as it does ',
250 'not drain the autorelease pool at the end of the test. Use ',
251 'PlatformTest instead.'
257 # Format: Sequence of tuples containing:
258 # * String pattern or, if starting with a slash, a regular expression.
259 # * Sequence of strings to show when the pattern matches.
260 # * Error flag. True if a match is a presubmit error, otherwise it's a warning.
261 _BANNED_IOS_EGTEST_FUNCTIONS = (
263 r'/\bEXPECT_OCMOCK_VERIFY\b',
265 'EXPECT_OCMOCK_VERIFY should not be used in EarlGrey tests because ',
266 'it is meant for GTests. Use [mock verify] instead.'
272 # Directories that contain deprecated Bind() or Callback types.
273 # Find sub-directories from a given directory by running:
274 # for i in `find . -maxdepth 1 -type d|sort`; do
276 # (cd $i; git grep -nP 'base::(Bind\(|(Callback<|Closure))'|wc -l)
279 # TODO(crbug.com/714018): Remove (or narrow the scope of) paths from this list
280 # when they have been converted to modern callback types (OnceCallback,
281 # RepeatingCallback, BindOnce, BindRepeating) in order to enable presubmit
282 # checks for them and prevent regressions.
283 _NOT_CONVERTED_TO_MODERN_BIND_AND_CALLBACK = '|'.join((
285 '^base/callback.h', # Intentional.
288 '^chrome/chrome_elf/',
290 '^chrome/installer/',
296 '^chromecast/media/',
297 '^chromecast/metrics/',
299 '^chromeos/attestation/',
300 '^chromeos/components/',
301 '^chromeos/network/',
302 '^chromeos/services/',
304 '^components/assist_ranker/',
305 '^components/autofill/',
306 '^components/autofill_assistant/',
307 '^components/browser_watcher/',
308 '^components/cast_channel/',
309 '^components/chromeos_camera/',
310 '^components/component_updater/',
311 '^components/content_settings/',
312 '^components/cronet/',
313 '^components/data_reduction_proxy/',
314 '^components/domain_reliability/',
315 '^components/dom_distiller/',
316 '^components/download/internal/common/',
317 '^components/drive/',
319 '^components/feature_engagement/',
320 '^components/feedback/',
321 '^components/flags_ui/',
322 '^components/gcm_driver/',
323 '^components/guest_view/',
324 '^components/heap_profiling/',
325 '^components/history/',
326 '^components/image_fetcher/',
327 '^components/invalidation/',
328 '^components/keyed_service/',
329 '^components/login/',
330 '^components/metrics/',
331 '^components/metrics_services_manager/',
333 '^components/navigation_interception/',
334 '^components/net_log/',
335 '^components/network_time/',
336 '^components/ntp_snippets/',
337 '^components/ntp_tiles/',
338 '^components/offline_pages/',
339 '^components/omnibox/',
340 '^components/ownership/',
341 '^components/password_manager/',
342 '^components/payments/',
343 '^components/plugins/',
344 '^components/policy/',
345 '^components/proxy_config/',
346 '^components/quirks/',
347 '^components/remote_cocoa/',
349 '^components/safe_browsing/',
350 '^components/search_engines/',
351 '^components/search_provider_logos/',
352 '^components/security_interstitials/',
353 '^components/security_state/',
354 '^components/services/',
355 '^components/sessions/',
356 '^components/signin/',
357 '^components/ssl_errors/',
358 '^components/storage_monitor/',
359 '^components/subresource_filter/',
360 '^components/suggestions/',
362 '^components/sync_preferences/',
363 '^components/sync_sessions/',
365 '^components/tracing/',
366 '^components/translate/',
368 '^components/update_client/',
369 '^components/upload_list/',
370 '^components/variations/',
371 '^components/visitedlink/',
372 '^components/webcrypto/',
373 '^components/webdata/',
374 '^components/webdata_services/',
375 '^device/bluetooth/',
376 '^extensions/browser/',
377 '^extensions/renderer/',
378 '^google_apis/dive/',
390 '^media/device_monitors/',
398 '^ppapi/shared_impl/',
404 '^remoting/internal/',
405 '^remoting/protocol/',
407 '^third_party/blink/',
408 '^tools/clang/base_bind_rewriters/', # Intentional.
409 '^tools/gdb/gdb_chrome.py', # Intentional.
412 # Format: Sequence of tuples containing:
413 # * String pattern or, if starting with a slash, a regular expression.
414 # * Sequence of strings to show when the pattern matches.
415 # * Error flag. True if a match is a presubmit error, otherwise it's a warning.
416 # * Sequence of paths to *not* check (regexps).
417 _BANNED_CPP_FUNCTIONS = (
421 'New code should not use NULL. Use nullptr instead.',
427 r'/\busing namespace ',
429 'Using directives ("using namespace x") are banned by the Google Style',
430 'Guide ( http://google.github.io/styleguide/cppguide.html#Namespaces ).',
431 'Explicitly qualify symbols or use using declarations ("using x::foo").',
434 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
436 # Make sure that gtest's FRIEND_TEST() macro is not used; the
437 # FRIEND_TEST_ALL_PREFIXES() macro from base/gtest_prod_util.h should be
438 # used instead since that allows for FLAKY_ and DISABLED_ prefixes.
442 'Chromium code should not use gtest\'s FRIEND_TEST() macro. Include',
443 'base/gtest_prod_util.h and use FRIEND_TEST_ALL_PREFIXES() instead.',
449 r'/XSelectInput|CWEventMask|XCB_CW_EVENT_MASK',
451 'Chrome clients wishing to select events on X windows should use',
452 'ui::XScopedEventSelector. It is safe to ignore this warning only if',
453 'you are selecting events from the GPU process, or if you are using',
454 'an XDisplay other than gfx::GetXDisplay().',
458 r"^ui[\\/]events[\\/]x[\\/].*\.cc$",
459 r"^ui[\\/]gl[\\/].*\.cc$",
460 r"^media[\\/]gpu[\\/].*\.cc$",
462 r"^ui[\\/]base[\\/]x[\\/]xwmstartupcheck[\\/]xwmstartupcheck\.cc$",
466 r'/\WX?(((Width|Height)(MM)?OfScreen)|(Display(Width|Height)))\(',
468 'Use the corresponding fields in x11::Screen instead.',
474 r'/XInternAtom|xcb_intern_atom',
476 'Use gfx::GetAtom() instead of interning atoms directly.',
480 r"^gpu[\\/]ipc[\\/]service[\\/]gpu_watchdog_thread\.cc$",
481 r"^remoting[\\/]host[\\/]linux[\\/]x_server_clipboard\.cc$",
482 r"^ui[\\/]gfx[\\/]x[\\/]x11_atom_cache\.cc$",
488 'Overriding setMatrixClip() is prohibited; ',
489 'the base function is deprecated. ',
497 'The use of SkRefPtr is prohibited. ',
498 'Please use sk_sp<> instead.'
506 'The indirect use of SkRefPtr via SkAutoRef is prohibited. ',
507 'Please use sk_sp<> instead.'
515 'The use of SkAutoTUnref is dangerous because it implicitly ',
516 'converts to a raw pointer. Please use sk_sp<> instead.'
524 'The indirect use of SkAutoTUnref through SkAutoUnref is dangerous ',
525 'because it implicitly converts to a raw pointer. ',
526 'Please use sk_sp<> instead.'
532 r'/HANDLE_EINTR\(.*close',
534 'HANDLE_EINTR(close) is invalid. If close fails with EINTR, the file',
535 'descriptor will be closed, and it is incorrect to retry the close.',
536 'Either call close directly and ignore its return value, or wrap close',
537 'in IGNORE_EINTR to use its return value. See http://crbug.com/269623'
543 r'/IGNORE_EINTR\((?!.*close)',
545 'IGNORE_EINTR is only valid when wrapping close. To wrap other system',
546 'calls, use HANDLE_EINTR. See http://crbug.com/269623',
550 # Files that #define IGNORE_EINTR.
551 r'^base[\\/]posix[\\/]eintr_wrapper\.h$',
552 r'^ppapi[\\/]tests[\\/]test_broker\.cc$',
558 'Do not introduce new v8::Extensions into the code base, use',
559 'gin::Wrappable instead. See http://crbug.com/334679',
563 r'extensions[\\/]renderer[\\/]safe_builtins\.*',
567 '#pragma comment(lib,',
569 'Specify libraries to link with in build files and not in the source.',
573 r'^base[\\/]third_party[\\/]symbolize[\\/].*',
574 r'^third_party[\\/]abseil-cpp[\\/].*',
578 r'/base::SequenceChecker\b',
580 'Consider using SEQUENCE_CHECKER macros instead of the class directly.',
586 r'/base::ThreadChecker\b',
588 'Consider using THREAD_CHECKER macros instead of the class directly.',
594 r'/(Time(|Delta|Ticks)|ThreadTicks)::FromInternalValue|ToInternalValue',
596 'base::TimeXXX::FromInternalValue() and ToInternalValue() are',
597 'deprecated (http://crbug.com/634507). Please avoid converting away',
598 'from the Time types in Chromium code, especially if any math is',
599 'being done on time values. For interfacing with platform/library',
600 'APIs, use FromMicroseconds() or InMicroseconds(), or one of the other',
601 'type converter methods instead. For faking TimeXXX values (for unit',
602 'testing only), use TimeXXX() + TimeDelta::FromMicroseconds(N). For',
603 'other use cases, please contact base/time/OWNERS.',
609 'CallJavascriptFunctionUnsafe',
611 "Don't use CallJavascriptFunctionUnsafe() in new code. Instead, use",
612 'AllowJavascript(), OnJavascriptAllowed()/OnJavascriptDisallowed(),',
613 'and CallJavascriptFunction(). See https://goo.gl/qivavq.',
617 r'^content[\\/]browser[\\/]webui[\\/]web_ui_impl\.(cc|h)$',
618 r'^content[\\/]public[\\/]browser[\\/]web_ui\.h$',
619 r'^content[\\/]public[\\/]test[\\/]test_web_ui\.(cc|h)$',
625 'Instead of leveldb::DB::Open() use leveldb_env::OpenDB() from',
626 'third_party/leveldatabase/env_chromium.h. It exposes databases to',
627 "Chrome's tracing, making their memory usage visible.",
631 r'^third_party/leveldatabase/.*\.(cc|h)$',
635 'leveldb::NewMemEnv',
637 'Instead of leveldb::NewMemEnv() use leveldb_chrome::NewMemEnv() from',
638 'third_party/leveldatabase/leveldb_chrome.h. It exposes environments',
639 "to Chrome's tracing, making their memory usage visible.",
643 r'^third_party/leveldatabase/.*\.(cc|h)$',
647 'RunLoop::QuitCurrent',
649 'Please migrate away from RunLoop::QuitCurrent*() methods. Use member',
650 'methods of a specific RunLoop instance instead.',
656 'base::ScopedMockTimeMessageLoopTaskRunner',
658 'ScopedMockTimeMessageLoopTaskRunner is deprecated. Prefer',
659 'TaskEnvironment::TimeSource::MOCK_TIME. There are still a',
660 'few cases that may require a ScopedMockTimeMessageLoopTaskRunner',
661 '(i.e. mocking the main MessageLoopForUI in browser_tests), but check',
662 'with gab@ first if you think you need it)',
670 'Using std::regex adds unnecessary binary size to Chrome. Please use',
671 're2::RE2 instead (crbug.com/755321)',
679 'std::stoi uses exceptions to communicate results. ',
680 'Use base::StringToInt() instead.',
683 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
688 'std::stol uses exceptions to communicate results. ',
689 'Use base::StringToInt() instead.',
692 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
697 'std::stoul uses exceptions to communicate results. ',
698 'Use base::StringToUint() instead.',
701 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
706 'std::stoll uses exceptions to communicate results. ',
707 'Use base::StringToInt64() instead.',
710 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
715 'std::stoull uses exceptions to communicate results. ',
716 'Use base::StringToUint64() instead.',
719 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
724 'std::stof uses exceptions to communicate results. ',
725 'For locale-independent values, e.g. reading numbers from disk',
726 'profiles, use base::StringToDouble().',
727 'For user-visible values, parse using ICU.',
730 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
735 'std::stod uses exceptions to communicate results. ',
736 'For locale-independent values, e.g. reading numbers from disk',
737 'profiles, use base::StringToDouble().',
738 'For user-visible values, parse using ICU.',
741 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
746 'std::stold uses exceptions to communicate results. ',
747 'For locale-independent values, e.g. reading numbers from disk',
748 'profiles, use base::StringToDouble().',
749 'For user-visible values, parse using ICU.',
752 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
755 r'/\bstd::to_string\b',
757 'std::to_string is locale dependent and slower than alternatives.',
758 'For locale-independent strings, e.g. writing numbers to disk',
759 'profiles, use base::NumberToString().',
760 'For user-visible strings, use base::FormatNumber() and',
761 'the related functions in base/i18n/number_formatting.h.',
763 False, # Only a warning since it is already used.
764 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
767 r'/\bstd::shared_ptr\b',
769 'std::shared_ptr should not be used. Use scoped_refptr instead.',
772 [_THIRD_PARTY_EXCEPT_BLINK,
773 '^third_party/blink/renderer/core/typed_arrays/array_buffer/' +
774 'array_buffer_contents\.(cc|h)'],
777 r'/\bstd::weak_ptr\b',
779 'std::weak_ptr should not be used. Use base::WeakPtr instead.',
782 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
787 'long long is banned. Use stdint.h if you need a 64 bit number.',
789 False, # Only a warning since it is already used.
790 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
795 'std::bind is banned because of lifetime risks.',
796 'Use base::BindOnce or base::BindRepeating instead.',
799 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
802 r'/\b#include <chrono>\b',
804 '<chrono> overlaps with Time APIs in base. Keep using',
808 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
811 r'/\b#include <exception>\b',
813 'Exceptions are banned and disabled in Chromium.',
816 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
819 r'/\bstd::function\b',
821 'std::function is banned. Instead use base::Callback which directly',
822 'supports Chromium\'s weak pointers, ref counting and more.',
824 False, # Only a warning since it is already used.
825 [_THIRD_PARTY_EXCEPT_BLINK], # Do not warn in third_party folders.
828 r'/\b#include <random>\b',
830 'Do not use any random number engines from <random>. Instead',
831 'use base::RandomBitGenerator.',
834 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
839 'std::ratio is banned by the Google Style Guide.',
842 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
845 (r'/base::ThreadRestrictions::(ScopedAllowIO|AssertIOAllowed|'
846 r'DisallowWaiting|AssertWaitAllowed|SetWaitAllowed|ScopedAllowWait)'),
848 'Use the new API in base/threading/thread_restrictions.h.',
856 'Please use base::Bind{Once,Repeating} instead',
857 'of base::Bind. (crbug.com/714018)',
860 (_NOT_CONVERTED_TO_MODERN_BIND_AND_CALLBACK,),
863 r'/\bbase::Callback[<:]',
865 'Please use base::{Once,Repeating}Callback instead',
866 'of base::Callback. (crbug.com/714018)',
869 (_NOT_CONVERTED_TO_MODERN_BIND_AND_CALLBACK,),
872 r'/\bbase::Closure\b',
874 'Please use base::{Once,Repeating}Closure instead',
875 'of base::Closure. (crbug.com/714018)',
878 (_NOT_CONVERTED_TO_MODERN_BIND_AND_CALLBACK,),
881 r'/\bRunMessageLoop\b',
883 'RunMessageLoop is deprecated, use RunLoop instead.',
891 'RunThisRunLoop is deprecated, use RunLoop directly instead.',
897 'RunAllPendingInMessageLoop()',
899 "Prefer RunLoop over RunAllPendingInMessageLoop, please contact gab@",
900 "if you're convinced you need this.",
906 'RunAllPendingInMessageLoop(BrowserThread',
908 'RunAllPendingInMessageLoop is deprecated. Use RunLoop for',
909 'BrowserThread::UI, BrowserTaskEnvironment::RunIOThreadUntilIdle',
910 'for BrowserThread::IO, and prefer RunLoop::QuitClosure to observe',
911 'async events instead of flushing threads.',
917 r'MessageLoopRunner',
919 'MessageLoopRunner is deprecated, use RunLoop instead.',
925 'GetDeferredQuitTaskForRunLoop',
927 "GetDeferredQuitTaskForRunLoop shouldn't be needed, please contact",
928 "gab@ if you found a use case where this is the only solution.",
934 'sqlite3_initialize(',
936 'Instead of calling sqlite3_initialize(), depend on //sql, ',
937 '#include "sql/initialize.h" and use sql::EnsureSqliteInitialized().',
941 r'^sql/initialization\.(cc|h)$',
942 r'^third_party/sqlite/.*\.(c|cc|h)$',
946 'std::random_shuffle',
948 'std::random_shuffle is deprecated in C++14, and removed in C++17. Use',
949 'base::RandomShuffle instead.'
955 'ios/web/public/test/http_server',
957 'web::HTTPserver is deprecated use net::EmbeddedTestServer instead.',
965 'Improper use of Microsoft::WRL::ComPtr<T>::GetAddressOf() has been ',
966 'implicated in a few leaks. Use operator& instead. See ',
967 'http://crbug.com/914910 for more conversion guidance.'
975 'DEFINE_TYPE_CASTS is deprecated. Instead, use downcast helpers from ',
976 '//third_party/blink/renderer/platform/casting.h.'
980 r'^third_party/blink/renderer/.*\.(cc|h)$',
984 r'/\bIsHTML.+Element\(\b',
986 'Function IsHTMLXXXXElement is deprecated. Instead, use downcast ',
987 ' helpers IsA<HTMLXXXXElement> from ',
988 '//third_party/blink/renderer/platform/casting.h.'
992 r'^third_party/blink/renderer/.*\.(cc|h)$',
996 r'/\bToHTML.+Element(|OrNull)\(\b',
998 'Function ToHTMLXXXXElement and ToHTMLXXXXElementOrNull are '
999 'deprecated. Instead, use downcast helpers To<HTMLXXXXElement> '
1000 'and DynamicTo<HTMLXXXXElement> from ',
1001 '//third_party/blink/renderer/platform/casting.h.'
1002 'auto* html_xxxx_ele = To<HTMLXXXXElement>(n)'
1003 'auto* html_xxxx_ele_or_null = DynamicTo<HTMLXXXXElement>(n)'
1007 r'^third_party/blink/renderer/.*\.(cc|h)$',
1011 r'/\bmojo::DataPipe\b',
1013 'mojo::DataPipe is deprecated. Use mojo::CreateDataPipe instead.',
1021 'SHFileOperation was deprecated in Windows Vista, and there are less ',
1022 'complex functions to achieve the same goals. Use IFileOperation for ',
1023 'any esoteric actions instead.'
1031 'StringFromGUID2 introduces an unnecessary dependency on ole32.dll.',
1032 'Use base::win::String16FromGUID instead.'
1036 r'/base/win/win_util_unittest.cc'
1042 'StringFromCLSID introduces an unnecessary dependency on ole32.dll.',
1043 'Use base::win::String16FromGUID instead.'
1047 r'/base/win/win_util_unittest.cc'
1053 'The use of kCFAllocatorNull with the NoCopy creation of ',
1054 'CoreFoundation types is prohibited.',
1062 'mojo::ConvertTo and TypeConverter are deprecated. Please consider',
1063 'StructTraits / UnionTraits / EnumTraits / ArrayTraits / MapTraits /',
1064 'StringTraits if you would like to convert between custom types and',
1065 'the wire format of mojom types.'
1069 r'^fuchsia/engine/browser/url_request_rewrite_rules_manager\.cc$',
1070 r'^fuchsia/engine/url_request_rewrite_type_converters\.cc$',
1071 r'^third_party/blink/.*\.(cc|h)$',
1072 r'^content/renderer/.*\.(cc|h)$',
1076 'GetInterfaceProvider',
1078 'InterfaceProvider is deprecated.',
1079 'Please use ExecutionContext::GetBrowserInterfaceBroker and overrides',
1080 'or Platform::GetBrowserInterfaceBroker.'
1088 'New code should use Microsoft::WRL::ComPtr from wrl/client.h as a ',
1089 'replacement for CComPtr from ATL. See http://crbug.com/5027 for more ',
1096 r'/\b(IFACE|STD)METHOD_?\(',
1098 'IFACEMETHOD() and STDMETHOD() make code harder to format and read.',
1099 'Instead, always use IFACEMETHODIMP in the declaration.'
1102 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
1105 'set_owned_by_client',
1107 'set_owned_by_client is deprecated.',
1108 'views::View already owns the child views by default. This introduces ',
1109 'a competing ownership model which makes the code difficult to reason ',
1110 'about. See http://crbug.com/1044687 for more details.'
1116 r'/\bTRACE_EVENT_ASYNC_',
1118 'Please use TRACE_EVENT_NESTABLE_ASYNC_.. macros instead',
1119 'of TRACE_EVENT_ASYNC_.. (crbug.com/1038710).',
1123 r'^base/trace_event/.*',
1124 r'^base/tracing/.*',
1129 # Format: Sequence of tuples containing:
1130 # * String pattern or, if starting with a slash, a regular expression.
1131 # * Sequence of strings to show when the pattern matches.
1132 _DEPRECATED_MOJO_TYPES = (
1134 r'/\bmojo::AssociatedBinding\b',
1136 'mojo::AssociatedBinding<Interface> is deprecated.',
1137 'Use mojo::AssociatedReceiver<Interface> instead.',
1141 r'/\bmojo::AssociatedBindingSet\b',
1143 'mojo::AssociatedBindingSet<Interface> is deprecated.',
1144 'Use mojo::AssociatedReceiverSet<Interface> instead.',
1148 r'/\bmojo::AssociatedInterfacePtr\b',
1150 'mojo::AssociatedInterfacePtr<Interface> is deprecated.',
1151 'Use mojo::AssociatedRemote<Interface> instead.',
1155 r'/\bmojo::AssociatedInterfacePtrInfo\b',
1157 'mojo::AssociatedInterfacePtrInfo<Interface> is deprecated.',
1158 'Use mojo::PendingAssociatedRemote<Interface> instead.',
1162 r'/\bmojo::AssociatedInterfaceRequest\b',
1164 'mojo::AssociatedInterfaceRequest<Interface> is deprecated.',
1165 'Use mojo::PendingAssociatedReceiver<Interface> instead.',
1169 r'/\bmojo::Binding\b',
1171 'mojo::Binding<Interface> is deprecated.',
1172 'Use mojo::Receiver<Interface> instead.',
1176 r'/\bmojo::BindingSet\b',
1178 'mojo::BindingSet<Interface> is deprecated.',
1179 'Use mojo::ReceiverSet<Interface> instead.',
1183 r'/\bmojo::InterfacePtr\b',
1185 'mojo::InterfacePtr<Interface> is deprecated.',
1186 'Use mojo::Remote<Interface> instead.',
1190 r'/\bmojo::InterfacePtrInfo\b',
1192 'mojo::InterfacePtrInfo<Interface> is deprecated.',
1193 'Use mojo::PendingRemote<Interface> instead.',
1197 r'/\bmojo::InterfaceRequest\b',
1199 'mojo::InterfaceRequest<Interface> is deprecated.',
1200 'Use mojo::PendingReceiver<Interface> instead.',
1204 r'/\bmojo::MakeRequest\b',
1206 'mojo::MakeRequest is deprecated.',
1207 'Use mojo::Remote::BindNewPipeAndPassReceiver() instead.',
1211 r'/\bmojo::MakeRequestAssociatedWithDedicatedPipe\b',
1213 'mojo::MakeRequest is deprecated.',
1214 'Use mojo::AssociatedRemote::'
1215 'BindNewEndpointAndPassDedicatedReceiverForTesting() instead.',
1219 r'/\bmojo::MakeStrongBinding\b',
1221 'mojo::MakeStrongBinding is deprecated.',
1222 'Either migrate to mojo::UniqueReceiverSet, if possible, or use',
1223 'mojo::MakeSelfOwnedReceiver() instead.',
1227 r'/\bmojo::MakeStrongAssociatedBinding\b',
1229 'mojo::MakeStrongAssociatedBinding is deprecated.',
1230 'Either migrate to mojo::UniqueAssociatedReceiverSet, if possible, or',
1231 'use mojo::MakeSelfOwnedAssociatedReceiver() instead.',
1235 r'/\bmojo::StrongAssociatedBindingSet\b',
1237 'mojo::StrongAssociatedBindingSet<Interface> is deprecated.',
1238 'Use mojo::UniqueAssociatedReceiverSet<Interface> instead.',
1242 r'/\bmojo::StrongBindingSet\b',
1244 'mojo::StrongBindingSet<Interface> is deprecated.',
1245 'Use mojo::UniqueReceiverSet<Interface> instead.',
1250 _IPC_ENUM_TRAITS_DEPRECATED = (
1251 'You are using IPC_ENUM_TRAITS() in your code. It has been deprecated.\n'
1252 'See http://www.chromium.org/Home/chromium-security/education/'
1253 'security-tips-for-ipc')
1255 _LONG_PATH_ERROR = (
1256 'Some files included in this CL have file names that are too long (> 200'
1257 ' characters). If committed, these files will cause issues on Windows. See'
1258 ' https://crbug.com/612667 for more details.'
1261 _JAVA_MULTIPLE_DEFINITION_EXCLUDED_PATHS = [
1262 r".*[\\/]BuildHooksAndroidImpl\.java",
1263 r".*[\\/]LicenseContentProvider\.java",
1264 r".*[\\/]PlatformServiceBridgeImpl.java",
1265 r".*chrome[\\\/]android[\\\/]feed[\\\/]dummy[\\\/].*\.java",
1268 # List of image extensions that are used as resources in chromium.
1269 _IMAGE_EXTENSIONS = ['.svg', '.png', '.webp']
1271 # These paths contain test data and other known invalid JSON files.
1272 _KNOWN_TEST_DATA_AND_INVALID_JSON_FILE_PATTERNS = [
1273 r'test[\\/]data[\\/]',
1274 r'testing[\\/]buildbot[\\/]',
1275 r'^components[\\/]policy[\\/]resources[\\/]policy_templates\.json$',
1276 r'^third_party[\\/]protobuf[\\/]',
1277 r'^third_party[\\/]blink[\\/]renderer[\\/]devtools[\\/]protocol\.json$',
1278 r'^third_party[\\/]blink[\\/]web_tests[\\/]external[\\/]wpt[\\/]',
1282 _VALID_OS_MACROS = (
1283 # Please keep sorted.
1288 'OS_CAT', # For testing.
1290 'OS_CYGWIN', # third_party code.
1308 # These are not checked on the public chromium-presubmit trybot.
1309 # Add files here that rely on .py files that exists only for target_os="android"
1310 # checkouts (e.g. //third_party/catapult).
1311 _ANDROID_SPECIFIC_PYDEPS_FILES = [
1312 'android_webview/tools/run_cts.pydeps',
1313 'build/android/devil_chromium.pydeps',
1314 'build/android/gyp/create_bundle_wrapper_script.pydeps',
1315 'build/android/gyp/jinja_template.pydeps',
1316 'build/android/resource_sizes.pydeps',
1317 'build/android/test_runner.pydeps',
1318 'build/android/test_wrapper/logdog_wrapper.pydeps',
1319 'chrome/android/features/create_stripped_java_factory.pydeps',
1320 'testing/scripts/run_android_wpt.pydeps',
1321 'third_party/android_platform/development/scripts/stack.pydeps',
1325 _GENERIC_PYDEPS_FILES = [
1326 'base/android/jni_generator/jni_generator.pydeps',
1327 'base/android/jni_generator/jni_registration_generator.pydeps',
1328 'build/android/gyp/aar.pydeps',
1329 'build/android/gyp/aidl.pydeps',
1330 'build/android/gyp/allot_native_libraries.pydeps',
1331 'build/android/gyp/apkbuilder.pydeps',
1332 'build/android/gyp/assert_static_initializers.pydeps',
1333 'build/android/gyp/bytecode_processor.pydeps',
1334 'build/android/gyp/compile_java.pydeps',
1335 'build/android/gyp/compile_resources.pydeps',
1336 'build/android/gyp/copy_ex.pydeps',
1337 'build/android/gyp/create_apk_operations_script.pydeps',
1338 'build/android/gyp/create_app_bundle_apks.pydeps',
1339 'build/android/gyp/create_app_bundle.pydeps',
1340 'build/android/gyp/create_java_binary_script.pydeps',
1341 'build/android/gyp/create_size_info_files.pydeps',
1342 'build/android/gyp/create_ui_locale_resources.pydeps',
1343 'build/android/gyp/desugar.pydeps',
1344 'build/android/gyp/dexsplitter.pydeps',
1345 'build/android/gyp/dex.pydeps',
1346 'build/android/gyp/dex_jdk_libs.pydeps',
1347 'build/android/gyp/dist_aar.pydeps',
1348 'build/android/gyp/filter_zip.pydeps',
1349 'build/android/gyp/gcc_preprocess.pydeps',
1350 'build/android/gyp/generate_linker_version_script.pydeps',
1351 'build/android/gyp/ijar.pydeps',
1352 'build/android/gyp/jacoco_instr.pydeps',
1353 'build/android/gyp/java_cpp_enum.pydeps',
1354 'build/android/gyp/java_cpp_strings.pydeps',
1355 'build/android/gyp/jetify_jar.pydeps',
1356 'build/android/gyp/lint.pydeps',
1357 'build/android/gyp/main_dex_list.pydeps',
1358 'build/android/gyp/merge_manifest.pydeps',
1359 'build/android/gyp/prepare_resources.pydeps',
1360 'build/android/gyp/proguard.pydeps',
1361 'build/android/gyp/turbine.pydeps',
1362 'build/android/gyp/validate_static_library_dex_references.pydeps',
1363 'build/android/gyp/write_build_config.pydeps',
1364 'build/android/gyp/write_native_libraries_java.pydeps',
1365 'build/android/gyp/zip.pydeps',
1366 'build/android/incremental_install/generate_android_manifest.pydeps',
1367 'build/android/incremental_install/write_installer_json.pydeps',
1368 'build/protoc_java.pydeps',
1369 'chrome/test/chromedriver/log_replay/client_replay_unittest.pydeps',
1370 'chrome/test/chromedriver/test/run_py_tests.pydeps',
1371 'components/cronet/tools/generate_javadoc.pydeps',
1372 'components/cronet/tools/jar_src.pydeps',
1373 'components/module_installer/android/module_desc_java.pydeps',
1374 'content/public/android/generate_child_service.pydeps',
1375 'net/tools/testserver/testserver.pydeps',
1376 'third_party/blink/renderer/bindings/scripts/build_web_idl_database.pydeps',
1377 'third_party/blink/renderer/bindings/scripts/collect_idl_files.pydeps',
1378 'third_party/blink/renderer/bindings/scripts/generate_bindings.pydeps',
1379 ('third_party/blink/renderer/bindings/scripts/'
1380 'generate_high_entropy_list.pydeps'),
1381 'tools/binary_size/sizes.pydeps',
1382 'tools/binary_size/supersize.pydeps',
1386 _ALL_PYDEPS_FILES = _ANDROID_SPECIFIC_PYDEPS_FILES + _GENERIC_PYDEPS_FILES
1389 # Bypass the AUTHORS check for these accounts.
1390 _KNOWN_ROBOTS = set(
1391 ) | set('%s@appspot.gserviceaccount.com' % s for s in ('findit-for-me',)
1392 ) | set('%s@developer.gserviceaccount.com' % s for s in ('3su6n15k.default',)
1393 ) | set('%s@chops-service-accounts.iam.gserviceaccount.com' % s
1394 for s in ('bling-autoroll-builder', 'v8-ci-autoroll-builder',
1396 ) | set('%s@skia-public.iam.gserviceaccount.com' % s
1397 for s in ('chromium-autoroll', 'chromium-release-autoroll')
1398 ) | set('%s@skia-corp.google.com.iam.gserviceaccount.com' % s
1399 for s in ('chromium-internal-autoroll',))
1402 def _IsCPlusPlusFile(input_api, file_path):
1403 """Returns True if this file contains C++-like code (and not Python,
1404 Go, Java, MarkDown, ...)"""
1406 ext = input_api.os_path.splitext(file_path)[1]
1407 # This list is compatible with CppChecker.IsCppFile but we should
1408 # consider adding ".c" to it. If we do that we can use this function
1409 # at more places in the code.
1418 def _IsCPlusPlusHeaderFile(input_api, file_path):
1419 return input_api.os_path.splitext(file_path)[1] == ".h"
1422 def _IsJavaFile(input_api, file_path):
1423 return input_api.os_path.splitext(file_path)[1] == ".java"
1426 def _IsProtoFile(input_api, file_path):
1427 return input_api.os_path.splitext(file_path)[1] == ".proto"
1429 def _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api):
1430 """Attempts to prevent use of functions intended only for testing in
1431 non-testing code. For now this is just a best-effort implementation
1432 that ignores header files and may have some false positives. A
1433 better implementation would probably need a proper C++ parser.
1435 # We only scan .cc files and the like, as the declaration of
1436 # for-testing functions in header files are hard to distinguish from
1437 # calls to such functions without a proper C++ parser.
1438 file_inclusion_pattern = [r'.+%s' % _IMPLEMENTATION_EXTENSIONS]
1440 base_function_pattern = r'[ :]test::[^\s]+|ForTest(s|ing)?|for_test(s|ing)?'
1441 inclusion_pattern = input_api.re.compile(r'(%s)\s*\(' % base_function_pattern)
1442 comment_pattern = input_api.re.compile(r'//.*(%s)' % base_function_pattern)
1443 exclusion_pattern = input_api.re.compile(
1444 r'::[A-Za-z0-9_]+(%s)|(%s)[^;]+\{' % (
1445 base_function_pattern, base_function_pattern))
1447 def FilterFile(affected_file):
1448 black_list = (_EXCLUDED_PATHS +
1449 _TEST_CODE_EXCLUDED_PATHS +
1450 input_api.DEFAULT_BLACK_LIST)
1451 return input_api.FilterSourceFile(
1453 white_list=file_inclusion_pattern,
1454 black_list=black_list)
1457 for f in input_api.AffectedSourceFiles(FilterFile):
1458 local_path = f.LocalPath()
1459 for line_number, line in f.ChangedContents():
1460 if (inclusion_pattern.search(line) and
1461 not comment_pattern.search(line) and
1462 not exclusion_pattern.search(line)):
1464 '%s:%d\n %s' % (local_path, line_number, line.strip()))
1467 return [output_api.PresubmitPromptOrNotify(_TEST_ONLY_WARNING, problems)]
1472 def _CheckNoProductionCodeUsingTestOnlyFunctionsJava(input_api, output_api):
1473 """This is a simplified version of
1474 _CheckNoProductionCodeUsingTestOnlyFunctions for Java files.
1476 javadoc_start_re = input_api.re.compile(r'^\s*/\*\*')
1477 javadoc_end_re = input_api.re.compile(r'^\s*\*/')
1478 name_pattern = r'ForTest(s|ing)?'
1479 # Describes an occurrence of "ForTest*" inside a // comment.
1480 comment_re = input_api.re.compile(r'//.*%s' % name_pattern)
1482 inclusion_re = input_api.re.compile(r'(%s)\s*\(' % name_pattern)
1483 # Ignore definitions. (Comments are ignored separately.)
1484 exclusion_re = input_api.re.compile(r'(%s)[^;]+\{' % name_pattern)
1487 sources = lambda x: input_api.FilterSourceFile(
1489 black_list=(('(?i).*test', r'.*\/junit\/')
1490 + input_api.DEFAULT_BLACK_LIST),
1491 white_list=[r'.*\.java$']
1493 for f in input_api.AffectedFiles(include_deletes=False, file_filter=sources):
1494 local_path = f.LocalPath()
1495 is_inside_javadoc = False
1496 for line_number, line in f.ChangedContents():
1497 if is_inside_javadoc and javadoc_end_re.search(line):
1498 is_inside_javadoc = False
1499 if not is_inside_javadoc and javadoc_start_re.search(line):
1500 is_inside_javadoc = True
1501 if is_inside_javadoc:
1503 if (inclusion_re.search(line) and
1504 not comment_re.search(line) and
1505 not exclusion_re.search(line)):
1507 '%s:%d\n %s' % (local_path, line_number, line.strip()))
1510 return [output_api.PresubmitPromptOrNotify(_TEST_ONLY_WARNING, problems)]
1515 def _CheckNoIOStreamInHeaders(input_api, output_api):
1516 """Checks to make sure no .h files include <iostream>."""
1518 pattern = input_api.re.compile(r'^#include\s*<iostream>',
1519 input_api.re.MULTILINE)
1520 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
1521 if not f.LocalPath().endswith('.h'):
1523 contents = input_api.ReadFile(f)
1524 if pattern.search(contents):
1528 return [output_api.PresubmitError(
1529 'Do not #include <iostream> in header files, since it inserts static '
1530 'initialization into every file including the header. Instead, '
1531 '#include <ostream>. See http://crbug.com/94794',
1535 def _CheckNoStrCatRedefines(input_api, output_api):
1536 """Checks no windows headers with StrCat redefined are included directly."""
1538 pattern_deny = input_api.re.compile(
1539 r'^#include\s*[<"](shlwapi|atlbase|propvarutil|sphelper).h[">]',
1540 input_api.re.MULTILINE)
1541 pattern_allow = input_api.re.compile(
1542 r'^#include\s"base/win/windows_defines.inc"',
1543 input_api.re.MULTILINE)
1544 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
1545 contents = input_api.ReadFile(f)
1546 if pattern_deny.search(contents) and not pattern_allow.search(contents):
1547 files.append(f.LocalPath())
1550 return [output_api.PresubmitError(
1551 'Do not #include shlwapi.h, atlbase.h, propvarutil.h or sphelper.h '
1552 'directly since they pollute code with StrCat macro. Instead, '
1553 'include matching header from base/win. See http://crbug.com/856536',
1558 def _CheckNoUNIT_TESTInSourceFiles(input_api, output_api):
1559 """Checks to make sure no source files use UNIT_TEST."""
1561 for f in input_api.AffectedFiles():
1562 if (not f.LocalPath().endswith(('.cc', '.mm'))):
1565 for line_num, line in f.ChangedContents():
1566 if 'UNIT_TEST ' in line or line.endswith('UNIT_TEST'):
1567 problems.append(' %s:%d' % (f.LocalPath(), line_num))
1571 return [output_api.PresubmitPromptWarning('UNIT_TEST is only for headers.\n' +
1572 '\n'.join(problems))]
1574 def _CheckNoDISABLETypoInTests(input_api, output_api):
1575 """Checks to prevent attempts to disable tests with DISABLE_ prefix.
1577 This test warns if somebody tries to disable a test with the DISABLE_ prefix
1578 instead of DISABLED_. To filter false positives, reports are only generated
1579 if a corresponding MAYBE_ line exists.
1583 # The following two patterns are looked for in tandem - is a test labeled
1584 # as MAYBE_ followed by a DISABLE_ (instead of the correct DISABLED)
1585 maybe_pattern = input_api.re.compile(r'MAYBE_([a-zA-Z0-9_]+)')
1586 disable_pattern = input_api.re.compile(r'DISABLE_([a-zA-Z0-9_]+)')
1588 # This is for the case that a test is disabled on all platforms.
1589 full_disable_pattern = input_api.re.compile(
1590 r'^\s*TEST[^(]*\([a-zA-Z0-9_]+,\s*DISABLE_[a-zA-Z0-9_]+\)',
1591 input_api.re.MULTILINE)
1593 for f in input_api.AffectedFiles(False):
1594 if not 'test' in f.LocalPath() or not f.LocalPath().endswith('.cc'):
1597 # Search for MABYE_, DISABLE_ pairs.
1598 disable_lines = {} # Maps of test name to line number.
1600 for line_num, line in f.ChangedContents():
1601 disable_match = disable_pattern.search(line)
1603 disable_lines[disable_match.group(1)] = line_num
1604 maybe_match = maybe_pattern.search(line)
1606 maybe_lines[maybe_match.group(1)] = line_num
1608 # Search for DISABLE_ occurrences within a TEST() macro.
1609 disable_tests = set(disable_lines.keys())
1610 maybe_tests = set(maybe_lines.keys())
1611 for test in disable_tests.intersection(maybe_tests):
1612 problems.append(' %s:%d' % (f.LocalPath(), disable_lines[test]))
1614 contents = input_api.ReadFile(f)
1615 full_disable_match = full_disable_pattern.search(contents)
1616 if full_disable_match:
1617 problems.append(' %s' % f.LocalPath())
1622 output_api.PresubmitPromptWarning(
1623 'Attempt to disable a test with DISABLE_ instead of DISABLED_?\n' +
1624 '\n'.join(problems))
1628 def _CheckDCHECK_IS_ONHasBraces(input_api, output_api):
1629 """Checks to make sure DCHECK_IS_ON() does not skip the parentheses."""
1631 pattern = input_api.re.compile(r'DCHECK_IS_ON\b(?!\(\))',
1632 input_api.re.MULTILINE)
1633 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
1634 if (not f.LocalPath().endswith(('.cc', '.mm', '.h'))):
1636 for lnum, line in f.ChangedContents():
1637 if input_api.re.search(pattern, line):
1638 errors.append(output_api.PresubmitError(
1639 ('%s:%d: Use of DCHECK_IS_ON() must be written as "#if ' +
1640 'DCHECK_IS_ON()", not forgetting the parentheses.')
1641 % (f.LocalPath(), lnum)))
1645 def _FindHistogramNameInChunk(histogram_name, chunk):
1646 """Tries to find a histogram name or prefix in a line.
1648 Returns the existence of the histogram name, or None if it needs more chunk
1650 # A histogram_suffixes tag type has an affected-histogram name as a prefix of
1651 # the histogram_name.
1652 if '<affected-histogram' in chunk:
1653 # If the tag is not completed, needs more chunk to get the name.
1654 if not '>' in chunk:
1656 if not 'name="' in chunk:
1658 # Retrieve the first portion of the chunk wrapped by double-quotations. We
1659 # expect the only attribute is the name.
1660 histogram_prefix = chunk.split('"')[1]
1661 return histogram_prefix in histogram_name
1662 # Typically the whole histogram name should in the line.
1663 return histogram_name in chunk
1666 def _CheckUmaHistogramChanges(input_api, output_api):
1667 """Check that UMA histogram names in touched lines can still be found in other
1668 lines of the patch or in histograms.xml. Note that this check would not catch
1669 the reverse: changes in histograms.xml not matched in the code itself."""
1670 touched_histograms = []
1671 histograms_xml_modifications = []
1672 call_pattern_c = r'\bUMA_HISTOGRAM.*\('
1673 call_pattern_java = r'\bRecordHistogram\.record[a-zA-Z]+Histogram\('
1674 name_pattern = r'"(.*?)"'
1675 single_line_c_re = input_api.re.compile(call_pattern_c + name_pattern)
1676 single_line_java_re = input_api.re.compile(call_pattern_java + name_pattern)
1677 split_line_c_prefix_re = input_api.re.compile(call_pattern_c)
1678 split_line_java_prefix_re = input_api.re.compile(call_pattern_java)
1679 split_line_suffix_re = input_api.re.compile(r'^\s*' + name_pattern)
1680 last_line_matched_prefix = False
1681 for f in input_api.AffectedFiles():
1682 # If histograms.xml itself is modified, keep the modified lines for later.
1683 if f.LocalPath().endswith(('histograms.xml')):
1684 histograms_xml_modifications = f.ChangedContents()
1686 if f.LocalPath().endswith(('cc', 'mm', 'cpp')):
1687 single_line_re = single_line_c_re
1688 split_line_prefix_re = split_line_c_prefix_re
1689 elif f.LocalPath().endswith(('java')):
1690 single_line_re = single_line_java_re
1691 split_line_prefix_re = split_line_java_prefix_re
1694 for line_num, line in f.ChangedContents():
1695 if last_line_matched_prefix:
1696 suffix_found = split_line_suffix_re.search(line)
1698 touched_histograms.append([suffix_found.group(1), f, line_num])
1699 last_line_matched_prefix = False
1701 found = single_line_re.search(line)
1703 touched_histograms.append([found.group(1), f, line_num])
1705 last_line_matched_prefix = split_line_prefix_re.search(line)
1707 # Search for the touched histogram names in the local modifications to
1708 # histograms.xml, and, if not found, on the base histograms.xml file.
1709 unmatched_histograms = []
1710 for histogram_info in touched_histograms:
1711 histogram_name_found = False
1713 for line_num, line in histograms_xml_modifications:
1715 histogram_name_found = _FindHistogramNameInChunk(histogram_info[0], chunk)
1716 if histogram_name_found is None:
1719 if histogram_name_found:
1721 if not histogram_name_found:
1722 unmatched_histograms.append(histogram_info)
1724 histograms_xml_path = 'tools/metrics/histograms/histograms.xml'
1726 if unmatched_histograms:
1727 with open(histograms_xml_path) as histograms_xml:
1728 for histogram_name, f, line_num in unmatched_histograms:
1729 histograms_xml.seek(0)
1730 histogram_name_found = False
1732 for line in histograms_xml:
1734 histogram_name_found = _FindHistogramNameInChunk(histogram_name,
1736 if histogram_name_found is None:
1739 if histogram_name_found:
1741 if not histogram_name_found:
1742 problems.append(' [%s:%d] %s' %
1743 (f.LocalPath(), line_num, histogram_name))
1747 return [output_api.PresubmitPromptWarning('Some UMA_HISTOGRAM lines have '
1748 'been modified and the associated histogram name has no match in either '
1749 '%s or the modifications of it:' % (histograms_xml_path), problems)]
1752 def _CheckFlakyTestUsage(input_api, output_api):
1753 """Check that FlakyTest annotation is our own instead of the android one"""
1754 pattern = input_api.re.compile(r'import android.test.FlakyTest;')
1756 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
1757 if f.LocalPath().endswith('Test.java'):
1758 if pattern.search(input_api.ReadFile(f)):
1761 return [output_api.PresubmitError(
1762 'Use org.chromium.base.test.util.FlakyTest instead of '
1763 'android.test.FlakyTest',
1768 def _CheckNoNewWStrings(input_api, output_api):
1769 """Checks to make sure we don't introduce use of wstrings."""
1771 for f in input_api.AffectedFiles():
1772 if (not f.LocalPath().endswith(('.cc', '.h')) or
1773 f.LocalPath().endswith(('test.cc', '_win.cc', '_win.h')) or
1774 '/win/' in f.LocalPath() or
1775 'chrome_elf' in f.LocalPath() or
1776 'install_static' in f.LocalPath()):
1779 allowWString = False
1780 for line_num, line in f.ChangedContents():
1781 if 'presubmit: allow wstring' in line:
1783 elif not allowWString and 'wstring' in line:
1784 problems.append(' %s:%d' % (f.LocalPath(), line_num))
1785 allowWString = False
1787 allowWString = False
1791 return [output_api.PresubmitPromptWarning('New code should not use wstrings.'
1792 ' If you are calling a cross-platform API that accepts a wstring, '
1794 '\n'.join(problems))]
1797 def _CheckNoDEPSGIT(input_api, output_api):
1798 """Make sure .DEPS.git is never modified manually."""
1799 if any(f.LocalPath().endswith('.DEPS.git') for f in
1800 input_api.AffectedFiles()):
1801 return [output_api.PresubmitError(
1802 'Never commit changes to .DEPS.git. This file is maintained by an\n'
1803 'automated system based on what\'s in DEPS and your changes will be\n'
1805 'See https://sites.google.com/a/chromium.org/dev/developers/how-tos/'
1806 'get-the-code#Rolling_DEPS\n'
1807 'for more information')]
1811 def _CheckValidHostsInDEPS(input_api, output_api):
1812 """Checks that DEPS file deps are from allowed_hosts."""
1813 # Run only if DEPS file has been modified to annoy fewer bystanders.
1814 if all(f.LocalPath() != 'DEPS' for f in input_api.AffectedFiles()):
1816 # Outsource work to gclient verify
1818 gclient_path = input_api.os_path.join(
1819 input_api.PresubmitLocalPath(),
1820 'third_party', 'depot_tools', 'gclient.py')
1821 input_api.subprocess.check_output(
1822 [input_api.python_executable, gclient_path, 'verify'],
1823 stderr=input_api.subprocess.STDOUT)
1825 except input_api.subprocess.CalledProcessError as error:
1826 return [output_api.PresubmitError(
1827 'DEPS file must have only git dependencies.',
1828 long_text=error.output)]
1831 def _GetMessageForMatchingType(input_api, affected_file, line_number, line,
1832 type_name, message):
1833 """Helper method for _CheckNoBannedFunctions and _CheckNoDeprecatedMojoTypes.
1835 Returns an string composed of the name of the file, the line number where the
1836 match has been found and the additional text passed as |message| in case the
1837 target type name matches the text inside the line passed as parameter.
1841 if line.endswith(" nocheck"):
1845 if type_name[0:1] == '/':
1846 regex = type_name[1:]
1847 if input_api.re.search(regex, line):
1849 elif type_name in line:
1853 result.append(' %s:%d:' % (affected_file.LocalPath(), line_number))
1854 for message_line in message:
1855 result.append(' %s' % message_line)
1860 def _CheckNoBannedFunctions(input_api, output_api):
1861 """Make sure that banned functions are not used."""
1865 def IsBlacklisted(affected_file, blacklist):
1866 local_path = affected_file.LocalPath()
1867 for item in blacklist:
1868 if input_api.re.match(item, local_path):
1872 def IsIosObjcFile(affected_file):
1873 local_path = affected_file.LocalPath()
1874 if input_api.os_path.splitext(local_path)[-1] not in ('.mm', '.m', '.h'):
1876 basename = input_api.os_path.basename(local_path)
1877 if 'ios' in basename.split('_'):
1879 for sep in (input_api.os_path.sep, input_api.os_path.altsep):
1880 if sep and 'ios' in local_path.split(sep):
1884 def CheckForMatch(affected_file, line_num, line, func_name, message, error):
1885 problems = _GetMessageForMatchingType(input_api, f, line_num, line,
1889 errors.extend(problems)
1891 warnings.extend(problems)
1893 file_filter = lambda f: f.LocalPath().endswith(('.java'))
1894 for f in input_api.AffectedFiles(file_filter=file_filter):
1895 for line_num, line in f.ChangedContents():
1896 for func_name, message, error in _BANNED_JAVA_FUNCTIONS:
1897 CheckForMatch(f, line_num, line, func_name, message, error)
1899 file_filter = lambda f: f.LocalPath().endswith(('.mm', '.m', '.h'))
1900 for f in input_api.AffectedFiles(file_filter=file_filter):
1901 for line_num, line in f.ChangedContents():
1902 for func_name, message, error in _BANNED_OBJC_FUNCTIONS:
1903 CheckForMatch(f, line_num, line, func_name, message, error)
1905 for f in input_api.AffectedFiles(file_filter=IsIosObjcFile):
1906 for line_num, line in f.ChangedContents():
1907 for func_name, message, error in _BANNED_IOS_OBJC_FUNCTIONS:
1908 CheckForMatch(f, line_num, line, func_name, message, error)
1910 egtest_filter = lambda f: f.LocalPath().endswith(('_egtest.mm'))
1911 for f in input_api.AffectedFiles(file_filter=egtest_filter):
1912 for line_num, line in f.ChangedContents():
1913 for func_name, message, error in _BANNED_IOS_EGTEST_FUNCTIONS:
1914 CheckForMatch(f, line_num, line, func_name, message, error)
1916 file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm', '.h'))
1917 for f in input_api.AffectedFiles(file_filter=file_filter):
1918 for line_num, line in f.ChangedContents():
1919 for func_name, message, error, excluded_paths in _BANNED_CPP_FUNCTIONS:
1920 if IsBlacklisted(f, excluded_paths):
1922 CheckForMatch(f, line_num, line, func_name, message, error)
1926 result.append(output_api.PresubmitPromptWarning(
1927 'Banned functions were used.\n' + '\n'.join(warnings)))
1929 result.append(output_api.PresubmitError(
1930 'Banned functions were used.\n' + '\n'.join(errors)))
1934 def _CheckAndroidNoBannedImports(input_api, output_api):
1935 """Make sure that banned java imports are not used."""
1938 def IsException(path, exceptions):
1939 for exception in exceptions:
1940 if (path.startswith(exception)):
1944 file_filter = lambda f: f.LocalPath().endswith(('.java'))
1945 for f in input_api.AffectedFiles(file_filter=file_filter):
1946 for line_num, line in f.ChangedContents():
1947 for import_name, message, exceptions in _BANNED_JAVA_IMPORTS:
1948 if IsException(f.LocalPath(), exceptions):
1950 problems = _GetMessageForMatchingType(input_api, f, line_num, line,
1951 'import ' + import_name, message)
1953 errors.extend(problems)
1956 result.append(output_api.PresubmitError(
1957 'Banned imports were used.\n' + '\n'.join(errors)))
1961 def _CheckNoDeprecatedMojoTypes(input_api, output_api):
1962 """Make sure that old Mojo types are not used."""
1966 # For any path that is not an "ok" or an "error" path, a warning will be
1967 # raised if deprecated mojo types are found.
1968 ok_paths = ['components/arc']
1969 error_paths = ['third_party/blink', 'content']
1971 file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm', '.h'))
1972 for f in input_api.AffectedFiles(file_filter=file_filter):
1973 # Don't check //components/arc, not yet migrated (see crrev.com/c/1868870).
1974 if any(map(lambda path: f.LocalPath().startswith(path), ok_paths)):
1977 for line_num, line in f.ChangedContents():
1978 for func_name, message in _DEPRECATED_MOJO_TYPES:
1979 problems = _GetMessageForMatchingType(input_api, f, line_num, line,
1983 # Raise errors inside |error_paths| and warnings everywhere else.
1984 if any(map(lambda path: f.LocalPath().startswith(path), error_paths)):
1985 errors.extend(problems)
1987 warnings.extend(problems)
1991 result.append(output_api.PresubmitPromptWarning(
1992 'Banned Mojo types were used.\n' + '\n'.join(warnings)))
1994 result.append(output_api.PresubmitError(
1995 'Banned Mojo types were used.\n' + '\n'.join(errors)))
1999 def _CheckNoPragmaOnce(input_api, output_api):
2000 """Make sure that banned functions are not used."""
2002 pattern = input_api.re.compile(r'^#pragma\s+once',
2003 input_api.re.MULTILINE)
2004 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
2005 if not f.LocalPath().endswith('.h'):
2007 contents = input_api.ReadFile(f)
2008 if pattern.search(contents):
2012 return [output_api.PresubmitError(
2013 'Do not use #pragma once in header files.\n'
2014 'See http://www.chromium.org/developers/coding-style#TOC-File-headers',
2019 def _CheckNoTrinaryTrueFalse(input_api, output_api):
2020 """Checks to make sure we don't introduce use of foo ? true : false."""
2022 pattern = input_api.re.compile(r'\?\s*(true|false)\s*:\s*(true|false)')
2023 for f in input_api.AffectedFiles():
2024 if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
2027 for line_num, line in f.ChangedContents():
2028 if pattern.match(line):
2029 problems.append(' %s:%d' % (f.LocalPath(), line_num))
2033 return [output_api.PresubmitPromptWarning(
2034 'Please consider avoiding the "? true : false" pattern if possible.\n' +
2035 '\n'.join(problems))]
2038 def _CheckUnwantedDependencies(input_api, output_api):
2039 """Runs checkdeps on #include and import statements added in this
2040 change. Breaking - rules is an error, breaking ! rules is a
2044 # We need to wait until we have an input_api object and use this
2045 # roundabout construct to import checkdeps because this file is
2046 # eval-ed and thus doesn't have __file__.
2047 original_sys_path = sys.path
2049 sys.path = sys.path + [input_api.os_path.join(
2050 input_api.PresubmitLocalPath(), 'buildtools', 'checkdeps')]
2052 from rules import Rule
2054 # Restore sys.path to what it was before.
2055 sys.path = original_sys_path
2059 added_java_imports = []
2060 for f in input_api.AffectedFiles():
2061 if _IsCPlusPlusFile(input_api, f.LocalPath()):
2062 changed_lines = [line for _, line in f.ChangedContents()]
2063 added_includes.append([f.AbsoluteLocalPath(), changed_lines])
2064 elif _IsProtoFile(input_api, f.LocalPath()):
2065 changed_lines = [line for _, line in f.ChangedContents()]
2066 added_imports.append([f.AbsoluteLocalPath(), changed_lines])
2067 elif _IsJavaFile(input_api, f.LocalPath()):
2068 changed_lines = [line for _, line in f.ChangedContents()]
2069 added_java_imports.append([f.AbsoluteLocalPath(), changed_lines])
2071 deps_checker = checkdeps.DepsChecker(input_api.PresubmitLocalPath())
2073 error_descriptions = []
2074 warning_descriptions = []
2075 error_subjects = set()
2076 warning_subjects = set()
2077 for path, rule_type, rule_description in deps_checker.CheckAddedCppIncludes(
2079 path = input_api.os_path.relpath(path, input_api.PresubmitLocalPath())
2080 description_with_path = '%s\n %s' % (path, rule_description)
2081 if rule_type == Rule.DISALLOW:
2082 error_descriptions.append(description_with_path)
2083 error_subjects.add("#includes")
2085 warning_descriptions.append(description_with_path)
2086 warning_subjects.add("#includes")
2088 for path, rule_type, rule_description in deps_checker.CheckAddedProtoImports(
2090 path = input_api.os_path.relpath(path, input_api.PresubmitLocalPath())
2091 description_with_path = '%s\n %s' % (path, rule_description)
2092 if rule_type == Rule.DISALLOW:
2093 error_descriptions.append(description_with_path)
2094 error_subjects.add("imports")
2096 warning_descriptions.append(description_with_path)
2097 warning_subjects.add("imports")
2099 for path, rule_type, rule_description in deps_checker.CheckAddedJavaImports(
2100 added_java_imports, _JAVA_MULTIPLE_DEFINITION_EXCLUDED_PATHS):
2101 path = input_api.os_path.relpath(path, input_api.PresubmitLocalPath())
2102 description_with_path = '%s\n %s' % (path, rule_description)
2103 if rule_type == Rule.DISALLOW:
2104 error_descriptions.append(description_with_path)
2105 error_subjects.add("imports")
2107 warning_descriptions.append(description_with_path)
2108 warning_subjects.add("imports")
2111 if error_descriptions:
2112 results.append(output_api.PresubmitError(
2113 'You added one or more %s that violate checkdeps rules.'
2114 % " and ".join(error_subjects),
2115 error_descriptions))
2116 if warning_descriptions:
2117 results.append(output_api.PresubmitPromptOrNotify(
2118 'You added one or more %s of files that are temporarily\n'
2119 'allowed but being removed. Can you avoid introducing the\n'
2120 '%s? See relevant DEPS file(s) for details and contacts.' %
2121 (" and ".join(warning_subjects), "/".join(warning_subjects)),
2122 warning_descriptions))
2126 def _CheckFilePermissions(input_api, output_api):
2127 """Check that all files have their permissions properly set."""
2128 if input_api.platform == 'win32':
2130 checkperms_tool = input_api.os_path.join(
2131 input_api.PresubmitLocalPath(),
2132 'tools', 'checkperms', 'checkperms.py')
2133 args = [input_api.python_executable, checkperms_tool,
2134 '--root', input_api.change.RepositoryRoot()]
2135 with input_api.CreateTemporaryFile() as file_list:
2136 for f in input_api.AffectedFiles():
2137 # checkperms.py file/directory arguments must be relative to the
2139 file_list.write(f.LocalPath() + '\n')
2141 args += ['--file-list', file_list.name]
2143 input_api.subprocess.check_output(args)
2145 except input_api.subprocess.CalledProcessError as error:
2146 return [output_api.PresubmitError(
2147 'checkperms.py failed:',
2148 long_text=error.output)]
2151 def _CheckTeamTags(input_api, output_api):
2152 """Checks that OWNERS files have consistent TEAM and COMPONENT tags."""
2153 checkteamtags_tool = input_api.os_path.join(
2154 input_api.PresubmitLocalPath(),
2155 'tools', 'checkteamtags', 'checkteamtags.py')
2156 args = [input_api.python_executable, checkteamtags_tool,
2157 '--root', input_api.change.RepositoryRoot()]
2158 files = [f.LocalPath() for f in input_api.AffectedFiles(include_deletes=False)
2159 if input_api.os_path.basename(f.AbsoluteLocalPath()).upper() ==
2163 warnings = input_api.subprocess.check_output(args + files).splitlines()
2165 return [output_api.PresubmitPromptWarning(warnings[0], warnings[1:])]
2167 except input_api.subprocess.CalledProcessError as error:
2168 return [output_api.PresubmitError(
2169 'checkteamtags.py failed:',
2170 long_text=error.output)]
2173 def _CheckNoAuraWindowPropertyHInHeaders(input_api, output_api):
2174 """Makes sure we don't include ui/aura/window_property.h
2177 pattern = input_api.re.compile(r'^#include\s*"ui/aura/window_property.h"')
2179 for f in input_api.AffectedFiles():
2180 if not f.LocalPath().endswith('.h'):
2182 for line_num, line in f.ChangedContents():
2183 if pattern.match(line):
2184 errors.append(' %s:%d' % (f.LocalPath(), line_num))
2188 results.append(output_api.PresubmitError(
2189 'Header files should not include ui/aura/window_property.h', errors))
2193 def _CheckForVersionControlConflictsInFile(input_api, f):
2194 pattern = input_api.re.compile('^(?:<<<<<<<|>>>>>>>) |^=======$')
2196 for line_num, line in f.ChangedContents():
2197 if f.LocalPath().endswith(('.md', '.rst', '.txt')):
2198 # First-level headers in markdown look a lot like version control
2199 # conflict markers. http://daringfireball.net/projects/markdown/basics
2201 if pattern.match(line):
2202 errors.append(' %s:%d %s' % (f.LocalPath(), line_num, line))
2206 def _CheckForVersionControlConflicts(input_api, output_api):
2207 """Usually this is not intentional and will cause a compile failure."""
2209 for f in input_api.AffectedFiles():
2210 errors.extend(_CheckForVersionControlConflictsInFile(input_api, f))
2214 results.append(output_api.PresubmitError(
2215 'Version control conflict markers found, please resolve.', errors))
2219 def _CheckGoogleSupportAnswerUrl(input_api, output_api):
2220 pattern = input_api.re.compile('support\.google\.com\/chrome.*/answer')
2222 for f in input_api.AffectedFiles():
2223 for line_num, line in f.ChangedContents():
2224 if pattern.search(line):
2225 errors.append(' %s:%d %s' % (f.LocalPath(), line_num, line))
2229 results.append(output_api.PresubmitPromptWarning(
2230 'Found Google support URL addressed by answer number. Please replace '
2231 'with a p= identifier instead. See crbug.com/679462\n', errors))
2235 def _CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api):
2236 def FilterFile(affected_file):
2237 """Filter function for use with input_api.AffectedSourceFiles,
2238 below. This filters out everything except non-test files from
2239 top-level directories that generally speaking should not hard-code
2240 service URLs (e.g. src/android_webview/, src/content/ and others).
2242 return input_api.FilterSourceFile(
2244 white_list=[r'^(android_webview|base|content|net)[\\/].*'],
2245 black_list=(_EXCLUDED_PATHS +
2246 _TEST_CODE_EXCLUDED_PATHS +
2247 input_api.DEFAULT_BLACK_LIST))
2249 base_pattern = ('"[^"]*(google|googleapis|googlezip|googledrive|appspot)'
2250 '\.(com|net)[^"]*"')
2251 comment_pattern = input_api.re.compile('//.*%s' % base_pattern)
2252 pattern = input_api.re.compile(base_pattern)
2253 problems = [] # items are (filename, line_number, line)
2254 for f in input_api.AffectedSourceFiles(FilterFile):
2255 for line_num, line in f.ChangedContents():
2256 if not comment_pattern.search(line) and pattern.search(line):
2257 problems.append((f.LocalPath(), line_num, line))
2260 return [output_api.PresubmitPromptOrNotify(
2261 'Most layers below src/chrome/ should not hardcode service URLs.\n'
2262 'Are you sure this is correct?',
2264 problem[0], problem[1], problem[2]) for problem in problems])]
2269 def _CheckChromeOsSyncedPrefRegistration(input_api, output_api):
2270 """Warns if Chrome OS C++ files register syncable prefs as browser prefs."""
2271 def FileFilter(affected_file):
2272 """Includes directories known to be Chrome OS only."""
2273 return input_api.FilterSourceFile(
2275 white_list=('^ash/',
2276 '^chromeos/', # Top-level src/chromeos.
2277 '/chromeos/', # Any path component.
2280 black_list=(input_api.DEFAULT_BLACK_LIST))
2284 for f in input_api.AffectedFiles(file_filter=FileFilter):
2285 for line_num, line in f.ChangedContents():
2286 if input_api.re.search('PrefRegistrySyncable::SYNCABLE_PREF', line):
2287 prefs.append(' %s:%d:' % (f.LocalPath(), line_num))
2288 prefs.append(' %s' % line)
2289 if input_api.re.search(
2290 'PrefRegistrySyncable::SYNCABLE_PRIORITY_PREF', line):
2291 priority_prefs.append(' %s:%d' % (f.LocalPath(), line_num))
2292 priority_prefs.append(' %s' % line)
2296 results.append(output_api.PresubmitPromptWarning(
2297 'Preferences were registered as SYNCABLE_PREF and will be controlled '
2298 'by browser sync settings. If these prefs should be controlled by OS '
2299 'sync settings use SYNCABLE_OS_PREF instead.\n' + '\n'.join(prefs)))
2300 if (priority_prefs):
2301 results.append(output_api.PresubmitPromptWarning(
2302 'Preferences were registered as SYNCABLE_PRIORITY_PREF and will be '
2303 'controlled by browser sync settings. If these prefs should be '
2304 'controlled by OS sync settings use SYNCABLE_OS_PRIORITY_PREF '
2305 'instead.\n' + '\n'.join(prefs)))
2309 # TODO: add unit tests.
2310 def _CheckNoAbbreviationInPngFileName(input_api, output_api):
2311 """Makes sure there are no abbreviations in the name of PNG files.
2312 The native_client_sdk directory is excluded because it has auto-generated PNG
2313 files for documentation.
2316 white_list = [r'.*_[a-z]_.*\.png$|.*_[a-z]\.png$']
2317 black_list = [r'^native_client_sdk[\\/]']
2318 file_filter = lambda f: input_api.FilterSourceFile(
2319 f, white_list=white_list, black_list=black_list)
2320 for f in input_api.AffectedFiles(include_deletes=False,
2321 file_filter=file_filter):
2322 errors.append(' %s' % f.LocalPath())
2326 results.append(output_api.PresubmitError(
2327 'The name of PNG files should not have abbreviations. \n'
2328 'Use _hover.png, _center.png, instead of _h.png, _c.png.\n'
2329 'Contact oshima@chromium.org if you have questions.', errors))
2333 def _ExtractAddRulesFromParsedDeps(parsed_deps):
2334 """Extract the rules that add dependencies from a parsed DEPS file.
2337 parsed_deps: the locals dictionary from evaluating the DEPS file."""
2340 rule[1:] for rule in parsed_deps.get('include_rules', [])
2341 if rule.startswith('+') or rule.startswith('!')
2343 for _, rules in parsed_deps.get('specific_include_rules',
2346 rule[1:] for rule in rules
2347 if rule.startswith('+') or rule.startswith('!')
2352 def _ParseDeps(contents):
2353 """Simple helper for parsing DEPS files."""
2354 # Stubs for handling special syntax in the root DEPS file.
2357 def __init__(self, local_scope):
2358 self._local_scope = local_scope
2360 def Lookup(self, var_name):
2361 """Implements the Var syntax."""
2363 return self._local_scope['vars'][var_name]
2365 raise Exception('Var is not defined: %s' % var_name)
2369 'Var': _VarImpl(local_scope).Lookup,
2371 exec contents in global_scope, local_scope
2375 def _CalculateAddedDeps(os_path, old_contents, new_contents):
2376 """Helper method for _CheckAddedDepsHaveTargetApprovals. Returns
2377 a set of DEPS entries that we should look up.
2379 For a directory (rather than a specific filename) we fake a path to
2380 a specific filename by adding /DEPS. This is chosen as a file that
2381 will seldom or never be subject to per-file include_rules.
2383 # We ignore deps entries on auto-generated directories.
2384 AUTO_GENERATED_DIRS = ['grit', 'jni']
2386 old_deps = _ExtractAddRulesFromParsedDeps(_ParseDeps(old_contents))
2387 new_deps = _ExtractAddRulesFromParsedDeps(_ParseDeps(new_contents))
2389 added_deps = new_deps.difference(old_deps)
2392 for added_dep in added_deps:
2393 if added_dep.split('/')[0] in AUTO_GENERATED_DIRS:
2395 # Assume that a rule that ends in .h is a rule for a specific file.
2396 if added_dep.endswith('.h'):
2397 results.add(added_dep)
2399 results.add(os_path.join(added_dep, 'DEPS'))
2403 def _CheckAddedDepsHaveTargetApprovals(input_api, output_api):
2404 """When a dependency prefixed with + is added to a DEPS file, we
2405 want to make sure that the change is reviewed by an OWNER of the
2406 target file or directory, to avoid layering violations from being
2407 introduced. This check verifies that this happens.
2409 virtual_depended_on_files = set()
2411 file_filter = lambda f: not input_api.re.match(
2412 r"^third_party[\\/]blink[\\/].*", f.LocalPath())
2413 for f in input_api.AffectedFiles(include_deletes=False,
2414 file_filter=file_filter):
2415 filename = input_api.os_path.basename(f.LocalPath())
2416 if filename == 'DEPS':
2417 virtual_depended_on_files.update(_CalculateAddedDeps(
2419 '\n'.join(f.OldContents()),
2420 '\n'.join(f.NewContents())))
2422 if not virtual_depended_on_files:
2425 if input_api.is_committing:
2427 return [output_api.PresubmitNotifyResult(
2428 '--tbr was specified, skipping OWNERS check for DEPS additions')]
2429 if input_api.dry_run:
2430 return [output_api.PresubmitNotifyResult(
2431 'This is a dry run, skipping OWNERS check for DEPS additions')]
2432 if not input_api.change.issue:
2433 return [output_api.PresubmitError(
2434 "DEPS approval by OWNERS check failed: this change has "
2435 "no change number, so we can't check it for approvals.")]
2436 output = output_api.PresubmitError
2438 output = output_api.PresubmitNotifyResult
2440 owners_db = input_api.owners_db
2441 owner_email, reviewers = (
2442 input_api.canned_checks.GetCodereviewOwnerAndReviewers(
2444 owners_db.email_regexp,
2445 approval_needed=input_api.is_committing))
2447 owner_email = owner_email or input_api.change.author_email
2449 reviewers_plus_owner = set(reviewers)
2451 reviewers_plus_owner.add(owner_email)
2452 missing_files = owners_db.files_not_covered_by(virtual_depended_on_files,
2453 reviewers_plus_owner)
2455 # We strip the /DEPS part that was added by
2456 # _FilesToCheckForIncomingDeps to fake a path to a file in a
2458 def StripDeps(path):
2459 start_deps = path.rfind('/DEPS')
2460 if start_deps != -1:
2461 return path[:start_deps]
2464 unapproved_dependencies = ["'+%s'," % StripDeps(path)
2465 for path in missing_files]
2467 if unapproved_dependencies:
2469 output('You need LGTM from owners of depends-on paths in DEPS that were '
2470 'modified in this CL:\n %s' %
2471 '\n '.join(sorted(unapproved_dependencies)))]
2472 suggested_owners = owners_db.reviewers_for(missing_files, owner_email)
2473 output_list.append(output(
2474 'Suggested missing target path OWNERS:\n %s' %
2475 '\n '.join(suggested_owners or [])))
2481 # TODO: add unit tests.
2482 def _CheckSpamLogging(input_api, output_api):
2483 file_inclusion_pattern = [r'.+%s' % _IMPLEMENTATION_EXTENSIONS]
2484 black_list = (_EXCLUDED_PATHS +
2485 _TEST_CODE_EXCLUDED_PATHS +
2486 input_api.DEFAULT_BLACK_LIST +
2487 (r"^base[\\/]logging\.h$",
2488 r"^base[\\/]logging\.cc$",
2489 r"^base[\\/]task[\\/]thread_pool[\\/]task_tracker\.cc$",
2490 r"^chrome[\\/]app[\\/]chrome_main_delegate\.cc$",
2491 r"^chrome[\\/]browser[\\/]chrome_browser_main\.cc$",
2492 r"^chrome[\\/]browser[\\/]ui[\\/]startup[\\/]"
2493 r"startup_browser_creator\.cc$",
2494 r"^chrome[\\/]browser[\\/]browser_switcher[\\/]bho[\\/].*",
2495 r"^chrome[\\/]browser[\\/]diagnostics[\\/]" +
2496 r"diagnostics_writer\.cc$",
2497 r"^chrome[\\/]chrome_cleaner[\\/].*",
2498 r"^chrome[\\/]chrome_elf[\\/]dll_hash[\\/]dll_hash_main\.cc$",
2499 r"^chrome[\\/]installer[\\/]setup[\\/].*",
2500 r"^chromecast[\\/]",
2501 r"^cloud_print[\\/]",
2502 r"^components[\\/]browser_watcher[\\/]"
2503 r"dump_stability_report_main_win.cc$",
2504 r"^components[\\/]media_control[\\/]renderer[\\/]"
2505 r"media_playback_options\.cc$",
2506 r"^components[\\/]zucchini[\\/].*",
2507 # TODO(peter): Remove this exception. https://crbug.com/534537
2508 r"^content[\\/]browser[\\/]notifications[\\/]"
2509 r"notification_event_dispatcher_impl\.cc$",
2510 r"^content[\\/]common[\\/]gpu[\\/]client[\\/]"
2511 r"gl_helper_benchmark\.cc$",
2512 r"^courgette[\\/]courgette_minimal_tool\.cc$",
2513 r"^courgette[\\/]courgette_tool\.cc$",
2514 r"^extensions[\\/]renderer[\\/]logging_native_handler\.cc$",
2515 r"^fuchsia[\\/]engine[\\/]browser[\\/]frame_impl.cc$",
2516 r"^fuchsia[\\/]engine[\\/]context_provider_main.cc$",
2517 r"^headless[\\/]app[\\/]headless_shell\.cc$",
2518 r"^ipc[\\/]ipc_logging\.cc$",
2519 r"^native_client_sdk[\\/]",
2520 r"^remoting[\\/]base[\\/]logging\.h$",
2521 r"^remoting[\\/]host[\\/].*",
2522 r"^sandbox[\\/]linux[\\/].*",
2523 r"^storage[\\/]browser[\\/]file_system[\\/]" +
2524 r"dump_file_system.cc$",
2526 r"^ui[\\/]base[\\/]resource[\\/]data_pack.cc$",
2527 r"^ui[\\/]aura[\\/]bench[\\/]bench_main\.cc$",
2528 r"^ui[\\/]ozone[\\/]platform[\\/]cast[\\/]",
2529 r"^ui[\\/]base[\\/]x[\\/]xwmstartupcheck[\\/]"
2530 r"xwmstartupcheck\.cc$"))
2531 source_file_filter = lambda x: input_api.FilterSourceFile(
2532 x, white_list=file_inclusion_pattern, black_list=black_list)
2537 for f in input_api.AffectedSourceFiles(source_file_filter):
2538 for _, line in f.ChangedContents():
2539 if input_api.re.search(r"\bD?LOG\s*\(\s*INFO\s*\)", line):
2540 log_info.add(f.LocalPath())
2541 elif input_api.re.search(r"\bD?LOG_IF\s*\(\s*INFO\s*,", line):
2542 log_info.add(f.LocalPath())
2544 if input_api.re.search(r"\bprintf\(", line):
2545 printf.add(f.LocalPath())
2546 elif input_api.re.search(r"\bfprintf\((stdout|stderr)", line):
2547 printf.add(f.LocalPath())
2550 return [output_api.PresubmitError(
2551 'These files spam the console log with LOG(INFO):',
2554 return [output_api.PresubmitError(
2555 'These files spam the console log with printf/fprintf:',
2560 def _CheckForAnonymousVariables(input_api, output_api):
2561 """These types are all expected to hold locks while in scope and
2562 so should never be anonymous (which causes them to be immediately
2564 they_who_must_be_named = [
2568 'SkAutoAlphaRestore',
2569 'SkAutoBitmapShaderInstall',
2570 'SkAutoBlitterChoose',
2571 'SkAutoBounderCommit',
2573 'SkAutoCanvasRestore',
2574 'SkAutoCommentBlock',
2576 'SkAutoDisableDirectionCheck',
2577 'SkAutoDisableOvalCheck',
2584 'SkAutoMaskFreeImage',
2585 'SkAutoMutexAcquire',
2586 'SkAutoPathBoundsUpdate',
2588 'SkAutoRasterClipValidate',
2594 anonymous = r'(%s)\s*[({]' % '|'.join(they_who_must_be_named)
2595 # bad: base::AutoLock(lock.get());
2596 # not bad: base::AutoLock lock(lock.get());
2597 bad_pattern = input_api.re.compile(anonymous)
2598 # good: new base::AutoLock(lock.get())
2599 good_pattern = input_api.re.compile(r'\bnew\s*' + anonymous)
2602 for f in input_api.AffectedFiles():
2603 if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
2605 for linenum, line in f.ChangedContents():
2606 if bad_pattern.search(line) and not good_pattern.search(line):
2607 errors.append('%s:%d' % (f.LocalPath(), linenum))
2610 return [output_api.PresubmitError(
2611 'These lines create anonymous variables that need to be named:',
2616 def _CheckUniquePtr(input_api, output_api):
2617 # Returns whether |template_str| is of the form <T, U...> for some types T
2618 # and U. Assumes that |template_str| is already in the form <...>.
2619 def HasMoreThanOneArg(template_str):
2620 # Level of <...> nesting.
2622 for c in template_str:
2627 elif c == ',' and nesting == 1:
2631 file_inclusion_pattern = [r'.+%s' % _IMPLEMENTATION_EXTENSIONS]
2632 sources = lambda affected_file: input_api.FilterSourceFile(
2634 black_list=(_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS +
2635 input_api.DEFAULT_BLACK_LIST),
2636 white_list=file_inclusion_pattern)
2638 # Pattern to capture a single "<...>" block of template arguments. It can
2639 # handle linearly nested blocks, such as "<std::vector<std::set<T>>>", but
2640 # cannot handle branching structures, such as "<pair<set<T>,set<U>>". The
2641 # latter would likely require counting that < and > match, which is not
2642 # expressible in regular languages. Should the need arise, one can introduce
2643 # limited counting (matching up to a total number of nesting depth), which
2644 # should cover all practical cases for already a low nesting limit.
2645 template_arg_pattern = (
2646 r'<[^>]*' # Opening block of <.
2647 r'>([^<]*>)?') # Closing block of >.
2648 # Prefix expressing that whatever follows is not already inside a <...>
2650 not_inside_template_arg_pattern = r'(^|[^<,\s]\s*)'
2651 null_construct_pattern = input_api.re.compile(
2652 not_inside_template_arg_pattern
2653 + r'\bstd::unique_ptr'
2654 + template_arg_pattern
2657 # Same as template_arg_pattern, but excluding type arrays, e.g., <T[]>.
2658 template_arg_no_array_pattern = (
2659 r'<[^>]*[^]]' # Opening block of <.
2660 r'>([^(<]*[^]]>)?') # Closing block of >.
2661 # Prefix saying that what follows is the start of an expression.
2662 start_of_expr_pattern = r'(=|\breturn|^)\s*'
2663 # Suffix saying that what follows are call parentheses with a non-empty list
2665 nonempty_arg_list_pattern = r'\(([^)]|$)'
2666 # Put the template argument into a capture group for deeper examination later.
2667 return_construct_pattern = input_api.re.compile(
2668 start_of_expr_pattern
2669 + r'std::unique_ptr'
2670 + '(?P<template_arg>'
2671 + template_arg_no_array_pattern
2673 + nonempty_arg_list_pattern)
2675 problems_constructor = []
2676 problems_nullptr = []
2677 for f in input_api.AffectedSourceFiles(sources):
2678 for line_number, line in f.ChangedContents():
2680 # return std::unique_ptr<T>(foo);
2681 # bar = std::unique_ptr<T>(foo);
2683 # return std::unique_ptr<T[]>(foo);
2684 # bar = std::unique_ptr<T[]>(foo);
2685 # And also allow cases when the second template argument is present. Those
2686 # cases cannot be handled by std::make_unique:
2687 # return std::unique_ptr<T, U>(foo);
2688 # bar = std::unique_ptr<T, U>(foo);
2689 local_path = f.LocalPath()
2690 return_construct_result = return_construct_pattern.search(line)
2691 if return_construct_result and not HasMoreThanOneArg(
2692 return_construct_result.group('template_arg')):
2693 problems_constructor.append(
2694 '%s:%d\n %s' % (local_path, line_number, line.strip()))
2696 # std::unique_ptr<T>()
2697 if null_construct_pattern.search(line):
2698 problems_nullptr.append(
2699 '%s:%d\n %s' % (local_path, line_number, line.strip()))
2702 if problems_nullptr:
2703 errors.append(output_api.PresubmitError(
2704 'The following files use std::unique_ptr<T>(). Use nullptr instead.',
2706 if problems_constructor:
2707 errors.append(output_api.PresubmitError(
2708 'The following files use explicit std::unique_ptr constructor.'
2709 'Use std::make_unique<T>() instead.',
2710 problems_constructor))
2714 def _CheckUserActionUpdate(input_api, output_api):
2715 """Checks if any new user action has been added."""
2716 if any('actions.xml' == input_api.os_path.basename(f) for f in
2717 input_api.LocalPaths()):
2718 # If actions.xml is already included in the changelist, the PRESUBMIT
2719 # for actions.xml will do a more complete presubmit check.
2722 file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm'))
2723 action_re = r'[^a-zA-Z]UserMetricsAction\("([^"]*)'
2724 current_actions = None
2725 for f in input_api.AffectedFiles(file_filter=file_filter):
2726 for line_num, line in f.ChangedContents():
2727 match = input_api.re.search(action_re, line)
2729 # Loads contents in tools/metrics/actions/actions.xml to memory. It's
2731 if not current_actions:
2732 with open('tools/metrics/actions/actions.xml') as actions_f:
2733 current_actions = actions_f.read()
2734 # Search for the matched user action name in |current_actions|.
2735 for action_name in match.groups():
2736 action = 'name="{0}"'.format(action_name)
2737 if action not in current_actions:
2738 return [output_api.PresubmitPromptWarning(
2739 'File %s line %d: %s is missing in '
2740 'tools/metrics/actions/actions.xml. Please run '
2741 'tools/metrics/actions/extract_actions.py to update.'
2742 % (f.LocalPath(), line_num, action_name))]
2746 def _ImportJSONCommentEater(input_api):
2748 sys.path = sys.path + [input_api.os_path.join(
2749 input_api.PresubmitLocalPath(),
2750 'tools', 'json_comment_eater')]
2751 import json_comment_eater
2752 return json_comment_eater
2755 def _GetJSONParseError(input_api, filename, eat_comments=True):
2757 contents = input_api.ReadFile(filename)
2759 json_comment_eater = _ImportJSONCommentEater(input_api)
2760 contents = json_comment_eater.Nom(contents)
2762 input_api.json.loads(contents)
2763 except ValueError as e:
2768 def _GetIDLParseError(input_api, filename):
2770 contents = input_api.ReadFile(filename)
2771 idl_schema = input_api.os_path.join(
2772 input_api.PresubmitLocalPath(),
2773 'tools', 'json_schema_compiler', 'idl_schema.py')
2774 process = input_api.subprocess.Popen(
2775 [input_api.python_executable, idl_schema],
2776 stdin=input_api.subprocess.PIPE,
2777 stdout=input_api.subprocess.PIPE,
2778 stderr=input_api.subprocess.PIPE,
2779 universal_newlines=True)
2780 (_, error) = process.communicate(input=contents)
2781 return error or None
2782 except ValueError as e:
2786 def _CheckParseErrors(input_api, output_api):
2787 """Check that IDL and JSON files do not contain syntax errors."""
2789 '.idl': _GetIDLParseError,
2790 '.json': _GetJSONParseError,
2792 # Most JSON files are preprocessed and support comments, but these do not.
2793 json_no_comments_patterns = [
2796 # Only run IDL checker on files in these directories.
2797 idl_included_patterns = [
2798 r'^chrome[\\/]common[\\/]extensions[\\/]api[\\/]',
2799 r'^extensions[\\/]common[\\/]api[\\/]',
2802 def get_action(affected_file):
2803 filename = affected_file.LocalPath()
2804 return actions.get(input_api.os_path.splitext(filename)[1])
2806 def FilterFile(affected_file):
2807 action = get_action(affected_file)
2810 path = affected_file.LocalPath()
2812 if _MatchesFile(input_api,
2813 _KNOWN_TEST_DATA_AND_INVALID_JSON_FILE_PATTERNS,
2817 if (action == _GetIDLParseError and
2818 not _MatchesFile(input_api, idl_included_patterns, path)):
2823 for affected_file in input_api.AffectedFiles(
2824 file_filter=FilterFile, include_deletes=False):
2825 action = get_action(affected_file)
2827 if (action == _GetJSONParseError and
2828 _MatchesFile(input_api, json_no_comments_patterns,
2829 affected_file.LocalPath())):
2830 kwargs['eat_comments'] = False
2831 parse_error = action(input_api,
2832 affected_file.AbsoluteLocalPath(),
2835 results.append(output_api.PresubmitError('%s could not be parsed: %s' %
2836 (affected_file.LocalPath(), parse_error)))
2840 def _CheckJavaStyle(input_api, output_api):
2841 """Runs checkstyle on changed java files and returns errors if any exist."""
2843 original_sys_path = sys.path
2845 sys.path = sys.path + [input_api.os_path.join(
2846 input_api.PresubmitLocalPath(), 'tools', 'android', 'checkstyle')]
2849 # Restore sys.path to what it was before.
2850 sys.path = original_sys_path
2852 return checkstyle.RunCheckstyle(
2853 input_api, output_api, 'tools/android/checkstyle/chromium-style-5.0.xml',
2854 black_list=_EXCLUDED_PATHS + input_api.DEFAULT_BLACK_LIST)
2857 def _CheckPythonDevilInit(input_api, output_api):
2858 """Checks to make sure devil is initialized correctly in python scripts."""
2859 script_common_initialize_pattern = input_api.re.compile(
2860 r'script_common\.InitializeEnvironment\(')
2861 devil_env_config_initialize = input_api.re.compile(
2862 r'devil_env\.config\.Initialize\(')
2866 sources = lambda affected_file: input_api.FilterSourceFile(
2868 black_list=(_EXCLUDED_PATHS + input_api.DEFAULT_BLACK_LIST +
2869 (r'^build[\\/]android[\\/]devil_chromium\.py',
2870 r'^third_party[\\/].*',)),
2871 white_list=[r'.*\.py$'])
2873 for f in input_api.AffectedSourceFiles(sources):
2874 for line_num, line in f.ChangedContents():
2875 if (script_common_initialize_pattern.search(line) or
2876 devil_env_config_initialize.search(line)):
2877 errors.append("%s:%d" % (f.LocalPath(), line_num))
2882 results.append(output_api.PresubmitError(
2883 'Devil initialization should always be done using '
2884 'devil_chromium.Initialize() in the chromium project, to use better '
2885 'defaults for dependencies (ex. up-to-date version of adb).',
2891 def _MatchesFile(input_api, patterns, path):
2892 for pattern in patterns:
2893 if input_api.re.search(pattern, path):
2898 def _GetOwnersFilesToCheckForIpcOwners(input_api):
2899 """Gets a list of OWNERS files to check for correct security owners.
2902 A dictionary mapping an OWNER file to the list of OWNERS rules it must
2903 contain to cover IPC-related files with noparent reviewer rules.
2905 # Whether or not a file affects IPC is (mostly) determined by a simple list
2906 # of filename patterns.
2911 '*_param_traits*.*',
2914 '*_mojom_traits*.*',
2915 '*_struct_traits*.*',
2916 '*_type_converter*.*',
2918 # Android native IPC:
2920 # Blink uses a different file naming convention:
2924 '*TypeConverter*.*',
2927 # These third_party directories do not contain IPCs, but contain files
2928 # matching the above patterns, which trigger false positives.
2930 'third_party/crashpad/*',
2931 'third_party/blink/renderer/platform/bindings/*',
2932 'third_party/protobuf/benchmarks/python/*',
2933 'third_party/win_build_output/*',
2934 'third_party/feed_library/*',
2935 # These files are just used to communicate between class loaders running
2936 # in the same process.
2937 'weblayer/browser/java/org/chromium/weblayer_private/interfaces/*',
2938 'weblayer/browser/java/org/chromium/weblayer_private/test_interfaces/*',
2942 # Dictionary mapping an OWNERS file path to Patterns.
2943 # Patterns is a dictionary mapping glob patterns (suitable for use in per-file
2944 # rules ) to a PatternEntry.
2945 # PatternEntry is a dictionary with two keys:
2946 # - 'files': the files that are matched by this pattern
2947 # - 'rules': the per-file rules needed for this pattern
2948 # For example, if we expect OWNERS file to contain rules for *.mojom and
2949 # *_struct_traits*.*, Patterns might look like this:
2954 # 'per-file *.mojom=set noparent',
2955 # 'per-file *.mojom=file://ipc/SECURITY_OWNERS',
2958 # '*_struct_traits*.*': {
2961 # 'per-file *_struct_traits*.*=set noparent',
2962 # 'per-file *_struct_traits*.*=file://ipc/SECURITY_OWNERS',
2968 def AddPatternToCheck(input_file, pattern):
2969 owners_file = input_api.os_path.join(
2970 input_api.os_path.dirname(input_file.LocalPath()), 'OWNERS')
2971 if owners_file not in to_check:
2972 to_check[owners_file] = {}
2973 if pattern not in to_check[owners_file]:
2974 to_check[owners_file][pattern] = {
2977 'per-file %s=set noparent' % pattern,
2978 'per-file %s=file://ipc/SECURITY_OWNERS' % pattern,
2981 to_check[owners_file][pattern]['files'].append(input_file)
2983 # Iterate through the affected files to see what we actually need to check
2984 # for. We should only nag patch authors about per-file rules if a file in that
2985 # directory would match that pattern. If a directory only contains *.mojom
2986 # files and no *_messages*.h files, we should only nag about rules for
2988 for f in input_api.AffectedFiles(include_deletes=False):
2989 # Manifest files don't have a strong naming convention. Instead, try to find
2990 # affected .cc and .h files which look like they contain a manifest
2992 manifest_pattern = input_api.re.compile('manifests?\.(cc|h)$')
2993 test_manifest_pattern = input_api.re.compile('test_manifests?\.(cc|h)')
2994 if (manifest_pattern.search(f.LocalPath()) and not
2995 test_manifest_pattern.search(f.LocalPath())):
2996 # We expect all actual service manifest files to contain at least one
2997 # qualified reference to service_manager::Manifest.
2998 if 'service_manager::Manifest' in '\n'.join(f.NewContents()):
2999 AddPatternToCheck(f, input_api.os_path.basename(f.LocalPath()))
3000 for pattern in file_patterns:
3001 if input_api.fnmatch.fnmatch(
3002 input_api.os_path.basename(f.LocalPath()), pattern):
3004 for exclude in exclude_paths:
3005 if input_api.fnmatch.fnmatch(f.LocalPath(), exclude):
3010 AddPatternToCheck(f, pattern)
3016 def _AddOwnersFilesToCheckForFuchsiaSecurityOwners(input_api, to_check):
3017 """Adds OWNERS files to check for correct Fuchsia security owners."""
3020 # Component specifications.
3021 '*.cml', # Component Framework v2.
3022 '*.cmx', # Component Framework v1.
3024 # Fuchsia IDL protocol specifications.
3028 def AddPatternToCheck(input_file, pattern):
3029 owners_file = input_api.os_path.join(
3030 input_api.os_path.dirname(input_file.LocalPath()), 'OWNERS')
3031 if owners_file not in to_check:
3032 to_check[owners_file] = {}
3033 if pattern not in to_check[owners_file]:
3034 to_check[owners_file][pattern] = {
3037 'per-file %s=set noparent' % pattern,
3038 'per-file %s=file://fuchsia/SECURITY_OWNERS' % pattern,
3041 to_check[owners_file][pattern]['files'].append(input_file)
3043 # Iterate through the affected files to see what we actually need to check
3044 # for. We should only nag patch authors about per-file rules if a file in that
3045 # directory would match that pattern.
3046 for f in input_api.AffectedFiles(include_deletes=False):
3047 for pattern in file_patterns:
3048 if input_api.fnmatch.fnmatch(
3049 input_api.os_path.basename(f.LocalPath()), pattern):
3050 AddPatternToCheck(f, pattern)
3056 def _CheckSecurityOwners(input_api, output_api):
3057 """Checks that affected files involving IPC have an IPC OWNERS rule."""
3058 to_check = _GetOwnersFilesToCheckForIpcOwners(input_api)
3059 _AddOwnersFilesToCheckForFuchsiaSecurityOwners(input_api, to_check)
3062 # If there are any OWNERS files to check, there are IPC-related changes in
3063 # this CL. Auto-CC the review list.
3064 output_api.AppendCC('ipc-security-reviews@chromium.org')
3066 # Go through the OWNERS files to check, filtering out rules that are already
3067 # present in that OWNERS file.
3068 for owners_file, patterns in to_check.iteritems():
3070 with file(owners_file) as f:
3071 lines = set(f.read().splitlines())
3072 for entry in patterns.itervalues():
3073 entry['rules'] = [rule for rule in entry['rules'] if rule not in lines
3076 # No OWNERS file, so all the rules are definitely missing.
3079 # All the remaining lines weren't found in OWNERS files, so emit an error.
3081 for owners_file, patterns in to_check.iteritems():
3084 for _, entry in patterns.iteritems():
3085 missing_lines.extend(entry['rules'])
3086 files.extend([' %s' % f.LocalPath() for f in entry['files']])
3089 'Because of the presence of files:\n%s\n\n'
3090 '%s needs the following %d lines added:\n\n%s' %
3091 ('\n'.join(files), owners_file, len(missing_lines),
3092 '\n'.join(missing_lines)))
3096 if input_api.is_committing:
3097 output = output_api.PresubmitError
3099 output = output_api.PresubmitPromptWarning
3100 results.append(output(
3101 'Found OWNERS files that need to be updated for IPC security ' +
3102 'review coverage.\nPlease update the OWNERS files below:',
3103 long_text='\n\n'.join(errors)))
3108 def _GetFilesUsingSecurityCriticalFunctions(input_api):
3109 """Checks affected files for changes to security-critical calls. This
3110 function checks the full change diff, to catch both additions/changes
3113 Returns a dict keyed by file name, and the value is a set of detected
3116 # Map of function pretty name (displayed in an error) to the pattern to
3118 _PATTERNS_TO_CHECK = {
3119 'content::GetServiceSandboxType<>()':
3120 'GetServiceSandboxType\\<'
3122 _PATTERNS_TO_CHECK = {
3123 k: input_api.re.compile(v)
3124 for k, v in _PATTERNS_TO_CHECK.items()
3127 # Scan all affected files for changes touching _FUNCTIONS_TO_CHECK.
3128 files_to_functions = {}
3129 for f in input_api.AffectedFiles():
3130 diff = f.GenerateScmDiff()
3131 for line in diff.split('\n'):
3132 # Not using just RightHandSideLines() because removing a
3133 # call to a security-critical function can be just as important
3134 # as adding or changing the arguments.
3135 if line.startswith('-') or (line.startswith('+') and
3136 not line.startswith('++')):
3137 for name, pattern in _PATTERNS_TO_CHECK.items():
3138 if pattern.search(line):
3139 path = f.LocalPath()
3140 if not path in files_to_functions:
3141 files_to_functions[path] = set()
3142 files_to_functions[path].add(name)
3143 return files_to_functions
3146 def _CheckSecurityChanges(input_api, output_api):
3147 """Checks that changes involving security-critical functions are reviewed
3148 by the security team.
3150 files_to_functions = _GetFilesUsingSecurityCriticalFunctions(input_api)
3151 if len(files_to_functions):
3152 owners_db = input_api.owners_db
3153 owner_email, reviewers = (
3154 input_api.canned_checks.GetCodereviewOwnerAndReviewers(
3156 owners_db.email_regexp,
3157 approval_needed=input_api.is_committing))
3159 # Load the OWNERS file for security changes.
3160 owners_file = 'ipc/SECURITY_OWNERS'
3161 security_owners = owners_db.owners_rooted_at_file(owners_file)
3163 has_security_owner = any([owner in reviewers for owner in security_owners])
3164 if not has_security_owner:
3165 msg = 'The following files change calls to security-sensive functions\n' \
3166 'that need to be reviewed by {}.\n'.format(owners_file)
3167 for path, names in files_to_functions.items():
3168 msg += ' {}\n'.format(path)
3170 msg += ' {}\n'.format(name)
3173 if input_api.is_committing:
3174 output = output_api.PresubmitError
3176 output = output_api.PresubmitNotifyResult
3177 return [output(msg)]
3182 def _CheckSetNoParent(input_api, output_api):
3183 """Checks that set noparent is only used together with an OWNERS file in
3184 //build/OWNERS.setnoparent (see also
3185 //docs/code_reviews.md#owners-files-details)
3189 allowed_owners_files_file = 'build/OWNERS.setnoparent'
3190 allowed_owners_files = set()
3191 with open(allowed_owners_files_file, 'r') as f:
3194 if not line or line.startswith('#'):
3196 allowed_owners_files.add(line)
3198 per_file_pattern = input_api.re.compile('per-file (.+)=(.+)')
3200 for f in input_api.AffectedFiles(include_deletes=False):
3201 if not f.LocalPath().endswith('OWNERS'):
3204 found_owners_files = set()
3205 found_set_noparent_lines = dict()
3207 # Parse the OWNERS file.
3208 for lineno, line in enumerate(f.NewContents(), 1):
3210 if line.startswith('set noparent'):
3211 found_set_noparent_lines[''] = lineno
3212 if line.startswith('file://'):
3213 if line in allowed_owners_files:
3214 found_owners_files.add('')
3215 if line.startswith('per-file'):
3216 match = per_file_pattern.match(line)
3218 glob = match.group(1).strip()
3219 directive = match.group(2).strip()
3220 if directive == 'set noparent':
3221 found_set_noparent_lines[glob] = lineno
3222 if directive.startswith('file://'):
3223 if directive in allowed_owners_files:
3224 found_owners_files.add(glob)
3226 # Check that every set noparent line has a corresponding file:// line
3227 # listed in build/OWNERS.setnoparent.
3228 for set_noparent_line in found_set_noparent_lines:
3229 if set_noparent_line in found_owners_files:
3231 errors.append(' %s:%d' % (f.LocalPath(),
3232 found_set_noparent_lines[set_noparent_line]))
3236 if input_api.is_committing:
3237 output = output_api.PresubmitError
3239 output = output_api.PresubmitPromptWarning
3240 results.append(output(
3241 'Found the following "set noparent" restrictions in OWNERS files that '
3242 'do not include owners from build/OWNERS.setnoparent:',
3243 long_text='\n\n'.join(errors)))
3247 def _CheckUselessForwardDeclarations(input_api, output_api):
3248 """Checks that added or removed lines in non third party affected
3249 header files do not lead to new useless class or struct forward
3253 class_pattern = input_api.re.compile(r'^class\s+(\w+);$',
3254 input_api.re.MULTILINE)
3255 struct_pattern = input_api.re.compile(r'^struct\s+(\w+);$',
3256 input_api.re.MULTILINE)
3257 for f in input_api.AffectedFiles(include_deletes=False):
3258 if (f.LocalPath().startswith('third_party') and
3259 not f.LocalPath().startswith('third_party/blink') and
3260 not f.LocalPath().startswith('third_party\\blink')):
3263 if not f.LocalPath().endswith('.h'):
3266 contents = input_api.ReadFile(f)
3267 fwd_decls = input_api.re.findall(class_pattern, contents)
3268 fwd_decls.extend(input_api.re.findall(struct_pattern, contents))
3270 useless_fwd_decls = []
3271 for decl in fwd_decls:
3272 count = sum(1 for _ in input_api.re.finditer(
3273 r'\b%s\b' % input_api.re.escape(decl), contents))
3275 useless_fwd_decls.append(decl)
3277 if not useless_fwd_decls:
3280 for line in f.GenerateScmDiff().splitlines():
3281 if (line.startswith('-') and not line.startswith('--') or
3282 line.startswith('+') and not line.startswith('++')):
3283 for decl in useless_fwd_decls:
3284 if input_api.re.search(r'\b%s\b' % decl, line[1:]):
3285 results.append(output_api.PresubmitPromptWarning(
3286 '%s: %s forward declaration is no longer needed' %
3287 (f.LocalPath(), decl)))
3288 useless_fwd_decls.remove(decl)
3292 def _CheckAndroidDebuggableBuild(input_api, output_api):
3293 """Checks that code uses BuildInfo.isDebugAndroid() instead of
3294 Build.TYPE.equals('') or ''.equals(Build.TYPE) to check if
3295 this is a debuggable build of Android.
3297 build_type_check_pattern = input_api.re.compile(
3298 r'\bBuild\.TYPE\.equals\(|\.equals\(\s*\bBuild\.TYPE\)')
3302 sources = lambda affected_file: input_api.FilterSourceFile(
3304 black_list=(_EXCLUDED_PATHS +
3305 _TEST_CODE_EXCLUDED_PATHS +
3306 input_api.DEFAULT_BLACK_LIST +
3307 (r"^android_webview[\\/]support_library[\\/]"
3308 "boundary_interfaces[\\/]",
3309 r"^chrome[\\/]android[\\/]webapk[\\/].*",
3310 r'^third_party[\\/].*',
3311 r"tools[\\/]android[\\/]customtabs_benchmark[\\/].*",
3312 r"webview[\\/]chromium[\\/]License.*",)),
3313 white_list=[r'.*\.java$'])
3315 for f in input_api.AffectedSourceFiles(sources):
3316 for line_num, line in f.ChangedContents():
3317 if build_type_check_pattern.search(line):
3318 errors.append("%s:%d" % (f.LocalPath(), line_num))
3323 results.append(output_api.PresubmitPromptWarning(
3324 'Build.TYPE.equals or .equals(Build.TYPE) usage is detected.'
3325 ' Please use BuildInfo.isDebugAndroid() instead.',
3330 # TODO: add unit tests
3331 def _CheckAndroidToastUsage(input_api, output_api):
3332 """Checks that code uses org.chromium.ui.widget.Toast instead of
3333 android.widget.Toast (Chromium Toast doesn't force hardware
3334 acceleration on low-end devices, saving memory).
3336 toast_import_pattern = input_api.re.compile(
3337 r'^import android\.widget\.Toast;$')
3341 sources = lambda affected_file: input_api.FilterSourceFile(
3343 black_list=(_EXCLUDED_PATHS +
3344 _TEST_CODE_EXCLUDED_PATHS +
3345 input_api.DEFAULT_BLACK_LIST +
3346 (r'^chromecast[\\/].*',
3347 r'^remoting[\\/].*')),
3348 white_list=[r'.*\.java$'])
3350 for f in input_api.AffectedSourceFiles(sources):
3351 for line_num, line in f.ChangedContents():
3352 if toast_import_pattern.search(line):
3353 errors.append("%s:%d" % (f.LocalPath(), line_num))
3358 results.append(output_api.PresubmitError(
3359 'android.widget.Toast usage is detected. Android toasts use hardware'
3360 ' acceleration, and can be\ncostly on low-end devices. Please use'
3361 ' org.chromium.ui.widget.Toast instead.\n'
3362 'Contact dskiba@chromium.org if you have any questions.',
3368 def _CheckAndroidCrLogUsage(input_api, output_api):
3369 """Checks that new logs using org.chromium.base.Log:
3370 - Are using 'TAG' as variable name for the tags (warn)
3371 - Are using a tag that is shorter than 20 characters (error)
3374 # Do not check format of logs in the given files
3375 cr_log_check_excluded_paths = [
3376 # //chrome/android/webapk cannot depend on //base
3377 r"^chrome[\\/]android[\\/]webapk[\\/].*",
3378 # WebView license viewer code cannot depend on //base; used in stub APK.
3379 r"^android_webview[\\/]glue[\\/]java[\\/]src[\\/]com[\\/]android[\\/]"
3380 r"webview[\\/]chromium[\\/]License.*",
3381 # The customtabs_benchmark is a small app that does not depend on Chromium
3383 r"tools[\\/]android[\\/]customtabs_benchmark[\\/].*",
3386 cr_log_import_pattern = input_api.re.compile(
3387 r'^import org\.chromium\.base\.Log;$', input_api.re.MULTILINE)
3388 class_in_base_pattern = input_api.re.compile(
3389 r'^package org\.chromium\.base;$', input_api.re.MULTILINE)
3390 has_some_log_import_pattern = input_api.re.compile(
3391 r'^import .*\.Log;$', input_api.re.MULTILINE)
3392 # Extract the tag from lines like `Log.d(TAG, "*");` or `Log.d("TAG", "*");`
3393 log_call_pattern = input_api.re.compile(r'\bLog\.\w\((?P<tag>\"?\w+)')
3394 log_decl_pattern = input_api.re.compile(
3395 r'static final String TAG = "(?P<name>(.*))"')
3396 rough_log_decl_pattern = input_api.re.compile(r'\bString TAG\s*=')
3398 REF_MSG = ('See docs/android_logging.md for more info.')
3399 sources = lambda x: input_api.FilterSourceFile(x, white_list=[r'.*\.java$'],
3400 black_list=cr_log_check_excluded_paths)
3402 tag_decl_errors = []
3403 tag_length_errors = []
3405 tag_with_dot_errors = []
3406 util_log_errors = []
3408 for f in input_api.AffectedSourceFiles(sources):
3409 file_content = input_api.ReadFile(f)
3410 has_modified_logs = False
3412 if (cr_log_import_pattern.search(file_content) or
3413 (class_in_base_pattern.search(file_content) and
3414 not has_some_log_import_pattern.search(file_content))):
3415 # Checks to run for files using cr log
3416 for line_num, line in f.ChangedContents():
3417 if rough_log_decl_pattern.search(line):
3418 has_modified_logs = True
3420 # Check if the new line is doing some logging
3421 match = log_call_pattern.search(line)
3423 has_modified_logs = True
3425 # Make sure it uses "TAG"
3426 if not match.group('tag') == 'TAG':
3427 tag_errors.append("%s:%d" % (f.LocalPath(), line_num))
3429 # Report non cr Log function calls in changed lines
3430 for line_num, line in f.ChangedContents():
3431 if log_call_pattern.search(line):
3432 util_log_errors.append("%s:%d" % (f.LocalPath(), line_num))
3435 if has_modified_logs:
3436 # Make sure the tag is using the "cr" prefix and is not too long
3437 match = log_decl_pattern.search(file_content)
3438 tag_name = match.group('name') if match else None
3440 tag_decl_errors.append(f.LocalPath())
3441 elif len(tag_name) > 20:
3442 tag_length_errors.append(f.LocalPath())
3443 elif '.' in tag_name:
3444 tag_with_dot_errors.append(f.LocalPath())
3448 results.append(output_api.PresubmitPromptWarning(
3449 'Please define your tags using the suggested format: .\n'
3450 '"private static final String TAG = "<package tag>".\n'
3451 'They will be prepended with "cr_" automatically.\n' + REF_MSG,
3454 if tag_length_errors:
3455 results.append(output_api.PresubmitError(
3456 'The tag length is restricted by the system to be at most '
3457 '20 characters.\n' + REF_MSG,
3461 results.append(output_api.PresubmitPromptWarning(
3462 'Please use a variable named "TAG" for your log tags.\n' + REF_MSG,
3466 results.append(output_api.PresubmitPromptWarning(
3467 'Please use org.chromium.base.Log for new logs.\n' + REF_MSG,
3470 if tag_with_dot_errors:
3471 results.append(output_api.PresubmitPromptWarning(
3472 'Dot in log tags cause them to be elided in crash reports.\n' + REF_MSG,
3473 tag_with_dot_errors))
3478 def _CheckAndroidTestJUnitFrameworkImport(input_api, output_api):
3479 """Checks that junit.framework.* is no longer used."""
3480 deprecated_junit_framework_pattern = input_api.re.compile(
3481 r'^import junit\.framework\..*;',
3482 input_api.re.MULTILINE)
3483 sources = lambda x: input_api.FilterSourceFile(
3484 x, white_list=[r'.*\.java$'], black_list=None)
3486 for f in input_api.AffectedFiles(file_filter=sources):
3487 for line_num, line in f.ChangedContents():
3488 if deprecated_junit_framework_pattern.search(line):
3489 errors.append("%s:%d" % (f.LocalPath(), line_num))
3493 results.append(output_api.PresubmitError(
3494 'APIs from junit.framework.* are deprecated, please use JUnit4 framework'
3495 '(org.junit.*) from //third_party/junit. Contact yolandyan@chromium.org'
3496 ' if you have any question.', errors))
3500 def _CheckAndroidTestJUnitInheritance(input_api, output_api):
3501 """Checks that if new Java test classes have inheritance.
3502 Either the new test class is JUnit3 test or it is a JUnit4 test class
3503 with a base class, either case is undesirable.
3505 class_declaration_pattern = input_api.re.compile(r'^public class \w*Test ')
3507 sources = lambda x: input_api.FilterSourceFile(
3508 x, white_list=[r'.*Test\.java$'], black_list=None)
3510 for f in input_api.AffectedFiles(file_filter=sources):
3511 if not f.OldContents():
3512 class_declaration_start_flag = False
3513 for line_num, line in f.ChangedContents():
3514 if class_declaration_pattern.search(line):
3515 class_declaration_start_flag = True
3516 if class_declaration_start_flag and ' extends ' in line:
3517 errors.append('%s:%d' % (f.LocalPath(), line_num))
3519 class_declaration_start_flag = False
3523 results.append(output_api.PresubmitPromptWarning(
3524 'The newly created files include Test classes that inherits from base'
3525 ' class. Please do not use inheritance in JUnit4 tests or add new'
3526 ' JUnit3 tests. Contact yolandyan@chromium.org if you have any'
3527 ' questions.', errors))
3531 def _CheckAndroidTestAnnotationUsage(input_api, output_api):
3532 """Checks that android.test.suitebuilder.annotation.* is no longer used."""
3533 deprecated_annotation_import_pattern = input_api.re.compile(
3534 r'^import android\.test\.suitebuilder\.annotation\..*;',
3535 input_api.re.MULTILINE)
3536 sources = lambda x: input_api.FilterSourceFile(
3537 x, white_list=[r'.*\.java$'], black_list=None)
3539 for f in input_api.AffectedFiles(file_filter=sources):
3540 for line_num, line in f.ChangedContents():
3541 if deprecated_annotation_import_pattern.search(line):
3542 errors.append("%s:%d" % (f.LocalPath(), line_num))
3546 results.append(output_api.PresubmitError(
3547 'Annotations in android.test.suitebuilder.annotation have been'
3548 ' deprecated since API level 24. Please use android.support.test.filters'
3549 ' from //third_party/android_support_test_runner:runner_java instead.'
3550 ' Contact yolandyan@chromium.org if you have any questions.', errors))
3554 def _CheckAndroidNewMdpiAssetLocation(input_api, output_api):
3555 """Checks if MDPI assets are placed in a correct directory."""
3556 file_filter = lambda f: (f.LocalPath().endswith('.png') and
3557 ('/res/drawable/' in f.LocalPath() or
3558 '/res/drawable-ldrtl/' in f.LocalPath()))
3560 for f in input_api.AffectedFiles(include_deletes=False,
3561 file_filter=file_filter):
3562 errors.append(' %s' % f.LocalPath())
3566 results.append(output_api.PresubmitError(
3567 'MDPI assets should be placed in /res/drawable-mdpi/ or '
3568 '/res/drawable-ldrtl-mdpi/\ninstead of /res/drawable/ and'
3569 '/res/drawable-ldrtl/.\n'
3570 'Contact newt@chromium.org if you have questions.', errors))
3574 def _CheckAndroidWebkitImports(input_api, output_api):
3575 """Checks that code uses org.chromium.base.Callback instead of
3576 android.webview.ValueCallback except in the WebView glue layer
3579 valuecallback_import_pattern = input_api.re.compile(
3580 r'^import android\.webkit\.ValueCallback;$')
3584 sources = lambda affected_file: input_api.FilterSourceFile(
3586 black_list=(_EXCLUDED_PATHS +
3587 _TEST_CODE_EXCLUDED_PATHS +
3588 input_api.DEFAULT_BLACK_LIST +
3589 (r'^android_webview[\\/]glue[\\/].*',
3590 r'^weblayer[\\/].*',)),
3591 white_list=[r'.*\.java$'])
3593 for f in input_api.AffectedSourceFiles(sources):
3594 for line_num, line in f.ChangedContents():
3595 if valuecallback_import_pattern.search(line):
3596 errors.append("%s:%d" % (f.LocalPath(), line_num))
3601 results.append(output_api.PresubmitError(
3602 'android.webkit.ValueCallback usage is detected outside of the glue'
3603 ' layer. To stay compatible with the support library, android.webkit.*'
3604 ' classes should only be used inside the glue layer and'
3605 ' org.chromium.base.Callback should be used instead.',
3611 def _CheckAndroidXmlStyle(input_api, output_api, is_check_on_upload):
3612 """Checks Android XML styles """
3614 original_sys_path = sys.path
3616 sys.path = sys.path + [input_api.os_path.join(
3617 input_api.PresubmitLocalPath(), 'tools', 'android', 'checkxmlstyle')]
3618 import checkxmlstyle
3620 # Restore sys.path to what it was before.
3621 sys.path = original_sys_path
3623 if is_check_on_upload:
3624 return checkxmlstyle.CheckStyleOnUpload(input_api, output_api)
3626 return checkxmlstyle.CheckStyleOnCommit(input_api, output_api)
3629 class PydepsChecker(object):
3630 def __init__(self, input_api, pydeps_files):
3631 self._file_cache = {}
3632 self._input_api = input_api
3633 self._pydeps_files = pydeps_files
3635 def _LoadFile(self, path):
3636 """Returns the list of paths within a .pydeps file relative to //."""
3637 if path not in self._file_cache:
3638 with open(path) as f:
3639 self._file_cache[path] = f.read()
3640 return self._file_cache[path]
3642 def _ComputeNormalizedPydepsEntries(self, pydeps_path):
3643 """Returns an interable of paths within the .pydep, relativized to //."""
3644 os_path = self._input_api.os_path
3645 pydeps_dir = os_path.dirname(pydeps_path)
3646 entries = (l.rstrip() for l in self._LoadFile(pydeps_path).splitlines()
3647 if not l.startswith('*'))
3648 return (os_path.normpath(os_path.join(pydeps_dir, e)) for e in entries)
3650 def _CreateFilesToPydepsMap(self):
3651 """Returns a map of local_path -> list_of_pydeps."""
3653 for pydep_local_path in self._pydeps_files:
3654 for path in self._ComputeNormalizedPydepsEntries(pydep_local_path):
3655 ret.setdefault(path, []).append(pydep_local_path)
3658 def ComputeAffectedPydeps(self):
3659 """Returns an iterable of .pydeps files that might need regenerating."""
3660 affected_pydeps = set()
3661 file_to_pydeps_map = None
3662 for f in self._input_api.AffectedFiles(include_deletes=True):
3663 local_path = f.LocalPath()
3664 # Changes to DEPS can lead to .pydeps changes if any .py files are in
3665 # subrepositories. We can't figure out which files change, so re-check
3667 # Changes to print_python_deps.py affect all .pydeps.
3668 if local_path in ('DEPS', 'PRESUBMIT.py') or local_path.endswith(
3669 'print_python_deps.py'):
3670 return self._pydeps_files
3671 elif local_path.endswith('.pydeps'):
3672 if local_path in self._pydeps_files:
3673 affected_pydeps.add(local_path)
3674 elif local_path.endswith('.py'):
3675 if file_to_pydeps_map is None:
3676 file_to_pydeps_map = self._CreateFilesToPydepsMap()
3677 affected_pydeps.update(file_to_pydeps_map.get(local_path, ()))
3678 return affected_pydeps
3680 def DetermineIfStale(self, pydeps_path):
3681 """Runs print_python_deps.py to see if the files is stale."""
3685 old_pydeps_data = self._LoadFile(pydeps_path).splitlines()
3686 cmd = old_pydeps_data[1][1:].strip()
3687 env = dict(os.environ)
3688 env['PYTHONDONTWRITEBYTECODE'] = '1'
3689 new_pydeps_data = self._input_api.subprocess.check_output(
3690 cmd + ' --output ""', shell=True, env=env)
3691 old_contents = old_pydeps_data[2:]
3692 new_contents = new_pydeps_data.splitlines()[2:]
3693 if old_pydeps_data[2:] != new_pydeps_data.splitlines()[2:]:
3694 return cmd, '\n'.join(difflib.context_diff(old_contents, new_contents))
3697 def _ParseGclientArgs():
3699 with open('build/config/gclient_args.gni', 'r') as f:
3702 if not line or line.startswith('#'):
3704 attribute, value = line.split('=')
3705 args[attribute.strip()] = value.strip()
3709 def _CheckPydepsNeedsUpdating(input_api, output_api, checker_for_tests=None):
3710 """Checks if a .pydeps file needs to be regenerated."""
3711 # This check is for Python dependency lists (.pydeps files), and involves
3712 # paths not only in the PRESUBMIT.py, but also in the .pydeps files. It
3713 # doesn't work on Windows and Mac, so skip it on other platforms.
3714 if input_api.platform != 'linux2':
3716 is_android = _ParseGclientArgs().get('checkout_android', 'false') == 'true'
3717 pydeps_to_check = _ALL_PYDEPS_FILES if is_android else _GENERIC_PYDEPS_FILES
3719 # First, check for new / deleted .pydeps.
3720 for f in input_api.AffectedFiles(include_deletes=True):
3721 # Check whether we are running the presubmit check for a file in src.
3722 # f.LocalPath is relative to repo (src, or internal repo).
3723 # os_path.exists is relative to src repo.
3724 # Therefore if os_path.exists is true, it means f.LocalPath is relative
3725 # to src and we can conclude that the pydeps is in src.
3726 if input_api.os_path.exists(f.LocalPath()):
3727 if f.LocalPath().endswith('.pydeps'):
3728 if f.Action() == 'D' and f.LocalPath() in _ALL_PYDEPS_FILES:
3729 results.append(output_api.PresubmitError(
3730 'Please update _ALL_PYDEPS_FILES within //PRESUBMIT.py to '
3731 'remove %s' % f.LocalPath()))
3732 elif f.Action() != 'D' and f.LocalPath() not in _ALL_PYDEPS_FILES:
3733 results.append(output_api.PresubmitError(
3734 'Please update _ALL_PYDEPS_FILES within //PRESUBMIT.py to '
3735 'include %s' % f.LocalPath()))
3740 checker = checker_for_tests or PydepsChecker(input_api, _ALL_PYDEPS_FILES)
3741 affected_pydeps = set(checker.ComputeAffectedPydeps())
3742 affected_android_pydeps = affected_pydeps.intersection(
3743 set(_ANDROID_SPECIFIC_PYDEPS_FILES))
3744 if affected_android_pydeps and not is_android:
3745 results.append(output_api.PresubmitPromptOrNotify(
3746 'You have changed python files that may affect pydeps for android\n'
3747 'specific scripts. However, the relevant presumbit check cannot be\n'
3748 'run because you are not using an Android checkout. To validate that\n'
3749 'the .pydeps are correct, re-run presubmit in an Android checkout, or\n'
3750 'use the android-internal-presubmit optional trybot.\n'
3751 'Possibly stale pydeps files:\n{}'.format(
3752 '\n'.join(affected_android_pydeps))))
3754 affected_pydeps_to_check = affected_pydeps.intersection(set(pydeps_to_check))
3755 for pydep_path in affected_pydeps_to_check:
3757 result = checker.DetermineIfStale(pydep_path)
3760 results.append(output_api.PresubmitError(
3761 'File is stale: %s\nDiff (apply to fix):\n%s\n'
3762 'To regenerate, run:\n\n %s' %
3763 (pydep_path, diff, cmd)))
3764 except input_api.subprocess.CalledProcessError as error:
3765 return [output_api.PresubmitError('Error running: %s' % error.cmd,
3766 long_text=error.output)]
3771 def _CheckSingletonInHeaders(input_api, output_api):
3772 """Checks to make sure no header files have |Singleton<|."""
3773 def FileFilter(affected_file):
3774 # It's ok for base/memory/singleton.h to have |Singleton<|.
3775 black_list = (_EXCLUDED_PATHS +
3776 input_api.DEFAULT_BLACK_LIST +
3777 (r"^base[\\/]memory[\\/]singleton\.h$",
3778 r"^net[\\/]quic[\\/]platform[\\/]impl[\\/]"
3779 r"quic_singleton_impl\.h$"))
3780 return input_api.FilterSourceFile(affected_file, black_list=black_list)
3782 pattern = input_api.re.compile(r'(?<!class\sbase::)Singleton\s*<')
3784 for f in input_api.AffectedSourceFiles(FileFilter):
3785 if (f.LocalPath().endswith('.h') or f.LocalPath().endswith('.hxx') or
3786 f.LocalPath().endswith('.hpp') or f.LocalPath().endswith('.inl')):
3787 contents = input_api.ReadFile(f)
3788 for line in contents.splitlines(False):
3789 if (not line.lstrip().startswith('//') and # Strip C++ comment.
3790 pattern.search(line)):
3795 return [output_api.PresubmitError(
3796 'Found base::Singleton<T> in the following header files.\n' +
3797 'Please move them to an appropriate source file so that the ' +
3798 'template gets instantiated in a single compilation unit.',
3805 ( "-webkit-box", "flex" ),
3806 ( "-webkit-inline-box", "inline-flex" ),
3807 ( "-webkit-flex", "flex" ),
3808 ( "-webkit-inline-flex", "inline-flex" ),
3809 ( "-webkit-min-content", "min-content" ),
3810 ( "-webkit-max-content", "max-content" ),
3813 ( "-webkit-background-clip", "background-clip" ),
3814 ( "-webkit-background-origin", "background-origin" ),
3815 ( "-webkit-background-size", "background-size" ),
3816 ( "-webkit-box-shadow", "box-shadow" ),
3817 ( "-webkit-user-select", "user-select" ),
3820 ( "-webkit-gradient", "gradient" ),
3821 ( "-webkit-repeating-gradient", "repeating-gradient" ),
3822 ( "-webkit-linear-gradient", "linear-gradient" ),
3823 ( "-webkit-repeating-linear-gradient", "repeating-linear-gradient" ),
3824 ( "-webkit-radial-gradient", "radial-gradient" ),
3825 ( "-webkit-repeating-radial-gradient", "repeating-radial-gradient" ),
3829 # TODO: add unit tests
3830 def _CheckNoDeprecatedCss(input_api, output_api):
3831 """ Make sure that we don't use deprecated CSS
3832 properties, functions or values. Our external
3833 documentation and iOS CSS for dom distiller
3834 (reader mode) are ignored by the hooks as it
3835 needs to be consumed by WebKit. """
3837 file_inclusion_pattern = [r".+\.css$"]
3838 black_list = (_EXCLUDED_PATHS +
3839 _TEST_CODE_EXCLUDED_PATHS +
3840 input_api.DEFAULT_BLACK_LIST +
3841 (r"^chrome/common/extensions/docs",
3843 r"^components/dom_distiller/core/css/distilledpage_ios.css",
3844 r"^components/neterror/resources/neterror.css",
3845 r"^native_client_sdk"))
3846 file_filter = lambda f: input_api.FilterSourceFile(
3847 f, white_list=file_inclusion_pattern, black_list=black_list)
3848 for fpath in input_api.AffectedFiles(file_filter=file_filter):
3849 for line_num, line in fpath.ChangedContents():
3850 for (deprecated_value, value) in _DEPRECATED_CSS:
3851 if deprecated_value in line:
3852 results.append(output_api.PresubmitError(
3853 "%s:%d: Use of deprecated CSS %s, use %s instead" %
3854 (fpath.LocalPath(), line_num, deprecated_value, value)))
3858 def _CheckForRelativeIncludes(input_api, output_api):
3860 for f in input_api.AffectedFiles(include_deletes=False):
3861 if (f.LocalPath().startswith('third_party') and
3862 not f.LocalPath().startswith('third_party/blink') and
3863 not f.LocalPath().startswith('third_party\\blink')):
3866 if not _IsCPlusPlusFile(input_api, f.LocalPath()):
3869 relative_includes = [line for _, line in f.ChangedContents()
3870 if "#include" in line and "../" in line]
3871 if not relative_includes:
3873 bad_files[f.LocalPath()] = relative_includes
3878 error_descriptions = []
3879 for file_path, bad_lines in bad_files.iteritems():
3880 error_description = file_path
3881 for line in bad_lines:
3882 error_description += '\n ' + line
3883 error_descriptions.append(error_description)
3886 results.append(output_api.PresubmitError(
3887 'You added one or more relative #include paths (including "../").\n'
3888 'These shouldn\'t be used because they can be used to include headers\n'
3889 'from code that\'s not correctly specified as a dependency in the\n'
3890 'relevant BUILD.gn file(s).',
3891 error_descriptions))
3896 def _CheckForCcIncludes(input_api, output_api):
3897 """Check that nobody tries to include a cc file. It's a relatively
3898 common error which results in duplicate symbols in object
3899 files. This may not always break the build until someone later gets
3900 very confusing linking errors."""
3902 for f in input_api.AffectedFiles(include_deletes=False):
3903 # We let third_party code do whatever it wants
3904 if (f.LocalPath().startswith('third_party') and
3905 not f.LocalPath().startswith('third_party/blink') and
3906 not f.LocalPath().startswith('third_party\\blink')):
3909 if not _IsCPlusPlusFile(input_api, f.LocalPath()):
3912 for _, line in f.ChangedContents():
3913 if line.startswith('#include "'):
3914 included_file = line.split('"')[1]
3915 if _IsCPlusPlusFile(input_api, included_file):
3916 # The most common naming for external files with C++ code,
3917 # apart from standard headers, is to call them foo.inc, but
3918 # Chromium sometimes uses foo-inc.cc so allow that as well.
3919 if not included_file.endswith(('.h', '-inc.cc')):
3920 results.append(output_api.PresubmitError(
3921 'Only header files or .inc files should be included in other\n'
3922 'C++ files. Compiling the contents of a cc file more than once\n'
3923 'will cause duplicate information in the build which may later\n'
3924 'result in strange link_errors.\n' +
3925 f.LocalPath() + ':\n ' +
3931 def _CheckWatchlistDefinitionsEntrySyntax(key, value, ast):
3932 if not isinstance(key, ast.Str):
3933 return 'Key at line %d must be a string literal' % key.lineno
3934 if not isinstance(value, ast.Dict):
3935 return 'Value at line %d must be a dict' % value.lineno
3936 if len(value.keys) != 1:
3937 return 'Dict at line %d must have single entry' % value.lineno
3938 if not isinstance(value.keys[0], ast.Str) or value.keys[0].s != 'filepath':
3940 'Entry at line %d must have a string literal \'filepath\' as key' %
3945 def _CheckWatchlistsEntrySyntax(key, value, ast, email_regex):
3946 if not isinstance(key, ast.Str):
3947 return 'Key at line %d must be a string literal' % key.lineno
3948 if not isinstance(value, ast.List):
3949 return 'Value at line %d must be a list' % value.lineno
3950 for element in value.elts:
3951 if not isinstance(element, ast.Str):
3952 return 'Watchlist elements on line %d is not a string' % key.lineno
3953 if not email_regex.match(element.s):
3954 return ('Watchlist element on line %d doesn\'t look like a valid ' +
3955 'email: %s') % (key.lineno, element.s)
3959 def _CheckWATCHLISTSEntries(wd_dict, w_dict, input_api):
3960 mismatch_template = (
3961 'Mismatch between WATCHLIST_DEFINITIONS entry (%s) and WATCHLISTS '
3964 email_regex = input_api.re.compile(
3965 r"^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]+$")
3971 if i >= len(wd_dict.keys):
3972 if i >= len(w_dict.keys):
3974 return mismatch_template % ('missing', 'line %d' % w_dict.keys[i].lineno)
3975 elif i >= len(w_dict.keys):
3977 mismatch_template % ('line %d' % wd_dict.keys[i].lineno, 'missing'))
3979 wd_key = wd_dict.keys[i]
3980 w_key = w_dict.keys[i]
3982 result = _CheckWatchlistDefinitionsEntrySyntax(
3983 wd_key, wd_dict.values[i], ast)
3984 if result is not None:
3985 return 'Bad entry in WATCHLIST_DEFINITIONS dict: %s' % result
3987 result = _CheckWatchlistsEntrySyntax(
3988 w_key, w_dict.values[i], ast, email_regex)
3989 if result is not None:
3990 return 'Bad entry in WATCHLISTS dict: %s' % result
3992 if wd_key.s != w_key.s:
3993 return mismatch_template % (
3994 '%s at line %d' % (wd_key.s, wd_key.lineno),
3995 '%s at line %d' % (w_key.s, w_key.lineno))
3997 if wd_key.s < last_key:
3999 'WATCHLISTS dict is not sorted lexicographically at line %d and %d' %
4000 (wd_key.lineno, w_key.lineno))
4006 def _CheckWATCHLISTSSyntax(expression, input_api):
4008 if not isinstance(expression, ast.Expression):
4009 return 'WATCHLISTS file must contain a valid expression'
4010 dictionary = expression.body
4011 if not isinstance(dictionary, ast.Dict) or len(dictionary.keys) != 2:
4012 return 'WATCHLISTS file must have single dict with exactly two entries'
4014 first_key = dictionary.keys[0]
4015 first_value = dictionary.values[0]
4016 second_key = dictionary.keys[1]
4017 second_value = dictionary.values[1]
4019 if (not isinstance(first_key, ast.Str) or
4020 first_key.s != 'WATCHLIST_DEFINITIONS' or
4021 not isinstance(first_value, ast.Dict)):
4023 'The first entry of the dict in WATCHLISTS file must be '
4024 'WATCHLIST_DEFINITIONS dict')
4026 if (not isinstance(second_key, ast.Str) or
4027 second_key.s != 'WATCHLISTS' or
4028 not isinstance(second_value, ast.Dict)):
4030 'The second entry of the dict in WATCHLISTS file must be '
4033 return _CheckWATCHLISTSEntries(first_value, second_value, input_api)
4036 def _CheckWATCHLISTS(input_api, output_api):
4037 for f in input_api.AffectedFiles(include_deletes=False):
4038 if f.LocalPath() == 'WATCHLISTS':
4039 contents = input_api.ReadFile(f, 'r')
4042 # First, make sure that it can be evaluated.
4043 input_api.ast.literal_eval(contents)
4044 # Get an AST tree for it and scan the tree for detailed style checking.
4045 expression = input_api.ast.parse(
4046 contents, filename='WATCHLISTS', mode='eval')
4047 except ValueError as e:
4048 return [output_api.PresubmitError(
4049 'Cannot parse WATCHLISTS file', long_text=repr(e))]
4050 except SyntaxError as e:
4051 return [output_api.PresubmitError(
4052 'Cannot parse WATCHLISTS file', long_text=repr(e))]
4053 except TypeError as e:
4054 return [output_api.PresubmitError(
4055 'Cannot parse WATCHLISTS file', long_text=repr(e))]
4057 result = _CheckWATCHLISTSSyntax(expression, input_api)
4058 if result is not None:
4059 return [output_api.PresubmitError(result)]
4065 def _CheckNewHeaderWithoutGnChange(input_api, output_api):
4066 """Checks that newly added header files have corresponding GN changes.
4067 Note that this is only a heuristic. To be precise, run script:
4068 build/check_gn_headers.py.
4072 return input_api.FilterSourceFile(
4073 f, white_list=(r'.+%s' % _HEADER_EXTENSIONS, ))
4076 for f in input_api.AffectedSourceFiles(headers):
4077 if f.Action() != 'A':
4079 new_headers.append(f.LocalPath())
4082 return input_api.FilterSourceFile(f, white_list=(r'.+\.gn', ))
4084 all_gn_changed_contents = ''
4085 for f in input_api.AffectedSourceFiles(gn_files):
4086 for _, line in f.ChangedContents():
4087 all_gn_changed_contents += line
4090 for header in new_headers:
4091 basename = input_api.os_path.basename(header)
4092 if basename not in all_gn_changed_contents:
4093 problems.append(header)
4096 return [output_api.PresubmitPromptWarning(
4097 'Missing GN changes for new header files', items=sorted(problems),
4098 long_text='Please double check whether newly added header files need '
4099 'corresponding changes in gn or gni files.\nThis checking is only a '
4100 'heuristic. Run build/check_gn_headers.py to be precise.\n'
4101 'Read https://crbug.com/661774 for more info.')]
4105 def _CheckCorrectProductNameInMessages(input_api, output_api):
4106 """Check that Chromium-branded strings don't include "Chrome" or vice versa.
4108 This assumes we won't intentionally reference one product from the other
4113 "filename_postfix": "google_chrome_strings.grd",
4114 "correct_name": "Chrome",
4115 "incorrect_name": "Chromium",
4117 "filename_postfix": "chromium_strings.grd",
4118 "correct_name": "Chromium",
4119 "incorrect_name": "Chrome",
4122 for test_case in test_cases:
4124 filename_filter = lambda x: x.LocalPath().endswith(
4125 test_case["filename_postfix"])
4127 # Check each new line. Can yield false positives in multiline comments, but
4128 # easier than trying to parse the XML because messages can have nested
4129 # children, and associating message elements with affected lines is hard.
4130 for f in input_api.AffectedSourceFiles(filename_filter):
4131 for line_num, line in f.ChangedContents():
4132 if "<message" in line or "<!--" in line or "-->" in line:
4134 if test_case["incorrect_name"] in line:
4136 "Incorrect product name in %s:%d" % (f.LocalPath(), line_num))
4140 "Strings in %s-branded string files should reference \"%s\", not \"%s\""
4141 % (test_case["correct_name"], test_case["correct_name"],
4142 test_case["incorrect_name"]))
4143 all_problems.append(
4144 output_api.PresubmitPromptWarning(message, items=problems))
4149 def _CheckBuildtoolsRevisionsAreInSync(input_api, output_api):
4150 # TODO(crbug.com/941824): We need to make sure the entries in
4151 # //buildtools/DEPS are kept in sync with the entries in //DEPS
4152 # so that users of //buildtools in other projects get the same tooling
4153 # Chromium gets. If we ever fix the referenced bug and add 'includedeps'
4154 # support to gclient, we can eliminate the duplication and delete
4155 # this presubmit check.
4157 # Update this regexp if new revisions are added to the files.
4158 rev_regexp = input_api.re.compile(
4159 "'((clang_format|libcxx|libcxxabi|libunwind)_revision|gn_version)':")
4161 # If a user is changing one revision, they need to change the same
4162 # line in both files. This means that any given change should contain
4163 # exactly the same list of changed lines that match the regexps. The
4164 # replace(' ', '') call allows us to ignore whitespace changes to the
4165 # lines. The 'long_text' parameter to the error will contain the
4166 # list of changed lines in both files, which should make it easy enough
4167 # to spot the error without going overboard in this implementation.
4170 'buildtools/DEPS': {},
4174 for f in input_api.AffectedFiles(
4175 file_filter=lambda f: f.LocalPath() in ('DEPS', 'buildtools/DEPS')):
4176 for line_num, line in f.ChangedContents():
4177 if rev_regexp.search(line):
4178 revs_changes[f.LocalPath()][line.replace(' ', '')] = line
4179 long_text += '%s:%d: %s\n' % (f.LocalPath(), line_num, line)
4181 if set(revs_changes['DEPS']) != set(revs_changes['buildtools/DEPS']):
4182 return [output_api.PresubmitError(
4183 'Change buildtools revisions in sync in both //DEPS and '
4184 '//buildtools/DEPS.', long_text=long_text + '\n')]
4189 def _CheckForTooLargeFiles(input_api, output_api):
4190 """Avoid large files, especially binary files, in the repository since
4191 git doesn't scale well for those. They will be in everyone's repo
4192 clones forever, forever making Chromium slower to clone and work
4195 # Uploading files to cloud storage is not trivial so we don't want
4196 # to set the limit too low, but the upper limit for "normal" large
4197 # files seems to be 1-2 MB, with a handful around 5-8 MB, so
4198 # anything over 20 MB is exceptional.
4199 TOO_LARGE_FILE_SIZE_LIMIT = 20 * 1024 * 1024 # 10 MB
4201 too_large_files = []
4202 for f in input_api.AffectedFiles():
4203 # Check both added and modified files (but not deleted files).
4204 if f.Action() in ('A', 'M'):
4205 size = input_api.os_path.getsize(f.AbsoluteLocalPath())
4206 if size > TOO_LARGE_FILE_SIZE_LIMIT:
4207 too_large_files.append("%s: %d bytes" % (f.LocalPath(), size))
4211 'Do not commit large files to git since git scales badly for those.\n' +
4212 'Instead put the large files in cloud storage and use DEPS to\n' +
4213 'fetch them.\n' + '\n'.join(too_large_files)
4215 return [output_api.PresubmitError(
4216 'Too large files found in commit', long_text=message + '\n')]
4221 def _CheckFuzzTargets(input_api, output_api):
4222 """Checks specific for fuzz target sources."""
4223 EXPORTED_SYMBOLS = [
4224 'LLVMFuzzerInitialize',
4225 'LLVMFuzzerCustomMutator',
4226 'LLVMFuzzerCustomCrossOver',
4230 REQUIRED_HEADER = '#include "testing/libfuzzer/libfuzzer_exports.h"'
4232 def FilterFile(affected_file):
4233 """Ignore libFuzzer source code."""
4234 white_list = r'.*fuzz.*\.(h|hpp|hcc|cc|cpp|cxx)$'
4235 black_list = r"^third_party[\\/]libFuzzer"
4237 return input_api.FilterSourceFile(
4239 white_list=[white_list],
4240 black_list=[black_list])
4242 files_with_missing_header = []
4243 for f in input_api.AffectedSourceFiles(FilterFile):
4244 contents = input_api.ReadFile(f, 'r')
4245 if REQUIRED_HEADER in contents:
4248 if any(symbol in contents for symbol in EXPORTED_SYMBOLS):
4249 files_with_missing_header.append(f.LocalPath())
4251 if not files_with_missing_header:
4255 'If you define any of the libFuzzer optional functions (%s), it is '
4256 'recommended to add \'%s\' directive. Otherwise, the fuzz target may '
4257 'work incorrectly on Mac (crbug.com/687076).\nNote that '
4258 'LLVMFuzzerInitialize should not be used, unless your fuzz target needs '
4259 'to access command line arguments passed to the fuzzer. Instead, prefer '
4260 'static initialization and shared resources as documented in '
4261 'https://chromium.googlesource.com/chromium/src/+/master/testing/'
4262 'libfuzzer/efficient_fuzzing.md#simplifying-initialization_cleanup.\n' % (
4263 ', '.join(EXPORTED_SYMBOLS), REQUIRED_HEADER)
4266 return [output_api.PresubmitPromptWarning(
4267 message="Missing '%s' in:" % REQUIRED_HEADER,
4268 items=files_with_missing_header,
4269 long_text=long_text)]
4272 def _CheckNewImagesWarning(input_api, output_api):
4274 Warns authors who add images into the repo to make sure their images are
4275 optimized before committing.
4277 images_added = False
4280 filter_lambda = lambda x: input_api.FilterSourceFile(
4282 black_list=(('(?i).*test', r'.*\/junit\/')
4283 + input_api.DEFAULT_BLACK_LIST),
4284 white_list=[r'.*\/(drawable|mipmap)' ]
4286 for f in input_api.AffectedFiles(
4287 include_deletes=False, file_filter=filter_lambda):
4288 local_path = f.LocalPath().lower()
4289 if any(local_path.endswith(extension) for extension in _IMAGE_EXTENSIONS):
4291 image_paths.append(f)
4293 errors.append(output_api.PresubmitPromptWarning(
4294 'It looks like you are trying to commit some images. If these are '
4295 'non-test-only images, please make sure to read and apply the tips in '
4296 'https://chromium.googlesource.com/chromium/src/+/HEAD/docs/speed/'
4297 'binary_size/optimization_advice.md#optimizing-images\nThis check is '
4298 'FYI only and will not block your CL on the CQ.', image_paths))
4302 def _AndroidSpecificOnUploadChecks(input_api, output_api):
4303 """Groups upload checks that target android code."""
4305 results.extend(_CheckAndroidCrLogUsage(input_api, output_api))
4306 results.extend(_CheckAndroidDebuggableBuild(input_api, output_api))
4307 results.extend(_CheckAndroidNewMdpiAssetLocation(input_api, output_api))
4308 results.extend(_CheckAndroidToastUsage(input_api, output_api))
4309 results.extend(_CheckAndroidTestJUnitInheritance(input_api, output_api))
4310 results.extend(_CheckAndroidTestJUnitFrameworkImport(input_api, output_api))
4311 results.extend(_CheckAndroidTestAnnotationUsage(input_api, output_api))
4312 results.extend(_CheckAndroidWebkitImports(input_api, output_api))
4313 results.extend(_CheckAndroidXmlStyle(input_api, output_api, True))
4314 results.extend(_CheckNewImagesWarning(input_api, output_api))
4315 results.extend(_CheckAndroidNoBannedImports(input_api, output_api))
4318 def _AndroidSpecificOnCommitChecks(input_api, output_api):
4319 """Groups commit checks that target android code."""
4321 results.extend(_CheckAndroidXmlStyle(input_api, output_api, False))
4324 # TODO(chrishall): could we additionally match on any path owned by
4325 # ui/accessibility/OWNERS ?
4326 _ACCESSIBILITY_PATHS = (
4327 r"^chrome[\\/]browser.*[\\/]accessibility[\\/]",
4328 r"^chrome[\\/]browser[\\/]extensions[\\/]api[\\/]automation.*[\\/]",
4329 r"^chrome[\\/]renderer[\\/]extensions[\\/]accessibility_.*",
4330 r"^chrome[\\/]tests[\\/]data[\\/]accessibility[\\/]",
4331 r"^content[\\/]browser[\\/]accessibility[\\/]",
4332 r"^content[\\/]renderer[\\/]accessibility[\\/]",
4333 r"^content[\\/]tests[\\/]data[\\/]accessibility[\\/]",
4334 r"^extensions[\\/]renderer[\\/]api[\\/]automation[\\/]",
4335 r"^ui[\\/]accessibility[\\/]",
4336 r"^ui[\\/]views[\\/]accessibility[\\/]",
4339 def _CheckAccessibilityRelnotesField(input_api, output_api):
4340 """Checks that commits to accessibility code contain an AX-Relnotes field in
4341 their commit message."""
4342 def FileFilter(affected_file):
4343 paths = _ACCESSIBILITY_PATHS
4344 return input_api.FilterSourceFile(affected_file, white_list=paths)
4346 # Only consider changes affecting accessibility paths.
4347 if not any(input_api.AffectedFiles(file_filter=FileFilter)):
4350 # AX-Relnotes can appear in either the description or the footer.
4351 # When searching the description, require 'AX-Relnotes:' to appear at the
4352 # beginning of a line.
4353 ax_regex = input_api.re.compile('ax-relnotes[:=]')
4354 description_has_relnotes = any(ax_regex.match(line)
4355 for line in input_api.change.DescriptionText().lower().splitlines())
4357 footer_relnotes = input_api.change.GitFootersFromDescription().get(
4359 if description_has_relnotes or footer_relnotes:
4362 # TODO(chrishall): link to Relnotes documentation in message.
4363 message = ("Missing 'AX-Relnotes:' field required for accessibility changes"
4364 "\n please add 'AX-Relnotes: [release notes].' to describe any "
4365 "user-facing changes"
4366 "\n otherwise add 'AX-Relnotes: n/a.' if this change has no "
4367 "user-facing effects"
4368 "\n if this is confusing or annoying then please contact members "
4369 "of ui/accessibility/OWNERS.")
4371 return [output_api.PresubmitNotifyResult(message)]
4373 def _CommonChecks(input_api, output_api):
4374 """Checks common to both upload and commit."""
4376 results.extend(input_api.canned_checks.PanProjectChecks(
4377 input_api, output_api,
4378 excluded_paths=_EXCLUDED_PATHS))
4380 author = input_api.change.author_email
4381 if author and author not in _KNOWN_ROBOTS:
4383 input_api.canned_checks.CheckAuthorizedAuthor(input_api, output_api))
4385 results.extend(_CheckAccessibilityRelnotesField(input_api, output_api))
4387 _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api))
4389 _CheckNoProductionCodeUsingTestOnlyFunctionsJava(input_api, output_api))
4390 results.extend(_CheckNoIOStreamInHeaders(input_api, output_api))
4391 results.extend(_CheckNoUNIT_TESTInSourceFiles(input_api, output_api))
4392 results.extend(_CheckNoDISABLETypoInTests(input_api, output_api))
4393 results.extend(_CheckDCHECK_IS_ONHasBraces(input_api, output_api))
4394 results.extend(_CheckNoNewWStrings(input_api, output_api))
4395 results.extend(_CheckNoDEPSGIT(input_api, output_api))
4396 results.extend(_CheckNoBannedFunctions(input_api, output_api))
4397 results.extend(_CheckNoDeprecatedMojoTypes(input_api, output_api))
4398 results.extend(_CheckNoPragmaOnce(input_api, output_api))
4399 results.extend(_CheckNoTrinaryTrueFalse(input_api, output_api))
4400 results.extend(_CheckUnwantedDependencies(input_api, output_api))
4401 results.extend(_CheckFilePermissions(input_api, output_api))
4402 results.extend(_CheckTeamTags(input_api, output_api))
4403 results.extend(_CheckNoAuraWindowPropertyHInHeaders(input_api, output_api))
4404 results.extend(_CheckForVersionControlConflicts(input_api, output_api))
4405 results.extend(_CheckPatchFiles(input_api, output_api))
4406 results.extend(_CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api))
4407 results.extend(_CheckChromeOsSyncedPrefRegistration(input_api, output_api))
4408 results.extend(_CheckNoAbbreviationInPngFileName(input_api, output_api))
4409 results.extend(_CheckBuildConfigMacrosWithoutInclude(input_api, output_api))
4410 results.extend(_CheckForInvalidOSMacros(input_api, output_api))
4411 results.extend(_CheckForInvalidIfDefinedMacros(input_api, output_api))
4412 results.extend(_CheckFlakyTestUsage(input_api, output_api))
4413 results.extend(_CheckAddedDepsHaveTargetApprovals(input_api, output_api))
4415 input_api.canned_checks.CheckChangeHasNoTabs(
4418 source_file_filter=lambda x: x.LocalPath().endswith('.grd')))
4419 results.extend(_CheckSpamLogging(input_api, output_api))
4420 results.extend(_CheckForAnonymousVariables(input_api, output_api))
4421 results.extend(_CheckUserActionUpdate(input_api, output_api))
4422 results.extend(_CheckNoDeprecatedCss(input_api, output_api))
4423 results.extend(_CheckParseErrors(input_api, output_api))
4424 results.extend(_CheckForIPCRules(input_api, output_api))
4425 results.extend(_CheckForLongPathnames(input_api, output_api))
4426 results.extend(_CheckForIncludeGuards(input_api, output_api))
4427 results.extend(_CheckForWindowsLineEndings(input_api, output_api))
4428 results.extend(_CheckSingletonInHeaders(input_api, output_api))
4429 results.extend(_CheckPydepsNeedsUpdating(input_api, output_api))
4430 results.extend(_CheckJavaStyle(input_api, output_api))
4431 results.extend(_CheckSecurityOwners(input_api, output_api))
4432 results.extend(_CheckSecurityChanges(input_api, output_api))
4433 results.extend(_CheckSetNoParent(input_api, output_api))
4434 results.extend(_CheckUselessForwardDeclarations(input_api, output_api))
4435 results.extend(_CheckForRelativeIncludes(input_api, output_api))
4436 results.extend(_CheckForCcIncludes(input_api, output_api))
4437 results.extend(_CheckWATCHLISTS(input_api, output_api))
4438 results.extend(input_api.RunTests(
4439 input_api.canned_checks.CheckVPythonSpec(input_api, output_api)))
4440 results.extend(_CheckStrings(input_api, output_api))
4441 results.extend(_CheckTranslationExpectations(input_api, output_api))
4442 results.extend(_CheckCorrectProductNameInMessages(input_api, output_api))
4443 results.extend(_CheckBuildtoolsRevisionsAreInSync(input_api, output_api))
4444 results.extend(_CheckForTooLargeFiles(input_api, output_api))
4445 results.extend(_CheckPythonDevilInit(input_api, output_api))
4446 results.extend(_CheckStableMojomChanges(input_api, output_api))
4448 for f in input_api.AffectedFiles():
4449 path, name = input_api.os_path.split(f.LocalPath())
4450 if name == 'PRESUBMIT.py':
4451 full_path = input_api.os_path.join(input_api.PresubmitLocalPath(), path)
4452 test_file = input_api.os_path.join(path, 'PRESUBMIT_test.py')
4453 if f.Action() != 'D' and input_api.os_path.exists(test_file):
4454 # The PRESUBMIT.py file (and the directory containing it) might
4455 # have been affected by being moved or removed, so only try to
4456 # run the tests if they still exist.
4457 results.extend(input_api.canned_checks.RunUnitTestsInDirectory(
4458 input_api, output_api, full_path,
4459 whitelist=[r'^PRESUBMIT_test\.py$']))
4463 def _CheckPatchFiles(input_api, output_api):
4464 problems = [f.LocalPath() for f in input_api.AffectedFiles()
4465 if f.LocalPath().endswith(('.orig', '.rej'))]
4467 return [output_api.PresubmitError(
4468 "Don't commit .rej and .orig files.", problems)]
4473 def _CheckBuildConfigMacrosWithoutInclude(input_api, output_api):
4474 # Excludes OS_CHROMEOS, which is not defined in build_config.h.
4475 macro_re = input_api.re.compile(r'^\s*#(el)?if.*\bdefined\(((OS_(?!CHROMEOS)|'
4476 'COMPILER_|ARCH_CPU_|WCHAR_T_IS_)[^)]*)')
4477 include_re = input_api.re.compile(
4478 r'^#include\s+"build/build_config.h"', input_api.re.MULTILINE)
4479 extension_re = input_api.re.compile(r'\.[a-z]+$')
4481 for f in input_api.AffectedFiles():
4482 if not f.LocalPath().endswith(('.h', '.c', '.cc', '.cpp', '.m', '.mm')):
4484 found_line_number = None
4486 for line_num, line in f.ChangedContents():
4487 match = macro_re.search(line)
4489 found_line_number = line_num
4490 found_macro = match.group(2)
4492 if not found_line_number:
4495 found_include = False
4496 for line in f.NewContents():
4497 if include_re.search(line):
4498 found_include = True
4503 if not f.LocalPath().endswith('.h'):
4504 primary_header_path = extension_re.sub('.h', f.AbsoluteLocalPath())
4506 content = input_api.ReadFile(primary_header_path, 'r')
4507 if include_re.search(content):
4511 errors.append('%s:%d %s macro is used without including build/'
4513 % (f.LocalPath(), found_line_number, found_macro))
4515 return [output_api.PresubmitPromptWarning('\n'.join(errors))]
4519 def _DidYouMeanOSMacro(bad_macro):
4521 return {'A': 'OS_ANDROID',
4531 'W': 'OS_WIN'}[bad_macro[3].upper()]
4536 def _CheckForInvalidOSMacrosInFile(input_api, f):
4537 """Check for sensible looking, totally invalid OS macros."""
4538 preprocessor_statement = input_api.re.compile(r'^\s*#')
4539 os_macro = input_api.re.compile(r'defined\((OS_[^)]+)\)')
4541 for lnum, line in f.ChangedContents():
4542 if preprocessor_statement.search(line):
4543 for match in os_macro.finditer(line):
4544 if not match.group(1) in _VALID_OS_MACROS:
4545 good = _DidYouMeanOSMacro(match.group(1))
4546 did_you_mean = ' (did you mean %s?)' % good if good else ''
4547 results.append(' %s:%d %s%s' % (f.LocalPath(),
4554 def _CheckForInvalidOSMacros(input_api, output_api):
4555 """Check all affected files for invalid OS macros."""
4557 for f in input_api.AffectedSourceFiles(None):
4558 if not f.LocalPath().endswith(('.py', '.js', '.html', '.css', '.md')):
4559 bad_macros.extend(_CheckForInvalidOSMacrosInFile(input_api, f))
4564 return [output_api.PresubmitError(
4565 'Possibly invalid OS macro[s] found. Please fix your code\n'
4566 'or add your macro to src/PRESUBMIT.py.', bad_macros)]
4569 def _CheckForInvalidIfDefinedMacrosInFile(input_api, f):
4570 """Check all affected files for invalid "if defined" macros."""
4571 ALWAYS_DEFINED_MACROS = (
4580 "TARGET_IPHONE_SIMULATOR",
4581 "TARGET_OS_EMBEDDED",
4587 ifdef_macro = input_api.re.compile(r'^\s*#.*(?:ifdef\s|defined\()([^\s\)]+)')
4589 for lnum, line in f.ChangedContents():
4590 for match in ifdef_macro.finditer(line):
4591 if match.group(1) in ALWAYS_DEFINED_MACROS:
4592 always_defined = ' %s is always defined. ' % match.group(1)
4593 did_you_mean = 'Did you mean \'#if %s\'?' % match.group(1)
4594 results.append(' %s:%d %s\n\t%s' % (f.LocalPath(),
4601 def _CheckForInvalidIfDefinedMacros(input_api, output_api):
4602 """Check all affected files for invalid "if defined" macros."""
4604 skipped_paths = ['third_party/sqlite/', 'third_party/abseil-cpp/']
4605 for f in input_api.AffectedFiles():
4606 if any([f.LocalPath().startswith(path) for path in skipped_paths]):
4608 if f.LocalPath().endswith(('.h', '.c', '.cc', '.m', '.mm')):
4609 bad_macros.extend(_CheckForInvalidIfDefinedMacrosInFile(input_api, f))
4614 return [output_api.PresubmitError(
4615 'Found ifdef check on always-defined macro[s]. Please fix your code\n'
4616 'or check the list of ALWAYS_DEFINED_MACROS in src/PRESUBMIT.py.',
4620 def _CheckForIPCRules(input_api, output_api):
4621 """Check for same IPC rules described in
4622 http://www.chromium.org/Home/chromium-security/education/security-tips-for-ipc
4624 base_pattern = r'IPC_ENUM_TRAITS\('
4625 inclusion_pattern = input_api.re.compile(r'(%s)' % base_pattern)
4626 comment_pattern = input_api.re.compile(r'//.*(%s)' % base_pattern)
4629 for f in input_api.AffectedSourceFiles(None):
4630 local_path = f.LocalPath()
4631 if not local_path.endswith('.h'):
4633 for line_number, line in f.ChangedContents():
4634 if inclusion_pattern.search(line) and not comment_pattern.search(line):
4636 '%s:%d\n %s' % (local_path, line_number, line.strip()))
4639 return [output_api.PresubmitPromptWarning(
4640 _IPC_ENUM_TRAITS_DEPRECATED, problems)]
4645 def _CheckForLongPathnames(input_api, output_api):
4646 """Check to make sure no files being submitted have long paths.
4647 This causes issues on Windows.
4650 for f in input_api.AffectedTestableFiles():
4651 local_path = f.LocalPath()
4652 # Windows has a path limit of 260 characters. Limit path length to 200 so
4653 # that we have some extra for the prefix on dev machines and the bots.
4654 if len(local_path) > 200:
4655 problems.append(local_path)
4658 return [output_api.PresubmitError(_LONG_PATH_ERROR, problems)]
4663 def _CheckForIncludeGuards(input_api, output_api):
4664 """Check that header files have proper guards against multiple inclusion.
4665 If a file should not have such guards (and it probably should) then it
4666 should include the string "no-include-guard-because-multiply-included".
4668 def is_chromium_header_file(f):
4669 # We only check header files under the control of the Chromium
4670 # project. That is, those outside third_party apart from
4671 # third_party/blink.
4672 # We also exclude *_message_generator.h headers as they use
4673 # include guards in a special, non-typical way.
4674 file_with_path = input_api.os_path.normpath(f.LocalPath())
4675 return (file_with_path.endswith('.h') and
4676 not file_with_path.endswith('_message_generator.h') and
4677 (not file_with_path.startswith('third_party') or
4678 file_with_path.startswith(
4679 input_api.os_path.join('third_party', 'blink'))))
4681 def replace_special_with_underscore(string):
4682 return input_api.re.sub(r'[+\\/.-]', '_', string)
4686 for f in input_api.AffectedSourceFiles(is_chromium_header_file):
4688 guard_line_number = None
4689 seen_guard_end = False
4691 file_with_path = input_api.os_path.normpath(f.LocalPath())
4692 base_file_name = input_api.os_path.splitext(
4693 input_api.os_path.basename(file_with_path))[0]
4694 upper_base_file_name = base_file_name.upper()
4696 expected_guard = replace_special_with_underscore(
4697 file_with_path.upper() + '_')
4699 # For "path/elem/file_name.h" we should really only accept
4700 # PATH_ELEM_FILE_NAME_H_ per coding style. Unfortunately there
4701 # are too many (1000+) files with slight deviations from the
4702 # coding style. The most important part is that the include guard
4703 # is there, and that it's unique, not the name so this check is
4704 # forgiving for existing files.
4706 # As code becomes more uniform, this could be made stricter.
4708 guard_name_pattern_list = [
4709 # Anything with the right suffix (maybe with an extra _).
4712 # To cover include guards with old Blink style.
4715 # Anything including the uppercase name of the file.
4716 r'\w*' + input_api.re.escape(replace_special_with_underscore(
4717 upper_base_file_name)) + r'\w*',
4719 guard_name_pattern = '|'.join(guard_name_pattern_list)
4720 guard_pattern = input_api.re.compile(
4721 r'#ifndef\s+(' + guard_name_pattern + ')')
4723 for line_number, line in enumerate(f.NewContents()):
4724 if 'no-include-guard-because-multiply-included' in line:
4725 guard_name = 'DUMMY' # To not trigger check outside the loop.
4728 if guard_name is None:
4729 match = guard_pattern.match(line)
4731 guard_name = match.group(1)
4732 guard_line_number = line_number
4734 # We allow existing files to use include guards whose names
4735 # don't match the chromium style guide, but new files should
4737 if not f.OldContents():
4738 if guard_name != expected_guard:
4739 errors.append(output_api.PresubmitPromptWarning(
4740 'Header using the wrong include guard name %s' % guard_name,
4741 ['%s:%d' % (f.LocalPath(), line_number + 1)],
4742 'Expected: %r\nFound: %r' % (expected_guard, guard_name)))
4744 # The line after #ifndef should have a #define of the same name.
4745 if line_number == guard_line_number + 1:
4746 expected_line = '#define %s' % guard_name
4747 if line != expected_line:
4748 errors.append(output_api.PresubmitPromptWarning(
4749 'Missing "%s" for include guard' % expected_line,
4750 ['%s:%d' % (f.LocalPath(), line_number + 1)],
4751 'Expected: %r\nGot: %r' % (expected_line, line)))
4753 if not seen_guard_end and line == '#endif // %s' % guard_name:
4754 seen_guard_end = True
4755 elif seen_guard_end:
4756 if line.strip() != '':
4757 errors.append(output_api.PresubmitPromptWarning(
4758 'Include guard %s not covering the whole file' % (
4759 guard_name), [f.LocalPath()]))
4760 break # Nothing else to check and enough to warn once.
4762 if guard_name is None:
4763 errors.append(output_api.PresubmitPromptWarning(
4764 'Missing include guard %s' % expected_guard,
4766 'Missing include guard in %s\n'
4767 'Recommended name: %s\n'
4768 'This check can be disabled by having the string\n'
4769 'no-include-guard-because-multiply-included in the header.' %
4770 (f.LocalPath(), expected_guard)))
4775 def _CheckForWindowsLineEndings(input_api, output_api):
4776 """Check source code and known ascii text files for Windows style line
4779 known_text_files = r'.*\.(txt|html|htm|mhtml|py|gyp|gypi|gn|isolate)$'
4781 file_inclusion_pattern = (
4783 r'.+%s' % _IMPLEMENTATION_EXTENSIONS
4787 source_file_filter = lambda f: input_api.FilterSourceFile(
4788 f, white_list=file_inclusion_pattern, black_list=None)
4789 for f in input_api.AffectedSourceFiles(source_file_filter):
4790 include_file = False
4791 for _, line in f.ChangedContents():
4792 if line.endswith('\r\n'):
4795 problems.append(f.LocalPath())
4798 return [output_api.PresubmitPromptWarning('Are you sure that you want '
4799 'these files to contain Windows style line endings?\n' +
4800 '\n'.join(problems))]
4805 def _CheckSyslogUseWarning(input_api, output_api, source_file_filter=None):
4806 """Checks that all source files use SYSLOG properly."""
4808 for f in input_api.AffectedSourceFiles(source_file_filter):
4809 for line_number, line in f.ChangedContents():
4810 if 'SYSLOG' in line:
4811 syslog_files.append(f.LocalPath() + ':' + str(line_number))
4814 return [output_api.PresubmitPromptWarning(
4815 'Please make sure there are no privacy sensitive bits of data in SYSLOG'
4816 ' calls.\nFiles to check:\n', items=syslog_files)]
4820 def CheckChangeOnUpload(input_api, output_api):
4822 results.extend(_CommonChecks(input_api, output_api))
4823 results.extend(_CheckValidHostsInDEPS(input_api, output_api))
4825 input_api.canned_checks.CheckPatchFormatted(input_api, output_api))
4826 results.extend(_CheckUmaHistogramChanges(input_api, output_api))
4827 results.extend(_AndroidSpecificOnUploadChecks(input_api, output_api))
4828 results.extend(_CheckSyslogUseWarning(input_api, output_api))
4829 results.extend(_CheckGoogleSupportAnswerUrl(input_api, output_api))
4830 results.extend(_CheckUniquePtr(input_api, output_api))
4831 results.extend(_CheckNewHeaderWithoutGnChange(input_api, output_api))
4832 results.extend(_CheckFuzzTargets(input_api, output_api))
4836 def GetTryServerMasterForBot(bot):
4837 """Returns the Try Server master for the given bot.
4839 It tries to guess the master from the bot name, but may still fail
4840 and return None. There is no longer a default master.
4842 # Potentially ambiguous bot names are listed explicitly.
4844 'chromium_presubmit': 'master.tryserver.chromium.linux',
4845 'tools_build_presubmit': 'master.tryserver.chromium.linux',
4847 master = master_map.get(bot)
4849 if 'android' in bot:
4850 master = 'master.tryserver.chromium.android'
4851 elif 'linux' in bot or 'presubmit' in bot:
4852 master = 'master.tryserver.chromium.linux'
4854 master = 'master.tryserver.chromium.win'
4855 elif 'mac' in bot or 'ios' in bot:
4856 master = 'master.tryserver.chromium.mac'
4860 def CheckChangeOnCommit(input_api, output_api):
4862 results.extend(_CommonChecks(input_api, output_api))
4863 results.extend(_AndroidSpecificOnCommitChecks(input_api, output_api))
4864 # Make sure the tree is 'open'.
4865 results.extend(input_api.canned_checks.CheckTreeIsOpen(
4868 json_url='http://chromium-status.appspot.com/current?format=json'))
4871 input_api.canned_checks.CheckPatchFormatted(input_api, output_api))
4872 results.extend(input_api.canned_checks.CheckChangeHasBugField(
4873 input_api, output_api))
4874 results.extend(input_api.canned_checks.CheckChangeHasNoUnwantedTags(
4875 input_api, output_api))
4876 results.extend(input_api.canned_checks.CheckChangeHasDescription(
4877 input_api, output_api))
4881 def _CheckStrings(input_api, output_api):
4882 """Check string ICU syntax validity and if translation screenshots exist."""
4883 # Skip translation screenshots check if a SkipTranslationScreenshotsCheck
4884 # footer is set to true.
4885 git_footers = input_api.change.GitFootersFromDescription()
4886 skip_screenshot_check_footer = [
4888 for footer in git_footers.get(u'Skip-Translation-Screenshots-Check', [])]
4889 run_screenshot_check = u'true' not in skip_screenshot_check_footer
4894 from io import StringIO
4896 new_or_added_paths = set(f.LocalPath()
4897 for f in input_api.AffectedFiles()
4898 if (f.Action() == 'A' or f.Action() == 'M'))
4899 removed_paths = set(f.LocalPath()
4900 for f in input_api.AffectedFiles(include_deletes=True)
4901 if f.Action() == 'D')
4903 affected_grds = [f for f in input_api.AffectedFiles()
4904 if (f.LocalPath().endswith(('.grd', '.grdp')))]
4905 if not affected_grds:
4908 affected_png_paths = [f.AbsoluteLocalPath()
4909 for f in input_api.AffectedFiles()
4910 if (f.LocalPath().endswith('.png'))]
4912 # Check for screenshots. Developers can upload screenshots using
4913 # tools/translation/upload_screenshots.py which finds and uploads
4914 # images associated with .grd files (e.g. test_grd/IDS_STRING.png for the
4915 # message named IDS_STRING in test.grd) and produces a .sha1 file (e.g.
4916 # test_grd/IDS_STRING.png.sha1) for each png when the upload is successful.
4918 # The logic here is as follows:
4920 # - If the CL has a .png file under the screenshots directory for a grd
4921 # file, warn the developer. Actual images should never be checked into the
4924 # - If the CL contains modified or new messages in grd files and doesn't
4925 # contain the corresponding .sha1 files, warn the developer to add images
4926 # and upload them via tools/translation/upload_screenshots.py.
4928 # - If the CL contains modified or new messages in grd files and the
4929 # corresponding .sha1 files, everything looks good.
4931 # - If the CL contains removed messages in grd files but the corresponding
4932 # .sha1 files aren't removed, warn the developer to remove them.
4933 unnecessary_screenshots = []
4935 unnecessary_sha1_files = []
4937 # This checks verifies that the ICU syntax of messages this CL touched is
4938 # valid, and reports any found syntax errors.
4939 # Without this presubmit check, ICU syntax errors in Chromium strings can land
4940 # without developers being aware of them. Later on, such ICU syntax errors
4941 # break message extraction for translation, hence would block Chromium
4942 # translations until they are fixed.
4943 icu_syntax_errors = []
4945 def _CheckScreenshotAdded(screenshots_dir, message_id):
4946 sha1_path = input_api.os_path.join(
4947 screenshots_dir, message_id + '.png.sha1')
4948 if sha1_path not in new_or_added_paths:
4949 missing_sha1.append(sha1_path)
4952 def _CheckScreenshotRemoved(screenshots_dir, message_id):
4953 sha1_path = input_api.os_path.join(
4954 screenshots_dir, message_id + '.png.sha1')
4955 if input_api.os_path.exists(sha1_path) and sha1_path not in removed_paths:
4956 unnecessary_sha1_files.append(sha1_path)
4959 def _ValidateIcuSyntax(text, level, signatures):
4960 """Validates ICU syntax of a text string.
4962 Check if text looks similar to ICU and checks for ICU syntax correctness
4963 in this case. Reports various issues with ICU syntax and values of
4964 variants. Supports checking of nested messages. Accumulate information of
4965 each ICU messages found in the text for further checking.
4968 text: a string to check.
4969 level: a number of current nesting level.
4970 signatures: an accumulator, a list of tuple of (level, variable,
4974 None if a string is not ICU or no issue detected.
4975 A tuple of (message, start index, end index) if an issue detected.
4978 'plural': (frozenset(
4979 ['=0', '=1', 'zero', 'one', 'two', 'few', 'many', 'other']),
4980 frozenset(['=1', 'other'])),
4981 'selectordinal': (frozenset(
4982 ['=0', '=1', 'zero', 'one', 'two', 'few', 'many', 'other']),
4983 frozenset(['one', 'other'])),
4984 'select': (frozenset(), frozenset(['other'])),
4987 # Check if the message looks like an attempt to use ICU
4988 # plural. If yes - check if its syntax strictly matches ICU format.
4989 like = re.match(r'^[^{]*\{[^{]*\b(plural|selectordinal|select)\b', text)
4991 signatures.append((level, None, None, None))
4994 # Check for valid prefix and suffix
4996 r'^([^{]*\{)([a-zA-Z0-9_]+),\s*'
4997 r'(plural|selectordinal|select),\s*'
4998 r'(?:offset:\d+)?\s*(.*)', text, re.DOTALL)
5000 return (('This message looks like an ICU plural, '
5001 'but does not follow ICU syntax.'), like.start(), like.end())
5002 starting, variable, kind, variant_pairs = m.groups()
5003 variants, depth, last_pos = _ParseIcuVariants(variant_pairs, m.start(4))
5005 return ('Invalid ICU format. Unbalanced opening bracket', last_pos,
5008 ending = text[last_pos:]
5010 return ('Invalid ICU format. No initial opening bracket', last_pos - 1,
5012 if not ending or '}' not in ending:
5013 return ('Invalid ICU format. No final closing bracket', last_pos - 1,
5017 ('Invalid ICU format. Extra characters at the start of a complex '
5018 'message (go/icu-message-migration): "%s"') %
5019 starting, 0, len(starting))
5021 return (('Invalid ICU format. Extra characters at the end of a complex '
5022 'message (go/icu-message-migration): "%s"')
5023 % ending, last_pos - 1, len(text) - 1)
5024 if kind not in valid_types:
5025 return (('Unknown ICU message type %s. '
5026 'Valid types are: plural, select, selectordinal') % kind, 0, 0)
5027 known, required = valid_types[kind]
5028 defined_variants = set()
5029 for variant, variant_range, value, value_range in variants:
5030 start, end = variant_range
5031 if variant in defined_variants:
5032 return ('Variant "%s" is defined more than once' % variant,
5034 elif known and variant not in known:
5035 return ('Variant "%s" is not valid for %s message' % (variant, kind),
5037 defined_variants.add(variant)
5038 # Check for nested structure
5039 res = _ValidateIcuSyntax(value[1:-1], level + 1, signatures)
5041 return (res[0], res[1] + value_range[0] + 1,
5042 res[2] + value_range[0] + 1)
5043 missing = required - defined_variants
5045 return ('Required variants missing: %s' % ', '.join(missing), 0,
5047 signatures.append((level, variable, kind, defined_variants))
5050 def _ParseIcuVariants(text, offset=0):
5051 """Parse variants part of ICU complex message.
5053 Builds a tuple of variant names and values, as well as
5054 their offsets in the input string.
5057 text: a string to parse
5058 offset: additional offset to add to positions in the text to get correct
5059 position in the complete ICU string.
5062 List of tuples, each tuple consist of four fields: variant name,
5063 variant name span (tuple of two integers), variant value, value
5064 span (tuple of two integers).
5066 depth, start, end = 0, -1, -1
5069 for idx, char in enumerate(text):
5073 chunk = text[end + 1:start]
5075 pos = offset + end + 1 + chunk.find(key)
5076 span = (pos, pos + len(key))
5080 return variants, depth, offset + idx
5084 variants.append((key, span, text[start:end + 1], (offset + start,
5086 return variants, depth, offset + end + 1
5089 old_sys_path = sys.path
5090 sys.path = sys.path + [input_api.os_path.join(
5091 input_api.PresubmitLocalPath(), 'tools', 'translation')]
5092 from helper import grd_helper
5094 sys.path = old_sys_path
5096 for f in affected_grds:
5097 file_path = f.LocalPath()
5098 old_id_to_msg_map = {}
5099 new_id_to_msg_map = {}
5100 # Note that this code doesn't check if the file has been deleted. This is
5101 # OK because it only uses the old and new file contents and doesn't load
5102 # the file via its path.
5103 # It's also possible that a file's content refers to a renamed or deleted
5104 # file via a <part> tag, such as <part file="now-deleted-file.grdp">. This
5105 # is OK as well, because grd_helper ignores <part> tags when loading .grd or
5107 if file_path.endswith('.grdp'):
5109 old_id_to_msg_map = grd_helper.GetGrdpMessagesFromString(
5110 unicode('\n'.join(f.OldContents())))
5112 new_id_to_msg_map = grd_helper.GetGrdpMessagesFromString(
5113 unicode('\n'.join(f.NewContents())))
5115 file_dir = input_api.os_path.dirname(file_path) or '.'
5117 old_id_to_msg_map = grd_helper.GetGrdMessages(
5118 StringIO(unicode('\n'.join(f.OldContents()))), file_dir)
5120 new_id_to_msg_map = grd_helper.GetGrdMessages(
5121 StringIO(unicode('\n'.join(f.NewContents()))), file_dir)
5123 # Compute added, removed and modified message IDs.
5124 old_ids = set(old_id_to_msg_map)
5125 new_ids = set(new_id_to_msg_map)
5126 added_ids = new_ids - old_ids
5127 removed_ids = old_ids - new_ids
5128 modified_ids = set([])
5129 for key in old_ids.intersection(new_ids):
5130 if (old_id_to_msg_map[key].FormatXml()
5131 != new_id_to_msg_map[key].FormatXml()):
5132 modified_ids.add(key)
5134 grd_name, ext = input_api.os_path.splitext(
5135 input_api.os_path.basename(file_path))
5136 screenshots_dir = input_api.os_path.join(
5137 input_api.os_path.dirname(file_path), grd_name + ext.replace('.', '_'))
5139 if run_screenshot_check:
5140 # Check the screenshot directory for .png files. Warn if there is any.
5141 for png_path in affected_png_paths:
5142 if png_path.startswith(screenshots_dir):
5143 unnecessary_screenshots.append(png_path)
5145 for added_id in added_ids:
5146 _CheckScreenshotAdded(screenshots_dir, added_id)
5148 for modified_id in modified_ids:
5149 _CheckScreenshotAdded(screenshots_dir, modified_id)
5151 for removed_id in removed_ids:
5152 _CheckScreenshotRemoved(screenshots_dir, removed_id)
5154 # Check new and changed strings for ICU syntax errors.
5155 for key in added_ids.union(modified_ids):
5156 msg = new_id_to_msg_map[key].ContentsAsXml('', True)
5157 err = _ValidateIcuSyntax(msg, 0, [])
5159 icu_syntax_errors.append(str(key) + ': ' + str(err[0]))
5162 if run_screenshot_check:
5163 if unnecessary_screenshots:
5164 results.append(output_api.PresubmitNotifyResult(
5165 'Do not include actual screenshots in the changelist. Run '
5166 'tools/translate/upload_screenshots.py to upload them instead:',
5167 sorted(unnecessary_screenshots)))
5170 results.append(output_api.PresubmitNotifyResult(
5171 'You are adding or modifying UI strings.\n'
5172 'To ensure the best translations, take screenshots of the relevant UI '
5173 '(https://g.co/chrome/translation) and add these files to your '
5174 'changelist:', sorted(missing_sha1)))
5176 if unnecessary_sha1_files:
5177 results.append(output_api.PresubmitNotifyResult(
5178 'You removed strings associated with these files. Remove:',
5179 sorted(unnecessary_sha1_files)))
5181 results.append(output_api.PresubmitPromptOrNotify('Skipping translation '
5182 'screenshots check.'))
5184 if icu_syntax_errors:
5185 results.append(output_api.PresubmitError(
5186 'ICU syntax errors were found in the following strings (problems or '
5187 'feedback? Contact rainhard@chromium.org):', items=icu_syntax_errors))
5192 def _CheckTranslationExpectations(input_api, output_api,
5194 translation_expectations_path=None,
5197 affected_grds = [f for f in input_api.AffectedFiles()
5198 if (f.LocalPath().endswith('.grd') or
5199 f.LocalPath().endswith('.grdp'))]
5200 if not affected_grds:
5204 old_sys_path = sys.path
5205 sys.path = sys.path + [
5206 input_api.os_path.join(
5207 input_api.PresubmitLocalPath(), 'tools', 'translation')]
5208 from helper import git_helper
5209 from helper import translation_helper
5211 sys.path = old_sys_path
5213 # Check that translation expectations can be parsed and we can get a list of
5214 # translatable grd files. |repo_root| and |translation_expectations_path| are
5215 # only passed by tests.
5217 repo_root = input_api.PresubmitLocalPath()
5218 if not translation_expectations_path:
5219 translation_expectations_path = input_api.os_path.join(
5220 repo_root, 'tools', 'gritsettings',
5221 'translation_expectations.pyl')
5223 grd_files = git_helper.list_grds_in_repository(repo_root)
5226 translation_helper.get_translatable_grds(repo_root, grd_files,
5227 translation_expectations_path)
5228 except Exception as e:
5229 return [output_api.PresubmitNotifyResult(
5230 'Failed to get a list of translatable grd files. This happens when:\n'
5231 ' - One of the modified grd or grdp files cannot be parsed or\n'
5232 ' - %s is not updated.\n'
5233 'Stack:\n%s' % (translation_expectations_path, str(e)))]
5237 def _CheckStableMojomChanges(input_api, output_api):
5238 """Changes to [Stable] mojom types must preserve backward-compatibility."""
5239 changed_mojoms = input_api.AffectedFiles(
5240 include_deletes=True,
5241 file_filter=lambda f: f.LocalPath().endswith(('.mojom')))
5243 for mojom in changed_mojoms:
5244 old_contents = ''.join(mojom.OldContents()) or None
5245 new_contents = ''.join(mojom.NewContents()) or None
5247 'filename': mojom.LocalPath(),
5248 'old': '\n'.join(mojom.OldContents()) or None,
5249 'new': '\n'.join(mojom.NewContents()) or None,
5252 process = input_api.subprocess.Popen(
5253 [input_api.python_executable,
5254 input_api.os_path.join(input_api.PresubmitLocalPath(), 'mojo',
5255 'public', 'tools', 'mojom',
5256 'check_stable_mojom_compatibility.py'),
5257 '--src-root', input_api.PresubmitLocalPath()],
5258 stdin=input_api.subprocess.PIPE,
5259 stdout=input_api.subprocess.PIPE,
5260 stderr=input_api.subprocess.PIPE,
5261 universal_newlines=True)
5262 (x, error) = process.communicate(input=input_api.json.dumps(delta))
5263 if process.returncode:
5264 return [output_api.PresubmitError(
5265 'One or more [Stable] mojom definitions appears to have been changed '
5266 'in a way that is not backward-compatible.',