1 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 """Top-level presubmit script for Chromium.
7 See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
8 for more details about the presubmit API built into depot_tools.
13 r"^native_client_sdk[\\/]src[\\/]build_tools[\\/]make_rules.py",
14 r"^native_client_sdk[\\/]src[\\/]build_tools[\\/]make_simple.py",
15 r"^native_client_sdk[\\/]src[\\/]tools[\\/].*.mk",
16 r"^net[\\/]tools[\\/]spdyshark[\\/].*",
18 r"^third_party[\\/]blink[\\/].*",
19 r"^third_party[\\/]breakpad[\\/].*",
23 r".+[\\/]pnacl_shim\.c$",
24 r"^gpu[\\/]config[\\/].*_list_json\.cc$",
25 r"^chrome[\\/]browser[\\/]resources[\\/]pdf[\\/]index.js",
26 r"tools[\\/]md_browser[\\/].*\.css$",
27 # Test pages for Maps telemetry tests.
28 r"tools[\\/]perf[\\/]page_sets[\\/]maps_perf_test.*",
29 # Test pages for WebRTC telemetry tests.
30 r"tools[\\/]perf[\\/]page_sets[\\/]webrtc_cases.*",
34 # Fragment of a regular expression that matches C++ and Objective-C++
35 # implementation files.
36 _IMPLEMENTATION_EXTENSIONS = r'\.(cc|cpp|cxx|mm)$'
39 # Fragment of a regular expression that matches C++ and Objective-C++
41 _HEADER_EXTENSIONS = r'\.(h|hpp|hxx)$'
44 # Regular expression that matches code only used for test binaries
46 _TEST_CODE_EXCLUDED_PATHS = (
47 r'.*[\\/](fake_|test_|mock_).+%s' % _IMPLEMENTATION_EXTENSIONS,
48 r'.+_test_(base|support|util)%s' % _IMPLEMENTATION_EXTENSIONS,
49 r'.+_(api|browser|eg|int|perf|pixel|unit|ui)?test(_[a-z]+)?%s' %
50 _IMPLEMENTATION_EXTENSIONS,
51 r'.+_(fuzz|fuzzer)(_[a-z]+)?%s' % _IMPLEMENTATION_EXTENSIONS,
52 r'.+profile_sync_service_harness%s' % _IMPLEMENTATION_EXTENSIONS,
53 r'.*[\\/](test|tool(s)?)[\\/].*',
54 # content_shell is used for running layout tests.
55 r'content[\\/]shell[\\/].*',
56 # Non-production example code.
57 r'mojo[\\/]examples[\\/].*',
58 # Launcher for running iOS tests on the simulator.
59 r'testing[\\/]iossim[\\/]iossim\.mm$',
62 _THIRD_PARTY_EXCEPT_BLINK = 'third_party/(?!blink/)'
64 _TEST_ONLY_WARNING = (
65 'You might be calling functions intended only for testing from\n'
66 'production code. It is OK to ignore this warning if you know what\n'
67 'you are doing, as the heuristics used to detect the situation are\n'
68 'not perfect. The commit queue will not block on this warning.')
71 _INCLUDE_ORDER_WARNING = (
72 'Your #include order seems to be broken. Remember to use the right '
73 'collation (LC_COLLATE=C) and check\nhttps://google.github.io/styleguide/'
74 'cppguide.html#Names_and_Order_of_Includes')
77 # Format: Sequence of tuples containing:
78 # * String pattern or, if starting with a slash, a regular expression.
79 # * Sequence of strings to show when the pattern matches.
80 # * Error flag. True if a match is a presubmit error, otherwise it's a warning.
81 _BANNED_JAVA_FUNCTIONS = (
83 'StrictMode.allowThreadDiskReads()',
85 'Prefer using StrictModeContext.allowDiskReads() to using StrictMode '
91 'StrictMode.allowThreadDiskWrites()',
93 'Prefer using StrictModeContext.allowDiskWrites() to using StrictMode '
100 # Format: Sequence of tuples containing:
101 # * String pattern or, if starting with a slash, a regular expression.
102 # * Sequence of strings to show when the pattern matches.
103 # * Error flag. True if a match is a presubmit error, otherwise it's a warning.
104 _BANNED_OBJC_FUNCTIONS = (
108 'The use of -[NSView addTrackingRect:owner:userData:assumeInside:] is'
109 'prohibited. Please use CrTrackingArea instead.',
110 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
115 r'/NSTrackingArea\W',
117 'The use of NSTrackingAreas is prohibited. Please use CrTrackingArea',
119 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
124 'convertPointFromBase:',
126 'The use of -[NSView convertPointFromBase:] is almost certainly wrong.',
127 'Please use |convertPoint:(point) fromView:nil| instead.',
128 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
133 'convertPointToBase:',
135 'The use of -[NSView convertPointToBase:] is almost certainly wrong.',
136 'Please use |convertPoint:(point) toView:nil| instead.',
137 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
142 'convertRectFromBase:',
144 'The use of -[NSView convertRectFromBase:] is almost certainly wrong.',
145 'Please use |convertRect:(point) fromView:nil| instead.',
146 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
151 'convertRectToBase:',
153 'The use of -[NSView convertRectToBase:] is almost certainly wrong.',
154 'Please use |convertRect:(point) toView:nil| instead.',
155 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
160 'convertSizeFromBase:',
162 'The use of -[NSView convertSizeFromBase:] is almost certainly wrong.',
163 'Please use |convertSize:(point) fromView:nil| instead.',
164 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
169 'convertSizeToBase:',
171 'The use of -[NSView convertSizeToBase:] is almost certainly wrong.',
172 'Please use |convertSize:(point) toView:nil| instead.',
173 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
178 r"/\s+UTF8String\s*]",
180 'The use of -[NSString UTF8String] is dangerous as it can return null',
181 'even if |canBeConvertedToEncoding:NSUTF8StringEncoding| returns YES.',
182 'Please use |SysNSStringToUTF8| instead.',
187 r'__unsafe_unretained',
189 'The use of __unsafe_unretained is almost certainly wrong, unless',
190 'when interacting with NSFastEnumeration or NSInvocation.',
191 'Please use __weak in files build with ARC, nothing otherwise.',
198 'The use of "freeWhenDone:NO" with the NoCopy creation of ',
199 'Foundation types is prohibited.',
205 # Format: Sequence of tuples containing:
206 # * String pattern or, if starting with a slash, a regular expression.
207 # * Sequence of strings to show when the pattern matches.
208 # * Error flag. True if a match is a presubmit error, otherwise it's a warning.
209 _BANNED_IOS_OBJC_FUNCTIONS = (
213 'TEST() macro should not be used in Objective-C++ code as it does not ',
214 'drain the autorelease pool at the end of the test. Use TEST_F() ',
215 'macro instead with a fixture inheriting from PlatformTest (or a ',
221 r'/\btesting::Test\b',
223 'testing::Test should not be used in Objective-C++ code as it does ',
224 'not drain the autorelease pool at the end of the test. Use ',
225 'PlatformTest instead.'
231 # Directories that contain deprecated Bind() or Callback types.
232 # Find sub-directories from a given directory by running:
233 # for i in `find . -maxdepth 1 -type d`; do
235 # (cd $i; git grep -P 'base::(Bind\(|(Callback<|Closure))'|wc -l)
238 # TODO(crbug.com/714018): Remove (or narrow the scope of) paths from this list
239 # when they have been converted to modern callback types (OnceCallback,
240 # RepeatingCallback, BindOnce, BindRepeating) in order to enable presubmit
241 # checks for them and prevent regressions.
242 _NOT_CONVERTED_TO_MODERN_BIND_AND_CALLBACK = '|'.join((
243 '^android_webview/browser/',
247 '^base/callback.h', # Intentional.
250 '^chrome/chrome_elf/',
251 '^chrome/chrome_watcher/',
253 '^chrome/installer/',
254 '^chrome/notification_helper/',
261 '^chromecast/browser/',
262 '^chromecast/crash/',
263 '^chromecast/media/',
264 '^chromecast/metrics/',
266 '^chromeos/attestation/',
268 '^chromeos/components/',
269 '^chromeos/cryptohome/',
271 '^chromeos/geolocation/',
273 '^chromeos/network/',
274 '^chromeos/printing/',
275 '^chromeos/process_proxy/',
276 '^chromeos/services/',
277 '^chromeos/settings/',
278 '^chromeos/timezone/',
281 '^components/assist_ranker/',
282 '^components/autofill/',
283 '^components/autofill_assistant/',
284 '^components/bookmarks/',
285 '^components/browser_sync/',
286 '^components/browser_watcher/',
287 '^components/browsing_data/',
288 '^components/cast_channel/',
289 '^components/certificate_transparency/',
290 '^components/chromeos_camera/',
291 '^components/component_updater/',
292 '^components/content_settings/',
293 '^components/crash/',
294 '^components/cronet/',
295 '^components/data_reduction_proxy/',
296 '^components/discardable_memory/',
297 '^components/dom_distiller/',
298 '^components/domain_reliability/',
299 '^components/download/',
300 '^components/drive/',
302 '^components/favicon/',
303 '^components/feature_engagement/',
304 '^components/feedback/',
305 '^components/flags_ui/',
306 '^components/gcm_driver/',
307 '^components/google/',
308 '^components/guest_view/',
309 '^components/heap_profiling/',
310 '^components/history/',
311 '^components/image_fetcher/',
312 '^components/invalidation/',
313 '^components/keyed_service/',
314 '^components/login/',
315 '^components/metrics/',
316 '^components/metrics_services_manager/',
318 '^components/navigation_interception/',
319 '^components/net_log/',
320 '^components/network_time/',
321 '^components/ntp_snippets/',
322 '^components/ntp_tiles/',
323 '^components/offline_items_collection/',
324 '^components/offline_pages/',
325 '^components/omnibox/',
326 '^components/ownership/',
327 '^components/pairing/',
328 '^components/password_manager/',
329 '^components/payments/',
330 '^components/plugins/',
331 '^components/policy/',
332 '^components/pref_registry/',
333 '^components/prefs/',
334 '^components/printing/',
335 '^components/proxy_config/',
336 '^components/quirks/',
337 '^components/rappor/',
338 '^components/remote_cocoa/',
339 '^components/renderer_context_menu/',
341 '^components/safe_browsing/',
342 '^components/search_engines/',
343 '^components/search_provider_logos/',
344 '^components/security_interstitials/',
345 '^components/security_state/',
346 '^components/services/',
347 '^components/sessions/',
348 '^components/signin/',
349 '^components/ssl_errors/',
350 '^components/storage_monitor/',
351 '^components/subresource_filter/',
352 '^components/suggestions/',
353 '^components/supervised_user_error_page/',
355 '^components/sync_bookmarks/',
356 '^components/sync_device_info/',
357 '^components/sync_preferences/',
358 '^components/sync_sessions/',
360 '^components/tracing/',
361 '^components/translate/',
363 '^components/update_client/',
364 '^components/upload_list/',
365 '^components/variations/',
366 '^components/visitedlink/',
367 '^components/web_cache/',
368 '^components/web_resource/',
369 '^components/web_restrictions/',
370 '^components/webcrypto/',
371 '^components/webdata/',
372 '^components/webdata_services/',
380 '^content/renderer/android/',
381 '^content/renderer/fetchers/',
382 '^content/renderer/image_downloader/',
383 '^content/renderer/input/',
384 '^content/renderer/java/',
385 '^content/renderer/media/',
386 '^content/renderer/media_capture_from_element/',
387 '^content/renderer/media_recorder/',
388 '^content/renderer/p2p/',
389 '^content/renderer/pepper/',
390 '^content/renderer/service_worker/',
391 '^content/renderer/worker/',
396 '^device/bluetooth/',
399 '^device/udev_linux/',
403 '^google_apis/dive/',
404 '^google_apis/gaia/',
418 '^media/device_monitors/',
432 '^ppapi/shared_impl/',
439 '^remoting/internal/',
441 '^remoting/protocol/',
442 '^remoting/signaling/',
448 '^testing/gmock_mutant.h',
449 '^testing/libfuzzer/',
450 '^third_party/blink/',
451 '^third_party/crashpad/crashpad/test/gtest_main.cc',
452 '^third_party/leveldatabase/leveldb_chrome.cc',
453 '^third_party/boringssl/gtest_main_chromium.cc',
454 '^third_party/cacheinvalidation/overrides/' +
455 'google/cacheinvalidation/deps/callback.h',
456 '^third_party/libaddressinput/chromium/chrome_address_validator.cc',
457 '^third_party/zlib/google/',
459 '^tools/clang/base_bind_rewriters/', # Intentional.
460 '^tools/gdb/gdb_chrome.py', # Intentional.
461 '^ui/accelerated_widget_mac/',
469 '^ui/message_center/',
472 '^ui/views_content_client/',
476 # Format: Sequence of tuples containing:
477 # * String pattern or, if starting with a slash, a regular expression.
478 # * Sequence of strings to show when the pattern matches.
479 # * Error flag. True if a match is a presubmit error, otherwise it's a warning.
480 # * Sequence of paths to *not* check (regexps).
481 _BANNED_CPP_FUNCTIONS = (
485 'New code should not use NULL. Use nullptr instead.',
490 # Make sure that gtest's FRIEND_TEST() macro is not used; the
491 # FRIEND_TEST_ALL_PREFIXES() macro from base/gtest_prod_util.h should be
492 # used instead since that allows for FLAKY_ and DISABLED_ prefixes.
496 'Chromium code should not use gtest\'s FRIEND_TEST() macro. Include',
497 'base/gtest_prod_util.h and use FRIEND_TEST_ALL_PREFIXES() instead.',
503 r'XSelectInput|CWEventMask|XCB_CW_EVENT_MASK',
505 'Chrome clients wishing to select events on X windows should use',
506 'ui::XScopedEventSelector. It is safe to ignore this warning only if',
507 'you are selecting events from the GPU process, or if you are using',
508 'an XDisplay other than gfx::GetXDisplay().',
512 r"^ui[\\/]gl[\\/].*\.cc$",
513 r"^media[\\/]gpu[\\/].*\.cc$",
518 r'XInternAtom|xcb_intern_atom',
520 'Use gfx::GetAtom() instead of interning atoms directly.',
524 r"^gpu[\\/]ipc[\\/]service[\\/]gpu_watchdog_thread\.cc$",
525 r"^remoting[\\/]host[\\/]linux[\\/]x_server_clipboard\.cc$",
526 r"^ui[\\/]gfx[\\/]x[\\/]x11_atom_cache\.cc$",
532 'Overriding setMatrixClip() is prohibited; ',
533 'the base function is deprecated. ',
541 'The use of SkRefPtr is prohibited. ',
542 'Please use sk_sp<> instead.'
550 'The indirect use of SkRefPtr via SkAutoRef is prohibited. ',
551 'Please use sk_sp<> instead.'
559 'The use of SkAutoTUnref is dangerous because it implicitly ',
560 'converts to a raw pointer. Please use sk_sp<> instead.'
568 'The indirect use of SkAutoTUnref through SkAutoUnref is dangerous ',
569 'because it implicitly converts to a raw pointer. ',
570 'Please use sk_sp<> instead.'
576 r'/HANDLE_EINTR\(.*close',
578 'HANDLE_EINTR(close) is invalid. If close fails with EINTR, the file',
579 'descriptor will be closed, and it is incorrect to retry the close.',
580 'Either call close directly and ignore its return value, or wrap close',
581 'in IGNORE_EINTR to use its return value. See http://crbug.com/269623'
587 r'/IGNORE_EINTR\((?!.*close)',
589 'IGNORE_EINTR is only valid when wrapping close. To wrap other system',
590 'calls, use HANDLE_EINTR. See http://crbug.com/269623',
594 # Files that #define IGNORE_EINTR.
595 r'^base[\\/]posix[\\/]eintr_wrapper\.h$',
596 r'^ppapi[\\/]tests[\\/]test_broker\.cc$',
602 'Do not introduce new v8::Extensions into the code base, use',
603 'gin::Wrappable instead. See http://crbug.com/334679',
607 r'extensions[\\/]renderer[\\/]safe_builtins\.*',
611 '#pragma comment(lib,',
613 'Specify libraries to link with in build files and not in the source.',
617 r'^base[\\/]third_party[\\/]symbolize[\\/].*',
618 r'^third_party[\\/]abseil-cpp[\\/].*',
622 r'/base::SequenceChecker\b',
624 'Consider using SEQUENCE_CHECKER macros instead of the class directly.',
630 r'/base::ThreadChecker\b',
632 'Consider using THREAD_CHECKER macros instead of the class directly.',
638 r'/(Time(|Delta|Ticks)|ThreadTicks)::FromInternalValue|ToInternalValue',
640 'base::TimeXXX::FromInternalValue() and ToInternalValue() are',
641 'deprecated (http://crbug.com/634507). Please avoid converting away',
642 'from the Time types in Chromium code, especially if any math is',
643 'being done on time values. For interfacing with platform/library',
644 'APIs, use FromMicroseconds() or InMicroseconds(), or one of the other',
645 'type converter methods instead. For faking TimeXXX values (for unit',
646 'testing only), use TimeXXX() + TimeDelta::FromMicroseconds(N). For',
647 'other use cases, please contact base/time/OWNERS.',
653 'CallJavascriptFunctionUnsafe',
655 "Don't use CallJavascriptFunctionUnsafe() in new code. Instead, use",
656 'AllowJavascript(), OnJavascriptAllowed()/OnJavascriptDisallowed(),',
657 'and CallJavascriptFunction(). See https://goo.gl/qivavq.',
661 r'^content[\\/]browser[\\/]webui[\\/]web_ui_impl\.(cc|h)$',
662 r'^content[\\/]public[\\/]browser[\\/]web_ui\.h$',
663 r'^content[\\/]public[\\/]test[\\/]test_web_ui\.(cc|h)$',
669 'Instead of leveldb::DB::Open() use leveldb_env::OpenDB() from',
670 'third_party/leveldatabase/env_chromium.h. It exposes databases to',
671 "Chrome's tracing, making their memory usage visible.",
675 r'^third_party/leveldatabase/.*\.(cc|h)$',
679 'leveldb::NewMemEnv',
681 'Instead of leveldb::NewMemEnv() use leveldb_chrome::NewMemEnv() from',
682 'third_party/leveldatabase/leveldb_chrome.h. It exposes environments',
683 "to Chrome's tracing, making their memory usage visible.",
687 r'^third_party/leveldatabase/.*\.(cc|h)$',
691 'RunLoop::QuitCurrent',
693 'Please migrate away from RunLoop::QuitCurrent*() methods. Use member',
694 'methods of a specific RunLoop instance instead.',
700 'base::ScopedMockTimeMessageLoopTaskRunner',
702 'ScopedMockTimeMessageLoopTaskRunner is deprecated. Prefer',
703 'ScopedTaskEnvironment::MainThreadType::MOCK_TIME. There are still a',
704 'few cases that may require a ScopedMockTimeMessageLoopTaskRunner',
705 '(i.e. mocking the main MessageLoopForUI in browser_tests), but check',
706 'with gab@ first if you think you need it)',
714 'Using std::regex adds unnecessary binary size to Chrome. Please use',
715 're2::RE2 instead (crbug.com/755321)',
721 r'/\bstd::to_string\b',
723 'std::to_string is locale dependent and slower than alternatives.',
724 'For locale-independent strings, e.g. writing numbers to and from',
725 'disk profiles, use base::NumberToString().',
726 'For user-visible strings, use base::FormatNumber() and',
727 'the related functions in base/i18n/number_formatting.h.',
729 False, # Only a warning for now since it is already used,
730 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
733 r'/\bstd::shared_ptr\b',
735 'std::shared_ptr should not be used. Use scoped_refptr instead.',
738 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
743 'long long is banned. Use stdint.h if you need a 64 bit number.',
745 False, # Only a warning since it is already used.
746 [_THIRD_PARTY_EXCEPT_BLINK], # Don't warn in third_party folders.
751 'std::bind is banned because of lifetime risks.',
752 'Use base::BindOnce or base::BindRepeating instead.',
755 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
758 r'/\b#include <chrono>\b',
760 '<chrono> overlaps with Time APIs in base. Keep using',
764 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
767 r'/\b#include <exception>\b',
769 'Exceptions are banned and disabled in Chromium.',
772 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
775 r'/\bstd::function\b',
777 'std::function is banned. Instead use base::Callback which directly',
778 'supports Chromium\'s weak pointers, ref counting and more.',
780 False, # Only a warning since there are dozens of uses already.
781 [_THIRD_PARTY_EXCEPT_BLINK], # Do not warn in third_party folders.
784 r'/\b#include <random>\b',
786 'Do not use any random number engines from <random>. Instead',
787 'use base::RandomBitGenerator.',
790 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
795 'std::ratio is banned by the Google Style Guide.',
798 [_THIRD_PARTY_EXCEPT_BLINK], # Not an error in third_party folders.
801 (r'/base::ThreadRestrictions::(ScopedAllowIO|AssertIOAllowed|'
802 r'DisallowWaiting|AssertWaitAllowed|SetWaitAllowed|ScopedAllowWait)'),
804 'Use the new API in base/threading/thread_restrictions.h.',
812 'Please use base::Bind{Once,Repeating} instead',
813 'of base::Bind. (crbug.com/714018)',
816 _NOT_CONVERTED_TO_MODERN_BIND_AND_CALLBACK,
819 r'/\bbase::Callback[<:]',
821 'Please use base::{Once,Repeating}Callback instead',
822 'of base::Callback. (crbug.com/714018)',
825 _NOT_CONVERTED_TO_MODERN_BIND_AND_CALLBACK,
828 r'/\bbase::Closure\b',
830 'Please use base::{Once,Repeating}Closure instead',
831 'of base::Closure. (crbug.com/714018)',
834 _NOT_CONVERTED_TO_MODERN_BIND_AND_CALLBACK,
837 r'/base::SharedMemory(|Handle)',
839 'base::SharedMemory is deprecated. Please use',
840 '{Writable,ReadOnly}SharedMemoryRegion instead.',
846 r'/\bRunMessageLoop\b',
848 'RunMessageLoop is deprecated, use RunLoop instead.',
856 'RunThisRunLoop is deprecated, use RunLoop directly instead.',
862 r'RunAllPendingInMessageLoop()',
864 "Prefer RunLoop over RunAllPendingInMessageLoop, please contact gab@",
865 "if you're convinced you need this.",
871 r'RunAllPendingInMessageLoop(BrowserThread',
873 'RunAllPendingInMessageLoop is deprecated. Use RunLoop for',
874 'BrowserThread::UI, TestBrowserThreadBundle::RunIOThreadUntilIdle',
875 'for BrowserThread::IO, and prefer RunLoop::QuitClosure to observe',
876 'async events instead of flushing threads.',
882 r'MessageLoopRunner',
884 'MessageLoopRunner is deprecated, use RunLoop instead.',
890 r'GetDeferredQuitTaskForRunLoop',
892 "GetDeferredQuitTaskForRunLoop shouldn't be needed, please contact",
893 "gab@ if you found a use case where this is the only solution.",
899 'sqlite3_initialize(',
901 'Instead of calling sqlite3_initialize(), depend on //sql, ',
902 '#include "sql/initialize.h" and use sql::EnsureSqliteInitialized().',
906 r'^sql/initialization\.(cc|h)$',
907 r'^third_party/sqlite/.*\.(c|cc|h)$',
913 'net::URLFetcher should no longer be used in content embedders. ',
914 'Instead, use network::SimpleURLLoader instead, which supports ',
915 'an out-of-process network stack. ',
916 'net::URLFetcher may still be used in binaries that do not embed',
921 r'^ios[\\/].*\.(cc|h)$',
922 r'.*[\\/]ios[\\/].*\.(cc|h)$',
924 r'^net[\\/].*\.(cc|h)$',
925 r'.*[\\/]tools[\\/].*\.(cc|h)$',
929 r'std::random_shuffle',
931 'std::random_shuffle is deprecated in C++14, and removed in C++17. Use',
932 'base::RandomShuffle instead.'
938 'ios/web/public/test/http_server',
940 'web::HTTPserver is deprecated use net::EmbeddedTestServer instead.',
948 'Improper use of Microsoft::WRL::ComPtr<T>::GetAddressOf() has been ',
949 'implicated in a few leaks. Use operator& instead.'
957 'DEFINE_TYPE_CASTS is deprecated. Instead, use downcast helpers from ',
958 '//third_party/blink/renderer/platform/casting.h.'
962 r'^third_party/blink/renderer/.*\.(cc|h)$',
966 r'/\bmojo::DataPipe\b',
968 'mojo::DataPipe is deprecated. Use mojo::CreateDataPipe instead.',
976 'SHFileOperation was deprecated in Windows Vista, and there are less ',
977 'complex functions to achieve the same goals. Use IFileOperation for ',
978 'any esoteric actions instead.'
986 'StringFromGUID2 introduces an unnecessary dependency on ole32.dll.',
987 'Use base::win::String16FromGUID instead.'
991 r'/base/win/win_util_unittest.cc'
997 'StringFromCLSID introduces an unnecessary dependency on ole32.dll.',
998 'Use base::win::String16FromGUID instead.'
1002 r'/base/win/win_util_unittest.cc'
1008 'The use of kCFAllocatorNull with the NoCopy creation of ',
1009 'CoreFoundation types is prohibited.',
1017 'mojo::ConvertTo and TypeConverter are deprecated. Please consider',
1018 'StructTraits / UnionTraits / EnumTraits / ArrayTraits / MapTraits /',
1019 'StringTraits if you would like to convert between custom types and',
1020 'the wire format of mojom types.'
1024 r'^third_party/blink/.*\.(cc|h)$',
1025 r'^content/renderer/.*\.(cc|h)$',
1031 _IPC_ENUM_TRAITS_DEPRECATED = (
1032 'You are using IPC_ENUM_TRAITS() in your code. It has been deprecated.\n'
1033 'See http://www.chromium.org/Home/chromium-security/education/'
1034 'security-tips-for-ipc')
1036 _LONG_PATH_ERROR = (
1037 'Some files included in this CL have file names that are too long (> 200'
1038 ' characters). If committed, these files will cause issues on Windows. See'
1039 ' https://crbug.com/612667 for more details.'
1042 _JAVA_MULTIPLE_DEFINITION_EXCLUDED_PATHS = [
1043 r".*[\\/]BuildHooksAndroidImpl\.java",
1044 r".*[\\/]LicenseContentProvider\.java",
1045 r".*[\\/]PlatformServiceBridgeImpl.java",
1046 r".*chrome[\\\/]android[\\\/]feed[\\\/]dummy[\\\/].*\.java",
1049 # These paths contain test data and other known invalid JSON files.
1050 _KNOWN_INVALID_JSON_FILE_PATTERNS = [
1051 r'test[\\/]data[\\/]',
1052 r'^components[\\/]policy[\\/]resources[\\/]policy_templates\.json$',
1053 r'^third_party[\\/]protobuf[\\/]',
1054 r'^third_party[\\/]blink[\\/]renderer[\\/]devtools[\\/]protocol\.json$',
1055 r'^third_party[\\/]blink[\\/]web_tests[\\/]external[\\/]wpt[\\/]',
1059 _VALID_OS_MACROS = (
1060 # Please keep sorted.
1065 'OS_CAT', # For testing.
1067 'OS_CYGWIN', # third_party code.
1085 _ANDROID_SPECIFIC_PYDEPS_FILES = [
1086 'android_webview/tools/run_cts.pydeps',
1087 'base/android/jni_generator/jni_generator.pydeps',
1088 'base/android/jni_generator/jni_registration_generator.pydeps',
1089 'build/android/devil_chromium.pydeps',
1090 'build/android/gyp/aar.pydeps',
1091 'build/android/gyp/aidl.pydeps',
1092 'build/android/gyp/apkbuilder.pydeps',
1093 'build/android/gyp/assert_static_initializers.pydeps',
1094 'build/android/gyp/bytecode_processor.pydeps',
1095 'build/android/gyp/compile_resources.pydeps',
1096 'build/android/gyp/create_app_bundle_minimal_apks.pydeps',
1097 'build/android/gyp/create_bundle_wrapper_script.pydeps',
1098 'build/android/gyp/copy_ex.pydeps',
1099 'build/android/gyp/create_app_bundle.pydeps',
1100 'build/android/gyp/create_apk_operations_script.pydeps',
1101 'build/android/gyp/create_java_binary_script.pydeps',
1102 'build/android/gyp/create_size_info_files.pydeps',
1103 'build/android/gyp/create_tool_wrapper.pydeps',
1104 'build/android/gyp/desugar.pydeps',
1105 'build/android/gyp/dexsplitter.pydeps',
1106 'build/android/gyp/dex.pydeps',
1107 'build/android/gyp/dist_aar.pydeps',
1108 'build/android/gyp/filter_zip.pydeps',
1109 'build/android/gyp/gcc_preprocess.pydeps',
1110 'build/android/gyp/generate_linker_version_script.pydeps',
1111 'build/android/gyp/ijar.pydeps',
1112 'build/android/gyp/jacoco_instr.pydeps',
1113 'build/android/gyp/java_cpp_enum.pydeps',
1114 'build/android/gyp/java_cpp_strings.pydeps',
1115 'build/android/gyp/javac.pydeps',
1116 'build/android/gyp/jinja_template.pydeps',
1117 'build/android/gyp/lint.pydeps',
1118 'build/android/gyp/main_dex_list.pydeps',
1119 'build/android/gyp/merge_manifest.pydeps',
1120 'build/android/gyp/prepare_resources.pydeps',
1121 'build/android/gyp/proguard.pydeps',
1122 'build/android/gyp/write_build_config.pydeps',
1123 'build/android/gyp/write_ordered_libraries.pydeps',
1124 'build/android/gyp/zip.pydeps',
1125 'build/android/incremental_install/generate_android_manifest.pydeps',
1126 'build/android/incremental_install/write_installer_json.pydeps',
1127 'build/android/resource_sizes.pydeps',
1128 'build/android/test_runner.pydeps',
1129 'build/android/test_wrapper/logdog_wrapper.pydeps',
1130 'build/protoc_java.pydeps',
1131 'chrome/android/features/create_stripped_java_factory.pydeps',
1132 'net/tools/testserver/testserver.pydeps',
1133 'third_party/android_platform/development/scripts/stack.pydeps',
1137 _GENERIC_PYDEPS_FILES = [
1138 'chrome/test/chromedriver/test/run_py_tests.pydeps',
1139 'chrome/test/chromedriver/log_replay/client_replay_unittest.pydeps',
1140 'tools/binary_size/sizes.pydeps',
1141 'tools/binary_size/supersize.pydeps',
1145 _ALL_PYDEPS_FILES = _ANDROID_SPECIFIC_PYDEPS_FILES + _GENERIC_PYDEPS_FILES
1148 # Bypass the AUTHORS check for these accounts.
1149 _KNOWN_ROBOTS = set(
1150 '%s-chromium-autoroll@skia-buildbots.google.com.iam.gserviceaccount.com' % s
1151 for s in ('afdo', 'angle', 'catapult', 'chromite', 'depot-tools',
1152 'fuchsia-sdk', 'nacl', 'pdfium', 'perfetto', 'skia',
1153 'spirv', 'src-internal', 'webrtc')
1154 ) | set('%s@appspot.gserviceaccount.com' % s for s in ('findit-for-me',)
1155 ) | set('%s@developer.gserviceaccount.com' % s for s in ('3su6n15k.default',)
1156 ) | set('%s@chops-service-accounts.iam.gserviceaccount.com' % s
1157 for s in ('v8-ci-autoroll-builder', 'wpt-autoroller',)
1158 ) | set('%s@skia-public.iam.gserviceaccount.com' % s
1159 for s in ('chromium-autoroll',)
1160 ) | set('%s@skia-corp.google.com.iam.gserviceaccount.com' % s
1161 for s in ('chromium-internal-autoroll',))
1164 def _IsCPlusPlusFile(input_api, file_path):
1165 """Returns True if this file contains C++-like code (and not Python,
1166 Go, Java, MarkDown, ...)"""
1168 ext = input_api.os_path.splitext(file_path)[1]
1169 # This list is compatible with CppChecker.IsCppFile but we should
1170 # consider adding ".c" to it. If we do that we can use this function
1171 # at more places in the code.
1180 def _IsCPlusPlusHeaderFile(input_api, file_path):
1181 return input_api.os_path.splitext(file_path)[1] == ".h"
1184 def _IsJavaFile(input_api, file_path):
1185 return input_api.os_path.splitext(file_path)[1] == ".java"
1188 def _IsProtoFile(input_api, file_path):
1189 return input_api.os_path.splitext(file_path)[1] == ".proto"
1191 def _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api):
1192 """Attempts to prevent use of functions intended only for testing in
1193 non-testing code. For now this is just a best-effort implementation
1194 that ignores header files and may have some false positives. A
1195 better implementation would probably need a proper C++ parser.
1197 # We only scan .cc files and the like, as the declaration of
1198 # for-testing functions in header files are hard to distinguish from
1199 # calls to such functions without a proper C++ parser.
1200 file_inclusion_pattern = [r'.+%s' % _IMPLEMENTATION_EXTENSIONS]
1202 base_function_pattern = r'[ :]test::[^\s]+|ForTest(s|ing)?|for_test(s|ing)?'
1203 inclusion_pattern = input_api.re.compile(r'(%s)\s*\(' % base_function_pattern)
1204 comment_pattern = input_api.re.compile(r'//.*(%s)' % base_function_pattern)
1205 exclusion_pattern = input_api.re.compile(
1206 r'::[A-Za-z0-9_]+(%s)|(%s)[^;]+\{' % (
1207 base_function_pattern, base_function_pattern))
1209 def FilterFile(affected_file):
1210 black_list = (_EXCLUDED_PATHS +
1211 _TEST_CODE_EXCLUDED_PATHS +
1212 input_api.DEFAULT_BLACK_LIST)
1213 return input_api.FilterSourceFile(
1215 white_list=file_inclusion_pattern,
1216 black_list=black_list)
1219 for f in input_api.AffectedSourceFiles(FilterFile):
1220 local_path = f.LocalPath()
1221 for line_number, line in f.ChangedContents():
1222 if (inclusion_pattern.search(line) and
1223 not comment_pattern.search(line) and
1224 not exclusion_pattern.search(line)):
1226 '%s:%d\n %s' % (local_path, line_number, line.strip()))
1229 return [output_api.PresubmitPromptOrNotify(_TEST_ONLY_WARNING, problems)]
1234 def _CheckNoProductionCodeUsingTestOnlyFunctionsJava(input_api, output_api):
1235 """This is a simplified version of
1236 _CheckNoProductionCodeUsingTestOnlyFunctions for Java files.
1238 javadoc_start_re = input_api.re.compile(r'^\s*/\*\*')
1239 javadoc_end_re = input_api.re.compile(r'^\s*\*/')
1240 name_pattern = r'ForTest(s|ing)?'
1241 # Describes an occurrence of "ForTest*" inside a // comment.
1242 comment_re = input_api.re.compile(r'//.*%s' % name_pattern)
1244 inclusion_re = input_api.re.compile(r'(%s)\s*\(' % name_pattern)
1245 # Ignore definitions. (Comments are ignored separately.)
1246 exclusion_re = input_api.re.compile(r'(%s)[^;]+\{' % name_pattern)
1249 sources = lambda x: input_api.FilterSourceFile(
1251 black_list=(('(?i).*test', r'.*\/junit\/')
1252 + input_api.DEFAULT_BLACK_LIST),
1253 white_list=[r'.*\.java$']
1255 for f in input_api.AffectedFiles(include_deletes=False, file_filter=sources):
1256 local_path = f.LocalPath()
1257 is_inside_javadoc = False
1258 for line_number, line in f.ChangedContents():
1259 if is_inside_javadoc and javadoc_end_re.search(line):
1260 is_inside_javadoc = False
1261 if not is_inside_javadoc and javadoc_start_re.search(line):
1262 is_inside_javadoc = True
1263 if is_inside_javadoc:
1265 if (inclusion_re.search(line) and
1266 not comment_re.search(line) and
1267 not exclusion_re.search(line)):
1269 '%s:%d\n %s' % (local_path, line_number, line.strip()))
1272 return [output_api.PresubmitPromptOrNotify(_TEST_ONLY_WARNING, problems)]
1277 def _CheckNoIOStreamInHeaders(input_api, output_api):
1278 """Checks to make sure no .h files include <iostream>."""
1280 pattern = input_api.re.compile(r'^#include\s*<iostream>',
1281 input_api.re.MULTILINE)
1282 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
1283 if not f.LocalPath().endswith('.h'):
1285 contents = input_api.ReadFile(f)
1286 if pattern.search(contents):
1290 return [output_api.PresubmitError(
1291 'Do not #include <iostream> in header files, since it inserts static '
1292 'initialization into every file including the header. Instead, '
1293 '#include <ostream>. See http://crbug.com/94794',
1297 def _CheckNoStrCatRedefines(input_api, output_api):
1298 """Checks no windows headers with StrCat redefined are included directly."""
1300 pattern_deny = input_api.re.compile(
1301 r'^#include\s*[<"](shlwapi|atlbase|propvarutil|sphelper).h[">]',
1302 input_api.re.MULTILINE)
1303 pattern_allow = input_api.re.compile(
1304 r'^#include\s"base/win/windows_defines.inc"',
1305 input_api.re.MULTILINE)
1306 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
1307 contents = input_api.ReadFile(f)
1308 if pattern_deny.search(contents) and not pattern_allow.search(contents):
1309 files.append(f.LocalPath())
1312 return [output_api.PresubmitError(
1313 'Do not #include shlwapi.h, atlbase.h, propvarutil.h or sphelper.h '
1314 'directly since they pollute code with StrCat macro. Instead, '
1315 'include matching header from base/win. See http://crbug.com/856536',
1320 def _CheckNoUNIT_TESTInSourceFiles(input_api, output_api):
1321 """Checks to make sure no source files use UNIT_TEST."""
1323 for f in input_api.AffectedFiles():
1324 if (not f.LocalPath().endswith(('.cc', '.mm'))):
1327 for line_num, line in f.ChangedContents():
1328 if 'UNIT_TEST ' in line or line.endswith('UNIT_TEST'):
1329 problems.append(' %s:%d' % (f.LocalPath(), line_num))
1333 return [output_api.PresubmitPromptWarning('UNIT_TEST is only for headers.\n' +
1334 '\n'.join(problems))]
1336 def _CheckNoDISABLETypoInTests(input_api, output_api):
1337 """Checks to prevent attempts to disable tests with DISABLE_ prefix.
1339 This test warns if somebody tries to disable a test with the DISABLE_ prefix
1340 instead of DISABLED_. To filter false positives, reports are only generated
1341 if a corresponding MAYBE_ line exists.
1345 # The following two patterns are looked for in tandem - is a test labeled
1346 # as MAYBE_ followed by a DISABLE_ (instead of the correct DISABLED)
1347 maybe_pattern = input_api.re.compile(r'MAYBE_([a-zA-Z0-9_]+)')
1348 disable_pattern = input_api.re.compile(r'DISABLE_([a-zA-Z0-9_]+)')
1350 # This is for the case that a test is disabled on all platforms.
1351 full_disable_pattern = input_api.re.compile(
1352 r'^\s*TEST[^(]*\([a-zA-Z0-9_]+,\s*DISABLE_[a-zA-Z0-9_]+\)',
1353 input_api.re.MULTILINE)
1355 for f in input_api.AffectedFiles(False):
1356 if not 'test' in f.LocalPath() or not f.LocalPath().endswith('.cc'):
1359 # Search for MABYE_, DISABLE_ pairs.
1360 disable_lines = {} # Maps of test name to line number.
1362 for line_num, line in f.ChangedContents():
1363 disable_match = disable_pattern.search(line)
1365 disable_lines[disable_match.group(1)] = line_num
1366 maybe_match = maybe_pattern.search(line)
1368 maybe_lines[maybe_match.group(1)] = line_num
1370 # Search for DISABLE_ occurrences within a TEST() macro.
1371 disable_tests = set(disable_lines.keys())
1372 maybe_tests = set(maybe_lines.keys())
1373 for test in disable_tests.intersection(maybe_tests):
1374 problems.append(' %s:%d' % (f.LocalPath(), disable_lines[test]))
1376 contents = input_api.ReadFile(f)
1377 full_disable_match = full_disable_pattern.search(contents)
1378 if full_disable_match:
1379 problems.append(' %s' % f.LocalPath())
1384 output_api.PresubmitPromptWarning(
1385 'Attempt to disable a test with DISABLE_ instead of DISABLED_?\n' +
1386 '\n'.join(problems))
1390 def _CheckDCHECK_IS_ONHasBraces(input_api, output_api):
1391 """Checks to make sure DCHECK_IS_ON() does not skip the parentheses."""
1393 pattern = input_api.re.compile(r'DCHECK_IS_ON(?!\(\))',
1394 input_api.re.MULTILINE)
1395 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
1396 if (not f.LocalPath().endswith(('.cc', '.mm', '.h'))):
1398 for lnum, line in f.ChangedContents():
1399 if input_api.re.search(pattern, line):
1400 errors.append(output_api.PresubmitError(
1401 ('%s:%d: Use of DCHECK_IS_ON() must be written as "#if ' +
1402 'DCHECK_IS_ON()", not forgetting the parentheses.')
1403 % (f.LocalPath(), lnum)))
1407 def _FindHistogramNameInChunk(histogram_name, chunk):
1408 """Tries to find a histogram name or prefix in a line.
1410 Returns the existence of the histogram name, or None if it needs more chunk
1412 # A histogram_suffixes tag type has an affected-histogram name as a prefix of
1413 # the histogram_name.
1414 if '<affected-histogram' in chunk:
1415 # If the tag is not completed, needs more chunk to get the name.
1416 if not '>' in chunk:
1418 if not 'name="' in chunk:
1420 # Retrieve the first portion of the chunk wrapped by double-quotations. We
1421 # expect the only attribute is the name.
1422 histogram_prefix = chunk.split('"')[1]
1423 return histogram_prefix in histogram_name
1424 # Typically the whole histogram name should in the line.
1425 return histogram_name in chunk
1428 def _CheckUmaHistogramChanges(input_api, output_api):
1429 """Check that UMA histogram names in touched lines can still be found in other
1430 lines of the patch or in histograms.xml. Note that this check would not catch
1431 the reverse: changes in histograms.xml not matched in the code itself."""
1432 touched_histograms = []
1433 histograms_xml_modifications = []
1434 call_pattern_c = r'\bUMA_HISTOGRAM.*\('
1435 call_pattern_java = r'\bRecordHistogram\.record[a-zA-Z]+Histogram\('
1436 name_pattern = r'"(.*?)"'
1437 single_line_c_re = input_api.re.compile(call_pattern_c + name_pattern)
1438 single_line_java_re = input_api.re.compile(call_pattern_java + name_pattern)
1439 split_line_c_prefix_re = input_api.re.compile(call_pattern_c)
1440 split_line_java_prefix_re = input_api.re.compile(call_pattern_java)
1441 split_line_suffix_re = input_api.re.compile(r'^\s*' + name_pattern)
1442 last_line_matched_prefix = False
1443 for f in input_api.AffectedFiles():
1444 # If histograms.xml itself is modified, keep the modified lines for later.
1445 if f.LocalPath().endswith(('histograms.xml')):
1446 histograms_xml_modifications = f.ChangedContents()
1448 if f.LocalPath().endswith(('cc', 'mm', 'cpp')):
1449 single_line_re = single_line_c_re
1450 split_line_prefix_re = split_line_c_prefix_re
1451 elif f.LocalPath().endswith(('java')):
1452 single_line_re = single_line_java_re
1453 split_line_prefix_re = split_line_java_prefix_re
1456 for line_num, line in f.ChangedContents():
1457 if last_line_matched_prefix:
1458 suffix_found = split_line_suffix_re.search(line)
1460 touched_histograms.append([suffix_found.group(1), f, line_num])
1461 last_line_matched_prefix = False
1463 found = single_line_re.search(line)
1465 touched_histograms.append([found.group(1), f, line_num])
1467 last_line_matched_prefix = split_line_prefix_re.search(line)
1469 # Search for the touched histogram names in the local modifications to
1470 # histograms.xml, and, if not found, on the base histograms.xml file.
1471 unmatched_histograms = []
1472 for histogram_info in touched_histograms:
1473 histogram_name_found = False
1475 for line_num, line in histograms_xml_modifications:
1477 histogram_name_found = _FindHistogramNameInChunk(histogram_info[0], chunk)
1478 if histogram_name_found is None:
1481 if histogram_name_found:
1483 if not histogram_name_found:
1484 unmatched_histograms.append(histogram_info)
1486 histograms_xml_path = 'tools/metrics/histograms/histograms.xml'
1488 if unmatched_histograms:
1489 with open(histograms_xml_path) as histograms_xml:
1490 for histogram_name, f, line_num in unmatched_histograms:
1491 histograms_xml.seek(0)
1492 histogram_name_found = False
1494 for line in histograms_xml:
1496 histogram_name_found = _FindHistogramNameInChunk(histogram_name,
1498 if histogram_name_found is None:
1501 if histogram_name_found:
1503 if not histogram_name_found:
1504 problems.append(' [%s:%d] %s' %
1505 (f.LocalPath(), line_num, histogram_name))
1509 return [output_api.PresubmitPromptWarning('Some UMA_HISTOGRAM lines have '
1510 'been modified and the associated histogram name has no match in either '
1511 '%s or the modifications of it:' % (histograms_xml_path), problems)]
1514 def _CheckFlakyTestUsage(input_api, output_api):
1515 """Check that FlakyTest annotation is our own instead of the android one"""
1516 pattern = input_api.re.compile(r'import android.test.FlakyTest;')
1518 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
1519 if f.LocalPath().endswith('Test.java'):
1520 if pattern.search(input_api.ReadFile(f)):
1523 return [output_api.PresubmitError(
1524 'Use org.chromium.base.test.util.FlakyTest instead of '
1525 'android.test.FlakyTest',
1530 def _CheckNoNewWStrings(input_api, output_api):
1531 """Checks to make sure we don't introduce use of wstrings."""
1533 for f in input_api.AffectedFiles():
1534 if (not f.LocalPath().endswith(('.cc', '.h')) or
1535 f.LocalPath().endswith(('test.cc', '_win.cc', '_win.h')) or
1536 '/win/' in f.LocalPath() or
1537 'chrome_elf' in f.LocalPath() or
1538 'install_static' in f.LocalPath()):
1541 allowWString = False
1542 for line_num, line in f.ChangedContents():
1543 if 'presubmit: allow wstring' in line:
1545 elif not allowWString and 'wstring' in line:
1546 problems.append(' %s:%d' % (f.LocalPath(), line_num))
1547 allowWString = False
1549 allowWString = False
1553 return [output_api.PresubmitPromptWarning('New code should not use wstrings.'
1554 ' If you are calling a cross-platform API that accepts a wstring, '
1556 '\n'.join(problems))]
1559 def _CheckNoDEPSGIT(input_api, output_api):
1560 """Make sure .DEPS.git is never modified manually."""
1561 if any(f.LocalPath().endswith('.DEPS.git') for f in
1562 input_api.AffectedFiles()):
1563 return [output_api.PresubmitError(
1564 'Never commit changes to .DEPS.git. This file is maintained by an\n'
1565 'automated system based on what\'s in DEPS and your changes will be\n'
1567 'See https://sites.google.com/a/chromium.org/dev/developers/how-tos/'
1568 'get-the-code#Rolling_DEPS\n'
1569 'for more information')]
1573 def _CheckValidHostsInDEPS(input_api, output_api):
1574 """Checks that DEPS file deps are from allowed_hosts."""
1575 # Run only if DEPS file has been modified to annoy fewer bystanders.
1576 if all(f.LocalPath() != 'DEPS' for f in input_api.AffectedFiles()):
1578 # Outsource work to gclient verify
1580 gclient_path = input_api.os_path.join(
1581 input_api.PresubmitLocalPath(),
1582 'third_party', 'depot_tools', 'gclient.py')
1583 input_api.subprocess.check_output(
1584 [input_api.python_executable, gclient_path, 'verify'],
1585 stderr=input_api.subprocess.STDOUT)
1587 except input_api.subprocess.CalledProcessError as error:
1588 return [output_api.PresubmitError(
1589 'DEPS file must have only git dependencies.',
1590 long_text=error.output)]
1593 def _CheckNoBannedFunctions(input_api, output_api):
1594 """Make sure that banned functions are not used."""
1598 def IsBlacklisted(affected_file, blacklist):
1599 local_path = affected_file.LocalPath()
1600 for item in blacklist:
1601 if input_api.re.match(item, local_path):
1605 def IsIosObcjFile(affected_file):
1606 local_path = affected_file.LocalPath()
1607 if input_api.os_path.splitext(local_path)[-1] not in ('.mm', '.m', '.h'):
1609 basename = input_api.os_path.basename(local_path)
1610 if 'ios' in basename.split('_'):
1612 for sep in (input_api.os_path.sep, input_api.os_path.altsep):
1613 if sep and 'ios' in local_path.split(sep):
1617 def CheckForMatch(affected_file, line_num, line, func_name, message, error):
1619 if func_name[0:1] == '/':
1620 regex = func_name[1:]
1621 if input_api.re.search(regex, line):
1623 elif func_name in line:
1629 problems.append(' %s:%d:' % (affected_file.LocalPath(), line_num))
1630 for message_line in message:
1631 problems.append(' %s' % message_line)
1633 file_filter = lambda f: f.LocalPath().endswith(('.java'))
1634 for f in input_api.AffectedFiles(file_filter=file_filter):
1635 for line_num, line in f.ChangedContents():
1636 for func_name, message, error in _BANNED_JAVA_FUNCTIONS:
1637 CheckForMatch(f, line_num, line, func_name, message, error)
1639 file_filter = lambda f: f.LocalPath().endswith(('.mm', '.m', '.h'))
1640 for f in input_api.AffectedFiles(file_filter=file_filter):
1641 for line_num, line in f.ChangedContents():
1642 for func_name, message, error in _BANNED_OBJC_FUNCTIONS:
1643 CheckForMatch(f, line_num, line, func_name, message, error)
1645 for f in input_api.AffectedFiles(file_filter=IsIosObcjFile):
1646 for line_num, line in f.ChangedContents():
1647 for func_name, message, error in _BANNED_IOS_OBJC_FUNCTIONS:
1648 CheckForMatch(f, line_num, line, func_name, message, error)
1650 file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm', '.h'))
1651 for f in input_api.AffectedFiles(file_filter=file_filter):
1652 for line_num, line in f.ChangedContents():
1653 for func_name, message, error, excluded_paths in _BANNED_CPP_FUNCTIONS:
1654 if IsBlacklisted(f, excluded_paths):
1656 CheckForMatch(f, line_num, line, func_name, message, error)
1660 result.append(output_api.PresubmitPromptWarning(
1661 'Banned functions were used.\n' + '\n'.join(warnings)))
1663 result.append(output_api.PresubmitError(
1664 'Banned functions were used.\n' + '\n'.join(errors)))
1668 def _CheckNoPragmaOnce(input_api, output_api):
1669 """Make sure that banned functions are not used."""
1671 pattern = input_api.re.compile(r'^#pragma\s+once',
1672 input_api.re.MULTILINE)
1673 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
1674 if not f.LocalPath().endswith('.h'):
1676 contents = input_api.ReadFile(f)
1677 if pattern.search(contents):
1681 return [output_api.PresubmitError(
1682 'Do not use #pragma once in header files.\n'
1683 'See http://www.chromium.org/developers/coding-style#TOC-File-headers',
1688 def _CheckNoTrinaryTrueFalse(input_api, output_api):
1689 """Checks to make sure we don't introduce use of foo ? true : false."""
1691 pattern = input_api.re.compile(r'\?\s*(true|false)\s*:\s*(true|false)')
1692 for f in input_api.AffectedFiles():
1693 if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
1696 for line_num, line in f.ChangedContents():
1697 if pattern.match(line):
1698 problems.append(' %s:%d' % (f.LocalPath(), line_num))
1702 return [output_api.PresubmitPromptWarning(
1703 'Please consider avoiding the "? true : false" pattern if possible.\n' +
1704 '\n'.join(problems))]
1707 def _CheckUnwantedDependencies(input_api, output_api):
1708 """Runs checkdeps on #include and import statements added in this
1709 change. Breaking - rules is an error, breaking ! rules is a
1713 # We need to wait until we have an input_api object and use this
1714 # roundabout construct to import checkdeps because this file is
1715 # eval-ed and thus doesn't have __file__.
1716 original_sys_path = sys.path
1718 sys.path = sys.path + [input_api.os_path.join(
1719 input_api.PresubmitLocalPath(), 'buildtools', 'checkdeps')]
1721 from rules import Rule
1723 # Restore sys.path to what it was before.
1724 sys.path = original_sys_path
1728 added_java_imports = []
1729 for f in input_api.AffectedFiles():
1730 if _IsCPlusPlusFile(input_api, f.LocalPath()):
1731 changed_lines = [line for _, line in f.ChangedContents()]
1732 added_includes.append([f.AbsoluteLocalPath(), changed_lines])
1733 elif _IsProtoFile(input_api, f.LocalPath()):
1734 changed_lines = [line for _, line in f.ChangedContents()]
1735 added_imports.append([f.AbsoluteLocalPath(), changed_lines])
1736 elif _IsJavaFile(input_api, f.LocalPath()):
1737 changed_lines = [line for _, line in f.ChangedContents()]
1738 added_java_imports.append([f.AbsoluteLocalPath(), changed_lines])
1740 deps_checker = checkdeps.DepsChecker(input_api.PresubmitLocalPath())
1742 error_descriptions = []
1743 warning_descriptions = []
1744 error_subjects = set()
1745 warning_subjects = set()
1746 for path, rule_type, rule_description in deps_checker.CheckAddedCppIncludes(
1748 path = input_api.os_path.relpath(path, input_api.PresubmitLocalPath())
1749 description_with_path = '%s\n %s' % (path, rule_description)
1750 if rule_type == Rule.DISALLOW:
1751 error_descriptions.append(description_with_path)
1752 error_subjects.add("#includes")
1754 warning_descriptions.append(description_with_path)
1755 warning_subjects.add("#includes")
1757 for path, rule_type, rule_description in deps_checker.CheckAddedProtoImports(
1759 path = input_api.os_path.relpath(path, input_api.PresubmitLocalPath())
1760 description_with_path = '%s\n %s' % (path, rule_description)
1761 if rule_type == Rule.DISALLOW:
1762 error_descriptions.append(description_with_path)
1763 error_subjects.add("imports")
1765 warning_descriptions.append(description_with_path)
1766 warning_subjects.add("imports")
1768 for path, rule_type, rule_description in deps_checker.CheckAddedJavaImports(
1769 added_java_imports, _JAVA_MULTIPLE_DEFINITION_EXCLUDED_PATHS):
1770 path = input_api.os_path.relpath(path, input_api.PresubmitLocalPath())
1771 description_with_path = '%s\n %s' % (path, rule_description)
1772 if rule_type == Rule.DISALLOW:
1773 error_descriptions.append(description_with_path)
1774 error_subjects.add("imports")
1776 warning_descriptions.append(description_with_path)
1777 warning_subjects.add("imports")
1780 if error_descriptions:
1781 results.append(output_api.PresubmitError(
1782 'You added one or more %s that violate checkdeps rules.'
1783 % " and ".join(error_subjects),
1784 error_descriptions))
1785 if warning_descriptions:
1786 results.append(output_api.PresubmitPromptOrNotify(
1787 'You added one or more %s of files that are temporarily\n'
1788 'allowed but being removed. Can you avoid introducing the\n'
1789 '%s? See relevant DEPS file(s) for details and contacts.' %
1790 (" and ".join(warning_subjects), "/".join(warning_subjects)),
1791 warning_descriptions))
1795 def _CheckFilePermissions(input_api, output_api):
1796 """Check that all files have their permissions properly set."""
1797 if input_api.platform == 'win32':
1799 checkperms_tool = input_api.os_path.join(
1800 input_api.PresubmitLocalPath(),
1801 'tools', 'checkperms', 'checkperms.py')
1802 args = [input_api.python_executable, checkperms_tool,
1803 '--root', input_api.change.RepositoryRoot()]
1804 with input_api.CreateTemporaryFile() as file_list:
1805 for f in input_api.AffectedFiles():
1806 # checkperms.py file/directory arguments must be relative to the
1808 file_list.write(f.LocalPath() + '\n')
1810 args += ['--file-list', file_list.name]
1812 input_api.subprocess.check_output(args)
1814 except input_api.subprocess.CalledProcessError as error:
1815 return [output_api.PresubmitError(
1816 'checkperms.py failed:',
1817 long_text=error.output)]
1820 def _CheckTeamTags(input_api, output_api):
1821 """Checks that OWNERS files have consistent TEAM and COMPONENT tags."""
1822 checkteamtags_tool = input_api.os_path.join(
1823 input_api.PresubmitLocalPath(),
1824 'tools', 'checkteamtags', 'checkteamtags.py')
1825 args = [input_api.python_executable, checkteamtags_tool,
1826 '--root', input_api.change.RepositoryRoot()]
1827 files = [f.LocalPath() for f in input_api.AffectedFiles(include_deletes=False)
1828 if input_api.os_path.basename(f.AbsoluteLocalPath()).upper() ==
1832 input_api.subprocess.check_output(args + files)
1834 except input_api.subprocess.CalledProcessError as error:
1835 return [output_api.PresubmitError(
1836 'checkteamtags.py failed:',
1837 long_text=error.output)]
1840 def _CheckNoAuraWindowPropertyHInHeaders(input_api, output_api):
1841 """Makes sure we don't include ui/aura/window_property.h
1844 pattern = input_api.re.compile(r'^#include\s*"ui/aura/window_property.h"')
1846 for f in input_api.AffectedFiles():
1847 if not f.LocalPath().endswith('.h'):
1849 for line_num, line in f.ChangedContents():
1850 if pattern.match(line):
1851 errors.append(' %s:%d' % (f.LocalPath(), line_num))
1855 results.append(output_api.PresubmitError(
1856 'Header files should not include ui/aura/window_property.h', errors))
1860 def _CheckForVersionControlConflictsInFile(input_api, f):
1861 pattern = input_api.re.compile('^(?:<<<<<<<|>>>>>>>) |^=======$')
1863 for line_num, line in f.ChangedContents():
1864 if f.LocalPath().endswith(('.md', '.rst', '.txt')):
1865 # First-level headers in markdown look a lot like version control
1866 # conflict markers. http://daringfireball.net/projects/markdown/basics
1868 if pattern.match(line):
1869 errors.append(' %s:%d %s' % (f.LocalPath(), line_num, line))
1873 def _CheckForVersionControlConflicts(input_api, output_api):
1874 """Usually this is not intentional and will cause a compile failure."""
1876 for f in input_api.AffectedFiles():
1877 errors.extend(_CheckForVersionControlConflictsInFile(input_api, f))
1881 results.append(output_api.PresubmitError(
1882 'Version control conflict markers found, please resolve.', errors))
1886 def _CheckGoogleSupportAnswerUrl(input_api, output_api):
1887 pattern = input_api.re.compile('support\.google\.com\/chrome.*/answer')
1889 for f in input_api.AffectedFiles():
1890 for line_num, line in f.ChangedContents():
1891 if pattern.search(line):
1892 errors.append(' %s:%d %s' % (f.LocalPath(), line_num, line))
1896 results.append(output_api.PresubmitPromptWarning(
1897 'Found Google support URL addressed by answer number. Please replace '
1898 'with a p= identifier instead. See crbug.com/679462\n', errors))
1902 def _CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api):
1903 def FilterFile(affected_file):
1904 """Filter function for use with input_api.AffectedSourceFiles,
1905 below. This filters out everything except non-test files from
1906 top-level directories that generally speaking should not hard-code
1907 service URLs (e.g. src/android_webview/, src/content/ and others).
1909 return input_api.FilterSourceFile(
1911 white_list=[r'^(android_webview|base|content|net)[\\/].*'],
1912 black_list=(_EXCLUDED_PATHS +
1913 _TEST_CODE_EXCLUDED_PATHS +
1914 input_api.DEFAULT_BLACK_LIST))
1916 base_pattern = ('"[^"]*(google|googleapis|googlezip|googledrive|appspot)'
1917 '\.(com|net)[^"]*"')
1918 comment_pattern = input_api.re.compile('//.*%s' % base_pattern)
1919 pattern = input_api.re.compile(base_pattern)
1920 problems = [] # items are (filename, line_number, line)
1921 for f in input_api.AffectedSourceFiles(FilterFile):
1922 for line_num, line in f.ChangedContents():
1923 if not comment_pattern.search(line) and pattern.search(line):
1924 problems.append((f.LocalPath(), line_num, line))
1927 return [output_api.PresubmitPromptOrNotify(
1928 'Most layers below src/chrome/ should not hardcode service URLs.\n'
1929 'Are you sure this is correct?',
1931 problem[0], problem[1], problem[2]) for problem in problems])]
1936 # TODO: add unit tests.
1937 def _CheckNoAbbreviationInPngFileName(input_api, output_api):
1938 """Makes sure there are no abbreviations in the name of PNG files.
1939 The native_client_sdk directory is excluded because it has auto-generated PNG
1940 files for documentation.
1943 white_list = [r'.*_[a-z]_.*\.png$|.*_[a-z]\.png$']
1944 black_list = [r'^native_client_sdk[\\/]']
1945 file_filter = lambda f: input_api.FilterSourceFile(
1946 f, white_list=white_list, black_list=black_list)
1947 for f in input_api.AffectedFiles(include_deletes=False,
1948 file_filter=file_filter):
1949 errors.append(' %s' % f.LocalPath())
1953 results.append(output_api.PresubmitError(
1954 'The name of PNG files should not have abbreviations. \n'
1955 'Use _hover.png, _center.png, instead of _h.png, _c.png.\n'
1956 'Contact oshima@chromium.org if you have questions.', errors))
1960 def _ExtractAddRulesFromParsedDeps(parsed_deps):
1961 """Extract the rules that add dependencies from a parsed DEPS file.
1964 parsed_deps: the locals dictionary from evaluating the DEPS file."""
1967 rule[1:] for rule in parsed_deps.get('include_rules', [])
1968 if rule.startswith('+') or rule.startswith('!')
1970 for _, rules in parsed_deps.get('specific_include_rules',
1973 rule[1:] for rule in rules
1974 if rule.startswith('+') or rule.startswith('!')
1979 def _ParseDeps(contents):
1980 """Simple helper for parsing DEPS files."""
1981 # Stubs for handling special syntax in the root DEPS file.
1984 def __init__(self, local_scope):
1985 self._local_scope = local_scope
1987 def Lookup(self, var_name):
1988 """Implements the Var syntax."""
1990 return self._local_scope['vars'][var_name]
1992 raise Exception('Var is not defined: %s' % var_name)
1996 'Var': _VarImpl(local_scope).Lookup,
1998 exec contents in global_scope, local_scope
2002 def _CalculateAddedDeps(os_path, old_contents, new_contents):
2003 """Helper method for _CheckAddedDepsHaveTargetApprovals. Returns
2004 a set of DEPS entries that we should look up.
2006 For a directory (rather than a specific filename) we fake a path to
2007 a specific filename by adding /DEPS. This is chosen as a file that
2008 will seldom or never be subject to per-file include_rules.
2010 # We ignore deps entries on auto-generated directories.
2011 AUTO_GENERATED_DIRS = ['grit', 'jni']
2013 old_deps = _ExtractAddRulesFromParsedDeps(_ParseDeps(old_contents))
2014 new_deps = _ExtractAddRulesFromParsedDeps(_ParseDeps(new_contents))
2016 added_deps = new_deps.difference(old_deps)
2019 for added_dep in added_deps:
2020 if added_dep.split('/')[0] in AUTO_GENERATED_DIRS:
2022 # Assume that a rule that ends in .h is a rule for a specific file.
2023 if added_dep.endswith('.h'):
2024 results.add(added_dep)
2026 results.add(os_path.join(added_dep, 'DEPS'))
2030 def _CheckAddedDepsHaveTargetApprovals(input_api, output_api):
2031 """When a dependency prefixed with + is added to a DEPS file, we
2032 want to make sure that the change is reviewed by an OWNER of the
2033 target file or directory, to avoid layering violations from being
2034 introduced. This check verifies that this happens.
2036 virtual_depended_on_files = set()
2038 file_filter = lambda f: not input_api.re.match(
2039 r"^third_party[\\/]blink[\\/].*", f.LocalPath())
2040 for f in input_api.AffectedFiles(include_deletes=False,
2041 file_filter=file_filter):
2042 filename = input_api.os_path.basename(f.LocalPath())
2043 if filename == 'DEPS':
2044 virtual_depended_on_files.update(_CalculateAddedDeps(
2046 '\n'.join(f.OldContents()),
2047 '\n'.join(f.NewContents())))
2049 if not virtual_depended_on_files:
2052 if input_api.is_committing:
2054 return [output_api.PresubmitNotifyResult(
2055 '--tbr was specified, skipping OWNERS check for DEPS additions')]
2056 if input_api.dry_run:
2057 return [output_api.PresubmitNotifyResult(
2058 'This is a dry run, skipping OWNERS check for DEPS additions')]
2059 if not input_api.change.issue:
2060 return [output_api.PresubmitError(
2061 "DEPS approval by OWNERS check failed: this change has "
2062 "no change number, so we can't check it for approvals.")]
2063 output = output_api.PresubmitError
2065 output = output_api.PresubmitNotifyResult
2067 owners_db = input_api.owners_db
2068 owner_email, reviewers = (
2069 input_api.canned_checks.GetCodereviewOwnerAndReviewers(
2071 owners_db.email_regexp,
2072 approval_needed=input_api.is_committing))
2074 owner_email = owner_email or input_api.change.author_email
2076 reviewers_plus_owner = set(reviewers)
2078 reviewers_plus_owner.add(owner_email)
2079 missing_files = owners_db.files_not_covered_by(virtual_depended_on_files,
2080 reviewers_plus_owner)
2082 # We strip the /DEPS part that was added by
2083 # _FilesToCheckForIncomingDeps to fake a path to a file in a
2085 def StripDeps(path):
2086 start_deps = path.rfind('/DEPS')
2087 if start_deps != -1:
2088 return path[:start_deps]
2091 unapproved_dependencies = ["'+%s'," % StripDeps(path)
2092 for path in missing_files]
2094 if unapproved_dependencies:
2096 output('You need LGTM from owners of depends-on paths in DEPS that were '
2097 'modified in this CL:\n %s' %
2098 '\n '.join(sorted(unapproved_dependencies)))]
2099 suggested_owners = owners_db.reviewers_for(missing_files, owner_email)
2100 output_list.append(output(
2101 'Suggested missing target path OWNERS:\n %s' %
2102 '\n '.join(suggested_owners or [])))
2108 # TODO: add unit tests.
2109 def _CheckSpamLogging(input_api, output_api):
2110 file_inclusion_pattern = [r'.+%s' % _IMPLEMENTATION_EXTENSIONS]
2111 black_list = (_EXCLUDED_PATHS +
2112 _TEST_CODE_EXCLUDED_PATHS +
2113 input_api.DEFAULT_BLACK_LIST +
2114 (r"^base[\\/]logging\.h$",
2115 r"^base[\\/]logging\.cc$",
2116 r"^chrome[\\/]app[\\/]chrome_main_delegate\.cc$",
2117 r"^chrome[\\/]browser[\\/]chrome_browser_main\.cc$",
2118 r"^chrome[\\/]browser[\\/]ui[\\/]startup[\\/]"
2119 r"startup_browser_creator\.cc$",
2120 r"^chrome[\\/]browser[\\/]browser_switcher[\\/]bho[\\/].*",
2121 r"^chrome[\\/]browser[\\/]diagnostics[\\/]" +
2122 r"diagnostics_writer\.cc$",
2123 r"^chrome[\\/]chrome_cleaner[\\/].*",
2124 r"^chrome[\\/]chrome_elf[\\/]dll_hash[\\/]dll_hash_main\.cc$",
2125 r"^chrome[\\/]installer[\\/]setup[\\/].*",
2126 r"^chromecast[\\/]",
2127 r"^cloud_print[\\/]",
2128 r"^components[\\/]browser_watcher[\\/]"
2129 r"dump_stability_report_main_win.cc$",
2130 r"^components[\\/]html_viewer[\\/]"
2131 r"web_test_delegate_impl\.cc$",
2132 r"^components[\\/]zucchini[\\/].*",
2133 # TODO(peter): Remove this exception. https://crbug.com/534537
2134 r"^content[\\/]browser[\\/]notifications[\\/]"
2135 r"notification_event_dispatcher_impl\.cc$",
2136 r"^content[\\/]common[\\/]gpu[\\/]client[\\/]"
2137 r"gl_helper_benchmark\.cc$",
2138 r"^courgette[\\/]courgette_minimal_tool\.cc$",
2139 r"^courgette[\\/]courgette_tool\.cc$",
2140 r"^extensions[\\/]renderer[\\/]logging_native_handler\.cc$",
2141 r"^fuchsia[\\/]engine[\\/]browser[\\/]frame_impl.cc$",
2142 r"^headless[\\/]app[\\/]headless_shell\.cc$",
2143 r"^ipc[\\/]ipc_logging\.cc$",
2144 r"^native_client_sdk[\\/]",
2145 r"^remoting[\\/]base[\\/]logging\.h$",
2146 r"^remoting[\\/]host[\\/].*",
2147 r"^sandbox[\\/]linux[\\/].*",
2148 r"^storage[\\/]browser[\\/]fileapi[\\/]" +
2149 r"dump_file_system.cc$",
2151 r"^ui[\\/]base[\\/]resource[\\/]data_pack.cc$",
2152 r"^ui[\\/]aura[\\/]bench[\\/]bench_main\.cc$",
2153 r"^ui[\\/]ozone[\\/]platform[\\/]cast[\\/]"))
2154 source_file_filter = lambda x: input_api.FilterSourceFile(
2155 x, white_list=file_inclusion_pattern, black_list=black_list)
2160 for f in input_api.AffectedSourceFiles(source_file_filter):
2161 for _, line in f.ChangedContents():
2162 if input_api.re.search(r"\bD?LOG\s*\(\s*INFO\s*\)", line):
2163 log_info.add(f.LocalPath())
2164 elif input_api.re.search(r"\bD?LOG_IF\s*\(\s*INFO\s*,", line):
2165 log_info.add(f.LocalPath())
2167 if input_api.re.search(r"\bprintf\(", line):
2168 printf.add(f.LocalPath())
2169 elif input_api.re.search(r"\bfprintf\((stdout|stderr)", line):
2170 printf.add(f.LocalPath())
2173 return [output_api.PresubmitError(
2174 'These files spam the console log with LOG(INFO):',
2177 return [output_api.PresubmitError(
2178 'These files spam the console log with printf/fprintf:',
2183 def _CheckForAnonymousVariables(input_api, output_api):
2184 """These types are all expected to hold locks while in scope and
2185 so should never be anonymous (which causes them to be immediately
2187 they_who_must_be_named = [
2191 'SkAutoAlphaRestore',
2192 'SkAutoBitmapShaderInstall',
2193 'SkAutoBlitterChoose',
2194 'SkAutoBounderCommit',
2196 'SkAutoCanvasRestore',
2197 'SkAutoCommentBlock',
2199 'SkAutoDisableDirectionCheck',
2200 'SkAutoDisableOvalCheck',
2207 'SkAutoMaskFreeImage',
2208 'SkAutoMutexAcquire',
2209 'SkAutoPathBoundsUpdate',
2211 'SkAutoRasterClipValidate',
2217 anonymous = r'(%s)\s*[({]' % '|'.join(they_who_must_be_named)
2218 # bad: base::AutoLock(lock.get());
2219 # not bad: base::AutoLock lock(lock.get());
2220 bad_pattern = input_api.re.compile(anonymous)
2221 # good: new base::AutoLock(lock.get())
2222 good_pattern = input_api.re.compile(r'\bnew\s*' + anonymous)
2225 for f in input_api.AffectedFiles():
2226 if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
2228 for linenum, line in f.ChangedContents():
2229 if bad_pattern.search(line) and not good_pattern.search(line):
2230 errors.append('%s:%d' % (f.LocalPath(), linenum))
2233 return [output_api.PresubmitError(
2234 'These lines create anonymous variables that need to be named:',
2239 def _CheckUniquePtr(input_api, output_api):
2240 # Returns whether |template_str| is of the form <T, U...> for some types T
2241 # and U. Assumes that |template_str| is already in the form <...>.
2242 def HasMoreThanOneArg(template_str):
2243 # Level of <...> nesting.
2245 for c in template_str:
2250 elif c == ',' and nesting == 1:
2254 file_inclusion_pattern = [r'.+%s' % _IMPLEMENTATION_EXTENSIONS]
2255 sources = lambda affected_file: input_api.FilterSourceFile(
2257 black_list=(_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS +
2258 input_api.DEFAULT_BLACK_LIST),
2259 white_list=file_inclusion_pattern)
2261 # Pattern to capture a single "<...>" block of template arguments. It can
2262 # handle linearly nested blocks, such as "<std::vector<std::set<T>>>", but
2263 # cannot handle branching structures, such as "<pair<set<T>,set<U>>". The
2264 # latter would likely require counting that < and > match, which is not
2265 # expressible in regular languages. Should the need arise, one can introduce
2266 # limited counting (matching up to a total number of nesting depth), which
2267 # should cover all practical cases for already a low nesting limit.
2268 template_arg_pattern = (
2269 r'<[^>]*' # Opening block of <.
2270 r'>([^<]*>)?') # Closing block of >.
2271 # Prefix expressing that whatever follows is not already inside a <...>
2273 not_inside_template_arg_pattern = r'(^|[^<,\s]\s*)'
2274 null_construct_pattern = input_api.re.compile(
2275 not_inside_template_arg_pattern
2276 + r'\bstd::unique_ptr'
2277 + template_arg_pattern
2280 # Same as template_arg_pattern, but excluding type arrays, e.g., <T[]>.
2281 template_arg_no_array_pattern = (
2282 r'<[^>]*[^]]' # Opening block of <.
2283 r'>([^(<]*[^]]>)?') # Closing block of >.
2284 # Prefix saying that what follows is the start of an expression.
2285 start_of_expr_pattern = r'(=|\breturn|^)\s*'
2286 # Suffix saying that what follows are call parentheses with a non-empty list
2288 nonempty_arg_list_pattern = r'\(([^)]|$)'
2289 # Put the template argument into a capture group for deeper examination later.
2290 return_construct_pattern = input_api.re.compile(
2291 start_of_expr_pattern
2292 + r'std::unique_ptr'
2293 + '(?P<template_arg>'
2294 + template_arg_no_array_pattern
2296 + nonempty_arg_list_pattern)
2298 problems_constructor = []
2299 problems_nullptr = []
2300 for f in input_api.AffectedSourceFiles(sources):
2301 for line_number, line in f.ChangedContents():
2303 # return std::unique_ptr<T>(foo);
2304 # bar = std::unique_ptr<T>(foo);
2306 # return std::unique_ptr<T[]>(foo);
2307 # bar = std::unique_ptr<T[]>(foo);
2308 # And also allow cases when the second template argument is present. Those
2309 # cases cannot be handled by std::make_unique:
2310 # return std::unique_ptr<T, U>(foo);
2311 # bar = std::unique_ptr<T, U>(foo);
2312 local_path = f.LocalPath()
2313 return_construct_result = return_construct_pattern.search(line)
2314 if return_construct_result and not HasMoreThanOneArg(
2315 return_construct_result.group('template_arg')):
2316 problems_constructor.append(
2317 '%s:%d\n %s' % (local_path, line_number, line.strip()))
2319 # std::unique_ptr<T>()
2320 if null_construct_pattern.search(line):
2321 problems_nullptr.append(
2322 '%s:%d\n %s' % (local_path, line_number, line.strip()))
2325 if problems_nullptr:
2326 errors.append(output_api.PresubmitError(
2327 'The following files use std::unique_ptr<T>(). Use nullptr instead.',
2329 if problems_constructor:
2330 errors.append(output_api.PresubmitError(
2331 'The following files use explicit std::unique_ptr constructor.'
2332 'Use std::make_unique<T>() instead.',
2333 problems_constructor))
2337 def _CheckUserActionUpdate(input_api, output_api):
2338 """Checks if any new user action has been added."""
2339 if any('actions.xml' == input_api.os_path.basename(f) for f in
2340 input_api.LocalPaths()):
2341 # If actions.xml is already included in the changelist, the PRESUBMIT
2342 # for actions.xml will do a more complete presubmit check.
2345 file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm'))
2346 action_re = r'[^a-zA-Z]UserMetricsAction\("([^"]*)'
2347 current_actions = None
2348 for f in input_api.AffectedFiles(file_filter=file_filter):
2349 for line_num, line in f.ChangedContents():
2350 match = input_api.re.search(action_re, line)
2352 # Loads contents in tools/metrics/actions/actions.xml to memory. It's
2354 if not current_actions:
2355 with open('tools/metrics/actions/actions.xml') as actions_f:
2356 current_actions = actions_f.read()
2357 # Search for the matched user action name in |current_actions|.
2358 for action_name in match.groups():
2359 action = 'name="{0}"'.format(action_name)
2360 if action not in current_actions:
2361 return [output_api.PresubmitPromptWarning(
2362 'File %s line %d: %s is missing in '
2363 'tools/metrics/actions/actions.xml. Please run '
2364 'tools/metrics/actions/extract_actions.py to update.'
2365 % (f.LocalPath(), line_num, action_name))]
2369 def _ImportJSONCommentEater(input_api):
2371 sys.path = sys.path + [input_api.os_path.join(
2372 input_api.PresubmitLocalPath(),
2373 'tools', 'json_comment_eater')]
2374 import json_comment_eater
2375 return json_comment_eater
2378 def _GetJSONParseError(input_api, filename, eat_comments=True):
2380 contents = input_api.ReadFile(filename)
2382 json_comment_eater = _ImportJSONCommentEater(input_api)
2383 contents = json_comment_eater.Nom(contents)
2385 input_api.json.loads(contents)
2386 except ValueError as e:
2391 def _GetIDLParseError(input_api, filename):
2393 contents = input_api.ReadFile(filename)
2394 idl_schema = input_api.os_path.join(
2395 input_api.PresubmitLocalPath(),
2396 'tools', 'json_schema_compiler', 'idl_schema.py')
2397 process = input_api.subprocess.Popen(
2398 [input_api.python_executable, idl_schema],
2399 stdin=input_api.subprocess.PIPE,
2400 stdout=input_api.subprocess.PIPE,
2401 stderr=input_api.subprocess.PIPE,
2402 universal_newlines=True)
2403 (_, error) = process.communicate(input=contents)
2404 return error or None
2405 except ValueError as e:
2409 def _CheckParseErrors(input_api, output_api):
2410 """Check that IDL and JSON files do not contain syntax errors."""
2412 '.idl': _GetIDLParseError,
2413 '.json': _GetJSONParseError,
2415 # Most JSON files are preprocessed and support comments, but these do not.
2416 json_no_comments_patterns = [
2419 # Only run IDL checker on files in these directories.
2420 idl_included_patterns = [
2421 r'^chrome[\\/]common[\\/]extensions[\\/]api[\\/]',
2422 r'^extensions[\\/]common[\\/]api[\\/]',
2425 def get_action(affected_file):
2426 filename = affected_file.LocalPath()
2427 return actions.get(input_api.os_path.splitext(filename)[1])
2429 def FilterFile(affected_file):
2430 action = get_action(affected_file)
2433 path = affected_file.LocalPath()
2435 if _MatchesFile(input_api, _KNOWN_INVALID_JSON_FILE_PATTERNS, path):
2438 if (action == _GetIDLParseError and
2439 not _MatchesFile(input_api, idl_included_patterns, path)):
2444 for affected_file in input_api.AffectedFiles(
2445 file_filter=FilterFile, include_deletes=False):
2446 action = get_action(affected_file)
2448 if (action == _GetJSONParseError and
2449 _MatchesFile(input_api, json_no_comments_patterns,
2450 affected_file.LocalPath())):
2451 kwargs['eat_comments'] = False
2452 parse_error = action(input_api,
2453 affected_file.AbsoluteLocalPath(),
2456 results.append(output_api.PresubmitError('%s could not be parsed: %s' %
2457 (affected_file.LocalPath(), parse_error)))
2461 def _CheckJavaStyle(input_api, output_api):
2462 """Runs checkstyle on changed java files and returns errors if any exist."""
2464 original_sys_path = sys.path
2466 sys.path = sys.path + [input_api.os_path.join(
2467 input_api.PresubmitLocalPath(), 'tools', 'android', 'checkstyle')]
2470 # Restore sys.path to what it was before.
2471 sys.path = original_sys_path
2473 return checkstyle.RunCheckstyle(
2474 input_api, output_api, 'tools/android/checkstyle/chromium-style-5.0.xml',
2475 black_list=_EXCLUDED_PATHS + input_api.DEFAULT_BLACK_LIST)
2478 def _MatchesFile(input_api, patterns, path):
2479 for pattern in patterns:
2480 if input_api.re.search(pattern, path):
2485 def _GetOwnersFilesToCheckForIpcOwners(input_api):
2486 """Gets a list of OWNERS files to check for correct security owners.
2489 A dictionary mapping an OWNER file to the list of OWNERS rules it must
2490 contain to cover IPC-related files with noparent reviewer rules.
2492 # Whether or not a file affects IPC is (mostly) determined by a simple list
2493 # of filename patterns.
2498 '*_param_traits*.*',
2501 '*_mojom_traits*.*',
2502 '*_struct_traits*.*',
2503 '*_type_converter*.*',
2505 # Android native IPC:
2507 # Blink uses a different file naming convention:
2511 '*TypeConverter*.*',
2514 # These third_party directories do not contain IPCs, but contain files
2515 # matching the above patterns, which trigger false positives.
2517 'third_party/crashpad/*',
2518 'third_party/protobuf/benchmarks/python/*',
2519 'third_party/third_party/blink/renderer/platform/bindings/*',
2520 'third_party/win_build_output/*',
2523 # Dictionary mapping an OWNERS file path to Patterns.
2524 # Patterns is a dictionary mapping glob patterns (suitable for use in per-file
2525 # rules ) to a PatternEntry.
2526 # PatternEntry is a dictionary with two keys:
2527 # - 'files': the files that are matched by this pattern
2528 # - 'rules': the per-file rules needed for this pattern
2529 # For example, if we expect OWNERS file to contain rules for *.mojom and
2530 # *_struct_traits*.*, Patterns might look like this:
2535 # 'per-file *.mojom=set noparent',
2536 # 'per-file *.mojom=file://ipc/SECURITY_OWNERS',
2539 # '*_struct_traits*.*': {
2542 # 'per-file *_struct_traits*.*=set noparent',
2543 # 'per-file *_struct_traits*.*=file://ipc/SECURITY_OWNERS',
2549 def AddPatternToCheck(input_file, pattern):
2550 owners_file = input_api.os_path.join(
2551 input_api.os_path.dirname(input_file.LocalPath()), 'OWNERS')
2552 if owners_file not in to_check:
2553 to_check[owners_file] = {}
2554 if pattern not in to_check[owners_file]:
2555 to_check[owners_file][pattern] = {
2558 'per-file %s=set noparent' % pattern,
2559 'per-file %s=file://ipc/SECURITY_OWNERS' % pattern,
2562 to_check[owners_file][pattern]['files'].append(input_file)
2564 # Iterate through the affected files to see what we actually need to check
2565 # for. We should only nag patch authors about per-file rules if a file in that
2566 # directory would match that pattern. If a directory only contains *.mojom
2567 # files and no *_messages*.h files, we should only nag about rules for
2569 for f in input_api.AffectedFiles(include_deletes=False):
2570 # Manifest files don't have a strong naming convention. Instead, scan
2571 # affected files for .json, .cc, and .h files which look like they contain
2572 # a manifest definition.
2573 if (f.LocalPath().endswith('.json') and
2574 not _MatchesFile(input_api, _KNOWN_INVALID_JSON_FILE_PATTERNS,
2576 json_comment_eater = _ImportJSONCommentEater(input_api)
2577 mostly_json_lines = '\n'.join(f.NewContents())
2578 # Comments aren't allowed in strict JSON, so filter them out.
2579 json_lines = json_comment_eater.Nom(mostly_json_lines)
2581 json_content = input_api.json.loads(json_lines)
2583 # There's another PRESUBMIT check that already verifies that JSON files
2584 # are not invalid, so no need to emit another warning here.
2586 if 'interface_provider_specs' in json_content:
2587 AddPatternToCheck(f, input_api.os_path.basename(f.LocalPath()))
2589 manifest_pattern = input_api.re.compile('manifests?\.(cc|h)$')
2590 test_manifest_pattern = input_api.re.compile('test_manifests?\.(cc|h)')
2591 if (manifest_pattern.search(f.LocalPath()) and not
2592 test_manifest_pattern.search(f.LocalPath())):
2593 # We expect all actual service manifest files to contain at least one
2594 # qualified reference to service_manager::Manifest.
2595 if 'service_manager::Manifest' in '\n'.join(f.NewContents()):
2596 AddPatternToCheck(f, input_api.os_path.basename(f.LocalPath()))
2597 for pattern in file_patterns:
2598 if input_api.fnmatch.fnmatch(
2599 input_api.os_path.basename(f.LocalPath()), pattern):
2601 for exclude in exclude_paths:
2602 if input_api.fnmatch.fnmatch(f.LocalPath(), exclude):
2607 AddPatternToCheck(f, pattern)
2613 def _CheckIpcOwners(input_api, output_api):
2614 """Checks that affected files involving IPC have an IPC OWNERS rule."""
2615 to_check = _GetOwnersFilesToCheckForIpcOwners(input_api)
2618 # If there are any OWNERS files to check, there are IPC-related changes in
2619 # this CL. Auto-CC the review list.
2620 output_api.AppendCC('ipc-security-reviews@chromium.org')
2622 # Go through the OWNERS files to check, filtering out rules that are already
2623 # present in that OWNERS file.
2624 for owners_file, patterns in to_check.iteritems():
2626 with file(owners_file) as f:
2627 lines = set(f.read().splitlines())
2628 for entry in patterns.itervalues():
2629 entry['rules'] = [rule for rule in entry['rules'] if rule not in lines
2632 # No OWNERS file, so all the rules are definitely missing.
2635 # All the remaining lines weren't found in OWNERS files, so emit an error.
2637 for owners_file, patterns in to_check.iteritems():
2640 for _, entry in patterns.iteritems():
2641 missing_lines.extend(entry['rules'])
2642 files.extend([' %s' % f.LocalPath() for f in entry['files']])
2645 'Because of the presence of files:\n%s\n\n'
2646 '%s needs the following %d lines added:\n\n%s' %
2647 ('\n'.join(files), owners_file, len(missing_lines),
2648 '\n'.join(missing_lines)))
2652 if input_api.is_committing:
2653 output = output_api.PresubmitError
2655 output = output_api.PresubmitPromptWarning
2656 results.append(output(
2657 'Found OWNERS files that need to be updated for IPC security ' +
2658 'review coverage.\nPlease update the OWNERS files below:',
2659 long_text='\n\n'.join(errors)))
2664 def _CheckUselessForwardDeclarations(input_api, output_api):
2665 """Checks that added or removed lines in non third party affected
2666 header files do not lead to new useless class or struct forward
2670 class_pattern = input_api.re.compile(r'^class\s+(\w+);$',
2671 input_api.re.MULTILINE)
2672 struct_pattern = input_api.re.compile(r'^struct\s+(\w+);$',
2673 input_api.re.MULTILINE)
2674 for f in input_api.AffectedFiles(include_deletes=False):
2675 if (f.LocalPath().startswith('third_party') and
2676 not f.LocalPath().startswith('third_party/blink') and
2677 not f.LocalPath().startswith('third_party\\blink')):
2680 if not f.LocalPath().endswith('.h'):
2683 contents = input_api.ReadFile(f)
2684 fwd_decls = input_api.re.findall(class_pattern, contents)
2685 fwd_decls.extend(input_api.re.findall(struct_pattern, contents))
2687 useless_fwd_decls = []
2688 for decl in fwd_decls:
2689 count = sum(1 for _ in input_api.re.finditer(
2690 r'\b%s\b' % input_api.re.escape(decl), contents))
2692 useless_fwd_decls.append(decl)
2694 if not useless_fwd_decls:
2697 for line in f.GenerateScmDiff().splitlines():
2698 if (line.startswith('-') and not line.startswith('--') or
2699 line.startswith('+') and not line.startswith('++')):
2700 for decl in useless_fwd_decls:
2701 if input_api.re.search(r'\b%s\b' % decl, line[1:]):
2702 results.append(output_api.PresubmitPromptWarning(
2703 '%s: %s forward declaration is no longer needed' %
2704 (f.LocalPath(), decl)))
2705 useless_fwd_decls.remove(decl)
2709 def _CheckAndroidDebuggableBuild(input_api, output_api):
2710 """Checks that code uses BuildInfo.isDebugAndroid() instead of
2711 Build.TYPE.equals('') or ''.equals(Build.TYPE) to check if
2712 this is a debuggable build of Android.
2714 build_type_check_pattern = input_api.re.compile(
2715 r'\bBuild\.TYPE\.equals\(|\.equals\(\s*\bBuild\.TYPE\)')
2719 sources = lambda affected_file: input_api.FilterSourceFile(
2721 black_list=(_EXCLUDED_PATHS +
2722 _TEST_CODE_EXCLUDED_PATHS +
2723 input_api.DEFAULT_BLACK_LIST +
2724 (r"^android_webview[\\/]support_library[\\/]"
2725 "boundary_interfaces[\\/]",
2726 r"^chrome[\\/]android[\\/]webapk[\\/].*",
2727 r'^third_party[\\/].*',
2728 r"tools[\\/]android[\\/]customtabs_benchmark[\\/].*",
2729 r"webview[\\/]chromium[\\/]License.*",)),
2730 white_list=[r'.*\.java$'])
2732 for f in input_api.AffectedSourceFiles(sources):
2733 for line_num, line in f.ChangedContents():
2734 if build_type_check_pattern.search(line):
2735 errors.append("%s:%d" % (f.LocalPath(), line_num))
2740 results.append(output_api.PresubmitPromptWarning(
2741 'Build.TYPE.equals or .equals(Build.TYPE) usage is detected.'
2742 ' Please use BuildInfo.isDebugAndroid() instead.',
2747 # TODO: add unit tests
2748 def _CheckAndroidToastUsage(input_api, output_api):
2749 """Checks that code uses org.chromium.ui.widget.Toast instead of
2750 android.widget.Toast (Chromium Toast doesn't force hardware
2751 acceleration on low-end devices, saving memory).
2753 toast_import_pattern = input_api.re.compile(
2754 r'^import android\.widget\.Toast;$')
2758 sources = lambda affected_file: input_api.FilterSourceFile(
2760 black_list=(_EXCLUDED_PATHS +
2761 _TEST_CODE_EXCLUDED_PATHS +
2762 input_api.DEFAULT_BLACK_LIST +
2763 (r'^chromecast[\\/].*',
2764 r'^remoting[\\/].*')),
2765 white_list=[r'.*\.java$'])
2767 for f in input_api.AffectedSourceFiles(sources):
2768 for line_num, line in f.ChangedContents():
2769 if toast_import_pattern.search(line):
2770 errors.append("%s:%d" % (f.LocalPath(), line_num))
2775 results.append(output_api.PresubmitError(
2776 'android.widget.Toast usage is detected. Android toasts use hardware'
2777 ' acceleration, and can be\ncostly on low-end devices. Please use'
2778 ' org.chromium.ui.widget.Toast instead.\n'
2779 'Contact dskiba@chromium.org if you have any questions.',
2785 def _CheckAndroidCrLogUsage(input_api, output_api):
2786 """Checks that new logs using org.chromium.base.Log:
2787 - Are using 'TAG' as variable name for the tags (warn)
2788 - Are using a tag that is shorter than 20 characters (error)
2791 # Do not check format of logs in the given files
2792 cr_log_check_excluded_paths = [
2793 # //chrome/android/webapk cannot depend on //base
2794 r"^chrome[\\/]android[\\/]webapk[\\/].*",
2795 # WebView license viewer code cannot depend on //base; used in stub APK.
2796 r"^android_webview[\\/]glue[\\/]java[\\/]src[\\/]com[\\/]android[\\/]"
2797 r"webview[\\/]chromium[\\/]License.*",
2798 # The customtabs_benchmark is a small app that does not depend on Chromium
2800 r"tools[\\/]android[\\/]customtabs_benchmark[\\/].*",
2803 cr_log_import_pattern = input_api.re.compile(
2804 r'^import org\.chromium\.base\.Log;$', input_api.re.MULTILINE)
2805 class_in_base_pattern = input_api.re.compile(
2806 r'^package org\.chromium\.base;$', input_api.re.MULTILINE)
2807 has_some_log_import_pattern = input_api.re.compile(
2808 r'^import .*\.Log;$', input_api.re.MULTILINE)
2809 # Extract the tag from lines like `Log.d(TAG, "*");` or `Log.d("TAG", "*");`
2810 log_call_pattern = input_api.re.compile(r'^\s*Log\.\w\((?P<tag>\"?\w+\"?)\,')
2811 log_decl_pattern = input_api.re.compile(
2812 r'^\s*private static final String TAG = "(?P<name>(.*))";',
2813 input_api.re.MULTILINE)
2815 REF_MSG = ('See docs/android_logging.md '
2816 'or contact dgn@chromium.org for more info.')
2817 sources = lambda x: input_api.FilterSourceFile(x, white_list=[r'.*\.java$'],
2818 black_list=cr_log_check_excluded_paths)
2820 tag_decl_errors = []
2821 tag_length_errors = []
2823 tag_with_dot_errors = []
2824 util_log_errors = []
2826 for f in input_api.AffectedSourceFiles(sources):
2827 file_content = input_api.ReadFile(f)
2828 has_modified_logs = False
2831 if (cr_log_import_pattern.search(file_content) or
2832 (class_in_base_pattern.search(file_content) and
2833 not has_some_log_import_pattern.search(file_content))):
2834 # Checks to run for files using cr log
2835 for line_num, line in f.ChangedContents():
2837 # Check if the new line is doing some logging
2838 match = log_call_pattern.search(line)
2840 has_modified_logs = True
2842 # Make sure it uses "TAG"
2843 if not match.group('tag') == 'TAG':
2844 tag_errors.append("%s:%d" % (f.LocalPath(), line_num))
2846 # Report non cr Log function calls in changed lines
2847 for line_num, line in f.ChangedContents():
2848 if log_call_pattern.search(line):
2849 util_log_errors.append("%s:%d" % (f.LocalPath(), line_num))
2852 if has_modified_logs:
2853 # Make sure the tag is using the "cr" prefix and is not too long
2854 match = log_decl_pattern.search(file_content)
2855 tag_name = match.group('name') if match else None
2857 tag_decl_errors.append(f.LocalPath())
2858 elif len(tag_name) > 20:
2859 tag_length_errors.append(f.LocalPath())
2860 elif '.' in tag_name:
2861 tag_with_dot_errors.append(f.LocalPath())
2865 results.append(output_api.PresubmitPromptWarning(
2866 'Please define your tags using the suggested format: .\n'
2867 '"private static final String TAG = "<package tag>".\n'
2868 'They will be prepended with "cr_" automatically.\n' + REF_MSG,
2871 if tag_length_errors:
2872 results.append(output_api.PresubmitError(
2873 'The tag length is restricted by the system to be at most '
2874 '20 characters.\n' + REF_MSG,
2878 results.append(output_api.PresubmitPromptWarning(
2879 'Please use a variable named "TAG" for your log tags.\n' + REF_MSG,
2883 results.append(output_api.PresubmitPromptWarning(
2884 'Please use org.chromium.base.Log for new logs.\n' + REF_MSG,
2887 if tag_with_dot_errors:
2888 results.append(output_api.PresubmitPromptWarning(
2889 'Dot in log tags cause them to be elided in crash reports.\n' + REF_MSG,
2890 tag_with_dot_errors))
2895 def _CheckAndroidTestJUnitFrameworkImport(input_api, output_api):
2896 """Checks that junit.framework.* is no longer used."""
2897 deprecated_junit_framework_pattern = input_api.re.compile(
2898 r'^import junit\.framework\..*;',
2899 input_api.re.MULTILINE)
2900 sources = lambda x: input_api.FilterSourceFile(
2901 x, white_list=[r'.*\.java$'], black_list=None)
2903 for f in input_api.AffectedFiles(sources):
2904 for line_num, line in f.ChangedContents():
2905 if deprecated_junit_framework_pattern.search(line):
2906 errors.append("%s:%d" % (f.LocalPath(), line_num))
2910 results.append(output_api.PresubmitError(
2911 'APIs from junit.framework.* are deprecated, please use JUnit4 framework'
2912 '(org.junit.*) from //third_party/junit. Contact yolandyan@chromium.org'
2913 ' if you have any question.', errors))
2917 def _CheckAndroidTestJUnitInheritance(input_api, output_api):
2918 """Checks that if new Java test classes have inheritance.
2919 Either the new test class is JUnit3 test or it is a JUnit4 test class
2920 with a base class, either case is undesirable.
2922 class_declaration_pattern = input_api.re.compile(r'^public class \w*Test ')
2924 sources = lambda x: input_api.FilterSourceFile(
2925 x, white_list=[r'.*Test\.java$'], black_list=None)
2927 for f in input_api.AffectedFiles(sources):
2928 if not f.OldContents():
2929 class_declaration_start_flag = False
2930 for line_num, line in f.ChangedContents():
2931 if class_declaration_pattern.search(line):
2932 class_declaration_start_flag = True
2933 if class_declaration_start_flag and ' extends ' in line:
2934 errors.append('%s:%d' % (f.LocalPath(), line_num))
2936 class_declaration_start_flag = False
2940 results.append(output_api.PresubmitPromptWarning(
2941 'The newly created files include Test classes that inherits from base'
2942 ' class. Please do not use inheritance in JUnit4 tests or add new'
2943 ' JUnit3 tests. Contact yolandyan@chromium.org if you have any'
2944 ' questions.', errors))
2948 def _CheckAndroidTestAnnotationUsage(input_api, output_api):
2949 """Checks that android.test.suitebuilder.annotation.* is no longer used."""
2950 deprecated_annotation_import_pattern = input_api.re.compile(
2951 r'^import android\.test\.suitebuilder\.annotation\..*;',
2952 input_api.re.MULTILINE)
2953 sources = lambda x: input_api.FilterSourceFile(
2954 x, white_list=[r'.*\.java$'], black_list=None)
2956 for f in input_api.AffectedFiles(sources):
2957 for line_num, line in f.ChangedContents():
2958 if deprecated_annotation_import_pattern.search(line):
2959 errors.append("%s:%d" % (f.LocalPath(), line_num))
2963 results.append(output_api.PresubmitError(
2964 'Annotations in android.test.suitebuilder.annotation have been'
2965 ' deprecated since API level 24. Please use android.support.test.filters'
2966 ' from //third_party/android_support_test_runner:runner_java instead.'
2967 ' Contact yolandyan@chromium.org if you have any questions.', errors))
2971 def _CheckAndroidNewMdpiAssetLocation(input_api, output_api):
2972 """Checks if MDPI assets are placed in a correct directory."""
2973 file_filter = lambda f: (f.LocalPath().endswith('.png') and
2974 ('/res/drawable/' in f.LocalPath() or
2975 '/res/drawable-ldrtl/' in f.LocalPath()))
2977 for f in input_api.AffectedFiles(include_deletes=False,
2978 file_filter=file_filter):
2979 errors.append(' %s' % f.LocalPath())
2983 results.append(output_api.PresubmitError(
2984 'MDPI assets should be placed in /res/drawable-mdpi/ or '
2985 '/res/drawable-ldrtl-mdpi/\ninstead of /res/drawable/ and'
2986 '/res/drawable-ldrtl/.\n'
2987 'Contact newt@chromium.org if you have questions.', errors))
2991 def _CheckAndroidWebkitImports(input_api, output_api):
2992 """Checks that code uses org.chromium.base.Callback instead of
2993 android.widget.ValueCallback except in the WebView glue layer.
2995 valuecallback_import_pattern = input_api.re.compile(
2996 r'^import android\.webkit\.ValueCallback;$')
3000 sources = lambda affected_file: input_api.FilterSourceFile(
3002 black_list=(_EXCLUDED_PATHS +
3003 _TEST_CODE_EXCLUDED_PATHS +
3004 input_api.DEFAULT_BLACK_LIST +
3005 (r'^android_webview[\\/]glue[\\/].*',)),
3006 white_list=[r'.*\.java$'])
3008 for f in input_api.AffectedSourceFiles(sources):
3009 for line_num, line in f.ChangedContents():
3010 if valuecallback_import_pattern.search(line):
3011 errors.append("%s:%d" % (f.LocalPath(), line_num))
3016 results.append(output_api.PresubmitError(
3017 'android.webkit.ValueCallback usage is detected outside of the glue'
3018 ' layer. To stay compatible with the support library, android.webkit.*'
3019 ' classes should only be used inside the glue layer and'
3020 ' org.chromium.base.Callback should be used instead.',
3026 def _CheckAndroidXmlStyle(input_api, output_api, is_check_on_upload):
3027 """Checks Android XML styles """
3029 original_sys_path = sys.path
3031 sys.path = sys.path + [input_api.os_path.join(
3032 input_api.PresubmitLocalPath(), 'tools', 'android', 'checkxmlstyle')]
3033 import checkxmlstyle
3035 # Restore sys.path to what it was before.
3036 sys.path = original_sys_path
3038 if is_check_on_upload:
3039 return checkxmlstyle.CheckStyleOnUpload(input_api, output_api)
3041 return checkxmlstyle.CheckStyleOnCommit(input_api, output_api)
3044 class PydepsChecker(object):
3045 def __init__(self, input_api, pydeps_files):
3046 self._file_cache = {}
3047 self._input_api = input_api
3048 self._pydeps_files = pydeps_files
3050 def _LoadFile(self, path):
3051 """Returns the list of paths within a .pydeps file relative to //."""
3052 if path not in self._file_cache:
3053 with open(path) as f:
3054 self._file_cache[path] = f.read()
3055 return self._file_cache[path]
3057 def _ComputeNormalizedPydepsEntries(self, pydeps_path):
3058 """Returns an interable of paths within the .pydep, relativized to //."""
3059 os_path = self._input_api.os_path
3060 pydeps_dir = os_path.dirname(pydeps_path)
3061 entries = (l.rstrip() for l in self._LoadFile(pydeps_path).splitlines()
3062 if not l.startswith('*'))
3063 return (os_path.normpath(os_path.join(pydeps_dir, e)) for e in entries)
3065 def _CreateFilesToPydepsMap(self):
3066 """Returns a map of local_path -> list_of_pydeps."""
3068 for pydep_local_path in self._pydeps_files:
3069 for path in self._ComputeNormalizedPydepsEntries(pydep_local_path):
3070 ret.setdefault(path, []).append(pydep_local_path)
3073 def ComputeAffectedPydeps(self):
3074 """Returns an iterable of .pydeps files that might need regenerating."""
3075 affected_pydeps = set()
3076 file_to_pydeps_map = None
3077 for f in self._input_api.AffectedFiles(include_deletes=True):
3078 local_path = f.LocalPath()
3079 # Changes to DEPS can lead to .pydeps changes if any .py files are in
3080 # subrepositories. We can't figure out which files change, so re-check
3082 # Changes to print_python_deps.py affect all .pydeps.
3083 if local_path == 'DEPS' or local_path.endswith('print_python_deps.py'):
3084 return self._pydeps_files
3085 elif local_path.endswith('.pydeps'):
3086 if local_path in self._pydeps_files:
3087 affected_pydeps.add(local_path)
3088 elif local_path.endswith('.py'):
3089 if file_to_pydeps_map is None:
3090 file_to_pydeps_map = self._CreateFilesToPydepsMap()
3091 affected_pydeps.update(file_to_pydeps_map.get(local_path, ()))
3092 return affected_pydeps
3094 def DetermineIfStale(self, pydeps_path):
3095 """Runs print_python_deps.py to see if the files is stale."""
3099 old_pydeps_data = self._LoadFile(pydeps_path).splitlines()
3100 cmd = old_pydeps_data[1][1:].strip()
3101 env = dict(os.environ)
3102 env['PYTHONDONTWRITEBYTECODE'] = '1'
3103 new_pydeps_data = self._input_api.subprocess.check_output(
3104 cmd + ' --output ""', shell=True, env=env)
3105 old_contents = old_pydeps_data[2:]
3106 new_contents = new_pydeps_data.splitlines()[2:]
3107 if old_pydeps_data[2:] != new_pydeps_data.splitlines()[2:]:
3108 return cmd, '\n'.join(difflib.context_diff(old_contents, new_contents))
3111 def _CheckPydepsNeedsUpdating(input_api, output_api, checker_for_tests=None):
3112 """Checks if a .pydeps file needs to be regenerated."""
3113 # This check is for Python dependency lists (.pydeps files), and involves
3114 # paths not only in the PRESUBMIT.py, but also in the .pydeps files. It
3115 # doesn't work on Windows and Mac, so skip it on other platforms.
3116 if input_api.platform != 'linux2':
3118 # TODO(agrieve): Update when there's a better way to detect
3119 # this: crbug.com/570091
3120 is_android = input_api.os_path.exists('third_party/android_tools')
3121 pydeps_files = _ALL_PYDEPS_FILES if is_android else _GENERIC_PYDEPS_FILES
3123 # First, check for new / deleted .pydeps.
3124 for f in input_api.AffectedFiles(include_deletes=True):
3125 # Check whether we are running the presubmit check for a file in src.
3126 # f.LocalPath is relative to repo (src, or internal repo).
3127 # os_path.exists is relative to src repo.
3128 # Therefore if os_path.exists is true, it means f.LocalPath is relative
3129 # to src and we can conclude that the pydeps is in src.
3130 if input_api.os_path.exists(f.LocalPath()):
3131 if f.LocalPath().endswith('.pydeps'):
3132 if f.Action() == 'D' and f.LocalPath() in _ALL_PYDEPS_FILES:
3133 results.append(output_api.PresubmitError(
3134 'Please update _ALL_PYDEPS_FILES within //PRESUBMIT.py to '
3135 'remove %s' % f.LocalPath()))
3136 elif f.Action() != 'D' and f.LocalPath() not in _ALL_PYDEPS_FILES:
3137 results.append(output_api.PresubmitError(
3138 'Please update _ALL_PYDEPS_FILES within //PRESUBMIT.py to '
3139 'include %s' % f.LocalPath()))
3144 checker = checker_for_tests or PydepsChecker(input_api, pydeps_files)
3146 for pydep_path in checker.ComputeAffectedPydeps():
3148 result = checker.DetermineIfStale(pydep_path)
3151 results.append(output_api.PresubmitError(
3152 'File is stale: %s\nDiff (apply to fix):\n%s\n'
3153 'To regenerate, run:\n\n %s' %
3154 (pydep_path, diff, cmd)))
3155 except input_api.subprocess.CalledProcessError as error:
3156 return [output_api.PresubmitError('Error running: %s' % error.cmd,
3157 long_text=error.output)]
3162 def _CheckSingletonInHeaders(input_api, output_api):
3163 """Checks to make sure no header files have |Singleton<|."""
3164 def FileFilter(affected_file):
3165 # It's ok for base/memory/singleton.h to have |Singleton<|.
3166 black_list = (_EXCLUDED_PATHS +
3167 input_api.DEFAULT_BLACK_LIST +
3168 (r"^base[\\/]memory[\\/]singleton\.h$",
3169 r"^net[\\/]quic[\\/]platform[\\/]impl[\\/]"
3170 r"quic_singleton_impl\.h$"))
3171 return input_api.FilterSourceFile(affected_file, black_list=black_list)
3173 pattern = input_api.re.compile(r'(?<!class\sbase::)Singleton\s*<')
3175 for f in input_api.AffectedSourceFiles(FileFilter):
3176 if (f.LocalPath().endswith('.h') or f.LocalPath().endswith('.hxx') or
3177 f.LocalPath().endswith('.hpp') or f.LocalPath().endswith('.inl')):
3178 contents = input_api.ReadFile(f)
3179 for line in contents.splitlines(False):
3180 if (not line.lstrip().startswith('//') and # Strip C++ comment.
3181 pattern.search(line)):
3186 return [output_api.PresubmitError(
3187 'Found base::Singleton<T> in the following header files.\n' +
3188 'Please move them to an appropriate source file so that the ' +
3189 'template gets instantiated in a single compilation unit.',
3196 ( "-webkit-box", "flex" ),
3197 ( "-webkit-inline-box", "inline-flex" ),
3198 ( "-webkit-flex", "flex" ),
3199 ( "-webkit-inline-flex", "inline-flex" ),
3200 ( "-webkit-min-content", "min-content" ),
3201 ( "-webkit-max-content", "max-content" ),
3204 ( "-webkit-background-clip", "background-clip" ),
3205 ( "-webkit-background-origin", "background-origin" ),
3206 ( "-webkit-background-size", "background-size" ),
3207 ( "-webkit-box-shadow", "box-shadow" ),
3208 ( "-webkit-user-select", "user-select" ),
3211 ( "-webkit-gradient", "gradient" ),
3212 ( "-webkit-repeating-gradient", "repeating-gradient" ),
3213 ( "-webkit-linear-gradient", "linear-gradient" ),
3214 ( "-webkit-repeating-linear-gradient", "repeating-linear-gradient" ),
3215 ( "-webkit-radial-gradient", "radial-gradient" ),
3216 ( "-webkit-repeating-radial-gradient", "repeating-radial-gradient" ),
3220 # TODO: add unit tests
3221 def _CheckNoDeprecatedCss(input_api, output_api):
3222 """ Make sure that we don't use deprecated CSS
3223 properties, functions or values. Our external
3224 documentation and iOS CSS for dom distiller
3225 (reader mode) are ignored by the hooks as it
3226 needs to be consumed by WebKit. """
3228 file_inclusion_pattern = [r".+\.css$"]
3229 black_list = (_EXCLUDED_PATHS +
3230 _TEST_CODE_EXCLUDED_PATHS +
3231 input_api.DEFAULT_BLACK_LIST +
3232 (r"^chrome/common/extensions/docs",
3234 r"^components/dom_distiller/core/css/distilledpage_ios.css",
3235 r"^components/neterror/resources/neterror.css",
3236 r"^native_client_sdk"))
3237 file_filter = lambda f: input_api.FilterSourceFile(
3238 f, white_list=file_inclusion_pattern, black_list=black_list)
3239 for fpath in input_api.AffectedFiles(file_filter=file_filter):
3240 for line_num, line in fpath.ChangedContents():
3241 for (deprecated_value, value) in _DEPRECATED_CSS:
3242 if deprecated_value in line:
3243 results.append(output_api.PresubmitError(
3244 "%s:%d: Use of deprecated CSS %s, use %s instead" %
3245 (fpath.LocalPath(), line_num, deprecated_value, value)))
3250 ( "__lookupGetter__", "Object.getOwnPropertyDescriptor" ),
3251 ( "__defineGetter__", "Object.defineProperty" ),
3252 ( "__defineSetter__", "Object.defineProperty" ),
3256 # TODO: add unit tests
3257 def _CheckNoDeprecatedJs(input_api, output_api):
3258 """Make sure that we don't use deprecated JS in Chrome code."""
3260 file_inclusion_pattern = [r".+\.js$"] # TODO(dbeam): .html?
3261 black_list = (_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS +
3262 input_api.DEFAULT_BLACK_LIST)
3263 file_filter = lambda f: input_api.FilterSourceFile(
3264 f, white_list=file_inclusion_pattern, black_list=black_list)
3265 for fpath in input_api.AffectedFiles(file_filter=file_filter):
3266 for lnum, line in fpath.ChangedContents():
3267 for (deprecated, replacement) in _DEPRECATED_JS:
3268 if deprecated in line:
3269 results.append(output_api.PresubmitError(
3270 "%s:%d: Use of deprecated JS %s, use %s instead" %
3271 (fpath.LocalPath(), lnum, deprecated, replacement)))
3275 def _CheckForRelativeIncludes(input_api, output_api):
3277 for f in input_api.AffectedFiles(include_deletes=False):
3278 if (f.LocalPath().startswith('third_party') and
3279 not f.LocalPath().startswith('third_party/blink') and
3280 not f.LocalPath().startswith('third_party\\blink')):
3283 if not _IsCPlusPlusFile(input_api, f.LocalPath()):
3286 relative_includes = [line for _, line in f.ChangedContents()
3287 if "#include" in line and "../" in line]
3288 if not relative_includes:
3290 bad_files[f.LocalPath()] = relative_includes
3295 error_descriptions = []
3296 for file_path, bad_lines in bad_files.iteritems():
3297 error_description = file_path
3298 for line in bad_lines:
3299 error_description += '\n ' + line
3300 error_descriptions.append(error_description)
3303 results.append(output_api.PresubmitError(
3304 'You added one or more relative #include paths (including "../").\n'
3305 'These shouldn\'t be used because they can be used to include headers\n'
3306 'from code that\'s not correctly specified as a dependency in the\n'
3307 'relevant BUILD.gn file(s).',
3308 error_descriptions))
3313 def _CheckForCcIncludes(input_api, output_api):
3314 """Check that nobody tries to include a cc file. It's a relatively
3315 common error which results in duplicate symbols in object
3316 files. This may not always break the build until someone later gets
3317 very confusing linking errors."""
3319 for f in input_api.AffectedFiles(include_deletes=False):
3320 # We let third_party code do whatever it wants
3321 if (f.LocalPath().startswith('third_party') and
3322 not f.LocalPath().startswith('third_party/blink') and
3323 not f.LocalPath().startswith('third_party\\blink')):
3326 if not _IsCPlusPlusFile(input_api, f.LocalPath()):
3329 for _, line in f.ChangedContents():
3330 if line.startswith('#include "'):
3331 included_file = line.split('"')[1]
3332 if _IsCPlusPlusFile(input_api, included_file):
3333 # The most common naming for external files with C++ code,
3334 # apart from standard headers, is to call them foo.inc, but
3335 # Chromium sometimes uses foo-inc.cc so allow that as well.
3336 if not included_file.endswith(('.h', '-inc.cc')):
3337 results.append(output_api.PresubmitError(
3338 'Only header files or .inc files should be included in other\n'
3339 'C++ files. Compiling the contents of a cc file more than once\n'
3340 'will cause duplicate information in the build which may later\n'
3341 'result in strange link_errors.\n' +
3342 f.LocalPath() + ':\n ' +
3348 def _CheckWatchlistDefinitionsEntrySyntax(key, value, ast):
3349 if not isinstance(key, ast.Str):
3350 return 'Key at line %d must be a string literal' % key.lineno
3351 if not isinstance(value, ast.Dict):
3352 return 'Value at line %d must be a dict' % value.lineno
3353 if len(value.keys) != 1:
3354 return 'Dict at line %d must have single entry' % value.lineno
3355 if not isinstance(value.keys[0], ast.Str) or value.keys[0].s != 'filepath':
3357 'Entry at line %d must have a string literal \'filepath\' as key' %
3362 def _CheckWatchlistsEntrySyntax(key, value, ast, email_regex):
3363 if not isinstance(key, ast.Str):
3364 return 'Key at line %d must be a string literal' % key.lineno
3365 if not isinstance(value, ast.List):
3366 return 'Value at line %d must be a list' % value.lineno
3367 for element in value.elts:
3368 if not isinstance(element, ast.Str):
3369 return 'Watchlist elements on line %d is not a string' % key.lineno
3370 if not email_regex.match(element.s):
3371 return ('Watchlist element on line %d doesn\'t look like a valid ' +
3372 'email: %s') % (key.lineno, element.s)
3376 def _CheckWATCHLISTSEntries(wd_dict, w_dict, input_api):
3377 mismatch_template = (
3378 'Mismatch between WATCHLIST_DEFINITIONS entry (%s) and WATCHLISTS '
3381 email_regex = input_api.re.compile(
3382 r"^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]+$")
3388 if i >= len(wd_dict.keys):
3389 if i >= len(w_dict.keys):
3391 return mismatch_template % ('missing', 'line %d' % w_dict.keys[i].lineno)
3392 elif i >= len(w_dict.keys):
3394 mismatch_template % ('line %d' % wd_dict.keys[i].lineno, 'missing'))
3396 wd_key = wd_dict.keys[i]
3397 w_key = w_dict.keys[i]
3399 result = _CheckWatchlistDefinitionsEntrySyntax(
3400 wd_key, wd_dict.values[i], ast)
3401 if result is not None:
3402 return 'Bad entry in WATCHLIST_DEFINITIONS dict: %s' % result
3404 result = _CheckWatchlistsEntrySyntax(
3405 w_key, w_dict.values[i], ast, email_regex)
3406 if result is not None:
3407 return 'Bad entry in WATCHLISTS dict: %s' % result
3409 if wd_key.s != w_key.s:
3410 return mismatch_template % (
3411 '%s at line %d' % (wd_key.s, wd_key.lineno),
3412 '%s at line %d' % (w_key.s, w_key.lineno))
3414 if wd_key.s < last_key:
3416 'WATCHLISTS dict is not sorted lexicographically at line %d and %d' %
3417 (wd_key.lineno, w_key.lineno))
3423 def _CheckWATCHLISTSSyntax(expression, input_api):
3425 if not isinstance(expression, ast.Expression):
3426 return 'WATCHLISTS file must contain a valid expression'
3427 dictionary = expression.body
3428 if not isinstance(dictionary, ast.Dict) or len(dictionary.keys) != 2:
3429 return 'WATCHLISTS file must have single dict with exactly two entries'
3431 first_key = dictionary.keys[0]
3432 first_value = dictionary.values[0]
3433 second_key = dictionary.keys[1]
3434 second_value = dictionary.values[1]
3436 if (not isinstance(first_key, ast.Str) or
3437 first_key.s != 'WATCHLIST_DEFINITIONS' or
3438 not isinstance(first_value, ast.Dict)):
3440 'The first entry of the dict in WATCHLISTS file must be '
3441 'WATCHLIST_DEFINITIONS dict')
3443 if (not isinstance(second_key, ast.Str) or
3444 second_key.s != 'WATCHLISTS' or
3445 not isinstance(second_value, ast.Dict)):
3447 'The second entry of the dict in WATCHLISTS file must be '
3450 return _CheckWATCHLISTSEntries(first_value, second_value, input_api)
3453 def _CheckWATCHLISTS(input_api, output_api):
3454 for f in input_api.AffectedFiles(include_deletes=False):
3455 if f.LocalPath() == 'WATCHLISTS':
3456 contents = input_api.ReadFile(f, 'r')
3459 # First, make sure that it can be evaluated.
3460 input_api.ast.literal_eval(contents)
3461 # Get an AST tree for it and scan the tree for detailed style checking.
3462 expression = input_api.ast.parse(
3463 contents, filename='WATCHLISTS', mode='eval')
3464 except ValueError as e:
3465 return [output_api.PresubmitError(
3466 'Cannot parse WATCHLISTS file', long_text=repr(e))]
3467 except SyntaxError as e:
3468 return [output_api.PresubmitError(
3469 'Cannot parse WATCHLISTS file', long_text=repr(e))]
3470 except TypeError as e:
3471 return [output_api.PresubmitError(
3472 'Cannot parse WATCHLISTS file', long_text=repr(e))]
3474 result = _CheckWATCHLISTSSyntax(expression, input_api)
3475 if result is not None:
3476 return [output_api.PresubmitError(result)]
3482 def _CheckNewHeaderWithoutGnChange(input_api, output_api):
3483 """Checks that newly added header files have corresponding GN changes.
3484 Note that this is only a heuristic. To be precise, run script:
3485 build/check_gn_headers.py.
3489 return input_api.FilterSourceFile(
3490 f, white_list=(r'.+%s' % _HEADER_EXTENSIONS, ))
3493 for f in input_api.AffectedSourceFiles(headers):
3494 if f.Action() != 'A':
3496 new_headers.append(f.LocalPath())
3499 return input_api.FilterSourceFile(f, white_list=(r'.+\.gn', ))
3501 all_gn_changed_contents = ''
3502 for f in input_api.AffectedSourceFiles(gn_files):
3503 for _, line in f.ChangedContents():
3504 all_gn_changed_contents += line
3507 for header in new_headers:
3508 basename = input_api.os_path.basename(header)
3509 if basename not in all_gn_changed_contents:
3510 problems.append(header)
3513 return [output_api.PresubmitPromptWarning(
3514 'Missing GN changes for new header files', items=sorted(problems),
3515 long_text='Please double check whether newly added header files need '
3516 'corresponding changes in gn or gni files.\nThis checking is only a '
3517 'heuristic. Run build/check_gn_headers.py to be precise.\n'
3518 'Read https://crbug.com/661774 for more info.')]
3522 def _CheckCorrectProductNameInMessages(input_api, output_api):
3523 """Check that Chromium-branded strings don't include "Chrome" or vice versa.
3525 This assumes we won't intentionally reference one product from the other
3530 "filename_postfix": "google_chrome_strings.grd",
3531 "correct_name": "Chrome",
3532 "incorrect_name": "Chromium",
3534 "filename_postfix": "chromium_strings.grd",
3535 "correct_name": "Chromium",
3536 "incorrect_name": "Chrome",
3539 for test_case in test_cases:
3541 filename_filter = lambda x: x.LocalPath().endswith(
3542 test_case["filename_postfix"])
3544 # Check each new line. Can yield false positives in multiline comments, but
3545 # easier than trying to parse the XML because messages can have nested
3546 # children, and associating message elements with affected lines is hard.
3547 for f in input_api.AffectedSourceFiles(filename_filter):
3548 for line_num, line in f.ChangedContents():
3549 if "<message" in line or "<!--" in line or "-->" in line:
3551 if test_case["incorrect_name"] in line:
3553 "Incorrect product name in %s:%d" % (f.LocalPath(), line_num))
3557 "Strings in %s-branded string files should reference \"%s\", not \"%s\""
3558 % (test_case["correct_name"], test_case["correct_name"],
3559 test_case["incorrect_name"]))
3560 all_problems.append(
3561 output_api.PresubmitPromptWarning(message, items=problems))
3566 def _CheckBuildtoolsRevisionsAreInSync(input_api, output_api):
3567 # TODO(crbug.com/941824): We need to make sure the entries in
3568 # //buildtools/DEPS are kept in sync with the entries in //DEPS
3569 # so that users of //buildtools in other projects get the same tooling
3570 # Chromium gets. If we ever fix the referenced bug and add 'includedeps'
3571 # support to gclient, we can eliminate the duplication and delete
3572 # this presubmit check.
3574 # Update this regexp if new revisions are added to the files.
3575 rev_regexp = input_api.re.compile(
3576 "'((clang_format|libcxx|libcxxabi|libunwind)_revision|gn_version)':")
3578 # If a user is changing one revision, they need to change the same
3579 # line in both files. This means that any given change should contain
3580 # exactly the same list of changed lines that match the regexps. The
3581 # replace(' ', '') call allows us to ignore whitespace changes to the
3582 # lines. The 'long_text' parameter to the error will contain the
3583 # list of changed lines in both files, which should make it easy enough
3584 # to spot the error without going overboard in this implementation.
3587 'buildtools/DEPS': {},
3591 for f in input_api.AffectedFiles(
3592 file_filter=lambda f: f.LocalPath() in ('DEPS', 'buildtools/DEPS')):
3593 for line_num, line in f.ChangedContents():
3594 if rev_regexp.search(line):
3595 revs_changes[f.LocalPath()][line.replace(' ', '')] = line
3596 long_text += '%s:%d: %s\n' % (f.LocalPath(), line_num, line)
3598 if set(revs_changes['DEPS']) != set(revs_changes['buildtools/DEPS']):
3599 return [output_api.PresubmitError(
3600 'Change buildtools revisions in sync in both //DEPS and '
3601 '//buildtools/DEPS.', long_text=long_text + '\n')]
3606 def _CheckForTooLargeFiles(input_api, output_api):
3607 """Avoid large files, especially binary files, in the repository since
3608 git doesn't scale well for those. They will be in everyone's repo
3609 clones forever, forever making Chromium slower to clone and work
3612 # Uploading files to cloud storage is not trivial so we don't want
3613 # to set the limit too low, but the upper limit for "normal" large
3614 # files seems to be 1-2 MB, with a handful around 5-8 MB, so
3615 # anything over 20 MB is exceptional.
3616 TOO_LARGE_FILE_SIZE_LIMIT = 20 * 1024 * 1024 # 10 MB
3618 too_large_files = []
3619 for f in input_api.AffectedFiles():
3620 # Check both added and modified files (but not deleted files).
3621 if f.Action() in ('A', 'M'):
3622 size = input_api.os_path.getsize(f.AbsoluteLocalPath())
3623 if size > TOO_LARGE_FILE_SIZE_LIMIT:
3624 too_large_files.append("%s: %d bytes" % (f.LocalPath(), size))
3628 'Do not commit large files to git since git scales badly for those.\n' +
3629 'Instead put the large files in cloud storage and use DEPS to\n' +
3630 'fetch them.\n' + '\n'.join(too_large_files)
3632 return [output_api.PresubmitError(
3633 'Too large files found in commit', long_text=message + '\n')]
3637 def _AndroidSpecificOnUploadChecks(input_api, output_api):
3638 """Groups upload checks that target android code."""
3640 results.extend(_CheckAndroidCrLogUsage(input_api, output_api))
3641 results.extend(_CheckAndroidDebuggableBuild(input_api, output_api))
3642 results.extend(_CheckAndroidNewMdpiAssetLocation(input_api, output_api))
3643 results.extend(_CheckAndroidToastUsage(input_api, output_api))
3644 results.extend(_CheckAndroidTestJUnitInheritance(input_api, output_api))
3645 results.extend(_CheckAndroidTestJUnitFrameworkImport(input_api, output_api))
3646 results.extend(_CheckAndroidTestAnnotationUsage(input_api, output_api))
3647 results.extend(_CheckAndroidWebkitImports(input_api, output_api))
3648 results.extend(_CheckAndroidXmlStyle(input_api, output_api, True))
3651 def _AndroidSpecificOnCommitChecks(input_api, output_api):
3652 """Groups commit checks that target android code."""
3654 results.extend(_CheckAndroidXmlStyle(input_api, output_api, False))
3658 def _CommonChecks(input_api, output_api):
3659 """Checks common to both upload and commit."""
3661 results.extend(input_api.canned_checks.PanProjectChecks(
3662 input_api, output_api,
3663 excluded_paths=_EXCLUDED_PATHS))
3665 author = input_api.change.author_email
3666 if author and author not in _KNOWN_ROBOTS:
3668 input_api.canned_checks.CheckAuthorizedAuthor(input_api, output_api))
3671 _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api))
3673 _CheckNoProductionCodeUsingTestOnlyFunctionsJava(input_api, output_api))
3674 results.extend(_CheckNoIOStreamInHeaders(input_api, output_api))
3675 results.extend(_CheckNoUNIT_TESTInSourceFiles(input_api, output_api))
3676 results.extend(_CheckNoDISABLETypoInTests(input_api, output_api))
3677 results.extend(_CheckDCHECK_IS_ONHasBraces(input_api, output_api))
3678 results.extend(_CheckNoNewWStrings(input_api, output_api))
3679 results.extend(_CheckNoDEPSGIT(input_api, output_api))
3680 results.extend(_CheckNoBannedFunctions(input_api, output_api))
3681 results.extend(_CheckNoPragmaOnce(input_api, output_api))
3682 results.extend(_CheckNoTrinaryTrueFalse(input_api, output_api))
3683 results.extend(_CheckUnwantedDependencies(input_api, output_api))
3684 results.extend(_CheckFilePermissions(input_api, output_api))
3685 results.extend(_CheckTeamTags(input_api, output_api))
3686 results.extend(_CheckNoAuraWindowPropertyHInHeaders(input_api, output_api))
3687 results.extend(_CheckForVersionControlConflicts(input_api, output_api))
3688 results.extend(_CheckPatchFiles(input_api, output_api))
3689 results.extend(_CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api))
3690 results.extend(_CheckNoAbbreviationInPngFileName(input_api, output_api))
3691 results.extend(_CheckBuildConfigMacrosWithoutInclude(input_api, output_api))
3692 results.extend(_CheckForInvalidOSMacros(input_api, output_api))
3693 results.extend(_CheckForInvalidIfDefinedMacros(input_api, output_api))
3694 results.extend(_CheckFlakyTestUsage(input_api, output_api))
3695 results.extend(_CheckAddedDepsHaveTargetApprovals(input_api, output_api))
3697 input_api.canned_checks.CheckChangeHasNoTabs(
3700 source_file_filter=lambda x: x.LocalPath().endswith('.grd')))
3701 results.extend(_CheckSpamLogging(input_api, output_api))
3702 results.extend(_CheckForAnonymousVariables(input_api, output_api))
3703 results.extend(_CheckUserActionUpdate(input_api, output_api))
3704 results.extend(_CheckNoDeprecatedCss(input_api, output_api))
3705 results.extend(_CheckNoDeprecatedJs(input_api, output_api))
3706 results.extend(_CheckParseErrors(input_api, output_api))
3707 results.extend(_CheckForIPCRules(input_api, output_api))
3708 results.extend(_CheckForLongPathnames(input_api, output_api))
3709 results.extend(_CheckForIncludeGuards(input_api, output_api))
3710 results.extend(_CheckForWindowsLineEndings(input_api, output_api))
3711 results.extend(_CheckSingletonInHeaders(input_api, output_api))
3712 results.extend(_CheckPydepsNeedsUpdating(input_api, output_api))
3713 results.extend(_CheckJavaStyle(input_api, output_api))
3714 results.extend(_CheckIpcOwners(input_api, output_api))
3715 results.extend(_CheckUselessForwardDeclarations(input_api, output_api))
3716 results.extend(_CheckForRelativeIncludes(input_api, output_api))
3717 results.extend(_CheckForCcIncludes(input_api, output_api))
3718 results.extend(_CheckWATCHLISTS(input_api, output_api))
3719 results.extend(input_api.RunTests(
3720 input_api.canned_checks.CheckVPythonSpec(input_api, output_api)))
3721 results.extend(_CheckTranslationScreenshots(input_api, output_api))
3722 results.extend(_CheckCorrectProductNameInMessages(input_api, output_api))
3723 results.extend(_CheckBuildtoolsRevisionsAreInSync(input_api, output_api))
3724 results.extend(_CheckForTooLargeFiles(input_api, output_api))
3726 for f in input_api.AffectedFiles():
3727 path, name = input_api.os_path.split(f.LocalPath())
3728 if name == 'PRESUBMIT.py':
3729 full_path = input_api.os_path.join(input_api.PresubmitLocalPath(), path)
3730 test_file = input_api.os_path.join(path, 'PRESUBMIT_test.py')
3731 if f.Action() != 'D' and input_api.os_path.exists(test_file):
3732 # The PRESUBMIT.py file (and the directory containing it) might
3733 # have been affected by being moved or removed, so only try to
3734 # run the tests if they still exist.
3735 results.extend(input_api.canned_checks.RunUnitTestsInDirectory(
3736 input_api, output_api, full_path,
3737 whitelist=[r'^PRESUBMIT_test\.py$']))
3741 def _CheckPatchFiles(input_api, output_api):
3742 problems = [f.LocalPath() for f in input_api.AffectedFiles()
3743 if f.LocalPath().endswith(('.orig', '.rej'))]
3745 return [output_api.PresubmitError(
3746 "Don't commit .rej and .orig files.", problems)]
3751 def _CheckBuildConfigMacrosWithoutInclude(input_api, output_api):
3752 # Excludes OS_CHROMEOS, which is not defined in build_config.h.
3753 macro_re = input_api.re.compile(r'^\s*#(el)?if.*\bdefined\(((OS_(?!CHROMEOS)|'
3754 'COMPILER_|ARCH_CPU_|WCHAR_T_IS_)[^)]*)')
3755 include_re = input_api.re.compile(
3756 r'^#include\s+"build/build_config.h"', input_api.re.MULTILINE)
3757 extension_re = input_api.re.compile(r'\.[a-z]+$')
3759 for f in input_api.AffectedFiles():
3760 if not f.LocalPath().endswith(('.h', '.c', '.cc', '.cpp', '.m', '.mm')):
3762 found_line_number = None
3764 for line_num, line in f.ChangedContents():
3765 match = macro_re.search(line)
3767 found_line_number = line_num
3768 found_macro = match.group(2)
3770 if not found_line_number:
3773 found_include = False
3774 for line in f.NewContents():
3775 if include_re.search(line):
3776 found_include = True
3781 if not f.LocalPath().endswith('.h'):
3782 primary_header_path = extension_re.sub('.h', f.AbsoluteLocalPath())
3784 content = input_api.ReadFile(primary_header_path, 'r')
3785 if include_re.search(content):
3789 errors.append('%s:%d %s macro is used without including build/'
3791 % (f.LocalPath(), found_line_number, found_macro))
3793 return [output_api.PresubmitPromptWarning('\n'.join(errors))]
3797 def _DidYouMeanOSMacro(bad_macro):
3799 return {'A': 'OS_ANDROID',
3809 'W': 'OS_WIN'}[bad_macro[3].upper()]
3814 def _CheckForInvalidOSMacrosInFile(input_api, f):
3815 """Check for sensible looking, totally invalid OS macros."""
3816 preprocessor_statement = input_api.re.compile(r'^\s*#')
3817 os_macro = input_api.re.compile(r'defined\((OS_[^)]+)\)')
3819 for lnum, line in f.ChangedContents():
3820 if preprocessor_statement.search(line):
3821 for match in os_macro.finditer(line):
3822 if not match.group(1) in _VALID_OS_MACROS:
3823 good = _DidYouMeanOSMacro(match.group(1))
3824 did_you_mean = ' (did you mean %s?)' % good if good else ''
3825 results.append(' %s:%d %s%s' % (f.LocalPath(),
3832 def _CheckForInvalidOSMacros(input_api, output_api):
3833 """Check all affected files for invalid OS macros."""
3835 for f in input_api.AffectedSourceFiles(None):
3836 if not f.LocalPath().endswith(('.py', '.js', '.html', '.css', '.md')):
3837 bad_macros.extend(_CheckForInvalidOSMacrosInFile(input_api, f))
3842 return [output_api.PresubmitError(
3843 'Possibly invalid OS macro[s] found. Please fix your code\n'
3844 'or add your macro to src/PRESUBMIT.py.', bad_macros)]
3847 def _CheckForInvalidIfDefinedMacrosInFile(input_api, f):
3848 """Check all affected files for invalid "if defined" macros."""
3849 ALWAYS_DEFINED_MACROS = (
3858 "TARGET_IPHONE_SIMULATOR",
3859 "TARGET_OS_EMBEDDED",
3865 ifdef_macro = input_api.re.compile(r'^\s*#.*(?:ifdef\s|defined\()([^\s\)]+)')
3867 for lnum, line in f.ChangedContents():
3868 for match in ifdef_macro.finditer(line):
3869 if match.group(1) in ALWAYS_DEFINED_MACROS:
3870 always_defined = ' %s is always defined. ' % match.group(1)
3871 did_you_mean = 'Did you mean \'#if %s\'?' % match.group(1)
3872 results.append(' %s:%d %s\n\t%s' % (f.LocalPath(),
3879 def _CheckForInvalidIfDefinedMacros(input_api, output_api):
3880 """Check all affected files for invalid "if defined" macros."""
3882 skipped_paths = ['third_party/sqlite/', 'third_party/abseil-cpp/']
3883 for f in input_api.AffectedFiles():
3884 if any([f.LocalPath().startswith(path) for path in skipped_paths]):
3886 if f.LocalPath().endswith(('.h', '.c', '.cc', '.m', '.mm')):
3887 bad_macros.extend(_CheckForInvalidIfDefinedMacrosInFile(input_api, f))
3892 return [output_api.PresubmitError(
3893 'Found ifdef check on always-defined macro[s]. Please fix your code\n'
3894 'or check the list of ALWAYS_DEFINED_MACROS in src/PRESUBMIT.py.',
3898 def _CheckForIPCRules(input_api, output_api):
3899 """Check for same IPC rules described in
3900 http://www.chromium.org/Home/chromium-security/education/security-tips-for-ipc
3902 base_pattern = r'IPC_ENUM_TRAITS\('
3903 inclusion_pattern = input_api.re.compile(r'(%s)' % base_pattern)
3904 comment_pattern = input_api.re.compile(r'//.*(%s)' % base_pattern)
3907 for f in input_api.AffectedSourceFiles(None):
3908 local_path = f.LocalPath()
3909 if not local_path.endswith('.h'):
3911 for line_number, line in f.ChangedContents():
3912 if inclusion_pattern.search(line) and not comment_pattern.search(line):
3914 '%s:%d\n %s' % (local_path, line_number, line.strip()))
3917 return [output_api.PresubmitPromptWarning(
3918 _IPC_ENUM_TRAITS_DEPRECATED, problems)]
3923 def _CheckForLongPathnames(input_api, output_api):
3924 """Check to make sure no files being submitted have long paths.
3925 This causes issues on Windows.
3928 for f in input_api.AffectedSourceFiles(None):
3929 local_path = f.LocalPath()
3930 # Windows has a path limit of 260 characters. Limit path length to 200 so
3931 # that we have some extra for the prefix on dev machines and the bots.
3932 if len(local_path) > 200:
3933 problems.append(local_path)
3936 return [output_api.PresubmitError(_LONG_PATH_ERROR, problems)]
3941 def _CheckForIncludeGuards(input_api, output_api):
3942 """Check that header files have proper guards against multiple inclusion.
3943 If a file should not have such guards (and it probably should) then it
3944 should include the string "no-include-guard-because-multiply-included".
3946 def is_chromium_header_file(f):
3947 # We only check header files under the control of the Chromium
3948 # project. That is, those outside third_party apart from
3949 # third_party/blink.
3950 file_with_path = input_api.os_path.normpath(f.LocalPath())
3951 return (file_with_path.endswith('.h') and
3952 (not file_with_path.startswith('third_party') or
3953 file_with_path.startswith(
3954 input_api.os_path.join('third_party', 'blink'))))
3956 def replace_special_with_underscore(string):
3957 return input_api.re.sub(r'[+\\/.-]', '_', string)
3961 for f in input_api.AffectedSourceFiles(is_chromium_header_file):
3963 guard_line_number = None
3964 seen_guard_end = False
3966 file_with_path = input_api.os_path.normpath(f.LocalPath())
3967 base_file_name = input_api.os_path.splitext(
3968 input_api.os_path.basename(file_with_path))[0]
3969 upper_base_file_name = base_file_name.upper()
3971 expected_guard = replace_special_with_underscore(
3972 file_with_path.upper() + '_')
3974 # For "path/elem/file_name.h" we should really only accept
3975 # PATH_ELEM_FILE_NAME_H_ per coding style. Unfortunately there
3976 # are too many (1000+) files with slight deviations from the
3977 # coding style. The most important part is that the include guard
3978 # is there, and that it's unique, not the name so this check is
3979 # forgiving for existing files.
3981 # As code becomes more uniform, this could be made stricter.
3983 guard_name_pattern_list = [
3984 # Anything with the right suffix (maybe with an extra _).
3987 # To cover include guards with old Blink style.
3990 # Anything including the uppercase name of the file.
3991 r'\w*' + input_api.re.escape(replace_special_with_underscore(
3992 upper_base_file_name)) + r'\w*',
3994 guard_name_pattern = '|'.join(guard_name_pattern_list)
3995 guard_pattern = input_api.re.compile(
3996 r'#ifndef\s+(' + guard_name_pattern + ')')
3998 for line_number, line in enumerate(f.NewContents()):
3999 if 'no-include-guard-because-multiply-included' in line:
4000 guard_name = 'DUMMY' # To not trigger check outside the loop.
4003 if guard_name is None:
4004 match = guard_pattern.match(line)
4006 guard_name = match.group(1)
4007 guard_line_number = line_number
4009 # We allow existing files to use include guards whose names
4010 # don't match the chromium style guide, but new files should
4012 if not f.OldContents():
4013 if guard_name != expected_guard:
4014 errors.append(output_api.PresubmitPromptWarning(
4015 'Header using the wrong include guard name %s' % guard_name,
4016 ['%s:%d' % (f.LocalPath(), line_number + 1)],
4017 'Expected: %r\nFound: %r' % (expected_guard, guard_name)))
4019 # The line after #ifndef should have a #define of the same name.
4020 if line_number == guard_line_number + 1:
4021 expected_line = '#define %s' % guard_name
4022 if line != expected_line:
4023 errors.append(output_api.PresubmitPromptWarning(
4024 'Missing "%s" for include guard' % expected_line,
4025 ['%s:%d' % (f.LocalPath(), line_number + 1)],
4026 'Expected: %r\nGot: %r' % (expected_line, line)))
4028 if not seen_guard_end and line == '#endif // %s' % guard_name:
4029 seen_guard_end = True
4030 elif seen_guard_end:
4031 if line.strip() != '':
4032 errors.append(output_api.PresubmitPromptWarning(
4033 'Include guard %s not covering the whole file' % (
4034 guard_name), [f.LocalPath()]))
4035 break # Nothing else to check and enough to warn once.
4037 if guard_name is None:
4038 errors.append(output_api.PresubmitPromptWarning(
4039 'Missing include guard %s' % expected_guard,
4041 'Missing include guard in %s\n'
4042 'Recommended name: %s\n'
4043 'This check can be disabled by having the string\n'
4044 'no-include-guard-because-multiply-included in the header.' %
4045 (f.LocalPath(), expected_guard)))
4050 def _CheckForWindowsLineEndings(input_api, output_api):
4051 """Check source code and known ascii text files for Windows style line
4054 known_text_files = r'.*\.(txt|html|htm|mhtml|py|gyp|gypi|gn|isolate)$'
4056 file_inclusion_pattern = (
4058 r'.+%s' % _IMPLEMENTATION_EXTENSIONS
4062 source_file_filter = lambda f: input_api.FilterSourceFile(
4063 f, white_list=file_inclusion_pattern, black_list=None)
4064 for f in input_api.AffectedSourceFiles(source_file_filter):
4065 include_file = False
4066 for _, line in f.ChangedContents():
4067 if line.endswith('\r\n'):
4070 problems.append(f.LocalPath())
4073 return [output_api.PresubmitPromptWarning('Are you sure that you want '
4074 'these files to contain Windows style line endings?\n' +
4075 '\n'.join(problems))]
4080 def _CheckSyslogUseWarning(input_api, output_api, source_file_filter=None):
4081 """Checks that all source files use SYSLOG properly."""
4083 for f in input_api.AffectedSourceFiles(source_file_filter):
4084 for line_number, line in f.ChangedContents():
4085 if 'SYSLOG' in line:
4086 syslog_files.append(f.LocalPath() + ':' + str(line_number))
4089 return [output_api.PresubmitPromptWarning(
4090 'Please make sure there are no privacy sensitive bits of data in SYSLOG'
4091 ' calls.\nFiles to check:\n', items=syslog_files)]
4095 def CheckChangeOnUpload(input_api, output_api):
4097 results.extend(_CommonChecks(input_api, output_api))
4098 results.extend(_CheckValidHostsInDEPS(input_api, output_api))
4100 input_api.canned_checks.CheckPatchFormatted(input_api, output_api))
4101 results.extend(_CheckUmaHistogramChanges(input_api, output_api))
4102 results.extend(_AndroidSpecificOnUploadChecks(input_api, output_api))
4103 results.extend(_CheckSyslogUseWarning(input_api, output_api))
4104 results.extend(_CheckGoogleSupportAnswerUrl(input_api, output_api))
4105 results.extend(_CheckUniquePtr(input_api, output_api))
4106 results.extend(_CheckNewHeaderWithoutGnChange(input_api, output_api))
4110 def GetTryServerMasterForBot(bot):
4111 """Returns the Try Server master for the given bot.
4113 It tries to guess the master from the bot name, but may still fail
4114 and return None. There is no longer a default master.
4116 # Potentially ambiguous bot names are listed explicitly.
4118 'chromium_presubmit': 'master.tryserver.chromium.linux',
4119 'tools_build_presubmit': 'master.tryserver.chromium.linux',
4121 master = master_map.get(bot)
4123 if 'android' in bot:
4124 master = 'master.tryserver.chromium.android'
4125 elif 'linux' in bot or 'presubmit' in bot:
4126 master = 'master.tryserver.chromium.linux'
4128 master = 'master.tryserver.chromium.win'
4129 elif 'mac' in bot or 'ios' in bot:
4130 master = 'master.tryserver.chromium.mac'
4134 def CheckChangeOnCommit(input_api, output_api):
4136 results.extend(_CommonChecks(input_api, output_api))
4137 results.extend(_AndroidSpecificOnCommitChecks(input_api, output_api))
4138 # Make sure the tree is 'open'.
4139 results.extend(input_api.canned_checks.CheckTreeIsOpen(
4142 json_url='http://chromium-status.appspot.com/current?format=json'))
4145 input_api.canned_checks.CheckPatchFormatted(input_api, output_api))
4146 results.extend(input_api.canned_checks.CheckChangeHasBugField(
4147 input_api, output_api))
4148 results.extend(input_api.canned_checks.CheckChangeHasDescription(
4149 input_api, output_api))
4153 def _CheckTranslationScreenshots(input_api, output_api):
4154 PART_FILE_TAG = "part"
4157 from io import StringIO
4160 old_sys_path = sys.path
4161 sys.path = sys.path + [input_api.os_path.join(
4162 input_api.PresubmitLocalPath(), 'tools', 'grit')]
4163 import grit.grd_reader
4164 import grit.node.message
4167 sys.path = old_sys_path
4169 def _GetGrdMessages(grd_path_or_string, dir_path='.'):
4170 """Load the grd file and return a dict of message ids to messages.
4172 Ignores any nested grdp files pointed by <part> tag.
4174 doc = grit.grd_reader.Parse(grd_path_or_string, dir_path,
4175 stop_after=None, first_ids_file=None,
4176 debug=False, defines=None,
4177 tags_to_ignore=set([PART_FILE_TAG]))
4179 msg.attrs['name']:msg for msg in doc.GetChildrenOfType(
4180 grit.node.message.MessageNode)
4183 def _GetGrdpMessagesFromString(grdp_string):
4184 """Parses the contents of a grdp file given in grdp_string.
4186 grd_reader can't parse grdp files directly. Instead, this creates a
4187 temporary directory with a grd file pointing to the grdp file, and loads the
4188 grd from there. Any nested grdp files (pointed by <part> tag) are ignored.
4190 WRAPPER = """<?xml version="1.0" encoding="utf-8"?>
4191 <grit latest_public_release="1" current_release="1">
4194 <part file="sub.grdp" />
4199 with grit.util.TempDir({'main.grd': WRAPPER,
4200 'sub.grdp': grdp_string}) as temp_dir:
4201 return _GetGrdMessages(temp_dir.GetPath('main.grd'), temp_dir.GetPath())
4203 new_or_added_paths = set(f.LocalPath()
4204 for f in input_api.AffectedFiles()
4205 if (f.Action() == 'A' or f.Action() == 'M'))
4206 removed_paths = set(f.LocalPath()
4207 for f in input_api.AffectedFiles(include_deletes=True)
4208 if f.Action() == 'D')
4210 affected_grds = [f for f in input_api.AffectedFiles()
4211 if (f.LocalPath().endswith('.grd') or
4212 f.LocalPath().endswith('.grdp'))]
4213 affected_png_paths = [f.AbsoluteLocalPath()
4214 for f in input_api.AffectedFiles()
4215 if (f.LocalPath().endswith('.png'))]
4217 # Check for screenshots. Developers can upload screenshots using
4218 # tools/translation/upload_screenshots.py which finds and uploads
4219 # images associated with .grd files (e.g. test_grd/IDS_STRING.png for the
4220 # message named IDS_STRING in test.grd) and produces a .sha1 file (e.g.
4221 # test_grd/IDS_STRING.png.sha1) for each png when the upload is successful.
4223 # The logic here is as follows:
4225 # - If the CL has a .png file under the screenshots directory for a grd
4226 # file, warn the developer. Actual images should never be checked into the
4229 # - If the CL contains modified or new messages in grd files and doesn't
4230 # contain the corresponding .sha1 files, warn the developer to add images
4231 # and upload them via tools/translation/upload_screenshots.py.
4233 # - If the CL contains modified or new messages in grd files and the
4234 # corresponding .sha1 files, everything looks good.
4236 # - If the CL contains removed messages in grd files but the corresponding
4237 # .sha1 files aren't removed, warn the developer to remove them.
4238 unnecessary_screenshots = []
4240 unnecessary_sha1_files = []
4243 def _CheckScreenshotAdded(screenshots_dir, message_id):
4244 sha1_path = input_api.os_path.join(
4245 screenshots_dir, message_id + '.png.sha1')
4246 if sha1_path not in new_or_added_paths:
4247 missing_sha1.append(sha1_path)
4250 def _CheckScreenshotRemoved(screenshots_dir, message_id):
4251 sha1_path = input_api.os_path.join(
4252 screenshots_dir, message_id + '.png.sha1')
4253 if sha1_path not in removed_paths:
4254 unnecessary_sha1_files.append(sha1_path)
4257 for f in affected_grds:
4258 file_path = f.LocalPath()
4259 old_id_to_msg_map = {}
4260 new_id_to_msg_map = {}
4261 if file_path.endswith('.grdp'):
4263 old_id_to_msg_map = _GetGrdpMessagesFromString(
4264 unicode('\n'.join(f.OldContents())))
4266 new_id_to_msg_map = _GetGrdpMessagesFromString(
4267 unicode('\n'.join(f.NewContents())))
4270 old_id_to_msg_map = _GetGrdMessages(
4271 StringIO(unicode('\n'.join(f.OldContents()))))
4273 new_id_to_msg_map = _GetGrdMessages(
4274 StringIO(unicode('\n'.join(f.NewContents()))))
4276 # Compute added, removed and modified message IDs.
4277 old_ids = set(old_id_to_msg_map)
4278 new_ids = set(new_id_to_msg_map)
4279 added_ids = new_ids - old_ids
4280 removed_ids = old_ids - new_ids
4281 modified_ids = set([])
4282 for key in old_ids.intersection(new_ids):
4283 if (old_id_to_msg_map[key].FormatXml()
4284 != new_id_to_msg_map[key].FormatXml()):
4285 modified_ids.add(key)
4287 grd_name, ext = input_api.os_path.splitext(
4288 input_api.os_path.basename(file_path))
4289 screenshots_dir = input_api.os_path.join(
4290 input_api.os_path.dirname(file_path), grd_name + ext.replace('.', '_'))
4292 # Check the screenshot directory for .png files. Warn if there is any.
4293 for png_path in affected_png_paths:
4294 if png_path.startswith(screenshots_dir):
4295 unnecessary_screenshots.append(png_path)
4297 for added_id in added_ids:
4298 _CheckScreenshotAdded(screenshots_dir, added_id)
4300 for modified_id in modified_ids:
4301 _CheckScreenshotAdded(screenshots_dir, modified_id)
4303 for removed_id in removed_ids:
4304 _CheckScreenshotRemoved(screenshots_dir, removed_id)
4307 if unnecessary_screenshots:
4308 results.append(output_api.PresubmitNotifyResult(
4309 'Do not include actual screenshots in the changelist. Run '
4310 'tools/translate/upload_screenshots.py to upload them instead:',
4311 sorted(unnecessary_screenshots)))
4314 results.append(output_api.PresubmitNotifyResult(
4315 'You are adding or modifying UI strings.\n'
4316 'To ensure the best translations, take screenshots of the relevant UI '
4317 '(https://g.co/chrome/translation) and add these files to your '
4318 'changelist:', sorted(missing_sha1)))
4320 if unnecessary_sha1_files:
4321 results.append(output_api.PresubmitNotifyResult(
4322 'You removed strings associated with these files. Remove:',
4323 sorted(unnecessary_sha1_files)))