fixup! Upload upstream chromium 85.0.4183.93
[platform/framework/web/chromium-efl.git] / PRESUBMIT.py
index 7f2ec1b..bc0f59a 100644 (file)
@@ -17,12 +17,14 @@ _EXCLUDED_PATHS = (
     r"^skia[\\/].*",
     r"^third_party[\\/]blink[\\/].*",
     r"^third_party[\\/]breakpad[\\/].*",
+    # sqlite is an imported third party dependency.
+    r"^third_party[\\/]sqlite[\\/].*",
     r"^v8[\\/].*",
     r".*MakeFile$",
     r".+_autogen\.h$",
+    r".+_pb2\.py$",
     r".+[\\/]pnacl_shim\.c$",
     r"^gpu[\\/]config[\\/].*_list_json\.cc$",
-    r"^chrome[\\/]browser[\\/]resources[\\/]pdf[\\/]index.js",
     r"tools[\\/]md_browser[\\/].*\.css$",
     # Test pages for Maps telemetry tests.
     r"tools[\\/]perf[\\/]page_sets[\\/]maps_perf_test.*",
@@ -48,6 +50,7 @@ _TEST_CODE_EXCLUDED_PATHS = (
     r'.+_test_(base|support|util)%s' % _IMPLEMENTATION_EXTENSIONS,
     r'.+_(api|browser|eg|int|perf|pixel|unit|ui)?test(_[a-z]+)?%s' %
         _IMPLEMENTATION_EXTENSIONS,
+    r'.+_(fuzz|fuzzer)(_[a-z]+)?%s' % _IMPLEMENTATION_EXTENSIONS,
     r'.+profile_sync_service_harness%s' % _IMPLEMENTATION_EXTENSIONS,
     r'.*[\\/](test|tool(s)?)[\\/].*',
     # content_shell is used for running layout tests.
@@ -56,14 +59,21 @@ _TEST_CODE_EXCLUDED_PATHS = (
     r'mojo[\\/]examples[\\/].*',
     # Launcher for running iOS tests on the simulator.
     r'testing[\\/]iossim[\\/]iossim\.mm$',
+    # EarlGrey app side code for tests.
+    r'ios[\\/].*_app_interface\.mm$',
+    # Views Examples code
+    r'ui[\\/]views[\\/]examples[\\/].*',
 )
 
+_THIRD_PARTY_EXCEPT_BLINK = 'third_party/(?!blink/)'
 
 _TEST_ONLY_WARNING = (
     'You might be calling functions intended only for testing from\n'
     'production code.  It is OK to ignore this warning if you know what\n'
     'you are doing, as the heuristics used to detect the situation are\n'
-    'not perfect.  The commit queue will not block on this warning.')
+    'not perfect.  The commit queue will not block on this warning,\n'
+    'however the android-binary-size trybot will block if the method\n'
+    'exists in the release apk.')
 
 
 _INCLUDE_ORDER_WARNING = (
@@ -71,7 +81,29 @@ _INCLUDE_ORDER_WARNING = (
     'collation (LC_COLLATE=C) and check\nhttps://google.github.io/styleguide/'
     'cppguide.html#Names_and_Order_of_Includes')
 
+# Format: Sequence of tuples containing:
+# * Full import path.
+# * Sequence of strings to show when the pattern matches.
+# * Sequence of path or filename exceptions to this rule
+_BANNED_JAVA_IMPORTS = (
+    (
+      'java.net.URI;',
+      (
+       'Use org.chromium.url.GURL instead of java.net.URI, where possible.',
+      ),
+      (
+        'net/android/javatests/src/org/chromium/net/'
+        'AndroidProxySelectorTest.java',
+        'components/cronet/',
+        'third_party/robolectric/local/',
+      ),
+    ),
+)
 
+# Format: Sequence of tuples containing:
+# * String pattern or, if starting with a slash, a regular expression.
+# * Sequence of strings to show when the pattern matches.
+# * Error flag. True if a match is a presubmit error, otherwise it's a warning.
 _BANNED_JAVA_FUNCTIONS = (
     (
       'StrictMode.allowThreadDiskReads()',
@@ -91,6 +123,10 @@ _BANNED_JAVA_FUNCTIONS = (
     ),
 )
 
+# Format: Sequence of tuples containing:
+# * String pattern or, if starting with a slash, a regular expression.
+# * Sequence of strings to show when the pattern matches.
+# * Error flag. True if a match is a presubmit error, otherwise it's a warning.
 _BANNED_OBJC_FUNCTIONS = (
     (
       'addTrackingRect:',
@@ -182,8 +218,20 @@ _BANNED_OBJC_FUNCTIONS = (
       ),
       False,
     ),
+    (
+      'freeWhenDone:NO',
+      (
+        'The use of "freeWhenDone:NO" with the NoCopy creation of ',
+        'Foundation types is prohibited.',
+      ),
+      True,
+    ),
 )
 
+# Format: Sequence of tuples containing:
+# * String pattern or, if starting with a slash, a regular expression.
+# * Sequence of strings to show when the pattern matches.
+# * Error flag. True if a match is a presubmit error, otherwise it's a warning.
 _BANNED_IOS_OBJC_FUNCTIONS = (
     (
       r'/\bTEST[(]',
@@ -206,20 +254,189 @@ _BANNED_IOS_OBJC_FUNCTIONS = (
     ),
 )
 
+# Format: Sequence of tuples containing:
+# * String pattern or, if starting with a slash, a regular expression.
+# * Sequence of strings to show when the pattern matches.
+# * Error flag. True if a match is a presubmit error, otherwise it's a warning.
+_BANNED_IOS_EGTEST_FUNCTIONS = (
+    (
+      r'/\bEXPECT_OCMOCK_VERIFY\b',
+      (
+        'EXPECT_OCMOCK_VERIFY should not be used in EarlGrey tests because ',
+        'it is meant for GTests. Use [mock verify] instead.'
+      ),
+      True,
+    ),
+)
 
+# Directories that contain deprecated Bind() or Callback types.
+# Find sub-directories from a given directory by running:
+# for i in `find . -maxdepth 1 -type d|sort`; do
+#   echo "-- $i"
+#   (cd $i; git grep -nP 'base::(Bind\(|(Callback<|Closure))'|wc -l)
+# done
+#
+# TODO(crbug.com/714018): Remove (or narrow the scope of) paths from this list
+# when they have been converted to modern callback types (OnceCallback,
+# RepeatingCallback, BindOnce, BindRepeating) in order to enable presubmit
+# checks for them and prevent regressions.
+_NOT_CONVERTED_TO_MODERN_BIND_AND_CALLBACK = '|'.join((
+  '^apps/',
+  '^base/callback.h',  # Intentional.
+  '^chrome/app/',
+  '^chrome/browser/',
+  '^chrome/chrome_elf/',
+  '^chrome/common/',
+  '^chrome/installer/',
+  '^chrome/renderer/',
+  '^chrome/services/',
+  '^chrome/test/',
+  '^chrome/tools/',
+  '^chrome/utility/',
+  '^chromecast/media/',
+  '^chromecast/metrics/',
+  '^chromecast/net/',
+  '^chromeos/attestation/',
+  '^chromeos/components/',
+  '^chromeos/network/',
+  '^chromeos/services/',
+  '^components/arc/',
+  '^components/assist_ranker/',
+  '^components/autofill/',
+  '^components/autofill_assistant/',
+  '^components/browser_watcher/',
+  '^components/cast_channel/',
+  '^components/chromeos_camera/',
+  '^components/component_updater/',
+  '^components/content_settings/',
+  '^components/cronet/',
+  '^components/data_reduction_proxy/',
+  '^components/domain_reliability/',
+  '^components/dom_distiller/',
+  '^components/download/internal/common/',
+  '^components/drive/',
+  '^components/exo/',
+  '^components/feature_engagement/',
+  '^components/feedback/',
+  '^components/flags_ui/',
+  '^components/gcm_driver/',
+  '^components/guest_view/',
+  '^components/heap_profiling/',
+  '^components/history/',
+  '^components/image_fetcher/',
+  '^components/invalidation/',
+  '^components/keyed_service/',
+  '^components/login/',
+  '^components/metrics/',
+  '^components/metrics_services_manager/',
+  '^components/nacl/',
+  '^components/navigation_interception/',
+  '^components/net_log/',
+  '^components/network_time/',
+  '^components/ntp_snippets/',
+  '^components/ntp_tiles/',
+  '^components/offline_pages/',
+  '^components/omnibox/',
+  '^components/ownership/',
+  '^components/password_manager/',
+  '^components/payments/',
+  '^components/plugins/',
+  '^components/policy/',
+  '^components/proxy_config/',
+  '^components/quirks/',
+  '^components/remote_cocoa/',
+  '^components/rlz/',
+  '^components/safe_browsing/',
+  '^components/search_engines/',
+  '^components/search_provider_logos/',
+  '^components/security_interstitials/',
+  '^components/security_state/',
+  '^components/services/',
+  '^components/sessions/',
+  '^components/signin/',
+  '^components/ssl_errors/',
+  '^components/storage_monitor/',
+  '^components/subresource_filter/',
+  '^components/suggestions/',
+  '^components/sync/',
+  '^components/sync_preferences/',
+  '^components/sync_sessions/',
+  '^components/test/',
+  '^components/tracing/',
+  '^components/translate/',
+  '^components/ukm/',
+  '^components/update_client/',
+  '^components/upload_list/',
+  '^components/variations/',
+  '^components/visitedlink/',
+  '^components/webcrypto/',
+  '^components/webdata/',
+  '^components/webdata_services/',
+  '^device/bluetooth/',
+  '^extensions/browser/',
+  '^extensions/renderer/',
+  '^google_apis/dive/',
+  '^google_apis/gcm/',
+  '^ios/chrome/',
+  '^ios/components/',
+  '^ios/net/',
+  '^ios/web/',
+  '^ios/web_view/',
+  '^ipc/',
+  '^media/base/',
+  '^media/blink/',
+  '^media/cast/',
+  '^media/cdm/',
+  '^media/device_monitors/',
+  '^media/filters/',
+  '^media/formats/',
+  '^media/gpu/',
+  '^media/mojo/',
+  '^media/renderers/',
+  '^net/',
+  '^ppapi/proxy/',
+  '^ppapi/shared_impl/',
+  '^ppapi/tests/',
+  '^ppapi/thunk/',
+  '^remoting/base/',
+  '^remoting/client/',
+  '^remoting/host/',
+  '^remoting/internal/',
+  '^remoting/protocol/',
+  '^services/',
+  '^third_party/blink/',
+  '^tools/clang/base_bind_rewriters/',  # Intentional.
+  '^tools/gdb/gdb_chrome.py',  # Intentional.
+))
+
+# Format: Sequence of tuples containing:
+# * String pattern or, if starting with a slash, a regular expression.
+# * Sequence of strings to show when the pattern matches.
+# * Error flag. True if a match is a presubmit error, otherwise it's a warning.
+# * Sequence of paths to *not* check (regexps).
 _BANNED_CPP_FUNCTIONS = (
-    # Make sure that gtest's FRIEND_TEST() macro is not used; the
-    # FRIEND_TEST_ALL_PREFIXES() macro from base/gtest_prod_util.h should be
-    # used instead since that allows for FLAKY_ and DISABLED_ prefixes.
     (
-      r'\bNULL\b',
+      r'/\bNULL\b',
       (
        'New code should not use NULL. Use nullptr instead.',
       ),
-      True,
+      False,
       (),
     ),
     (
+      r'/\busing namespace ',
+      (
+       'Using directives ("using namespace x") are banned by the Google Style',
+       'Guide ( http://google.github.io/styleguide/cppguide.html#Namespaces ).',
+       'Explicitly qualify symbols or use using declarations ("using x::foo").',
+      ),
+      True,
+      [_THIRD_PARTY_EXCEPT_BLINK],  # Don't warn in third_party folders.
+    ),
+    # Make sure that gtest's FRIEND_TEST() macro is not used; the
+    # FRIEND_TEST_ALL_PREFIXES() macro from base/gtest_prod_util.h should be
+    # used instead since that allows for FLAKY_ and DISABLED_ prefixes.
+    (
       'FRIEND_TEST(',
       (
        'Chromium code should not use gtest\'s FRIEND_TEST() macro. Include',
@@ -229,7 +446,7 @@ _BANNED_CPP_FUNCTIONS = (
       (),
     ),
     (
-      r'XSelectInput|CWEventMask|XCB_CW_EVENT_MASK',
+      r'/XSelectInput|CWEventMask|XCB_CW_EVENT_MASK',
       (
        'Chrome clients wishing to select events on X windows should use',
        'ui::XScopedEventSelector.  It is safe to ignore this warning only if',
@@ -238,13 +455,23 @@ _BANNED_CPP_FUNCTIONS = (
       ),
       True,
       (
+        r"^ui[\\/]events[\\/]x[\\/].*\.cc$",
         r"^ui[\\/]gl[\\/].*\.cc$",
         r"^media[\\/]gpu[\\/].*\.cc$",
         r"^gpu[\\/].*\.cc$",
+        r"^ui[\\/]base[\\/]x[\\/]xwmstartupcheck[\\/]xwmstartupcheck\.cc$",
+        ),
+    ),
+    (
+      r'/\WX?(((Width|Height)(MM)?OfScreen)|(Display(Width|Height)))\(',
+      (
+       'Use the corresponding fields in x11::Screen instead.',
       ),
+      True,
+      (),
     ),
     (
-      r'XInternAtom|xcb_intern_atom',
+      r'/XInternAtom|xcb_intern_atom',
       (
        'Use gfx::GetAtom() instead of interning atoms directly.',
       ),
@@ -429,7 +656,7 @@ _BANNED_CPP_FUNCTIONS = (
       'base::ScopedMockTimeMessageLoopTaskRunner',
       (
         'ScopedMockTimeMessageLoopTaskRunner is deprecated. Prefer',
-        'ScopedTaskEnvironment::MainThreadType::MOCK_TIME. There are still a',
+        'TaskEnvironment::TimeSource::MOCK_TIME. There are still a',
         'few cases that may require a ScopedMockTimeMessageLoopTaskRunner',
         '(i.e. mocking the main MessageLoopForUI in browser_tests), but check',
         'with gab@ first if you think you need it)',
@@ -438,7 +665,7 @@ _BANNED_CPP_FUNCTIONS = (
       (),
     ),
     (
-      r'std::regex',
+      'std::regex',
       (
         'Using std::regex adds unnecessary binary size to Chrome. Please use',
         're2::RE2 instead (crbug.com/755321)',
@@ -447,6 +674,174 @@ _BANNED_CPP_FUNCTIONS = (
       (),
     ),
     (
+      r'/\bstd::stoi\b',
+      (
+        'std::stoi uses exceptions to communicate results. ',
+        'Use base::StringToInt() instead.',
+      ),
+      True,
+      [_THIRD_PARTY_EXCEPT_BLINK],  # Don't warn in third_party folders.
+    ),
+    (
+      r'/\bstd::stol\b',
+      (
+        'std::stol uses exceptions to communicate results. ',
+        'Use base::StringToInt() instead.',
+      ),
+      True,
+      [_THIRD_PARTY_EXCEPT_BLINK],  # Don't warn in third_party folders.
+    ),
+    (
+      r'/\bstd::stoul\b',
+      (
+        'std::stoul uses exceptions to communicate results. ',
+        'Use base::StringToUint() instead.',
+      ),
+      True,
+      [_THIRD_PARTY_EXCEPT_BLINK],  # Don't warn in third_party folders.
+    ),
+    (
+      r'/\bstd::stoll\b',
+      (
+        'std::stoll uses exceptions to communicate results. ',
+        'Use base::StringToInt64() instead.',
+      ),
+      True,
+      [_THIRD_PARTY_EXCEPT_BLINK],  # Don't warn in third_party folders.
+    ),
+    (
+      r'/\bstd::stoull\b',
+      (
+        'std::stoull uses exceptions to communicate results. ',
+        'Use base::StringToUint64() instead.',
+      ),
+      True,
+      [_THIRD_PARTY_EXCEPT_BLINK],  # Don't warn in third_party folders.
+    ),
+    (
+      r'/\bstd::stof\b',
+      (
+        'std::stof uses exceptions to communicate results. ',
+        'For locale-independent values, e.g. reading numbers from disk',
+        'profiles, use base::StringToDouble().',
+        'For user-visible values, parse using ICU.',
+      ),
+      True,
+      [_THIRD_PARTY_EXCEPT_BLINK],  # Don't warn in third_party folders.
+    ),
+    (
+      r'/\bstd::stod\b',
+      (
+        'std::stod uses exceptions to communicate results. ',
+        'For locale-independent values, e.g. reading numbers from disk',
+        'profiles, use base::StringToDouble().',
+        'For user-visible values, parse using ICU.',
+      ),
+      True,
+      [_THIRD_PARTY_EXCEPT_BLINK],  # Don't warn in third_party folders.
+    ),
+    (
+      r'/\bstd::stold\b',
+      (
+        'std::stold uses exceptions to communicate results. ',
+        'For locale-independent values, e.g. reading numbers from disk',
+        'profiles, use base::StringToDouble().',
+        'For user-visible values, parse using ICU.',
+      ),
+      True,
+      [_THIRD_PARTY_EXCEPT_BLINK],  # Don't warn in third_party folders.
+    ),
+    (
+      r'/\bstd::to_string\b',
+      (
+        'std::to_string is locale dependent and slower than alternatives.',
+        'For locale-independent strings, e.g. writing numbers to disk',
+        'profiles, use base::NumberToString().',
+        'For user-visible strings, use base::FormatNumber() and',
+        'the related functions in base/i18n/number_formatting.h.',
+      ),
+      False,  # Only a warning since it is already used.
+      [_THIRD_PARTY_EXCEPT_BLINK],  # Don't warn in third_party folders.
+    ),
+    (
+      r'/\bstd::shared_ptr\b',
+      (
+        'std::shared_ptr should not be used. Use scoped_refptr instead.',
+      ),
+      True,
+      [_THIRD_PARTY_EXCEPT_BLINK,
+       '^third_party/blink/renderer/core/typed_arrays/array_buffer/' +
+         'array_buffer_contents\.(cc|h)'],
+    ),
+    (
+      r'/\bstd::weak_ptr\b',
+      (
+        'std::weak_ptr should not be used. Use base::WeakPtr instead.',
+      ),
+      True,
+      [_THIRD_PARTY_EXCEPT_BLINK],  # Not an error in third_party folders.
+    ),
+    (
+      r'/\blong long\b',
+      (
+        'long long is banned. Use stdint.h if you need a 64 bit number.',
+      ),
+      False,  # Only a warning since it is already used.
+      [_THIRD_PARTY_EXCEPT_BLINK],  # Don't warn in third_party folders.
+    ),
+    (
+      r'/\bstd::bind\b',
+      (
+        'std::bind is banned because of lifetime risks.',
+        'Use base::BindOnce or base::BindRepeating instead.',
+      ),
+      True,
+      [_THIRD_PARTY_EXCEPT_BLINK],  # Not an error in third_party folders.
+    ),
+    (
+      r'/\b#include <chrono>\b',
+      (
+        '<chrono> overlaps with Time APIs in base. Keep using',
+        'base classes.',
+      ),
+      True,
+      [_THIRD_PARTY_EXCEPT_BLINK],  # Not an error in third_party folders.
+    ),
+    (
+      r'/\b#include <exception>\b',
+      (
+        'Exceptions are banned and disabled in Chromium.',
+      ),
+      True,
+      [_THIRD_PARTY_EXCEPT_BLINK],  # Not an error in third_party folders.
+    ),
+    (
+      r'/\bstd::function\b',
+      (
+        'std::function is banned. Instead use base::Callback which directly',
+        'supports Chromium\'s weak pointers, ref counting and more.',
+      ),
+      False,  # Only a warning since it is already used.
+      [_THIRD_PARTY_EXCEPT_BLINK],  # Do not warn in third_party folders.
+    ),
+    (
+      r'/\b#include <random>\b',
+      (
+        'Do not use any random number engines from <random>. Instead',
+        'use base::RandomBitGenerator.',
+      ),
+      True,
+      [_THIRD_PARTY_EXCEPT_BLINK],  # Not an error in third_party folders.
+    ),
+    (
+      r'/\bstd::ratio\b',
+      (
+        'std::ratio is banned by the Google Style Guide.',
+      ),
+      True,
+      [_THIRD_PARTY_EXCEPT_BLINK],  # Not an error in third_party folders.
+    ),
+    (
       (r'/base::ThreadRestrictions::(ScopedAllowIO|AssertIOAllowed|'
        r'DisallowWaiting|AssertWaitAllowed|SetWaitAllowed|ScopedAllowWait)'),
       (
@@ -458,32 +853,32 @@ _BANNED_CPP_FUNCTIONS = (
     (
       r'/\bbase::Bind\(',
       (
-          'Please consider using base::Bind{Once,Repeating} instead',
+          'Please use base::Bind{Once,Repeating} instead',
           'of base::Bind. (crbug.com/714018)',
       ),
       False,
-      (),
+      (_NOT_CONVERTED_TO_MODERN_BIND_AND_CALLBACK,),
     ),
     (
-      r'/\bbase::Callback<',
+      r'/\bbase::Callback[<:]',
       (
-          'Please consider using base::{Once,Repeating}Callback instead',
+          'Please use base::{Once,Repeating}Callback instead',
           'of base::Callback. (crbug.com/714018)',
       ),
       False,
-      (),
+      (_NOT_CONVERTED_TO_MODERN_BIND_AND_CALLBACK,),
     ),
     (
       r'/\bbase::Closure\b',
       (
-          'Please consider using base::{Once,Repeating}Closure instead',
+          'Please use base::{Once,Repeating}Closure instead',
           'of base::Closure. (crbug.com/714018)',
       ),
       False,
-      (),
+      (_NOT_CONVERTED_TO_MODERN_BIND_AND_CALLBACK,),
     ),
     (
-      r'RunMessageLoop',
+      r'/\bRunMessageLoop\b',
       (
           'RunMessageLoop is deprecated, use RunLoop instead.',
       ),
@@ -491,7 +886,7 @@ _BANNED_CPP_FUNCTIONS = (
       (),
     ),
     (
-      r'RunThisRunLoop',
+      'RunThisRunLoop',
       (
           'RunThisRunLoop is deprecated, use RunLoop directly instead.',
       ),
@@ -499,7 +894,7 @@ _BANNED_CPP_FUNCTIONS = (
       (),
     ),
     (
-      r'RunAllPendingInMessageLoop()',
+      'RunAllPendingInMessageLoop()',
       (
           "Prefer RunLoop over RunAllPendingInMessageLoop, please contact gab@",
           "if you're convinced you need this.",
@@ -508,10 +903,10 @@ _BANNED_CPP_FUNCTIONS = (
       (),
     ),
     (
-      r'RunAllPendingInMessageLoop(BrowserThread',
+      'RunAllPendingInMessageLoop(BrowserThread',
       (
           'RunAllPendingInMessageLoop is deprecated. Use RunLoop for',
-          'BrowserThread::UI, TestBrowserThreadBundle::RunIOThreadUntilIdle',
+          'BrowserThread::UI, BrowserTaskEnvironment::RunIOThreadUntilIdle',
           'for BrowserThread::IO, and prefer RunLoop::QuitClosure to observe',
           'async events instead of flushing threads.',
       ),
@@ -527,7 +922,7 @@ _BANNED_CPP_FUNCTIONS = (
       (),
     ),
     (
-      r'GetDeferredQuitTaskForRunLoop',
+      'GetDeferredQuitTaskForRunLoop',
       (
           "GetDeferredQuitTaskForRunLoop shouldn't be needed, please contact",
           "gab@ if you found a use case where this is the only solution.",
@@ -536,9 +931,9 @@ _BANNED_CPP_FUNCTIONS = (
       (),
     ),
     (
-      'sqlite3_initialize',
+      'sqlite3_initialize(',
       (
-        'Instead of sqlite3_initialize, depend on //sql, ',
+        'Instead of calling sqlite3_initialize(), depend on //sql, ',
         '#include "sql/initialize.h" and use sql::EnsureSqliteInitialized().',
       ),
       True,
@@ -548,25 +943,7 @@ _BANNED_CPP_FUNCTIONS = (
       ),
     ),
     (
-      'net::URLFetcher',
-      (
-        'net::URLFetcher should no longer be used in content embedders. ',
-        'Instead, use network::SimpleURLLoader instead, which supports ',
-        'an out-of-process network stack. ',
-        'net::URLFetcher may still be used in binaries that do not embed',
-        'content.',
-      ),
-      False,
-      (
-        r'^ios[\\/].*\.(cc|h)$',
-        r'.*[\\/]ios[\\/].*\.(cc|h)$',
-        r'.*_ios\.(cc|h)$',
-        r'^net[\\/].*\.(cc|h)$',
-        r'.*[\\/]tools[\\/].*\.(cc|h)$',
-      ),
-    ),
-    (
-      r'std::random_shuffle',
+      'std::random_shuffle',
       (
         'std::random_shuffle is deprecated in C++14, and removed in C++17. Use',
         'base::RandomShuffle instead.'
@@ -586,13 +963,289 @@ _BANNED_CPP_FUNCTIONS = (
       'GetAddressOf',
       (
         'Improper use of Microsoft::WRL::ComPtr<T>::GetAddressOf() has been ',
-        'implicated in a few leaks. Use operator& instead.'
+        'implicated in a few leaks. Use operator& instead. See ',
+        'http://crbug.com/914910 for more conversion guidance.'
+      ),
+      True,
+      (),
+    ),
+    (
+      'DEFINE_TYPE_CASTS',
+      (
+        'DEFINE_TYPE_CASTS is deprecated. Instead, use downcast helpers from ',
+        '//third_party/blink/renderer/platform/casting.h.'
+      ),
+      True,
+      (
+        r'^third_party/blink/renderer/.*\.(cc|h)$',
+      ),
+    ),
+    (
+      r'/\bIsHTML.+Element\(\b',
+      (
+        'Function IsHTMLXXXXElement is deprecated. Instead, use downcast ',
+        ' helpers IsA<HTMLXXXXElement> from ',
+        '//third_party/blink/renderer/platform/casting.h.'
+      ),
+      False,
+      (
+        r'^third_party/blink/renderer/.*\.(cc|h)$',
+      ),
+    ),
+    (
+      r'/\bToHTML.+Element(|OrNull)\(\b',
+      (
+        'Function ToHTMLXXXXElement and ToHTMLXXXXElementOrNull are '
+        'deprecated. Instead, use downcast helpers To<HTMLXXXXElement> '
+        'and DynamicTo<HTMLXXXXElement> from ',
+        '//third_party/blink/renderer/platform/casting.h.'
+        'auto* html_xxxx_ele = To<HTMLXXXXElement>(n)'
+        'auto* html_xxxx_ele_or_null = DynamicTo<HTMLXXXXElement>(n)'
+      ),
+      False,
+      (
+        r'^third_party/blink/renderer/.*\.(cc|h)$',
+      ),
+    ),
+    (
+      r'/\bmojo::DataPipe\b',
+      (
+        'mojo::DataPipe is deprecated. Use mojo::CreateDataPipe instead.',
+      ),
+      True,
+      (),
+    ),
+    (
+      'SHFileOperation',
+      (
+        'SHFileOperation was deprecated in Windows Vista, and there are less ',
+        'complex functions to achieve the same goals. Use IFileOperation for ',
+        'any esoteric actions instead.'
+      ),
+      True,
+      (),
+    ),
+    (
+      'StringFromGUID2',
+      (
+        'StringFromGUID2 introduces an unnecessary dependency on ole32.dll.',
+        'Use base::win::String16FromGUID instead.'
+      ),
+      True,
+      (
+        r'/base/win/win_util_unittest.cc'
+      ),
+    ),
+    (
+      'StringFromCLSID',
+      (
+        'StringFromCLSID introduces an unnecessary dependency on ole32.dll.',
+        'Use base::win::String16FromGUID instead.'
+      ),
+      True,
+      (
+        r'/base/win/win_util_unittest.cc'
+      ),
+    ),
+    (
+      'kCFAllocatorNull',
+      (
+        'The use of kCFAllocatorNull with the NoCopy creation of ',
+        'CoreFoundation types is prohibited.',
       ),
       True,
       (),
     ),
+    (
+      'mojo::ConvertTo',
+      (
+        'mojo::ConvertTo and TypeConverter are deprecated. Please consider',
+        'StructTraits / UnionTraits / EnumTraits / ArrayTraits / MapTraits /',
+        'StringTraits if you would like to convert between custom types and',
+        'the wire format of mojom types.'
+      ),
+      False,
+      (
+        r'^fuchsia/engine/browser/url_request_rewrite_rules_manager\.cc$',
+        r'^fuchsia/engine/url_request_rewrite_type_converters\.cc$',
+        r'^third_party/blink/.*\.(cc|h)$',
+        r'^content/renderer/.*\.(cc|h)$',
+      ),
+    ),
+    (
+      'GetInterfaceProvider',
+      (
+        'InterfaceProvider is deprecated.',
+        'Please use ExecutionContext::GetBrowserInterfaceBroker and overrides',
+        'or Platform::GetBrowserInterfaceBroker.'
+      ),
+      False,
+      (),
+    ),
+    (
+      'CComPtr',
+      (
+        'New code should use Microsoft::WRL::ComPtr from wrl/client.h as a ',
+        'replacement for CComPtr from ATL. See http://crbug.com/5027 for more ',
+        'details.'
+      ),
+      False,
+      (),
+    ),
+    (
+      r'/\b(IFACE|STD)METHOD_?\(',
+      (
+        'IFACEMETHOD() and STDMETHOD() make code harder to format and read.',
+        'Instead, always use IFACEMETHODIMP in the declaration.'
+      ),
+      False,
+      [_THIRD_PARTY_EXCEPT_BLINK],  # Not an error in third_party folders.
+    ),
+    (
+      'set_owned_by_client',
+      (
+        'set_owned_by_client is deprecated.',
+        'views::View already owns the child views by default. This introduces ',
+        'a competing ownership model which makes the code difficult to reason ',
+        'about. See http://crbug.com/1044687 for more details.'
+      ),
+      False,
+      (),
+    ),
+    (
+      r'/\bTRACE_EVENT_ASYNC_',
+      (
+          'Please use TRACE_EVENT_NESTABLE_ASYNC_.. macros instead',
+          'of TRACE_EVENT_ASYNC_.. (crbug.com/1038710).',
+      ),
+      False,
+      (
+        r'^base/trace_event/.*',
+        r'^base/tracing/.*',
+      ),
+    ),
 )
 
+# Format: Sequence of tuples containing:
+# * String pattern or, if starting with a slash, a regular expression.
+# * Sequence of strings to show when the pattern matches.
+_DEPRECATED_MOJO_TYPES = (
+    (
+      r'/\bmojo::AssociatedBinding\b',
+      (
+        'mojo::AssociatedBinding<Interface> is deprecated.',
+        'Use mojo::AssociatedReceiver<Interface> instead.',
+      ),
+    ),
+    (
+      r'/\bmojo::AssociatedBindingSet\b',
+      (
+        'mojo::AssociatedBindingSet<Interface> is deprecated.',
+        'Use mojo::AssociatedReceiverSet<Interface> instead.',
+      ),
+    ),
+    (
+      r'/\bmojo::AssociatedInterfacePtr\b',
+      (
+        'mojo::AssociatedInterfacePtr<Interface> is deprecated.',
+        'Use mojo::AssociatedRemote<Interface> instead.',
+      ),
+    ),
+    (
+      r'/\bmojo::AssociatedInterfacePtrInfo\b',
+      (
+        'mojo::AssociatedInterfacePtrInfo<Interface> is deprecated.',
+        'Use mojo::PendingAssociatedRemote<Interface> instead.',
+      ),
+    ),
+    (
+      r'/\bmojo::AssociatedInterfaceRequest\b',
+      (
+        'mojo::AssociatedInterfaceRequest<Interface> is deprecated.',
+        'Use mojo::PendingAssociatedReceiver<Interface> instead.',
+      ),
+    ),
+    (
+      r'/\bmojo::Binding\b',
+      (
+        'mojo::Binding<Interface> is deprecated.',
+        'Use mojo::Receiver<Interface> instead.',
+      ),
+    ),
+    (
+      r'/\bmojo::BindingSet\b',
+      (
+        'mojo::BindingSet<Interface> is deprecated.',
+        'Use mojo::ReceiverSet<Interface> instead.',
+      ),
+    ),
+    (
+      r'/\bmojo::InterfacePtr\b',
+      (
+        'mojo::InterfacePtr<Interface> is deprecated.',
+        'Use mojo::Remote<Interface> instead.',
+      ),
+    ),
+    (
+      r'/\bmojo::InterfacePtrInfo\b',
+      (
+        'mojo::InterfacePtrInfo<Interface> is deprecated.',
+        'Use mojo::PendingRemote<Interface> instead.',
+      ),
+    ),
+    (
+      r'/\bmojo::InterfaceRequest\b',
+      (
+        'mojo::InterfaceRequest<Interface> is deprecated.',
+        'Use mojo::PendingReceiver<Interface> instead.',
+      ),
+    ),
+    (
+      r'/\bmojo::MakeRequest\b',
+      (
+        'mojo::MakeRequest is deprecated.',
+        'Use mojo::Remote::BindNewPipeAndPassReceiver() instead.',
+      ),
+    ),
+    (
+      r'/\bmojo::MakeRequestAssociatedWithDedicatedPipe\b',
+      (
+        'mojo::MakeRequest is deprecated.',
+        'Use mojo::AssociatedRemote::'
+        'BindNewEndpointAndPassDedicatedReceiverForTesting() instead.',
+      ),
+    ),
+    (
+      r'/\bmojo::MakeStrongBinding\b',
+      (
+        'mojo::MakeStrongBinding is deprecated.',
+        'Either migrate to mojo::UniqueReceiverSet, if possible, or use',
+        'mojo::MakeSelfOwnedReceiver() instead.',
+      ),
+    ),
+    (
+      r'/\bmojo::MakeStrongAssociatedBinding\b',
+      (
+        'mojo::MakeStrongAssociatedBinding is deprecated.',
+        'Either migrate to mojo::UniqueAssociatedReceiverSet, if possible, or',
+        'use mojo::MakeSelfOwnedAssociatedReceiver() instead.',
+      ),
+    ),
+    (
+      r'/\bmojo::StrongAssociatedBindingSet\b',
+      (
+        'mojo::StrongAssociatedBindingSet<Interface> is deprecated.',
+        'Use mojo::UniqueAssociatedReceiverSet<Interface> instead.',
+      ),
+    ),
+    (
+      r'/\bmojo::StrongBindingSet\b',
+      (
+        'mojo::StrongBindingSet<Interface> is deprecated.',
+        'Use mojo::UniqueReceiverSet<Interface> instead.',
+      ),
+    ),
+)
 
 _IPC_ENUM_TRAITS_DEPRECATED = (
     'You are using IPC_ENUM_TRAITS() in your code. It has been deprecated.\n'
@@ -612,9 +1265,13 @@ _JAVA_MULTIPLE_DEFINITION_EXCLUDED_PATHS = [
     r".*chrome[\\\/]android[\\\/]feed[\\\/]dummy[\\\/].*\.java",
 ]
 
+# List of image extensions that are used as resources in chromium.
+_IMAGE_EXTENSIONS = ['.svg', '.png', '.webp']
+
 # These paths contain test data and other known invalid JSON files.
-_KNOWN_INVALID_JSON_FILE_PATTERNS = [
+_KNOWN_TEST_DATA_AND_INVALID_JSON_FILE_PATTERNS = [
     r'test[\\/]data[\\/]',
+    r'testing[\\/]buildbot[\\/]',
     r'^components[\\/]policy[\\/]resources[\\/]policy_templates\.json$',
     r'^third_party[\\/]protobuf[\\/]',
     r'^third_party[\\/]blink[\\/]renderer[\\/]devtools[\\/]protocol\.json$',
@@ -648,59 +1305,80 @@ _VALID_OS_MACROS = (
 )
 
 
+# These are not checked on the public chromium-presubmit trybot.
+# Add files here that rely on .py files that exists only for target_os="android"
+# checkouts (e.g. //third_party/catapult).
 _ANDROID_SPECIFIC_PYDEPS_FILES = [
     'android_webview/tools/run_cts.pydeps',
+    'build/android/devil_chromium.pydeps',
+    'build/android/gyp/create_bundle_wrapper_script.pydeps',
+    'build/android/gyp/jinja_template.pydeps',
+    'build/android/resource_sizes.pydeps',
+    'build/android/test_runner.pydeps',
+    'build/android/test_wrapper/logdog_wrapper.pydeps',
+    'chrome/android/features/create_stripped_java_factory.pydeps',
+    'testing/scripts/run_android_wpt.pydeps',
+    'third_party/android_platform/development/scripts/stack.pydeps',
+]
+
+
+_GENERIC_PYDEPS_FILES = [
     'base/android/jni_generator/jni_generator.pydeps',
     'base/android/jni_generator/jni_registration_generator.pydeps',
     'build/android/gyp/aar.pydeps',
     'build/android/gyp/aidl.pydeps',
+    'build/android/gyp/allot_native_libraries.pydeps',
     'build/android/gyp/apkbuilder.pydeps',
+    'build/android/gyp/assert_static_initializers.pydeps',
     'build/android/gyp/bytecode_processor.pydeps',
+    'build/android/gyp/compile_java.pydeps',
     'build/android/gyp/compile_resources.pydeps',
-    'build/android/gyp/create_bundle_wrapper_script.pydeps',
     'build/android/gyp/copy_ex.pydeps',
-    'build/android/gyp/create_app_bundle.pydeps',
     'build/android/gyp/create_apk_operations_script.pydeps',
+    'build/android/gyp/create_app_bundle_apks.pydeps',
+    'build/android/gyp/create_app_bundle.pydeps',
     'build/android/gyp/create_java_binary_script.pydeps',
-    'build/android/gyp/create_stack_script.pydeps',
-    'build/android/gyp/create_test_runner_script.pydeps',
-    'build/android/gyp/create_tool_wrapper.pydeps',
+    'build/android/gyp/create_size_info_files.pydeps',
+    'build/android/gyp/create_ui_locale_resources.pydeps',
     'build/android/gyp/desugar.pydeps',
     'build/android/gyp/dexsplitter.pydeps',
     'build/android/gyp/dex.pydeps',
+    'build/android/gyp/dex_jdk_libs.pydeps',
     'build/android/gyp/dist_aar.pydeps',
-    'build/android/gyp/emma_instr.pydeps',
     'build/android/gyp/filter_zip.pydeps',
     'build/android/gyp/gcc_preprocess.pydeps',
     'build/android/gyp/generate_linker_version_script.pydeps',
     'build/android/gyp/ijar.pydeps',
+    'build/android/gyp/jacoco_instr.pydeps',
     'build/android/gyp/java_cpp_enum.pydeps',
-    'build/android/gyp/javac.pydeps',
-    'build/android/gyp/jinja_template.pydeps',
+    'build/android/gyp/java_cpp_strings.pydeps',
+    'build/android/gyp/jetify_jar.pydeps',
     'build/android/gyp/lint.pydeps',
     'build/android/gyp/main_dex_list.pydeps',
-    'build/android/gyp/merge_jar_info_files.pydeps',
     'build/android/gyp/merge_manifest.pydeps',
     'build/android/gyp/prepare_resources.pydeps',
     'build/android/gyp/proguard.pydeps',
+    'build/android/gyp/turbine.pydeps',
+    'build/android/gyp/validate_static_library_dex_references.pydeps',
     'build/android/gyp/write_build_config.pydeps',
-    'build/android/gyp/write_ordered_libraries.pydeps',
+    'build/android/gyp/write_native_libraries_java.pydeps',
     'build/android/gyp/zip.pydeps',
     'build/android/incremental_install/generate_android_manifest.pydeps',
     'build/android/incremental_install/write_installer_json.pydeps',
-    'build/android/resource_sizes.pydeps',
-    'build/android/test_runner.pydeps',
-    'build/android/test_wrapper/logdog_wrapper.pydeps',
     'build/protoc_java.pydeps',
-    ('build/secondary/third_party/android_platform/'
-     'development/scripts/stack.pydeps'),
-    'net/tools/testserver/testserver.pydeps',
-]
-
-
-_GENERIC_PYDEPS_FILES = [
-    'chrome/test/chromedriver/test/run_py_tests.pydeps',
     'chrome/test/chromedriver/log_replay/client_replay_unittest.pydeps',
+    'chrome/test/chromedriver/test/run_py_tests.pydeps',
+    'components/cronet/tools/generate_javadoc.pydeps',
+    'components/cronet/tools/jar_src.pydeps',
+    'components/module_installer/android/module_desc_java.pydeps',
+    'content/public/android/generate_child_service.pydeps',
+    'net/tools/testserver/testserver.pydeps',
+    'third_party/blink/renderer/bindings/scripts/build_web_idl_database.pydeps',
+    'third_party/blink/renderer/bindings/scripts/collect_idl_files.pydeps',
+    'third_party/blink/renderer/bindings/scripts/generate_bindings.pydeps',
+    ('third_party/blink/renderer/bindings/scripts/'
+     'generate_high_entropy_list.pydeps'),
+    'tools/binary_size/sizes.pydeps',
     'tools/binary_size/supersize.pydeps',
 ]
 
@@ -710,20 +1388,44 @@ _ALL_PYDEPS_FILES = _ANDROID_SPECIFIC_PYDEPS_FILES + _GENERIC_PYDEPS_FILES
 
 # Bypass the AUTHORS check for these accounts.
 _KNOWN_ROBOTS = set(
-    '%s-chromium-autoroll@skia-buildbots.google.com.iam.gserviceaccount.com' % s
-    for s in ('afdo', 'angle', 'catapult', 'chromite', 'depot-tools',
-              'fuchsia-sdk', 'nacl', 'pdfium', 'perfetto', 'skia',
-              'spirv', 'src-internal', 'webrtc')
   ) | set('%s@appspot.gserviceaccount.com' % s for s in ('findit-for-me',)
   ) | set('%s@developer.gserviceaccount.com' % s for s in ('3su6n15k.default',)
   ) | set('%s@chops-service-accounts.iam.gserviceaccount.com' % s
-          for s in ('v8-ci-autoroll-builder', 'wpt-autoroller',)
+          for s in ('bling-autoroll-builder', 'v8-ci-autoroll-builder',
+                    'wpt-autoroller',)
   ) | set('%s@skia-public.iam.gserviceaccount.com' % s
-          for s in ('chromium-autoroll',)
+          for s in ('chromium-autoroll', 'chromium-release-autoroll')
   ) | set('%s@skia-corp.google.com.iam.gserviceaccount.com' % s
           for s in ('chromium-internal-autoroll',))
 
 
+def _IsCPlusPlusFile(input_api, file_path):
+  """Returns True if this file contains C++-like code (and not Python,
+  Go, Java, MarkDown, ...)"""
+
+  ext = input_api.os_path.splitext(file_path)[1]
+  # This list is compatible with CppChecker.IsCppFile but we should
+  # consider adding ".c" to it. If we do that we can use this function
+  # at more places in the code.
+  return ext in (
+      '.h',
+      '.cc',
+      '.cpp',
+      '.m',
+      '.mm',
+  )
+
+def _IsCPlusPlusHeaderFile(input_api, file_path):
+  return input_api.os_path.splitext(file_path)[1] == ".h"
+
+
+def _IsJavaFile(input_api, file_path):
+  return input_api.os_path.splitext(file_path)[1] == ".java"
+
+
+def _IsProtoFile(input_api, file_path):
+  return input_api.os_path.splitext(file_path)[1] == ".proto"
+
 def _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api):
   """Attempts to prevent use of functions intended only for testing in
   non-testing code. For now this is just a best-effort implementation
@@ -926,7 +1628,7 @@ def _CheckNoDISABLETypoInTests(input_api, output_api):
 def _CheckDCHECK_IS_ONHasBraces(input_api, output_api):
   """Checks to make sure DCHECK_IS_ON() does not skip the parentheses."""
   errors = []
-  pattern = input_api.re.compile(r'DCHECK_IS_ON(?!\(\))',
+  pattern = input_api.re.compile(r'DCHECK_IS_ON\b(?!\(\))',
                                  input_api.re.MULTILINE)
   for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
     if (not f.LocalPath().endswith(('.cc', '.mm', '.h'))):
@@ -940,16 +1642,25 @@ def _CheckDCHECK_IS_ONHasBraces(input_api, output_api):
   return errors
 
 
-def _FindHistogramNameInLine(histogram_name, line):
-  """Tries to find a histogram name or prefix in a line."""
-  if not "affected-histogram" in line:
-    return histogram_name in line
+def _FindHistogramNameInChunk(histogram_name, chunk):
+  """Tries to find a histogram name or prefix in a line.
+
+  Returns the existence of the histogram name, or None if it needs more chunk
+  to determine."""
   # A histogram_suffixes tag type has an affected-histogram name as a prefix of
   # the histogram_name.
-  if not '"' in line:
-    return False
-  histogram_prefix = line.split('\"')[1]
-  return histogram_prefix in histogram_name
+  if '<affected-histogram' in chunk:
+    # If the tag is not completed, needs more chunk to get the name.
+    if not '>' in chunk:
+      return None
+    if not 'name="' in chunk:
+      return False
+    # Retrieve the first portion of the chunk wrapped by double-quotations. We
+    # expect the only attribute is the name.
+    histogram_prefix = chunk.split('"')[1]
+    return histogram_prefix in histogram_name
+  # Typically the whole histogram name should in the line.
+  return histogram_name in chunk
 
 
 def _CheckUmaHistogramChanges(input_api, output_api):
@@ -998,8 +1709,13 @@ def _CheckUmaHistogramChanges(input_api, output_api):
   unmatched_histograms = []
   for histogram_info in touched_histograms:
     histogram_name_found = False
+    chunk = ''
     for line_num, line in histograms_xml_modifications:
-      histogram_name_found = _FindHistogramNameInLine(histogram_info[0], line)
+      chunk += line
+      histogram_name_found = _FindHistogramNameInChunk(histogram_info[0], chunk)
+      if histogram_name_found is None:
+        continue
+      chunk = ''
       if histogram_name_found:
         break
     if not histogram_name_found:
@@ -1012,8 +1728,14 @@ def _CheckUmaHistogramChanges(input_api, output_api):
       for histogram_name, f, line_num in unmatched_histograms:
         histograms_xml.seek(0)
         histogram_name_found = False
+        chunk = ''
         for line in histograms_xml:
-          histogram_name_found = _FindHistogramNameInLine(histogram_name, line)
+          chunk += line
+          histogram_name_found = _FindHistogramNameInChunk(histogram_name,
+                                                           chunk)
+          if histogram_name_found is None:
+            continue
+          chunk = ''
           if histogram_name_found:
             break
         if not histogram_name_found:
@@ -1093,8 +1815,12 @@ def _CheckValidHostsInDEPS(input_api, output_api):
     return []
   # Outsource work to gclient verify
   try:
-    input_api.subprocess.check_output(['gclient', 'verify'],
-                                      stderr=input_api.subprocess.STDOUT)
+    gclient_path = input_api.os_path.join(
+        input_api.PresubmitLocalPath(),
+        'third_party', 'depot_tools', 'gclient.py')
+    input_api.subprocess.check_output(
+        [input_api.python_executable, gclient_path, 'verify'],
+        stderr=input_api.subprocess.STDOUT)
     return []
   except input_api.subprocess.CalledProcessError as error:
     return [output_api.PresubmitError(
@@ -1102,6 +1828,35 @@ def _CheckValidHostsInDEPS(input_api, output_api):
         long_text=error.output)]
 
 
+def _GetMessageForMatchingType(input_api, affected_file, line_number, line,
+                               type_name, message):
+  """Helper method for _CheckNoBannedFunctions and _CheckNoDeprecatedMojoTypes.
+
+  Returns an string composed of the name of the file, the line number where the
+  match has been found and the additional text passed as |message| in case the
+  target type name matches the text inside the line passed as parameter.
+  """
+  result = []
+
+  if line.endswith(" nocheck"):
+    return result
+
+  matched = False
+  if type_name[0:1] == '/':
+    regex = type_name[1:]
+    if input_api.re.search(regex, line):
+      matched = True
+  elif type_name in line:
+    matched = True
+
+  if matched:
+    result.append('    %s:%d:' % (affected_file.LocalPath(), line_number))
+    for message_line in message:
+      result.append('      %s' % message_line)
+
+  return result
+
+
 def _CheckNoBannedFunctions(input_api, output_api):
   """Make sure that banned functions are not used."""
   warnings = []
@@ -1114,7 +1869,7 @@ def _CheckNoBannedFunctions(input_api, output_api):
         return True
     return False
 
-  def IsIosObcjFile(affected_file):
+  def IsIosObjcFile(affected_file):
     local_path = affected_file.LocalPath()
     if input_api.os_path.splitext(local_path)[-1] not in ('.mm', '.m', '.h'):
       return False
@@ -1127,20 +1882,13 @@ def _CheckNoBannedFunctions(input_api, output_api):
     return False
 
   def CheckForMatch(affected_file, line_num, line, func_name, message, error):
-    matched = False
-    if func_name[0:1] == '/':
-      regex = func_name[1:]
-      if input_api.re.search(regex, line):
-        matched = True
-    elif func_name in line:
-      matched = True
-    if matched:
-      problems = warnings
+    problems = _GetMessageForMatchingType(input_api, f, line_num, line,
+                                          func_name, message)
+    if problems:
       if error:
-        problems = errors
-      problems.append('    %s:%d:' % (affected_file.LocalPath(), line_num))
-      for message_line in message:
-        problems.append('      %s' % message_line)
+        errors.extend(problems)
+      else:
+        warnings.extend(problems)
 
   file_filter = lambda f: f.LocalPath().endswith(('.java'))
   for f in input_api.AffectedFiles(file_filter=file_filter):
@@ -1154,11 +1902,17 @@ def _CheckNoBannedFunctions(input_api, output_api):
       for func_name, message, error in _BANNED_OBJC_FUNCTIONS:
         CheckForMatch(f, line_num, line, func_name, message, error)
 
-  for f in input_api.AffectedFiles(file_filter=IsIosObcjFile):
+  for f in input_api.AffectedFiles(file_filter=IsIosObjcFile):
     for line_num, line in f.ChangedContents():
       for func_name, message, error in _BANNED_IOS_OBJC_FUNCTIONS:
         CheckForMatch(f, line_num, line, func_name, message, error)
 
+  egtest_filter = lambda f: f.LocalPath().endswith(('_egtest.mm'))
+  for f in input_api.AffectedFiles(file_filter=egtest_filter):
+    for line_num, line in f.ChangedContents():
+      for func_name, message, error in _BANNED_IOS_EGTEST_FUNCTIONS:
+        CheckForMatch(f, line_num, line, func_name, message, error)
+
   file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm', '.h'))
   for f in input_api.AffectedFiles(file_filter=file_filter):
     for line_num, line in f.ChangedContents():
@@ -1177,6 +1931,71 @@ def _CheckNoBannedFunctions(input_api, output_api):
   return result
 
 
+def _CheckAndroidNoBannedImports(input_api, output_api):
+  """Make sure that banned java imports are not used."""
+  errors = []
+
+  def IsException(path, exceptions):
+    for exception in exceptions:
+      if (path.startswith(exception)):
+        return True
+    return False
+
+  file_filter = lambda f: f.LocalPath().endswith(('.java'))
+  for f in input_api.AffectedFiles(file_filter=file_filter):
+    for line_num, line in f.ChangedContents():
+      for import_name, message, exceptions in _BANNED_JAVA_IMPORTS:
+        if IsException(f.LocalPath(), exceptions):
+          continue;
+        problems = _GetMessageForMatchingType(input_api, f, line_num, line,
+            'import ' + import_name, message)
+        if problems:
+          errors.extend(problems)
+  result = []
+  if (errors):
+    result.append(output_api.PresubmitError(
+        'Banned imports were used.\n' + '\n'.join(errors)))
+  return result
+
+
+def _CheckNoDeprecatedMojoTypes(input_api, output_api):
+  """Make sure that old Mojo types are not used."""
+  warnings = []
+  errors = []
+
+  # For any path that is not an "ok" or an "error" path, a warning will be
+  # raised if deprecated mojo types are found.
+  ok_paths = ['components/arc']
+  error_paths = ['third_party/blink', 'content']
+
+  file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm', '.h'))
+  for f in input_api.AffectedFiles(file_filter=file_filter):
+    # Don't check //components/arc, not yet migrated (see crrev.com/c/1868870).
+    if any(map(lambda path: f.LocalPath().startswith(path), ok_paths)):
+      continue
+
+    for line_num, line in f.ChangedContents():
+      for func_name, message in _DEPRECATED_MOJO_TYPES:
+        problems = _GetMessageForMatchingType(input_api, f, line_num, line,
+                                              func_name, message)
+
+        if problems:
+          # Raise errors inside |error_paths| and warnings everywhere else.
+          if any(map(lambda path: f.LocalPath().startswith(path), error_paths)):
+            errors.extend(problems)
+          else:
+            warnings.extend(problems)
+
+  result = []
+  if (warnings):
+    result.append(output_api.PresubmitPromptWarning(
+        'Banned Mojo types were used.\n' + '\n'.join(warnings)))
+  if (errors):
+    result.append(output_api.PresubmitError(
+        'Banned Mojo types were used.\n' + '\n'.join(errors)))
+  return result
+
+
 def _CheckNoPragmaOnce(input_api, output_api):
   """Make sure that banned functions are not used."""
   files = []
@@ -1230,9 +2049,6 @@ def _CheckUnwantedDependencies(input_api, output_api):
     sys.path = sys.path + [input_api.os_path.join(
         input_api.PresubmitLocalPath(), 'buildtools', 'checkdeps')]
     import checkdeps
-    from cpp_checker import CppChecker
-    from java_checker import JavaChecker
-    from proto_checker import ProtoChecker
     from rules import Rule
   finally:
     # Restore sys.path to what it was before.
@@ -1242,13 +2058,13 @@ def _CheckUnwantedDependencies(input_api, output_api):
   added_imports = []
   added_java_imports = []
   for f in input_api.AffectedFiles():
-    if CppChecker.IsCppFile(f.LocalPath()):
+    if _IsCPlusPlusFile(input_api, f.LocalPath()):
       changed_lines = [line for _, line in f.ChangedContents()]
       added_includes.append([f.AbsoluteLocalPath(), changed_lines])
-    elif ProtoChecker.IsProtoFile(f.LocalPath()):
+    elif _IsProtoFile(input_api, f.LocalPath()):
       changed_lines = [line for _, line in f.ChangedContents()]
       added_imports.append([f.AbsoluteLocalPath(), changed_lines])
-    elif JavaChecker.IsJavaFile(f.LocalPath()):
+    elif _IsJavaFile(input_api, f.LocalPath()):
       changed_lines = [line for _, line in f.ChangedContents()]
       added_java_imports.append([f.AbsoluteLocalPath(), changed_lines])
 
@@ -1344,7 +2160,9 @@ def _CheckTeamTags(input_api, output_api):
            'OWNERS']
   try:
     if files:
-      input_api.subprocess.check_output(args + files)
+      warnings = input_api.subprocess.check_output(args + files).splitlines()
+      if warnings:
+        return [output_api.PresubmitPromptWarning(warnings[0], warnings[1:])]
     return []
   except input_api.subprocess.CalledProcessError as error:
     return [output_api.PresubmitError(
@@ -1376,7 +2194,7 @@ def _CheckForVersionControlConflictsInFile(input_api, f):
   pattern = input_api.re.compile('^(?:<<<<<<<|>>>>>>>) |^=======$')
   errors = []
   for line_num, line in f.ChangedContents():
-    if f.LocalPath().endswith('.md'):
+    if f.LocalPath().endswith(('.md', '.rst', '.txt')):
       # First-level headers in markdown look a lot like version control
       # conflict markers. http://daringfireball.net/projects/markdown/basics
       continue
@@ -1448,6 +2266,46 @@ def _CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api):
     return []
 
 
+def _CheckChromeOsSyncedPrefRegistration(input_api, output_api):
+  """Warns if Chrome OS C++ files register syncable prefs as browser prefs."""
+  def FileFilter(affected_file):
+    """Includes directories known to be Chrome OS only."""
+    return input_api.FilterSourceFile(
+      affected_file,
+      white_list=('^ash/',
+                  '^chromeos/',  # Top-level src/chromeos.
+                  '/chromeos/',  # Any path component.
+                  '^components/arc',
+                  '^components/exo'),
+      black_list=(input_api.DEFAULT_BLACK_LIST))
+
+  prefs = []
+  priority_prefs = []
+  for f in input_api.AffectedFiles(file_filter=FileFilter):
+    for line_num, line in f.ChangedContents():
+      if input_api.re.search('PrefRegistrySyncable::SYNCABLE_PREF', line):
+        prefs.append('    %s:%d:' % (f.LocalPath(), line_num))
+        prefs.append('      %s' % line)
+      if input_api.re.search(
+          'PrefRegistrySyncable::SYNCABLE_PRIORITY_PREF', line):
+        priority_prefs.append('    %s:%d' % (f.LocalPath(), line_num))
+        priority_prefs.append('      %s' % line)
+
+  results = []
+  if (prefs):
+    results.append(output_api.PresubmitPromptWarning(
+        'Preferences were registered as SYNCABLE_PREF and will be controlled '
+        'by browser sync settings. If these prefs should be controlled by OS '
+        'sync settings use SYNCABLE_OS_PREF instead.\n' + '\n'.join(prefs)))
+  if (priority_prefs):
+    results.append(output_api.PresubmitPromptWarning(
+        'Preferences were registered as SYNCABLE_PRIORITY_PREF and will be '
+        'controlled by browser sync settings. If these prefs should be '
+        'controlled by OS sync settings use SYNCABLE_OS_PRIORITY_PREF '
+        'instead.\n' + '\n'.join(prefs)))
+  return results
+
+
 # TODO: add unit tests.
 def _CheckNoAbbreviationInPngFileName(input_api, output_api):
   """Makes sure there are no abbreviations in the name of PNG files.
@@ -1628,21 +2486,23 @@ def _CheckSpamLogging(input_api, output_api):
                 input_api.DEFAULT_BLACK_LIST +
                 (r"^base[\\/]logging\.h$",
                  r"^base[\\/]logging\.cc$",
+                 r"^base[\\/]task[\\/]thread_pool[\\/]task_tracker\.cc$",
                  r"^chrome[\\/]app[\\/]chrome_main_delegate\.cc$",
                  r"^chrome[\\/]browser[\\/]chrome_browser_main\.cc$",
                  r"^chrome[\\/]browser[\\/]ui[\\/]startup[\\/]"
                      r"startup_browser_creator\.cc$",
-                 r"^chrome[\\/]installer[\\/]setup[\\/].*",
-                 r"^chrome[\\/]chrome_cleaner[\\/].*",
-                 r"chrome[\\/]browser[\\/]diagnostics[\\/]" +
+                 r"^chrome[\\/]browser[\\/]browser_switcher[\\/]bho[\\/].*",
+                 r"^chrome[\\/]browser[\\/]diagnostics[\\/]" +
                      r"diagnostics_writer\.cc$",
-                 r"^chrome_elf[\\/]dll_hash[\\/]dll_hash_main\.cc$",
+                 r"^chrome[\\/]chrome_cleaner[\\/].*",
+                 r"^chrome[\\/]chrome_elf[\\/]dll_hash[\\/]dll_hash_main\.cc$",
+                 r"^chrome[\\/]installer[\\/]setup[\\/].*",
                  r"^chromecast[\\/]",
                  r"^cloud_print[\\/]",
                  r"^components[\\/]browser_watcher[\\/]"
                      r"dump_stability_report_main_win.cc$",
-                 r"^components[\\/]html_viewer[\\/]"
-                     r"web_test_delegate_impl\.cc$",
+                 r"^components[\\/]media_control[\\/]renderer[\\/]"
+                     r"media_playback_options\.cc$",
                  r"^components[\\/]zucchini[\\/].*",
                  # TODO(peter): Remove this exception. https://crbug.com/534537
                  r"^content[\\/]browser[\\/]notifications[\\/]"
@@ -1652,19 +2512,22 @@ def _CheckSpamLogging(input_api, output_api):
                  r"^courgette[\\/]courgette_minimal_tool\.cc$",
                  r"^courgette[\\/]courgette_tool\.cc$",
                  r"^extensions[\\/]renderer[\\/]logging_native_handler\.cc$",
-                 r"^fuchsia[\\/]browser[\\/]frame_impl.cc$",
+                 r"^fuchsia[\\/]engine[\\/]browser[\\/]frame_impl.cc$",
+                 r"^fuchsia[\\/]engine[\\/]context_provider_main.cc$",
                  r"^headless[\\/]app[\\/]headless_shell\.cc$",
                  r"^ipc[\\/]ipc_logging\.cc$",
                  r"^native_client_sdk[\\/]",
                  r"^remoting[\\/]base[\\/]logging\.h$",
                  r"^remoting[\\/]host[\\/].*",
                  r"^sandbox[\\/]linux[\\/].*",
-                 r"^storage[\\/]browser[\\/]fileapi[\\/]" +
+                 r"^storage[\\/]browser[\\/]file_system[\\/]" +
                      r"dump_file_system.cc$",
                  r"^tools[\\/]",
                  r"^ui[\\/]base[\\/]resource[\\/]data_pack.cc$",
                  r"^ui[\\/]aura[\\/]bench[\\/]bench_main\.cc$",
-                 r"^ui[\\/]ozone[\\/]platform[\\/]cast[\\/]"))
+                 r"^ui[\\/]ozone[\\/]platform[\\/]cast[\\/]",
+                 r"^ui[\\/]base[\\/]x[\\/]xwmstartupcheck[\\/]"
+                     r"xwmstartupcheck\.cc$"))
   source_file_filter = lambda x: input_api.FilterSourceFile(
       x, white_list=file_inclusion_pattern, black_list=black_list)
 
@@ -1946,7 +2809,9 @@ def _CheckParseErrors(input_api, output_api):
       return False
     path = affected_file.LocalPath()
 
-    if _MatchesFile(input_api, _KNOWN_INVALID_JSON_FILE_PATTERNS, path):
+    if _MatchesFile(input_api,
+                    _KNOWN_TEST_DATA_AND_INVALID_JSON_FILE_PATTERNS,
+                    path):
       return False
 
     if (action == _GetIDLParseError and
@@ -1984,9 +2849,43 @@ def _CheckJavaStyle(input_api, output_api):
     # Restore sys.path to what it was before.
     sys.path = original_sys_path
 
-  return checkstyle.RunCheckstyle(
-      input_api, output_api, 'tools/android/checkstyle/chromium-style-5.0.xml',
-      black_list=_EXCLUDED_PATHS + input_api.DEFAULT_BLACK_LIST)
+  return checkstyle.RunCheckstyle(
+      input_api, output_api, 'tools/android/checkstyle/chromium-style-5.0.xml',
+      black_list=_EXCLUDED_PATHS + input_api.DEFAULT_BLACK_LIST)
+
+
+def _CheckPythonDevilInit(input_api, output_api):
+  """Checks to make sure devil is initialized correctly in python scripts."""
+  script_common_initialize_pattern = input_api.re.compile(
+      r'script_common\.InitializeEnvironment\(')
+  devil_env_config_initialize = input_api.re.compile(
+      r'devil_env\.config\.Initialize\(')
+
+  errors = []
+
+  sources = lambda affected_file: input_api.FilterSourceFile(
+      affected_file,
+      black_list=(_EXCLUDED_PATHS + input_api.DEFAULT_BLACK_LIST +
+                  (r'^build[\\/]android[\\/]devil_chromium\.py',
+                   r'^third_party[\\/].*',)),
+      white_list=[r'.*\.py$'])
+
+  for f in input_api.AffectedSourceFiles(sources):
+    for line_num, line in f.ChangedContents():
+      if (script_common_initialize_pattern.search(line) or
+          devil_env_config_initialize.search(line)):
+        errors.append("%s:%d" % (f.LocalPath(), line_num))
+
+  results = []
+
+  if errors:
+    results.append(output_api.PresubmitError(
+        'Devil initialization should always be done using '
+        'devil_chromium.Initialize() in the chromium project, to use better '
+        'defaults for dependencies (ex. up-to-date version of adb).',
+        errors))
+
+  return results
 
 
 def _MatchesFile(input_api, patterns, path):
@@ -2029,9 +2928,15 @@ def _GetOwnersFilesToCheckForIpcOwners(input_api):
   # matching the above patterns, which trigger false positives.
   exclude_paths = [
       'third_party/crashpad/*',
+      'third_party/blink/renderer/platform/bindings/*',
       'third_party/protobuf/benchmarks/python/*',
-      'third_party/third_party/blink/renderer/platform/bindings/*',
       'third_party/win_build_output/*',
+      'third_party/feed_library/*',
+      # These files are just used to communicate between class loaders running
+      # in the same process.
+      'weblayer/browser/java/org/chromium/weblayer_private/interfaces/*',
+      'weblayer/browser/java/org/chromium/weblayer_private/test_interfaces/*',
+
   ]
 
   # Dictionary mapping an OWNERS file path to Patterns.
@@ -2081,33 +2986,17 @@ def _GetOwnersFilesToCheckForIpcOwners(input_api):
   # files and no *_messages*.h files, we should only nag about rules for
   # *.mojom files.
   for f in input_api.AffectedFiles(include_deletes=False):
-    # Manifest files don't have a strong naming convention. Instead, scan
-    # affected files for .json, .cc, and .h files which look like they contain
-    # a manifest definition.
-    if (f.LocalPath().endswith('.json') and
-        not _MatchesFile(input_api, _KNOWN_INVALID_JSON_FILE_PATTERNS,
-                         f.LocalPath())):
-      json_comment_eater = _ImportJSONCommentEater(input_api)
-      mostly_json_lines = '\n'.join(f.NewContents())
-      # Comments aren't allowed in strict JSON, so filter them out.
-      json_lines = json_comment_eater.Nom(mostly_json_lines)
-      try:
-        json_content = input_api.json.loads(json_lines)
-      except:
-        # There's another PRESUBMIT check that already verifies that JSON files
-        # are not invalid, so no need to emit another warning here.
-        continue
-      if 'interface_provider_specs' in json_content:
+    # Manifest files don't have a strong naming convention. Instead, try to find
+    # affected .cc and .h files which look like they contain a manifest
+    # definition.
+    manifest_pattern = input_api.re.compile('manifests?\.(cc|h)$')
+    test_manifest_pattern = input_api.re.compile('test_manifests?\.(cc|h)')
+    if (manifest_pattern.search(f.LocalPath()) and not
+        test_manifest_pattern.search(f.LocalPath())):
+      # We expect all actual service manifest files to contain at least one
+      # qualified reference to service_manager::Manifest.
+      if 'service_manager::Manifest' in '\n'.join(f.NewContents()):
         AddPatternToCheck(f, input_api.os_path.basename(f.LocalPath()))
-    else:
-      manifest_pattern = input_api.re.compile('manifests?\.(cc|h)$')
-      test_manifest_pattern = input_api.re.compile('test_manifests?\.(cc|h)')
-      if (manifest_pattern.search(f.LocalPath()) and not
-          test_manifest_pattern.search(f.LocalPath())):
-        # We expect all actual service manifest files to contain at least one
-        # qualified reference to service_manager::Manifest.
-        if 'service_manager::Manifest' in '\n'.join(f.NewContents()):
-          AddPatternToCheck(f, input_api.os_path.basename(f.LocalPath()))
     for pattern in file_patterns:
       if input_api.fnmatch.fnmatch(
           input_api.os_path.basename(f.LocalPath()), pattern):
@@ -2124,9 +3013,50 @@ def _GetOwnersFilesToCheckForIpcOwners(input_api):
   return to_check
 
 
-def _CheckIpcOwners(input_api, output_api):
+def _AddOwnersFilesToCheckForFuchsiaSecurityOwners(input_api, to_check):
+  """Adds OWNERS files to check for correct Fuchsia security owners."""
+
+  file_patterns = [
+      # Component specifications.
+      '*.cml', # Component Framework v2.
+      '*.cmx', # Component Framework v1.
+
+      # Fuchsia IDL protocol specifications.
+      '*.fidl',
+  ]
+
+  def AddPatternToCheck(input_file, pattern):
+    owners_file = input_api.os_path.join(
+        input_api.os_path.dirname(input_file.LocalPath()), 'OWNERS')
+    if owners_file not in to_check:
+      to_check[owners_file] = {}
+    if pattern not in to_check[owners_file]:
+      to_check[owners_file][pattern] = {
+          'files': [],
+          'rules': [
+              'per-file %s=set noparent' % pattern,
+              'per-file %s=file://fuchsia/SECURITY_OWNERS' % pattern,
+          ]
+      }
+    to_check[owners_file][pattern]['files'].append(input_file)
+
+  # Iterate through the affected files to see what we actually need to check
+  # for. We should only nag patch authors about per-file rules if a file in that
+  # directory would match that pattern.
+  for f in input_api.AffectedFiles(include_deletes=False):
+    for pattern in file_patterns:
+      if input_api.fnmatch.fnmatch(
+          input_api.os_path.basename(f.LocalPath()), pattern):
+        AddPatternToCheck(f, pattern)
+        break
+
+  return to_check
+
+
+def _CheckSecurityOwners(input_api, output_api):
   """Checks that affected files involving IPC have an IPC OWNERS rule."""
   to_check = _GetOwnersFilesToCheckForIpcOwners(input_api)
+  _AddOwnersFilesToCheckForFuchsiaSecurityOwners(input_api, to_check)
 
   if to_check:
     # If there are any OWNERS files to check, there are IPC-related changes in
@@ -2175,6 +3105,145 @@ def _CheckIpcOwners(input_api, output_api):
   return results
 
 
+def _GetFilesUsingSecurityCriticalFunctions(input_api):
+  """Checks affected files for changes to security-critical calls. This
+  function checks the full change diff, to catch both additions/changes
+  and removals.
+
+  Returns a dict keyed by file name, and the value is a set of detected
+  functions.
+  """
+  # Map of function pretty name (displayed in an error) to the pattern to
+  # match it with.
+  _PATTERNS_TO_CHECK = {
+      'content::GetServiceSandboxType<>()':
+          'GetServiceSandboxType\\<'
+  }
+  _PATTERNS_TO_CHECK = {
+      k: input_api.re.compile(v)
+      for k, v in _PATTERNS_TO_CHECK.items()
+  }
+
+  # Scan all affected files for changes touching _FUNCTIONS_TO_CHECK.
+  files_to_functions = {}
+  for f in input_api.AffectedFiles():
+    diff = f.GenerateScmDiff()
+    for line in diff.split('\n'):
+      # Not using just RightHandSideLines() because removing a
+      # call to a security-critical function can be just as important
+      # as adding or changing the arguments.
+      if line.startswith('-') or (line.startswith('+') and
+          not line.startswith('++')):
+        for name, pattern in _PATTERNS_TO_CHECK.items():
+          if pattern.search(line):
+            path = f.LocalPath()
+            if not path in files_to_functions:
+              files_to_functions[path] = set()
+            files_to_functions[path].add(name)
+  return files_to_functions
+
+
+def _CheckSecurityChanges(input_api, output_api):
+  """Checks that changes involving security-critical functions are reviewed
+  by the security team.
+  """
+  files_to_functions = _GetFilesUsingSecurityCriticalFunctions(input_api)
+  if len(files_to_functions):
+    owners_db = input_api.owners_db
+    owner_email, reviewers = (
+        input_api.canned_checks.GetCodereviewOwnerAndReviewers(
+            input_api,
+            owners_db.email_regexp,
+            approval_needed=input_api.is_committing))
+
+    # Load the OWNERS file for security changes.
+    owners_file = 'ipc/SECURITY_OWNERS'
+    security_owners = owners_db.owners_rooted_at_file(owners_file)
+
+    has_security_owner = any([owner in reviewers for owner in security_owners])
+    if not has_security_owner:
+      msg = 'The following files change calls to security-sensive functions\n' \
+          'that need to be reviewed by {}.\n'.format(owners_file)
+      for path, names in files_to_functions.items():
+        msg += '  {}\n'.format(path)
+        for name in names:
+          msg += '    {}\n'.format(name)
+        msg += '\n'
+
+      if input_api.is_committing:
+        output = output_api.PresubmitError
+      else:
+        output = output_api.PresubmitNotifyResult
+      return [output(msg)]
+
+  return []
+
+
+def _CheckSetNoParent(input_api, output_api):
+  """Checks that set noparent is only used together with an OWNERS file in
+     //build/OWNERS.setnoparent (see also
+     //docs/code_reviews.md#owners-files-details)
+  """
+  errors = []
+
+  allowed_owners_files_file = 'build/OWNERS.setnoparent'
+  allowed_owners_files = set()
+  with open(allowed_owners_files_file, 'r') as f:
+    for line in f:
+      line = line.strip()
+      if not line or line.startswith('#'):
+        continue
+      allowed_owners_files.add(line)
+
+  per_file_pattern = input_api.re.compile('per-file (.+)=(.+)')
+
+  for f in input_api.AffectedFiles(include_deletes=False):
+    if not f.LocalPath().endswith('OWNERS'):
+      continue
+
+    found_owners_files = set()
+    found_set_noparent_lines = dict()
+
+    # Parse the OWNERS file.
+    for lineno, line in enumerate(f.NewContents(), 1):
+      line = line.strip()
+      if line.startswith('set noparent'):
+        found_set_noparent_lines[''] = lineno
+      if line.startswith('file://'):
+        if line in allowed_owners_files:
+          found_owners_files.add('')
+      if line.startswith('per-file'):
+        match = per_file_pattern.match(line)
+        if match:
+          glob = match.group(1).strip()
+          directive = match.group(2).strip()
+          if directive == 'set noparent':
+            found_set_noparent_lines[glob] = lineno
+          if directive.startswith('file://'):
+            if directive in allowed_owners_files:
+              found_owners_files.add(glob)
+
+    # Check that every set noparent line has a corresponding file:// line
+    # listed in build/OWNERS.setnoparent.
+    for set_noparent_line in found_set_noparent_lines:
+      if set_noparent_line in found_owners_files:
+        continue
+      errors.append('  %s:%d' % (f.LocalPath(),
+                                 found_set_noparent_lines[set_noparent_line]))
+
+  results = []
+  if errors:
+    if input_api.is_committing:
+      output = output_api.PresubmitError
+    else:
+      output = output_api.PresubmitPromptWarning
+    results.append(output(
+        'Found the following "set noparent" restrictions in OWNERS files that '
+        'do not include owners from build/OWNERS.setnoparent:',
+        long_text='\n\n'.join(errors)))
+  return results
+
+
 def _CheckUselessForwardDeclarations(input_api, output_api):
   """Checks that added or removed lines in non third party affected
      header files do not lead to new useless class or struct forward
@@ -2220,6 +3289,43 @@ def _CheckUselessForwardDeclarations(input_api, output_api):
 
   return results
 
+def _CheckAndroidDebuggableBuild(input_api, output_api):
+  """Checks that code uses BuildInfo.isDebugAndroid() instead of
+     Build.TYPE.equals('') or ''.equals(Build.TYPE) to check if
+     this is a debuggable build of Android.
+  """
+  build_type_check_pattern = input_api.re.compile(
+      r'\bBuild\.TYPE\.equals\(|\.equals\(\s*\bBuild\.TYPE\)')
+
+  errors = []
+
+  sources = lambda affected_file: input_api.FilterSourceFile(
+      affected_file,
+      black_list=(_EXCLUDED_PATHS +
+                  _TEST_CODE_EXCLUDED_PATHS +
+                  input_api.DEFAULT_BLACK_LIST +
+                  (r"^android_webview[\\/]support_library[\\/]"
+                      "boundary_interfaces[\\/]",
+                   r"^chrome[\\/]android[\\/]webapk[\\/].*",
+                   r'^third_party[\\/].*',
+                   r"tools[\\/]android[\\/]customtabs_benchmark[\\/].*",
+                   r"webview[\\/]chromium[\\/]License.*",)),
+      white_list=[r'.*\.java$'])
+
+  for f in input_api.AffectedSourceFiles(sources):
+    for line_num, line in f.ChangedContents():
+      if build_type_check_pattern.search(line):
+        errors.append("%s:%d" % (f.LocalPath(), line_num))
+
+  results = []
+
+  if errors:
+    results.append(output_api.PresubmitPromptWarning(
+        'Build.TYPE.equals or .equals(Build.TYPE) usage is detected.'
+        ' Please use BuildInfo.isDebugAndroid() instead.',
+        errors))
+
+  return results
 
 # TODO: add unit tests
 def _CheckAndroidToastUsage(input_api, output_api):
@@ -2284,13 +3390,12 @@ def _CheckAndroidCrLogUsage(input_api, output_api):
   has_some_log_import_pattern = input_api.re.compile(
       r'^import .*\.Log;$', input_api.re.MULTILINE)
   # Extract the tag from lines like `Log.d(TAG, "*");` or `Log.d("TAG", "*");`
-  log_call_pattern = input_api.re.compile(r'^\s*Log\.\w\((?P<tag>\"?\w+\"?)\,')
+  log_call_pattern = input_api.re.compile(r'\bLog\.\w\((?P<tag>\"?\w+)')
   log_decl_pattern = input_api.re.compile(
-      r'^\s*private static final String TAG = "(?P<name>(.*))";',
-      input_api.re.MULTILINE)
+      r'static final String TAG = "(?P<name>(.*))"')
+  rough_log_decl_pattern = input_api.re.compile(r'\bString TAG\s*=')
 
-  REF_MSG = ('See docs/android_logging.md '
-            'or contact dgn@chromium.org for more info.')
+  REF_MSG = ('See docs/android_logging.md for more info.')
   sources = lambda x: input_api.FilterSourceFile(x, white_list=[r'.*\.java$'],
       black_list=cr_log_check_excluded_paths)
 
@@ -2303,13 +3408,14 @@ def _CheckAndroidCrLogUsage(input_api, output_api):
   for f in input_api.AffectedSourceFiles(sources):
     file_content = input_api.ReadFile(f)
     has_modified_logs = False
-
     # Per line checks
     if (cr_log_import_pattern.search(file_content) or
         (class_in_base_pattern.search(file_content) and
             not has_some_log_import_pattern.search(file_content))):
       # Checks to run for files using cr log
       for line_num, line in f.ChangedContents():
+        if rough_log_decl_pattern.search(line):
+          has_modified_logs = True
 
         # Check if the new line is doing some logging
         match = log_call_pattern.search(line)
@@ -2377,7 +3483,7 @@ def _CheckAndroidTestJUnitFrameworkImport(input_api, output_api):
   sources = lambda x: input_api.FilterSourceFile(
       x, white_list=[r'.*\.java$'], black_list=None)
   errors = []
-  for f in input_api.AffectedFiles(sources):
+  for f in input_api.AffectedFiles(file_filter=sources):
     for line_num, line in f.ChangedContents():
       if deprecated_junit_framework_pattern.search(line):
         errors.append("%s:%d" % (f.LocalPath(), line_num))
@@ -2401,7 +3507,7 @@ def _CheckAndroidTestJUnitInheritance(input_api, output_api):
   sources = lambda x: input_api.FilterSourceFile(
       x, white_list=[r'.*Test\.java$'], black_list=None)
   errors = []
-  for f in input_api.AffectedFiles(sources):
+  for f in input_api.AffectedFiles(file_filter=sources):
     if not f.OldContents():
       class_declaration_start_flag = False
       for line_num, line in f.ChangedContents():
@@ -2430,7 +3536,7 @@ def _CheckAndroidTestAnnotationUsage(input_api, output_api):
   sources = lambda x: input_api.FilterSourceFile(
       x, white_list=[r'.*\.java$'], black_list=None)
   errors = []
-  for f in input_api.AffectedFiles(sources):
+  for f in input_api.AffectedFiles(file_filter=sources):
     for line_num, line in f.ChangedContents():
       if deprecated_annotation_import_pattern.search(line):
         errors.append("%s:%d" % (f.LocalPath(), line_num))
@@ -2467,7 +3573,8 @@ def _CheckAndroidNewMdpiAssetLocation(input_api, output_api):
 
 def _CheckAndroidWebkitImports(input_api, output_api):
   """Checks that code uses org.chromium.base.Callback instead of
-     android.widget.ValueCallback except in the WebView glue layer.
+     android.webview.ValueCallback except in the WebView glue layer
+     and WebLayer.
   """
   valuecallback_import_pattern = input_api.re.compile(
       r'^import android\.webkit\.ValueCallback;$')
@@ -2479,7 +3586,8 @@ def _CheckAndroidWebkitImports(input_api, output_api):
       black_list=(_EXCLUDED_PATHS +
                   _TEST_CODE_EXCLUDED_PATHS +
                   input_api.DEFAULT_BLACK_LIST +
-                  (r'^android_webview[\\/]glue[\\/].*',)),
+                  (r'^android_webview[\\/]glue[\\/].*',
+                   r'^weblayer[\\/].*',)),
       white_list=[r'.*\.java$'])
 
   for f in input_api.AffectedSourceFiles(sources):
@@ -2553,7 +3661,12 @@ class PydepsChecker(object):
     file_to_pydeps_map = None
     for f in self._input_api.AffectedFiles(include_deletes=True):
       local_path = f.LocalPath()
-      if local_path  == 'DEPS':
+      # Changes to DEPS can lead to .pydeps changes if any .py files are in
+      # subrepositories. We can't figure out which files change, so re-check
+      # all files.
+      # Changes to print_python_deps.py affect all .pydeps.
+      if local_path in ('DEPS', 'PRESUBMIT.py') or local_path.endswith(
+          'print_python_deps.py'):
         return self._pydeps_files
       elif local_path.endswith('.pydeps'):
         if local_path in self._pydeps_files:
@@ -2581,6 +3694,18 @@ class PydepsChecker(object):
       return cmd, '\n'.join(difflib.context_diff(old_contents, new_contents))
 
 
+def _ParseGclientArgs():
+  args = {}
+  with open('build/config/gclient_args.gni', 'r') as f:
+    for line in f:
+      line = line.strip()
+      if not line or line.startswith('#'):
+        continue
+      attribute, value = line.split('=')
+      args[attribute.strip()] = value.strip()
+  return args
+
+
 def _CheckPydepsNeedsUpdating(input_api, output_api, checker_for_tests=None):
   """Checks if a .pydeps file needs to be regenerated."""
   # This check is for Python dependency lists (.pydeps files), and involves
@@ -2588,10 +3713,8 @@ def _CheckPydepsNeedsUpdating(input_api, output_api, checker_for_tests=None):
   # doesn't work on Windows and Mac, so skip it on other platforms.
   if input_api.platform != 'linux2':
     return []
-  # TODO(agrieve): Update when there's a better way to detect
-  # this: crbug.com/570091
-  is_android = input_api.os_path.exists('third_party/android_tools')
-  pydeps_files = _ALL_PYDEPS_FILES if is_android else _GENERIC_PYDEPS_FILES
+  is_android = _ParseGclientArgs().get('checkout_android', 'false') == 'true'
+  pydeps_to_check = _ALL_PYDEPS_FILES if is_android else _GENERIC_PYDEPS_FILES
   results = []
   # First, check for new / deleted .pydeps.
   for f in input_api.AffectedFiles(include_deletes=True):
@@ -2614,9 +3737,22 @@ def _CheckPydepsNeedsUpdating(input_api, output_api, checker_for_tests=None):
   if results:
     return results
 
-  checker = checker_for_tests or PydepsChecker(input_api, pydeps_files)
-
-  for pydep_path in checker.ComputeAffectedPydeps():
+  checker = checker_for_tests or PydepsChecker(input_api, _ALL_PYDEPS_FILES)
+  affected_pydeps = set(checker.ComputeAffectedPydeps())
+  affected_android_pydeps = affected_pydeps.intersection(
+      set(_ANDROID_SPECIFIC_PYDEPS_FILES))
+  if affected_android_pydeps and not is_android:
+    results.append(output_api.PresubmitPromptOrNotify(
+        'You have changed python files that may affect pydeps for android\n'
+        'specific scripts. However, the relevant presumbit check cannot be\n'
+        'run because you are not using an Android checkout. To validate that\n'
+        'the .pydeps are correct, re-run presubmit in an Android checkout, or\n'
+        'use the android-internal-presubmit optional trybot.\n'
+        'Possibly stale pydeps files:\n{}'.format(
+            '\n'.join(affected_android_pydeps))))
+
+  affected_pydeps_to_check = affected_pydeps.intersection(set(pydeps_to_check))
+  for pydep_path in affected_pydeps_to_check:
     try:
       result = checker.DetermineIfStale(pydep_path)
       if result:
@@ -2719,44 +3855,7 @@ def _CheckNoDeprecatedCss(input_api, output_api):
   return results
 
 
-_DEPRECATED_JS = [
-  ( "__lookupGetter__", "Object.getOwnPropertyDescriptor" ),
-  ( "__defineGetter__", "Object.defineProperty" ),
-  ( "__defineSetter__", "Object.defineProperty" ),
-]
-
-
-# TODO: add unit tests
-def _CheckNoDeprecatedJs(input_api, output_api):
-  """Make sure that we don't use deprecated JS in Chrome code."""
-  results = []
-  file_inclusion_pattern = [r".+\.js$"]  # TODO(dbeam): .html?
-  black_list = (_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS +
-                input_api.DEFAULT_BLACK_LIST)
-  file_filter = lambda f: input_api.FilterSourceFile(
-      f, white_list=file_inclusion_pattern, black_list=black_list)
-  for fpath in input_api.AffectedFiles(file_filter=file_filter):
-    for lnum, line in fpath.ChangedContents():
-      for (deprecated, replacement) in _DEPRECATED_JS:
-        if deprecated in line:
-          results.append(output_api.PresubmitError(
-              "%s:%d: Use of deprecated JS %s, use %s instead" %
-              (fpath.LocalPath(), lnum, deprecated, replacement)))
-  return results
-
-
 def _CheckForRelativeIncludes(input_api, output_api):
-  # Need to set the sys.path so PRESUBMIT_test.py runs properly
-  import sys
-  original_sys_path = sys.path
-  try:
-    sys.path = sys.path + [input_api.os_path.join(
-        input_api.PresubmitLocalPath(), 'buildtools', 'checkdeps')]
-    from cpp_checker import CppChecker
-  finally:
-    # Restore sys.path to what it was before.
-    sys.path = original_sys_path
-
   bad_files = {}
   for f in input_api.AffectedFiles(include_deletes=False):
     if (f.LocalPath().startswith('third_party') and
@@ -2764,7 +3863,7 @@ def _CheckForRelativeIncludes(input_api, output_api):
       not f.LocalPath().startswith('third_party\\blink')):
       continue
 
-    if not CppChecker.IsCppFile(f.LocalPath()):
+    if not _IsCPlusPlusFile(input_api, f.LocalPath()):
       continue
 
     relative_includes = [line for _, line in f.ChangedContents()
@@ -2794,6 +3893,41 @@ def _CheckForRelativeIncludes(input_api, output_api):
   return results
 
 
+def _CheckForCcIncludes(input_api, output_api):
+  """Check that nobody tries to include a cc file. It's a relatively
+  common error which results in duplicate symbols in object
+  files. This may not always break the build until someone later gets
+  very confusing linking errors."""
+  results = []
+  for f in input_api.AffectedFiles(include_deletes=False):
+    # We let third_party code do whatever it wants
+    if (f.LocalPath().startswith('third_party') and
+      not f.LocalPath().startswith('third_party/blink') and
+      not f.LocalPath().startswith('third_party\\blink')):
+      continue
+
+    if not _IsCPlusPlusFile(input_api, f.LocalPath()):
+      continue
+
+    for _, line in f.ChangedContents():
+      if line.startswith('#include "'):
+        included_file = line.split('"')[1]
+        if _IsCPlusPlusFile(input_api, included_file):
+          # The most common naming for external files with C++ code,
+          # apart from standard headers, is to call them foo.inc, but
+          # Chromium sometimes uses foo-inc.cc so allow that as well.
+          if not included_file.endswith(('.h', '-inc.cc')):
+            results.append(output_api.PresubmitError(
+              'Only header files or .inc files should be included in other\n'
+              'C++ files. Compiling the contents of a cc file more than once\n'
+              'will cause duplicate information in the build which may later\n'
+              'result in strange link_errors.\n' +
+              f.LocalPath() + ':\n    ' +
+              line))
+
+  return results
+
+
 def _CheckWatchlistDefinitionsEntrySyntax(key, value, ast):
   if not isinstance(key, ast.Str):
     return 'Key at line %d must be a string literal' % key.lineno
@@ -3012,10 +4146,164 @@ def _CheckCorrectProductNameInMessages(input_api, output_api):
   return all_problems
 
 
+def _CheckBuildtoolsRevisionsAreInSync(input_api, output_api):
+  # TODO(crbug.com/941824): We need to make sure the entries in
+  # //buildtools/DEPS are kept in sync with the entries in //DEPS
+  # so that users of //buildtools in other projects get the same tooling
+  # Chromium gets. If we ever fix the referenced bug and add 'includedeps'
+  # support to gclient, we can eliminate the duplication and delete
+  # this presubmit check.
+
+  # Update this regexp if new revisions are added to the files.
+  rev_regexp = input_api.re.compile(
+      "'((clang_format|libcxx|libcxxabi|libunwind)_revision|gn_version)':")
+
+  # If a user is changing one revision, they need to change the same
+  # line in both files. This means that any given change should contain
+  # exactly the same list of changed lines that match the regexps. The
+  # replace(' ', '') call allows us to ignore whitespace changes to the
+  # lines. The 'long_text' parameter to the error will contain the
+  # list of changed lines in both files, which should make it easy enough
+  # to spot the error without going overboard in this implementation.
+  revs_changes = {
+      'DEPS': {},
+      'buildtools/DEPS': {},
+  }
+  long_text = ''
+
+  for f in input_api.AffectedFiles(
+      file_filter=lambda f: f.LocalPath() in ('DEPS', 'buildtools/DEPS')):
+    for line_num, line in f.ChangedContents():
+      if rev_regexp.search(line):
+        revs_changes[f.LocalPath()][line.replace(' ', '')] = line
+        long_text += '%s:%d: %s\n' % (f.LocalPath(), line_num, line)
+
+  if set(revs_changes['DEPS']) != set(revs_changes['buildtools/DEPS']):
+    return [output_api.PresubmitError(
+        'Change buildtools revisions in sync in both //DEPS and '
+        '//buildtools/DEPS.', long_text=long_text + '\n')]
+  else:
+    return []
+
+
+def _CheckForTooLargeFiles(input_api, output_api):
+  """Avoid large files, especially binary files, in the repository since
+  git doesn't scale well for those. They will be in everyone's repo
+  clones forever, forever making Chromium slower to clone and work
+  with."""
+
+  # Uploading files to cloud storage is not trivial so we don't want
+  # to set the limit too low, but the upper limit for "normal" large
+  # files seems to be 1-2 MB, with a handful around 5-8 MB, so
+  # anything over 20 MB is exceptional.
+  TOO_LARGE_FILE_SIZE_LIMIT = 20 * 1024 * 1024  # 10 MB
+
+  too_large_files = []
+  for f in input_api.AffectedFiles():
+    # Check both added and modified files (but not deleted files).
+    if f.Action() in ('A', 'M'):
+      size = input_api.os_path.getsize(f.AbsoluteLocalPath())
+      if size > TOO_LARGE_FILE_SIZE_LIMIT:
+        too_large_files.append("%s: %d bytes" % (f.LocalPath(), size))
+
+  if too_large_files:
+    message = (
+      'Do not commit large files to git since git scales badly for those.\n' +
+      'Instead put the large files in cloud storage and use DEPS to\n' +
+      'fetch them.\n' + '\n'.join(too_large_files)
+    )
+    return [output_api.PresubmitError(
+        'Too large files found in commit', long_text=message + '\n')]
+  else:
+    return []
+
+
+def _CheckFuzzTargets(input_api, output_api):
+  """Checks specific for fuzz target sources."""
+  EXPORTED_SYMBOLS = [
+      'LLVMFuzzerInitialize',
+      'LLVMFuzzerCustomMutator',
+      'LLVMFuzzerCustomCrossOver',
+      'LLVMFuzzerMutate',
+  ]
+
+  REQUIRED_HEADER = '#include "testing/libfuzzer/libfuzzer_exports.h"'
+
+  def FilterFile(affected_file):
+    """Ignore libFuzzer source code."""
+    white_list = r'.*fuzz.*\.(h|hpp|hcc|cc|cpp|cxx)$'
+    black_list = r"^third_party[\\/]libFuzzer"
+
+    return input_api.FilterSourceFile(
+        affected_file,
+        white_list=[white_list],
+        black_list=[black_list])
+
+  files_with_missing_header = []
+  for f in input_api.AffectedSourceFiles(FilterFile):
+    contents = input_api.ReadFile(f, 'r')
+    if REQUIRED_HEADER in contents:
+      continue
+
+    if any(symbol in contents for symbol in EXPORTED_SYMBOLS):
+      files_with_missing_header.append(f.LocalPath())
+
+  if not files_with_missing_header:
+    return []
+
+  long_text = (
+      'If you define any of the libFuzzer optional functions (%s), it is '
+      'recommended to add \'%s\' directive. Otherwise, the fuzz target may '
+      'work incorrectly on Mac (crbug.com/687076).\nNote that '
+      'LLVMFuzzerInitialize should not be used, unless your fuzz target needs '
+      'to access command line arguments passed to the fuzzer. Instead, prefer '
+      'static initialization and shared resources as documented in '
+      'https://chromium.googlesource.com/chromium/src/+/master/testing/'
+      'libfuzzer/efficient_fuzzing.md#simplifying-initialization_cleanup.\n' % (
+          ', '.join(EXPORTED_SYMBOLS), REQUIRED_HEADER)
+    )
+
+  return [output_api.PresubmitPromptWarning(
+        message="Missing '%s' in:" % REQUIRED_HEADER,
+        items=files_with_missing_header,
+        long_text=long_text)]
+
+
+def _CheckNewImagesWarning(input_api, output_api):
+  """
+  Warns authors who add images into the repo to make sure their images are
+  optimized before committing.
+  """
+  images_added = False
+  image_paths = []
+  errors = []
+  filter_lambda = lambda x: input_api.FilterSourceFile(
+    x,
+    black_list=(('(?i).*test', r'.*\/junit\/')
+                + input_api.DEFAULT_BLACK_LIST),
+    white_list=[r'.*\/(drawable|mipmap)' ]
+  )
+  for f in input_api.AffectedFiles(
+      include_deletes=False, file_filter=filter_lambda):
+    local_path = f.LocalPath().lower()
+    if any(local_path.endswith(extension) for extension in _IMAGE_EXTENSIONS):
+      images_added = True
+      image_paths.append(f)
+  if images_added:
+    errors.append(output_api.PresubmitPromptWarning(
+        'It looks like you are trying to commit some images. If these are '
+        'non-test-only images, please make sure to read and apply the tips in '
+        'https://chromium.googlesource.com/chromium/src/+/HEAD/docs/speed/'
+        'binary_size/optimization_advice.md#optimizing-images\nThis check is '
+        'FYI only and will not block your CL on the CQ.', image_paths))
+  return errors
+
+
 def _AndroidSpecificOnUploadChecks(input_api, output_api):
   """Groups upload checks that target android code."""
   results = []
   results.extend(_CheckAndroidCrLogUsage(input_api, output_api))
+  results.extend(_CheckAndroidDebuggableBuild(input_api, output_api))
   results.extend(_CheckAndroidNewMdpiAssetLocation(input_api, output_api))
   results.extend(_CheckAndroidToastUsage(input_api, output_api))
   results.extend(_CheckAndroidTestJUnitInheritance(input_api, output_api))
@@ -3023,6 +4311,8 @@ def _AndroidSpecificOnUploadChecks(input_api, output_api):
   results.extend(_CheckAndroidTestAnnotationUsage(input_api, output_api))
   results.extend(_CheckAndroidWebkitImports(input_api, output_api))
   results.extend(_CheckAndroidXmlStyle(input_api, output_api, True))
+  results.extend(_CheckNewImagesWarning(input_api, output_api))
+  results.extend(_CheckAndroidNoBannedImports(input_api, output_api))
   return results
 
 def _AndroidSpecificOnCommitChecks(input_api, output_api):
@@ -3031,6 +4321,54 @@ def _AndroidSpecificOnCommitChecks(input_api, output_api):
   results.extend(_CheckAndroidXmlStyle(input_api, output_api, False))
   return results
 
+# TODO(chrishall): could we additionally match on any path owned by
+#                  ui/accessibility/OWNERS ?
+_ACCESSIBILITY_PATHS = (
+    r"^chrome[\\/]browser.*[\\/]accessibility[\\/]",
+    r"^chrome[\\/]browser[\\/]extensions[\\/]api[\\/]automation.*[\\/]",
+    r"^chrome[\\/]renderer[\\/]extensions[\\/]accessibility_.*",
+    r"^chrome[\\/]tests[\\/]data[\\/]accessibility[\\/]",
+    r"^content[\\/]browser[\\/]accessibility[\\/]",
+    r"^content[\\/]renderer[\\/]accessibility[\\/]",
+    r"^content[\\/]tests[\\/]data[\\/]accessibility[\\/]",
+    r"^extensions[\\/]renderer[\\/]api[\\/]automation[\\/]",
+    r"^ui[\\/]accessibility[\\/]",
+    r"^ui[\\/]views[\\/]accessibility[\\/]",
+)
+
+def _CheckAccessibilityRelnotesField(input_api, output_api):
+  """Checks that commits to accessibility code contain an AX-Relnotes field in
+  their commit message."""
+  def FileFilter(affected_file):
+    paths = _ACCESSIBILITY_PATHS
+    return input_api.FilterSourceFile(affected_file, white_list=paths)
+
+  # Only consider changes affecting accessibility paths.
+  if not any(input_api.AffectedFiles(file_filter=FileFilter)):
+    return []
+
+  # AX-Relnotes can appear in either the description or the footer.
+  # When searching the description, require 'AX-Relnotes:' to appear at the
+  # beginning of a line.
+  ax_regex = input_api.re.compile('ax-relnotes[:=]')
+  description_has_relnotes = any(ax_regex.match(line)
+    for line in input_api.change.DescriptionText().lower().splitlines())
+
+  footer_relnotes = input_api.change.GitFootersFromDescription().get(
+    'AX-Relnotes', [])
+  if description_has_relnotes or footer_relnotes:
+    return []
+
+  # TODO(chrishall): link to Relnotes documentation in message.
+  message = ("Missing 'AX-Relnotes:' field required for accessibility changes"
+             "\n  please add 'AX-Relnotes: [release notes].' to describe any "
+             "user-facing changes"
+             "\n  otherwise add 'AX-Relnotes: n/a.' if this change has no "
+             "user-facing effects"
+             "\n  if this is confusing or annoying then please contact members "
+             "of ui/accessibility/OWNERS.")
+
+  return [output_api.PresubmitNotifyResult(message)]
 
 def _CommonChecks(input_api, output_api):
   """Checks common to both upload and commit."""
@@ -3044,6 +4382,7 @@ def _CommonChecks(input_api, output_api):
     results.extend(
         input_api.canned_checks.CheckAuthorizedAuthor(input_api, output_api))
 
+  results.extend(_CheckAccessibilityRelnotesField(input_api, output_api))
   results.extend(
       _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api))
   results.extend(
@@ -3055,6 +4394,7 @@ def _CommonChecks(input_api, output_api):
   results.extend(_CheckNoNewWStrings(input_api, output_api))
   results.extend(_CheckNoDEPSGIT(input_api, output_api))
   results.extend(_CheckNoBannedFunctions(input_api, output_api))
+  results.extend(_CheckNoDeprecatedMojoTypes(input_api, output_api))
   results.extend(_CheckNoPragmaOnce(input_api, output_api))
   results.extend(_CheckNoTrinaryTrueFalse(input_api, output_api))
   results.extend(_CheckUnwantedDependencies(input_api, output_api))
@@ -3064,6 +4404,7 @@ def _CommonChecks(input_api, output_api):
   results.extend(_CheckForVersionControlConflicts(input_api, output_api))
   results.extend(_CheckPatchFiles(input_api, output_api))
   results.extend(_CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api))
+  results.extend(_CheckChromeOsSyncedPrefRegistration(input_api, output_api))
   results.extend(_CheckNoAbbreviationInPngFileName(input_api, output_api))
   results.extend(_CheckBuildConfigMacrosWithoutInclude(input_api, output_api))
   results.extend(_CheckForInvalidOSMacros(input_api, output_api))
@@ -3079,7 +4420,6 @@ def _CommonChecks(input_api, output_api):
   results.extend(_CheckForAnonymousVariables(input_api, output_api))
   results.extend(_CheckUserActionUpdate(input_api, output_api))
   results.extend(_CheckNoDeprecatedCss(input_api, output_api))
-  results.extend(_CheckNoDeprecatedJs(input_api, output_api))
   results.extend(_CheckParseErrors(input_api, output_api))
   results.extend(_CheckForIPCRules(input_api, output_api))
   results.extend(_CheckForLongPathnames(input_api, output_api))
@@ -3088,14 +4428,22 @@ def _CommonChecks(input_api, output_api):
   results.extend(_CheckSingletonInHeaders(input_api, output_api))
   results.extend(_CheckPydepsNeedsUpdating(input_api, output_api))
   results.extend(_CheckJavaStyle(input_api, output_api))
-  results.extend(_CheckIpcOwners(input_api, output_api))
+  results.extend(_CheckSecurityOwners(input_api, output_api))
+  results.extend(_CheckSecurityChanges(input_api, output_api))
+  results.extend(_CheckSetNoParent(input_api, output_api))
   results.extend(_CheckUselessForwardDeclarations(input_api, output_api))
   results.extend(_CheckForRelativeIncludes(input_api, output_api))
+  results.extend(_CheckForCcIncludes(input_api, output_api))
   results.extend(_CheckWATCHLISTS(input_api, output_api))
   results.extend(input_api.RunTests(
     input_api.canned_checks.CheckVPythonSpec(input_api, output_api)))
-  results.extend(_CheckTranslationScreenshots(input_api, output_api))
+  results.extend(_CheckStrings(input_api, output_api))
+  results.extend(_CheckTranslationExpectations(input_api, output_api))
   results.extend(_CheckCorrectProductNameInMessages(input_api, output_api))
+  results.extend(_CheckBuildtoolsRevisionsAreInSync(input_api, output_api))
+  results.extend(_CheckForTooLargeFiles(input_api, output_api))
+  results.extend(_CheckPythonDevilInit(input_api, output_api))
+  results.extend(_CheckStableMojomChanges(input_api, output_api))
 
   for f in input_api.AffectedFiles():
     path, name = input_api.os_path.split(f.LocalPath())
@@ -3253,8 +4601,9 @@ def _CheckForInvalidIfDefinedMacrosInFile(input_api, f):
 def _CheckForInvalidIfDefinedMacros(input_api, output_api):
   """Check all affected files for invalid "if defined" macros."""
   bad_macros = []
+  skipped_paths = ['third_party/sqlite/', 'third_party/abseil-cpp/']
   for f in input_api.AffectedFiles():
-    if f.LocalPath().startswith('third_party/sqlite/'):
+    if any([f.LocalPath().startswith(path) for path in skipped_paths]):
       continue
     if f.LocalPath().endswith(('.h', '.c', '.cc', '.m', '.mm')):
       bad_macros.extend(_CheckForInvalidIfDefinedMacrosInFile(input_api, f))
@@ -3298,7 +4647,7 @@ def _CheckForLongPathnames(input_api, output_api):
   This causes issues on Windows.
   """
   problems = []
-  for f in input_api.AffectedSourceFiles(None):
+  for f in input_api.AffectedTestableFiles():
     local_path = f.LocalPath()
     # Windows has a path limit of 260 characters. Limit path length to 200 so
     # that we have some extra for the prefix on dev machines and the bots.
@@ -3320,8 +4669,11 @@ def _CheckForIncludeGuards(input_api, output_api):
     # We only check header files under the control of the Chromium
     # project. That is, those outside third_party apart from
     # third_party/blink.
+    # We also exclude *_message_generator.h headers as they use
+    # include guards in a special, non-typical way.
     file_with_path = input_api.os_path.normpath(f.LocalPath())
     return (file_with_path.endswith('.h') and
+            not file_with_path.endswith('_message_generator.h') and
             (not file_with_path.startswith('third_party') or
              file_with_path.startswith(
                input_api.os_path.join('third_party', 'blink'))))
@@ -3387,7 +4739,7 @@ def _CheckForIncludeGuards(input_api, output_api):
               errors.append(output_api.PresubmitPromptWarning(
                 'Header using the wrong include guard name %s' % guard_name,
                 ['%s:%d' % (f.LocalPath(), line_number + 1)],
-                'Expected: %r\nFound: %r' % (expected_guard, guard_name)))
+                'Expected: %r\nFound:    %r' % (expected_guard, guard_name)))
       else:
         # The line after #ifndef should have a #define of the same name.
         if line_number == guard_line_number + 1:
@@ -3477,6 +4829,7 @@ def CheckChangeOnUpload(input_api, output_api):
   results.extend(_CheckGoogleSupportAnswerUrl(input_api, output_api))
   results.extend(_CheckUniquePtr(input_api, output_api))
   results.extend(_CheckNewHeaderWithoutGnChange(input_api, output_api))
+  results.extend(_CheckFuzzTargets(input_api, output_api))
   return results
 
 
@@ -3518,61 +4871,28 @@ def CheckChangeOnCommit(input_api, output_api):
       input_api.canned_checks.CheckPatchFormatted(input_api, output_api))
   results.extend(input_api.canned_checks.CheckChangeHasBugField(
       input_api, output_api))
+  results.extend(input_api.canned_checks.CheckChangeHasNoUnwantedTags(
+      input_api, output_api))
   results.extend(input_api.canned_checks.CheckChangeHasDescription(
       input_api, output_api))
   return results
 
 
-def _CheckTranslationScreenshots(input_api, output_api):
-  PART_FILE_TAG = "part"
+def _CheckStrings(input_api, output_api):
+  """Check string ICU syntax validity and if translation screenshots exist."""
+  # Skip translation screenshots check if a SkipTranslationScreenshotsCheck
+  # footer is set to true.
+  git_footers = input_api.change.GitFootersFromDescription()
+  skip_screenshot_check_footer = [
+      footer.lower()
+      for footer in git_footers.get(u'Skip-Translation-Screenshots-Check', [])]
+  run_screenshot_check = u'true' not in skip_screenshot_check_footer
+
   import os
+  import re
   import sys
   from io import StringIO
 
-  try:
-    old_sys_path = sys.path
-    sys.path = sys.path + [input_api.os_path.join(
-          input_api.PresubmitLocalPath(), 'tools', 'grit')]
-    import grit.grd_reader
-    import grit.node.message
-    import grit.util
-  finally:
-    sys.path = old_sys_path
-
-  def _GetGrdMessages(grd_path_or_string, dir_path='.'):
-    """Load the grd file and return a dict of message ids to messages.
-
-    Ignores any nested grdp files pointed by <part> tag.
-    """
-    doc = grit.grd_reader.Parse(grd_path_or_string, dir_path,
-        stop_after=None, first_ids_file=None,
-        debug=False, defines=None,
-        tags_to_ignore=set([PART_FILE_TAG]))
-    return {
-      msg.attrs['name']:msg for msg in doc.GetChildrenOfType(
-        grit.node.message.MessageNode)
-    }
-
-  def _GetGrdpMessagesFromString(grdp_string):
-    """Parses the contents of a grdp file given in grdp_string.
-
-    grd_reader can't parse grdp files directly. Instead, this creates a
-    temporary directory with a grd file pointing to the grdp file, and loads the
-    grd from there. Any nested grdp files (pointed by <part> tag) are ignored.
-    """
-    WRAPPER = """<?xml version="1.0" encoding="utf-8"?>
-    <grit latest_public_release="1" current_release="1">
-      <release seq="1">
-        <messages>
-          <part file="sub.grdp" />
-        </messages>
-      </release>
-    </grit>
-    """
-    with grit.util.TempDir({'main.grd': WRAPPER,
-                            'sub.grdp': grdp_string}) as temp_dir:
-      return _GetGrdMessages(temp_dir.GetPath('main.grd'), temp_dir.GetPath())
-
   new_or_added_paths = set(f.LocalPath()
       for f in input_api.AffectedFiles()
       if (f.Action() == 'A' or f.Action() == 'M'))
@@ -3581,8 +4901,10 @@ def _CheckTranslationScreenshots(input_api, output_api):
       if f.Action() == 'D')
 
   affected_grds = [f for f in input_api.AffectedFiles()
-      if (f.LocalPath().endswith('.grd') or
-          f.LocalPath().endswith('.grdp'))]
+      if (f.LocalPath().endswith(('.grd', '.grdp')))]
+  if not affected_grds:
+    return []
+
   affected_png_paths = [f.AbsoluteLocalPath()
       for f in input_api.AffectedFiles()
       if (f.LocalPath().endswith('.png'))]
@@ -3612,6 +4934,13 @@ def _CheckTranslationScreenshots(input_api, output_api):
   missing_sha1 = []
   unnecessary_sha1_files = []
 
+  # This checks verifies that the ICU syntax of messages this CL touched is
+  # valid, and reports any found syntax errors.
+  # Without this presubmit check, ICU syntax errors in Chromium strings can land
+  # without developers being aware of them. Later on, such ICU syntax errors
+  # break message extraction for translation, hence would block Chromium
+  # translations until they are fixed.
+  icu_syntax_errors = []
 
   def _CheckScreenshotAdded(screenshots_dir, message_id):
     sha1_path = input_api.os_path.join(
@@ -3623,28 +4952,173 @@ def _CheckTranslationScreenshots(input_api, output_api):
   def _CheckScreenshotRemoved(screenshots_dir, message_id):
     sha1_path = input_api.os_path.join(
         screenshots_dir, message_id + '.png.sha1')
-    if sha1_path not in removed_paths:
+    if input_api.os_path.exists(sha1_path) and sha1_path not in removed_paths:
       unnecessary_sha1_files.append(sha1_path)
 
 
+  def _ValidateIcuSyntax(text, level, signatures):
+      """Validates ICU syntax of a text string.
+
+      Check if text looks similar to ICU and checks for ICU syntax correctness
+      in this case. Reports various issues with ICU syntax and values of
+      variants. Supports checking of nested messages. Accumulate information of
+      each ICU messages found in the text for further checking.
+
+      Args:
+        text: a string to check.
+        level: a number of current nesting level.
+        signatures: an accumulator, a list of tuple of (level, variable,
+          kind, variants).
+
+      Returns:
+        None if a string is not ICU or no issue detected.
+        A tuple of (message, start index, end index) if an issue detected.
+      """
+      valid_types = {
+          'plural': (frozenset(
+              ['=0', '=1', 'zero', 'one', 'two', 'few', 'many', 'other']),
+                     frozenset(['=1', 'other'])),
+          'selectordinal': (frozenset(
+              ['=0', '=1', 'zero', 'one', 'two', 'few', 'many', 'other']),
+                            frozenset(['one', 'other'])),
+          'select': (frozenset(), frozenset(['other'])),
+      }
+
+      # Check if the message looks like an attempt to use ICU
+      # plural. If yes - check if its syntax strictly matches ICU format.
+      like = re.match(r'^[^{]*\{[^{]*\b(plural|selectordinal|select)\b', text)
+      if not like:
+        signatures.append((level, None, None, None))
+        return
+
+      # Check for valid prefix and suffix
+      m = re.match(
+          r'^([^{]*\{)([a-zA-Z0-9_]+),\s*'
+          r'(plural|selectordinal|select),\s*'
+          r'(?:offset:\d+)?\s*(.*)', text, re.DOTALL)
+      if not m:
+        return (('This message looks like an ICU plural, '
+                 'but does not follow ICU syntax.'), like.start(), like.end())
+      starting, variable, kind, variant_pairs = m.groups()
+      variants, depth, last_pos = _ParseIcuVariants(variant_pairs, m.start(4))
+      if depth:
+        return ('Invalid ICU format. Unbalanced opening bracket', last_pos,
+                len(text))
+      first = text[0]
+      ending = text[last_pos:]
+      if not starting:
+        return ('Invalid ICU format. No initial opening bracket', last_pos - 1,
+                last_pos)
+      if not ending or '}' not in ending:
+        return ('Invalid ICU format. No final closing bracket', last_pos - 1,
+                last_pos)
+      elif first != '{':
+        return (
+            ('Invalid ICU format. Extra characters at the start of a complex '
+             'message (go/icu-message-migration): "%s"') %
+            starting, 0, len(starting))
+      elif ending != '}':
+        return (('Invalid ICU format. Extra characters at the end of a complex '
+                 'message (go/icu-message-migration): "%s"')
+                % ending, last_pos - 1, len(text) - 1)
+      if kind not in valid_types:
+        return (('Unknown ICU message type %s. '
+                 'Valid types are: plural, select, selectordinal') % kind, 0, 0)
+      known, required = valid_types[kind]
+      defined_variants = set()
+      for variant, variant_range, value, value_range in variants:
+        start, end = variant_range
+        if variant in defined_variants:
+          return ('Variant "%s" is defined more than once' % variant,
+                  start, end)
+        elif known and variant not in known:
+          return ('Variant "%s" is not valid for %s message' % (variant, kind),
+                  start, end)
+        defined_variants.add(variant)
+        # Check for nested structure
+        res = _ValidateIcuSyntax(value[1:-1], level + 1, signatures)
+        if res:
+          return (res[0], res[1] + value_range[0] + 1,
+                  res[2] + value_range[0] + 1)
+      missing = required - defined_variants
+      if missing:
+        return ('Required variants missing: %s' % ', '.join(missing), 0,
+                len(text))
+      signatures.append((level, variable, kind, defined_variants))
+
+
+  def _ParseIcuVariants(text, offset=0):
+    """Parse variants part of ICU complex message.
+
+    Builds a tuple of variant names and values, as well as
+    their offsets in the input string.
+
+    Args:
+      text: a string to parse
+      offset: additional offset to add to positions in the text to get correct
+        position in the complete ICU string.
+
+    Returns:
+      List of tuples, each tuple consist of four fields: variant name,
+      variant name span (tuple of two integers), variant value, value
+      span (tuple of two integers).
+    """
+    depth, start, end = 0, -1, -1
+    variants = []
+    key = None
+    for idx, char in enumerate(text):
+      if char == '{':
+        if not depth:
+          start = idx
+          chunk = text[end + 1:start]
+          key = chunk.strip()
+          pos = offset + end + 1 + chunk.find(key)
+          span = (pos, pos + len(key))
+        depth += 1
+      elif char == '}':
+        if not depth:
+          return variants, depth, offset + idx
+        depth -= 1
+        if not depth:
+          end = idx
+          variants.append((key, span, text[start:end + 1], (offset + start,
+                                                            offset + end + 1)))
+    return variants, depth, offset + end + 1
+
+  try:
+    old_sys_path = sys.path
+    sys.path = sys.path + [input_api.os_path.join(
+          input_api.PresubmitLocalPath(), 'tools', 'translation')]
+    from helper import grd_helper
+  finally:
+    sys.path = old_sys_path
+
   for f in affected_grds:
     file_path = f.LocalPath()
     old_id_to_msg_map = {}
     new_id_to_msg_map = {}
+    # Note that this code doesn't check if the file has been deleted. This is
+    # OK because it only uses the old and new file contents and doesn't load
+    # the file via its path.
+    # It's also possible that a file's content refers to a renamed or deleted
+    # file via a <part> tag, such as <part file="now-deleted-file.grdp">. This
+    # is OK as well, because grd_helper ignores <part> tags when loading .grd or
+    # .grdp files.
     if file_path.endswith('.grdp'):
       if f.OldContents():
-        old_id_to_msg_map = _GetGrdpMessagesFromString(
+        old_id_to_msg_map = grd_helper.GetGrdpMessagesFromString(
           unicode('\n'.join(f.OldContents())))
       if f.NewContents():
-        new_id_to_msg_map = _GetGrdpMessagesFromString(
+        new_id_to_msg_map = grd_helper.GetGrdpMessagesFromString(
           unicode('\n'.join(f.NewContents())))
     else:
+      file_dir = input_api.os_path.dirname(file_path) or '.'
       if f.OldContents():
-        old_id_to_msg_map = _GetGrdMessages(
-          StringIO(unicode('\n'.join(f.OldContents()))))
+        old_id_to_msg_map = grd_helper.GetGrdMessages(
+          StringIO(unicode('\n'.join(f.OldContents()))), file_dir)
       if f.NewContents():
-        new_id_to_msg_map = _GetGrdMessages(
-          StringIO(unicode('\n'.join(f.NewContents()))))
+        new_id_to_msg_map = grd_helper.GetGrdMessages(
+          StringIO(unicode('\n'.join(f.NewContents()))), file_dir)
 
     # Compute added, removed and modified message IDs.
     old_ids = set(old_id_to_msg_map)
@@ -3662,37 +5136,133 @@ def _CheckTranslationScreenshots(input_api, output_api):
     screenshots_dir = input_api.os_path.join(
         input_api.os_path.dirname(file_path), grd_name + ext.replace('.', '_'))
 
-    # Check the screenshot directory for .png files. Warn if there is any.
-    for png_path in affected_png_paths:
-      if png_path.startswith(screenshots_dir):
-        unnecessary_screenshots.append(png_path)
+    if run_screenshot_check:
+      # Check the screenshot directory for .png files. Warn if there is any.
+      for png_path in affected_png_paths:
+        if png_path.startswith(screenshots_dir):
+          unnecessary_screenshots.append(png_path)
 
-    for added_id in added_ids:
-      _CheckScreenshotAdded(screenshots_dir, added_id)
+      for added_id in added_ids:
+        _CheckScreenshotAdded(screenshots_dir, added_id)
 
-    for modified_id in modified_ids:
-      _CheckScreenshotAdded(screenshots_dir, modified_id)
+      for modified_id in modified_ids:
+        _CheckScreenshotAdded(screenshots_dir, modified_id)
 
-    for removed_id in removed_ids:
-      _CheckScreenshotRemoved(screenshots_dir, removed_id)
+      for removed_id in removed_ids:
+        _CheckScreenshotRemoved(screenshots_dir, removed_id)
+
+    # Check new and changed strings for ICU syntax errors.
+    for key in added_ids.union(modified_ids):
+      msg = new_id_to_msg_map[key].ContentsAsXml('', True)
+      err = _ValidateIcuSyntax(msg, 0, [])
+      if err is not None:
+        icu_syntax_errors.append(str(key) + ': ' + str(err[0]))
 
   results = []
-  if unnecessary_screenshots:
-    results.append(output_api.PresubmitNotifyResult(
-      'Do not include actual screenshots in the changelist. Run '
-      'tools/translate/upload_screenshots.py to upload them instead:',
-      sorted(unnecessary_screenshots)))
-
-  if missing_sha1:
-    results.append(output_api.PresubmitNotifyResult(
-      'You are adding or modifying UI strings.\n'
-      'To ensure the best translations, take screenshots of the relevant UI '
-      '(https://g.co/chrome/translation) and add these files to your '
-      'changelist:', sorted(missing_sha1)))
-
-  if unnecessary_sha1_files:
-    results.append(output_api.PresubmitNotifyResult(
-      'You removed strings associated with these files. Remove:',
-      sorted(unnecessary_sha1_files)))
+  if run_screenshot_check:
+    if unnecessary_screenshots:
+      results.append(output_api.PresubmitNotifyResult(
+        'Do not include actual screenshots in the changelist. Run '
+        'tools/translate/upload_screenshots.py to upload them instead:',
+        sorted(unnecessary_screenshots)))
+
+    if missing_sha1:
+      results.append(output_api.PresubmitNotifyResult(
+        'You are adding or modifying UI strings.\n'
+        'To ensure the best translations, take screenshots of the relevant UI '
+        '(https://g.co/chrome/translation) and add these files to your '
+        'changelist:', sorted(missing_sha1)))
+
+    if unnecessary_sha1_files:
+      results.append(output_api.PresubmitNotifyResult(
+        'You removed strings associated with these files. Remove:',
+        sorted(unnecessary_sha1_files)))
+  else:
+    results.append(output_api.PresubmitPromptOrNotify('Skipping translation '
+      'screenshots check.'))
+
+  if icu_syntax_errors:
+    results.append(output_api.PresubmitError(
+      'ICU syntax errors were found in the following strings (problems or '
+      'feedback? Contact rainhard@chromium.org):', items=icu_syntax_errors))
 
   return results
+
+
+def _CheckTranslationExpectations(input_api, output_api,
+                                  repo_root=None,
+                                  translation_expectations_path=None,
+                                  grd_files=None):
+  import sys
+  affected_grds = [f for f in input_api.AffectedFiles()
+      if (f.LocalPath().endswith('.grd') or
+          f.LocalPath().endswith('.grdp'))]
+  if not affected_grds:
+    return []
+
+  try:
+    old_sys_path = sys.path
+    sys.path = sys.path + [
+        input_api.os_path.join(
+            input_api.PresubmitLocalPath(), 'tools', 'translation')]
+    from helper import git_helper
+    from helper import translation_helper
+  finally:
+    sys.path = old_sys_path
+
+  # Check that translation expectations can be parsed and we can get a list of
+  # translatable grd files. |repo_root| and |translation_expectations_path| are
+  # only passed by tests.
+  if not repo_root:
+    repo_root = input_api.PresubmitLocalPath()
+  if not translation_expectations_path:
+    translation_expectations_path =  input_api.os_path.join(
+        repo_root, 'tools', 'gritsettings',
+        'translation_expectations.pyl')
+  if not grd_files:
+    grd_files = git_helper.list_grds_in_repository(repo_root)
+
+  try:
+    translation_helper.get_translatable_grds(repo_root, grd_files,
+                                             translation_expectations_path)
+  except Exception as e:
+    return [output_api.PresubmitNotifyResult(
+      'Failed to get a list of translatable grd files. This happens when:\n'
+      ' - One of the modified grd or grdp files cannot be parsed or\n'
+      ' - %s is not updated.\n'
+      'Stack:\n%s' % (translation_expectations_path, str(e)))]
+  return []
+
+
+def _CheckStableMojomChanges(input_api, output_api):
+  """Changes to [Stable] mojom types must preserve backward-compatibility."""
+  changed_mojoms = input_api.AffectedFiles(
+      include_deletes=True,
+      file_filter=lambda f: f.LocalPath().endswith(('.mojom')))
+  delta = []
+  for mojom in changed_mojoms:
+    old_contents = ''.join(mojom.OldContents()) or None
+    new_contents = ''.join(mojom.NewContents()) or None
+    delta.append({
+      'filename': mojom.LocalPath(),
+      'old': '\n'.join(mojom.OldContents()) or None,
+      'new': '\n'.join(mojom.NewContents()) or None,
+      })
+
+  process = input_api.subprocess.Popen(
+      [input_api.python_executable,
+       input_api.os_path.join(input_api.PresubmitLocalPath(), 'mojo',
+                              'public', 'tools', 'mojom',
+                              'check_stable_mojom_compatibility.py'),
+       '--src-root', input_api.PresubmitLocalPath()],
+       stdin=input_api.subprocess.PIPE,
+       stdout=input_api.subprocess.PIPE,
+       stderr=input_api.subprocess.PIPE,
+       universal_newlines=True)
+  (x, error) = process.communicate(input=input_api.json.dumps(delta))
+  if process.returncode:
+    return [output_api.PresubmitError(
+        'One or more [Stable] mojom definitions appears to have been changed '
+        'in a way that is not backward-compatible.',
+        long_text=error)]
+  return []