Import Cobalt 21.master.0.301702
diff --git a/src/.pre-commit-config.yaml b/src/.pre-commit-config.yaml
index 2b7e100..6240f63 100644
--- a/src/.pre-commit-config.yaml
+++ b/src/.pre-commit-config.yaml
@@ -5,7 +5,7 @@
 default_language_version:
     python: python3
 
-exclude: '^(third_party|v8)/'
+exclude: '^(build|third_party|v8)/'
 
 repos:
 -   repo: https://cobalt.googlesource.com/pre-commit-hooks
@@ -123,6 +123,7 @@
         entry: python precommit_hooks/run_python2_unittests.py
         language: python
         language_version: python2
+        additional_dependencies: ['mock']
         types: [python]
     -   id: osslint
         name: osslint
diff --git a/src/base/test/test_child_process.cc b/src/base/test/test_child_process.cc
index 3beac5b..ce15856 100644
--- a/src/base/test/test_child_process.cc
+++ b/src/base/test/test_child_process.cc
@@ -6,9 +6,6 @@
 #include <stdlib.h>
 #include <string.h>
 
-#include "starboard/common/string.h"
-#include "starboard/types.h"
-
 // Simple testing command, used to exercise child process launcher calls.
 //
 // Usage:
@@ -33,7 +30,7 @@
     int start_idx = 1;
 
     if (strcmp(argv[1], "-x") == 0) {
-      return_code = SbStringAToI(argv[2]);
+      return_code = atoi(argv[2]);
       start_idx = 3;
     }
 
diff --git a/src/build/.style.yapf b/src/build/.style.yapf
new file mode 100644
index 0000000..b4ebbe2
--- /dev/null
+++ b/src/build/.style.yapf
@@ -0,0 +1,6 @@
+[style]
+based_on_style = pep8
+
+# New directories should use a .style.yapf that does not include the following:
+column_limit = 80
+indent_width = 2
diff --git a/src/build/BUILD.gn b/src/build/BUILD.gn
new file mode 100644
index 0000000..51ef9b0
--- /dev/null
+++ b/src/build/BUILD.gn
@@ -0,0 +1,46 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/buildflag_header.gni")
+import("//build/config/chrome_build.gni")
+import("//build/config/chromecast_build.gni")
+import("//build/config/chromeos/args.gni")
+import("//build/config/chromeos/ui_mode.gni")
+
+source_set("buildflag_header_h") {
+  sources = [ "buildflag.h" ]
+}
+
+buildflag_header("branding_buildflags") {
+  header = "branding_buildflags.h"
+
+  if (is_chrome_branded) {
+    flags = [
+      "CHROMIUM_BRANDING=0",
+      "GOOGLE_CHROME_BRANDING=1",
+    ]
+  } else {
+    flags = [
+      "CHROMIUM_BRANDING=1",
+      "GOOGLE_CHROME_BRANDING=0",
+    ]
+  }
+}
+
+buildflag_header("chromecast_buildflags") {
+  header = "chromecast_buildflags.h"
+
+  flags = [ "IS_CHROMECAST=$is_chromecast" ]
+}
+
+buildflag_header("chromeos_buildflags") {
+  header = "chromeos_buildflags.h"
+
+  flags = [
+    "IS_CHROMEOS_DEVICE=$is_chromeos_device",
+
+    "IS_CHROMEOS_LACROS=$is_chromeos_lacros",
+    "IS_CHROMEOS_ASH=$is_chromeos_ash",
+  ]
+}
diff --git a/src/build/DIR_METADATA b/src/build/DIR_METADATA
new file mode 100644
index 0000000..c914ddc
--- /dev/null
+++ b/src/build/DIR_METADATA
@@ -0,0 +1,5 @@
+monorail {
+  component: "Build"
+}
+
+team_email: "build@chromium.org"
diff --git a/src/build/METADATA b/src/build/METADATA
new file mode 100644
index 0000000..91f4e6b
--- /dev/null
+++ b/src/build/METADATA
@@ -0,0 +1,20 @@
+name: "build"
+description:
+  "Subtree at build."
+
+third_party {
+  url {
+    type: LOCAL_SOURCE
+    value: "/build_mirror"
+  }
+  url {
+    type: GIT
+    value: "https://chromium.googlesource.com/chromium/src/build"
+  }
+  version: "4cb2bd7db6575df5a62f65ea60fb7ca2f2ff9f05"
+  last_upgrade_date {
+    year: 2021
+    month: 5
+    day: 4
+  }
+}
diff --git a/src/build/OWNERS.setnoparent b/src/build/OWNERS.setnoparent
new file mode 100644
index 0000000..5797d4d
--- /dev/null
+++ b/src/build/OWNERS.setnoparent
@@ -0,0 +1,62 @@
+# List of OWNERS files that can be used together with "set noparent". See
+# docs/code_reviews.md#owners-file-details for more details.
+
+# Overall project governance.
+file://ENG_REVIEW_OWNERS
+
+# Third-party dependency review, see //docs/adding_to_third_party.md
+file://third_party/OWNERS
+
+# Security reviews
+file://chromeos/SECURITY_OWNERS
+file://content/browser/SITE_ISOLATION_OWNERS
+file://fuchsia/SECURITY_OWNERS
+file://ipc/SECURITY_OWNERS
+file://net/base/SECURITY_OWNERS
+file://sandbox/linux/OWNERS
+file://sandbox/mac/OWNERS
+file://sandbox/OWNERS
+file://sandbox/win/OWNERS
+file://third_party/blink/SECURITY_OWNERS
+
+# Privacy reviews
+file://tools/traffic_annotation/summary/TRAFFIC_ANNOTATION_OWNERS
+file://tools/metrics/ukm/PRIVACY_OWNERS
+file://base/metrics/OWNERS
+
+# Blink API owners are responsible for decisions about what APIs Blink should
+# expose to the open web.
+file://third_party/blink/API_OWNERS
+
+# Extension related files.
+file://chrome/browser/extensions/component_extensions_allowlist/EXTENSION_ALLOWLIST_OWNERS
+file://extensions/common/api/API_OWNERS
+
+# This restriction is in place to avoid accidental addition to our top level
+# layout files, such as add duplicated assets, or introducing new colors when
+# we don't want them.
+file://ui/android/java/res/LAYOUT_OWNERS
+
+# Updating policy_templates.json can have drastic effects for systems depending
+# on policy definitions (for example, Google's cloud management tools for
+# Chrome and Chrome OS).
+# The rules are documented at:
+# https://sites.google.com/a/chromium.org/dev/developers/how-tos/enterprise/adding-new-policies
+file://components/policy/resources/ENTERPRISE_POLICY_OWNERS
+
+# This restriction is in place due to the complicated compliance regulations
+# around this code.
+file://chrome/android/java/src/org/chromium/chrome/browser/searchwidget/COMPLIANCE_OWNERS
+
+# Notification channels appear in system UI and are persisted forever by
+# Android, so should not be added or removed lightly, and the proper
+# deprecation and versioning steps must be taken when doing so.
+file://chrome/android/java/src/org/chromium/chrome/browser/notifications/channels/NOTIFICATION_CHANNEL_OWNERS
+
+# The Weblayer API is supposed to be stable and will be used outside of the
+# chromium repository.
+file://weblayer/API_OWNERS
+
+# New features for lock/login UI on Chrome OS need to work stably in all corner
+# cases.
+file://ash/login/LOGIN_LOCK_OWNERS
diff --git a/src/build/OWNERS.status b/src/build/OWNERS.status
new file mode 100644
index 0000000..f5cc1fc
--- /dev/null
+++ b/src/build/OWNERS.status
@@ -0,0 +1,12 @@
+# Use this file to set a global status message that should be shown whenever
+# git cl owners proposes to add you as a reviewer.
+#
+# The status messages should be somewhat stable, so please don't use this for
+# short term, or frequently changing updates.
+#
+# The format of the file is
+#
+#  you@chromium.org: Single line status message.
+#
+
+jochen@chromium.org: EMEA based reviewer.
diff --git a/src/build/README.chromium b/src/build/README.chromium
deleted file mode 100644
index 012df35..0000000
--- a/src/build/README.chromium
+++ /dev/null
@@ -1,15 +0,0 @@
-List of property sheets to be included by projects:
-  common.vsprops
-    Not used anymore. No-op. Kept for compatibility with current projects.
-
-  debug.vsprops
-    Enables debug settings. Must be included directly in Debug configuration. Includes internal\essential.vsprops.
-
-  external_code.vsprops
-    Contains settings made to simplify usage of external (non-Google) code. It relaxes the warning levels. Should be included after debug.vsprops or release.vsprops to override their settings.
-
-  output_dll_copy.rules
-    Run to enable automatic copy of DLL when they are as an input file in a vcproj project.
-
-  release.vsprops
-    Enables release settings. Must be included directly in Release configuration. Includes internal\essential.vsprops. Also includes "internal\release_impl$(CHROME_BUILD_TYPE).vsprops". So the behavior is dependant on the CHROME_BUILD_TYPE environment variable.
diff --git a/src/build/README.md b/src/build/README.md
new file mode 100644
index 0000000..f9dde97
--- /dev/null
+++ b/src/build/README.md
@@ -0,0 +1,36 @@
+# About
+`//build` contains:
+ * Core GN templates and configuration
+ * Core Python build scripts
+
+Since this directory is DEPS'ed in by some other repositories (webrtc, pdfium,
+v8, etc), it should be kept as self-contained as possible by not referring
+to files outside of it. Some exceptions exist (`//testing`, select
+`//third_party` subdirectories), but new dependencies tend to break these other
+projects, and so should be avoided.
+
+Changes to `//build` should be landed in the Chromium repo. They will then be
+replicated to the stand-alone [build repo](https://chromium.googlesource.com/chromium/src/build)
+by the [gsubtreed tool.](https://chromium.googlesource.com/infra/infra/+/master/infra/services/gsubtreed)
+Note: You can find all directories already  available through gsubtreed in the
+[list of all chromium repos](https://chromium.googlesource.com/).
+
+## Contents
+ * `//build/config` - Common templates via `.gni` files.
+ * `//build/toolchain` - GN toolchain definitions.
+ * `Other .py files` - Some are used by GN/Ninja. Some by gclient hooks, some
+   are just random utilities.
+
+Files referenced by `//.gn`:
+ * `//build/BUILDCONFIG.gn` - Included by all `BUILD.gn` files.
+ * `//build/secondary` - An overlay for `BUILD.gn` files. Enables adding
+   `BUILD.gn` to directories that live in sub-repositories.
+ * `//build_overrides` -
+   Refer to [//build_overrides/README.md](../build_overrides/README.md).
+
+## Docs
+
+* [Writing GN Templates](docs/writing_gn_templates.md)
+* [Debugging Slow Builds](docs/debugging_slow_builds.md)
+* [Mac Hermetic Toolchains](docs/mac_hermetic_toolchain.md)
+* [Android Build Documentation](android/docs/README.md)
diff --git a/src/build/add_rts_filters.py b/src/build/add_rts_filters.py
new file mode 100755
index 0000000..4186c39
--- /dev/null
+++ b/src/build/add_rts_filters.py
@@ -0,0 +1,47 @@
+#!/usr/bin/env python
+# Copyright (c) 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Creates a dummy RTS filter file if a real one doesn't exist yes.
+  Real filter files are  generated by the RTS binary for suites with any
+  skippable tests. The rest of the suites need to have dummy files because gn
+  will expect the file to be present.
+
+  Implementation uses try / except because the filter files are written
+  relatively close to when this code creates the dummy files.
+
+  The following type of implementation would have a race condition:
+  if not os.path.isfile(filter_file):
+    open(filter_file, 'w') as fp:
+      fp.write('*')
+"""
+import errno
+import os
+import sys
+
+
+def main():
+  filter_file = sys.argv[1]
+  directory = os.path.dirname(filter_file)
+  try:
+    os.makedirs(directory)
+  except OSError as err:
+    if err.errno == errno.EEXIST:
+      pass
+    else:
+      raise
+
+  try:
+    fp = os.open(filter_file, os.O_CREAT | os.O_EXCL | os.O_WRONLY)
+  except OSError as err:
+    if err.errno == errno.EEXIST:
+      pass
+    else:
+      raise
+  else:
+    with os.fdopen(fp, 'w') as file_obj:
+      file_obj.write('*')  # '*' is a dummy that means run everything
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/AndroidManifest.xml b/src/build/android/AndroidManifest.xml
new file mode 100644
index 0000000..3c4ed29
--- /dev/null
+++ b/src/build/android/AndroidManifest.xml
@@ -0,0 +1,18 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+  Copyright (c) 2012 The Chromium Authors. All rights reserved.  Use of this
+  source code is governed by a BSD-style license that can be found in the
+  LICENSE file.
+-->
+
+<!--
+  This is a dummy manifest which is required by:
+  1. aapt when generating R.java in java.gypi:
+     Nothing in the manifest is used, but it is still required by aapt.
+-->
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="org.dummy"
+    android:versionCode="1"
+    android:versionName="1.0">
+
+</manifest>
diff --git a/src/build/android/BUILD.gn b/src/build/android/BUILD.gn
new file mode 100644
index 0000000..1be9f47
--- /dev/null
+++ b/src/build/android/BUILD.gn
@@ -0,0 +1,167 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/build_vars.gni")
+import("//build/config/android/config.gni")
+import("//build/config/android/rules.gni")
+import("//build/config/python.gni")
+import("//build_overrides/build.gni")
+
+if (enable_java_templates) {
+  # Create or update the API versions cache if necessary by running a
+  # functionally empty lint task. This prevents racy creation of the
+  # cache while linting java targets in android_lint.
+  android_lint("prepare_android_lint_cache") {
+    create_cache = true
+  }
+
+  if (enable_jdk_library_desugaring) {
+    dex_jdk_libs("all_jdk_libs") {
+      output = "$target_out_dir/$target_name.l8.dex"
+      min_sdk_version = default_min_sdk_version
+    }
+  }
+
+  generate_build_config_srcjar("build_config_gen") {
+    use_final_fields = false
+  }
+
+  java_library("build_config_java") {
+    supports_android = true
+    srcjar_deps = [ ":build_config_gen" ]
+    jar_excluded_patterns = [ "*/build/BuildConfig.class" ]
+  }
+
+  write_native_libraries_java("native_libraries_gen") {
+    use_final_fields = false
+  }
+
+  android_library("native_libraries_java") {
+    srcjar_deps = [ ":native_libraries_gen" ]
+
+    # New version of NativeLibraries.java (with the actual correct values) will
+    # be created when creating an apk.
+    jar_excluded_patterns = [ "*/NativeLibraries.class" ]
+  }
+}
+
+python_library("devil_chromium_py") {
+  pydeps_file = "devil_chromium.pydeps"
+  data = [
+    "devil_chromium.py",
+    "devil_chromium.json",
+    "//third_party/catapult/third_party/gsutil/",
+    "//third_party/catapult/devil/devil/devil_dependencies.json",
+
+    # Read by gn_helpers.BuildWithChromium()
+    "//build/config/gclient_args.gni",
+  ]
+}
+
+# Contains runtime deps for installing apks.
+# E.g. from test_runner.py or from apk_operations.py.
+group("apk_installer_data") {
+  # Other //build users let devil library fetch these from Google Storage.
+  if (build_with_chromium) {
+    data_deps = [
+      "//build/android/pylib/device/commands",
+      "//tools/android/md5sum",
+    ]
+    data = [
+      "//third_party/android_build_tools/bundletool/bundletool-all-1.4.0.jar",
+    ]
+  }
+}
+
+python_library("apk_operations_py") {
+  pydeps_file = "apk_operations.pydeps"
+  deps = [ ":apk_installer_data" ]
+}
+
+python_library("test_runner_py") {
+  testonly = true
+  pydeps_file = "test_runner.pydeps"
+  data = [
+    "pylib/gtest/filter/",
+    "pylib/instrumentation/render_test.html.jinja",
+    "test_wrapper/logdog_wrapper.py",
+    "${android_sdk_build_tools}/aapt",
+    "${android_sdk_build_tools}/dexdump",
+    "${android_sdk_build_tools}/lib64/libc++.so",
+    "${android_sdk_build_tools}/split-select",
+    "${android_sdk_root}/platform-tools/adb",
+    "//third_party/requests/",
+  ]
+  data_deps = [
+    ":apk_installer_data",
+    ":devil_chromium_py",
+    ":logdog_wrapper_py",
+    ":stack_tools",
+  ]
+
+  # Other //build users let devil library fetch these from Google Storage.
+  if (build_with_chromium) {
+    data_deps += [ "//tools/android/forwarder2" ]
+    data += [ "//tools/android/avd/proto/" ]
+    if (is_asan) {
+      data_deps += [ "//tools/android/asan/third_party:asan_device_setup" ]
+    }
+  }
+
+  # Proguard is needed only when using apks (rather than native executables).
+  if (enable_java_templates) {
+    deps = [ "//build/android/stacktrace:java_deobfuscate" ]
+  }
+}
+
+python_library("logdog_wrapper_py") {
+  pydeps_file = "test_wrapper/logdog_wrapper.pydeps"
+}
+
+python_library("resource_sizes_py") {
+  pydeps_file = "resource_sizes.pydeps"
+  data_deps = [
+    ":devil_chromium_py",
+    "//third_party/catapult/tracing:convert_chart_json",
+  ]
+  data = [
+    build_vars_file,
+    android_readelf,
+  ]
+}
+
+# Tools necessary for symbolizing tombstones or stack traces that are output to
+# logcat.
+# Hidden behind build_with_chromium because some third party repos that use
+# //build don't pull in //third_party/android_platform.
+# TODO(crbug.com/1120190): Move stack script into //build/third_party
+#     and enable unconditionally.
+group("stack_tools") {
+  if (build_with_chromium) {
+    data = [
+      "tombstones.py",
+      "pylib/symbols/",
+      "stacktrace/",
+    ]
+
+    data_deps =
+        [ "//third_party/android_platform/development/scripts:stack_py" ]
+  }
+}
+
+# GN evaluates each .gn file once per toolchain, so restricting to default
+# toolchain will ensure write_file() is called only once.
+assert(current_toolchain == default_toolchain)
+
+# NOTE: If other platforms would benefit from exporting variables, we should
+# move this to a more top-level place.
+# It is currently here (instead of //BUILD.gn) to ensure that the file is
+# written even for non-chromium embedders of //build.
+_build_vars_json = {
+  # Underscore prefix so that it appears at the top.
+  _HEADER = "Generated during 'gn gen' by //build/android/BUILD.gn."
+  forward_variables_from(android_build_vars_json, "*")
+}
+
+write_file(build_vars_file, _build_vars_json, "json")
diff --git a/src/build/android/CheckInstallApk-debug.apk b/src/build/android/CheckInstallApk-debug.apk
new file mode 100644
index 0000000..3dc3191
--- /dev/null
+++ b/src/build/android/CheckInstallApk-debug.apk
Binary files differ
diff --git a/src/build/android/DIR_METADATA b/src/build/android/DIR_METADATA
new file mode 100644
index 0000000..7a2580a
--- /dev/null
+++ b/src/build/android/DIR_METADATA
@@ -0,0 +1 @@
+os: ANDROID
diff --git a/src/build/android/adb_chrome_public_command_line b/src/build/android/adb_chrome_public_command_line
new file mode 100755
index 0000000..86ece8c
--- /dev/null
+++ b/src/build/android/adb_chrome_public_command_line
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# If no flags are given, prints the current Chrome flags.
+#
+# Otherwise, the given flags are used to REPLACE (not modify) the Chrome
+# flags. For example:
+#   adb_chrome_public_command_line --enable-webgl
+#
+# To remove all Chrome flags, pass an empty string for the flags:
+#   adb_chrome_public_command_line ""
+
+exec $(dirname $0)/adb_command_line.py --name chrome-command-line "$@"
diff --git a/src/build/android/adb_command_line.py b/src/build/android/adb_command_line.py
new file mode 100755
index 0000000..c3ec8d4
--- /dev/null
+++ b/src/build/android/adb_command_line.py
@@ -0,0 +1,98 @@
+#!/usr/bin/env vpython
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility for reading / writing command-line flag files on device(s)."""
+
+from __future__ import print_function
+
+import argparse
+import logging
+import sys
+
+import devil_chromium
+
+from devil.android import device_errors
+from devil.android import device_utils
+from devil.android import flag_changer
+from devil.android.tools import script_common
+from devil.utils import cmd_helper
+from devil.utils import logging_common
+
+
+def CheckBuildTypeSupportsFlags(device, command_line_flags_file):
+  is_webview = command_line_flags_file == 'webview-command-line'
+  if device.IsUserBuild() and is_webview:
+    raise device_errors.CommandFailedError(
+        'WebView only respects flags on a userdebug or eng device, yours '
+        'is a user build.', device)
+  elif device.IsUserBuild():
+    logging.warning(
+        'Your device (%s) is a user build; Chrome may or may not pick up '
+        'your commandline flags. Check your '
+        '"command_line_on_non_rooted_enabled" preference, or switch '
+        'devices.', device)
+
+
+def main():
+  parser = argparse.ArgumentParser(description=__doc__)
+  parser.usage = '''%(prog)s --name FILENAME [--device SERIAL] [flags...]
+
+No flags: Prints existing command-line file.
+Empty string: Deletes command-line file.
+Otherwise: Writes command-line file.
+
+'''
+  parser.add_argument('--name', required=True,
+                      help='Name of file where to store flags on the device.')
+  parser.add_argument('-e', '--executable', dest='executable', default='chrome',
+                      help='(deprecated) No longer used.')
+  script_common.AddEnvironmentArguments(parser)
+  script_common.AddDeviceArguments(parser)
+  logging_common.AddLoggingArguments(parser)
+
+  args, remote_args = parser.parse_known_args()
+  devil_chromium.Initialize(adb_path=args.adb_path)
+  logging_common.InitializeLogging(args)
+
+  devices = device_utils.DeviceUtils.HealthyDevices(device_arg=args.devices,
+                                                    default_retries=0)
+  all_devices = device_utils.DeviceUtils.parallel(devices)
+
+  if not remote_args:
+    # No args == do not update, just print flags.
+    remote_args = None
+    action = ''
+  elif len(remote_args) == 1 and not remote_args[0]:
+    # Single empty string arg == delete flags
+    remote_args = []
+    action = 'Deleted command line file. '
+  else:
+    action = 'Wrote command line file. '
+
+  def update_flags(device):
+    CheckBuildTypeSupportsFlags(device, args.name)
+    changer = flag_changer.FlagChanger(device, args.name)
+    if remote_args is not None:
+      flags = changer.ReplaceFlags(remote_args)
+    else:
+      flags = changer.GetCurrentFlags()
+    return (device, device.build_description, flags)
+
+  updated_values = all_devices.pMap(update_flags).pGet(None)
+
+  print('%sCurrent flags (in %s):' % (action, args.name))
+  for d, desc, flags in updated_values:
+    if flags:
+      # Shell-quote flags for easy copy/paste as new args on the terminal.
+      quoted_flags = ' '.join(cmd_helper.SingleQuote(f) for f in sorted(flags))
+    else:
+      quoted_flags = '( empty )'
+    print('  %s (%s): %s' % (d, desc, quoted_flags))
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/adb_gdb b/src/build/android/adb_gdb
new file mode 100755
index 0000000..6de4273
--- /dev/null
+++ b/src/build/android/adb_gdb
@@ -0,0 +1,1000 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+
+# A generic script used to attach to a running Chromium process and
+# debug it. Most users should not use this directly, but one of the
+# wrapper scripts like adb_gdb_content_shell
+#
+# Use --help to print full usage instructions.
+#
+
+PROGNAME=$(basename "$0")
+PROGDIR=$(dirname "$0")
+
+# Force locale to C to allow recognizing output from subprocesses.
+LC_ALL=C
+
+# Location of Chromium-top-level sources.
+CHROMIUM_SRC=$(cd "$PROGDIR"/../.. >/dev/null && pwd 2>/dev/null)
+
+TMPDIR=
+GDBSERVER_PIDFILE=
+TARGET_GDBSERVER=
+COMMAND_PREFIX=
+COMMAND_SUFFIX=
+
+clean_exit () {
+  if [ "$TMPDIR" ]; then
+    GDBSERVER_PID=$(cat $GDBSERVER_PIDFILE 2>/dev/null)
+    if [ "$GDBSERVER_PID" ]; then
+      log "Killing background gdbserver process: $GDBSERVER_PID"
+      kill -9 $GDBSERVER_PID >/dev/null 2>&1
+      rm -f "$GDBSERVER_PIDFILE"
+    fi
+    if [ "$TARGET_GDBSERVER" ]; then
+      log "Removing target gdbserver binary: $TARGET_GDBSERVER."
+      "$ADB" shell "$COMMAND_PREFIX" rm "$TARGET_GDBSERVER" \
+          "$TARGET_DOMAIN_SOCKET" "$COMMAND_SUFFIX" >/dev/null 2>&1
+    fi
+    log "Cleaning up: $TMPDIR"
+    rm -rf "$TMPDIR"
+  fi
+  trap "" EXIT
+  exit $1
+}
+
+# Ensure clean exit on Ctrl-C or normal exit.
+trap "clean_exit 1" INT HUP QUIT TERM
+trap "clean_exit \$?" EXIT
+
+panic () {
+  echo "ERROR: $@" >&2
+  exit 1
+}
+
+fail_panic () {
+  if [ $? != 0 ]; then panic "$@"; fi
+}
+
+log () {
+  if [ "$VERBOSE" -gt 0 ]; then
+    echo "$@"
+  fi
+}
+
+DEFAULT_PULL_LIBS_DIR="/tmp/adb-gdb-support-$USER"
+IDE_DIR="$DEFAULT_PULL_LIBS_DIR"
+
+# NOTE: Allow wrapper scripts to set various default through ADB_GDB_XXX
+# environment variables. This is only for cosmetic reasons, i.e. to
+# display proper
+
+# Allow wrapper scripts to set the program name through ADB_GDB_PROGNAME
+PROGNAME=${ADB_GDB_PROGNAME:-$(basename "$0")}
+
+ADB=
+ANNOTATE=
+CGDB=
+GDBINIT=
+GDBSERVER=
+HELP=
+IDE=
+NDK_DIR=
+NO_PULL_LIBS=
+PACKAGE_NAME=
+PID=
+PORT=
+PROGRAM_NAME="activity"
+PULL_LIBS=
+PULL_LIBS_DIR=
+ATTACH_DELAY=1
+SU_PREFIX=
+SYMBOL_DIR=
+TARGET_ARCH=
+TOOLCHAIN=
+VERBOSE=0
+
+for opt; do
+  optarg=$(expr "x$opt" : 'x[^=]*=\(.*\)')
+  case $opt in
+    --adb=*)
+      ADB=$optarg
+      ;;
+    --device=*)
+      export ANDROID_SERIAL=$optarg
+      ;;
+    --annotate=3)
+      ANNOTATE=$optarg
+      ;;
+    --gdbserver=*)
+      GDBSERVER=$optarg
+      ;;
+    --gdb=*)
+      GDB=$optarg
+      ;;
+    --help|-h|-?)
+      HELP=true
+      ;;
+    --ide)
+      IDE=true
+      ;;
+    --ndk-dir=*)
+      NDK_DIR=$optarg
+      ;;
+    --no-pull-libs)
+      NO_PULL_LIBS=true
+      ;;
+    --package-name=*)
+      PACKAGE_NAME=$optarg
+      ;;
+    --pid=*)
+      PID=$optarg
+      ;;
+    --port=*)
+      PORT=$optarg
+      ;;
+    --program-name=*)
+      PROGRAM_NAME=$optarg
+      ;;
+    --pull-libs)
+      PULL_LIBS=true
+      ;;
+    --pull-libs-dir=*)
+      PULL_LIBS_DIR=$optarg
+      ;;
+    --script=*)
+      GDBINIT=$optarg
+      ;;
+    --attach-delay=*)
+      ATTACH_DELAY=$optarg
+      ;;
+    --su-prefix=*)
+      SU_PREFIX=$optarg
+      ;;
+    --symbol-dir=*)
+      SYMBOL_DIR=$optarg
+      ;;
+    --output-directory=*)
+      CHROMIUM_OUTPUT_DIR=$optarg
+      ;;
+    --target-arch=*)
+      TARGET_ARCH=$optarg
+      ;;
+    --toolchain=*)
+      TOOLCHAIN=$optarg
+      ;;
+    --cgdb)
+      CGDB=cgdb
+      ;;
+    --cgdb=*)
+      CGDB=$optarg
+      ;;
+    --verbose)
+      VERBOSE=$(( $VERBOSE + 1 ))
+      ;;
+    -*)
+      panic "Unknown option $opt, see --help." >&2
+      ;;
+    *)
+      if [ "$PACKAGE_NAME" ]; then
+        panic "You can only provide a single package name as argument!\
+ See --help."
+      fi
+      PACKAGE_NAME=$opt
+      ;;
+  esac
+done
+
+if [ "$HELP" ]; then
+  if [ "$ADB_GDB_PROGNAME" ]; then
+    # Assume wrapper scripts all provide a default package name.
+    cat <<EOF
+Usage: $PROGNAME [options]
+
+Attach gdb to a running Android $PROGRAM_NAME process.
+EOF
+  else
+    # Assume this is a direct call to adb_gdb
+  cat <<EOF
+Usage: $PROGNAME [options] [<package-name>]
+
+Attach gdb to a running Android $PROGRAM_NAME process.
+
+If provided, <package-name> must be the name of the Android application's
+package name to be debugged. You can also use --package-name=<name> to
+specify it.
+EOF
+  fi
+
+  cat <<EOF
+
+This script is used to debug a running $PROGRAM_NAME process.
+
+This script needs several things to work properly. It will try to pick
+them up automatically for you though:
+
+   - target gdbserver binary
+   - host gdb client (e.g. arm-linux-androideabi-gdb)
+   - directory with symbolic version of $PROGRAM_NAME's shared libraries.
+
+You can also use --ndk-dir=<path> to specify an alternative NDK installation
+directory.
+
+The script tries to find the most recent version of the debug version of
+shared libraries under one of the following directories:
+
+  \$CHROMIUM_SRC/<out>/lib/                (used by GYP builds)
+  \$CHROMIUM_SRC/<out>/lib.unstripped/     (used by GN builds)
+
+Where <out> is determined by CHROMIUM_OUTPUT_DIR, or --output-directory.
+
+You can set the path manually via --symbol-dir.
+
+The script tries to extract the target architecture from your target device,
+but if this fails, will default to 'arm'. Use --target-arch=<name> to force
+its value.
+
+Otherwise, the script will complain, but you can use the --gdbserver,
+--gdb and --symbol-lib options to specify everything manually.
+
+An alternative to --gdb=<file> is to use --toollchain=<path> to specify
+the path to the host target-specific cross-toolchain.
+
+You will also need the 'adb' tool in your path. Otherwise, use the --adb
+option. The script will complain if there is more than one device connected
+and a device is not specified with either --device or ANDROID_SERIAL).
+
+The first time you use it on a device, the script will pull many system
+libraries required by the process into a temporary directory. This
+is done to strongly improve the debugging experience, like allowing
+readable thread stacks and more. The libraries are copied to the following
+directory by default:
+
+  $DEFAULT_PULL_LIBS_DIR/
+
+But you can use the --pull-libs-dir=<path> option to specify an
+alternative. The script can detect when you change the connected device,
+and will re-pull the libraries only in this case. You can however force it
+with the --pull-libs option.
+
+Any local .gdbinit script will be ignored, but it is possible to pass a
+gdb command script with the --script=<file> option. Note that its commands
+will be passed to gdb after the remote connection and library symbol
+loading have completed.
+
+Valid options:
+  --help|-h|-?          Print this message.
+  --verbose             Increase verbosity.
+
+  --cgdb[=<file>]       Use cgdb (an interface for gdb that shows the code).
+  --symbol-dir=<path>   Specify directory with symbol shared libraries.
+  --output-directory=<path> Specify the output directory (e.g. "out/Debug").
+  --package-name=<name> Specify package name (alternative to 1st argument).
+  --program-name=<name> Specify program name (cosmetic only).
+  --pid=<pid>           Specify application process pid.
+  --attach-delay=<num>  Seconds to wait for gdbserver to attach to the
+                        remote process before starting gdb. Default 1.
+                        <num> may be a float if your sleep(1) supports it.
+  --annotate=<num>      Enable gdb annotation.
+  --script=<file>       Specify extra GDB init script.
+
+  --gdbserver=<file>    Specify target gdbserver binary.
+  --gdb=<file>          Specify host gdb client binary.
+  --target-arch=<name>  Specify NDK target arch.
+  --adb=<file>          Specify host ADB binary.
+  --device=<file>       ADB device serial to use (-s flag).
+  --port=<port>         Specify the tcp port to use.
+  --ide                 Forward gdb port, but do not enter gdb console.
+
+  --su-prefix=<prefix>  Prepend <prefix> to 'adb shell' commands that are
+                        run by this script. This can be useful to use
+                        the 'su' program on rooted production devices.
+                        e.g. --su-prefix="su -c"
+
+  --pull-libs           Force system libraries extraction.
+  --no-pull-libs        Do not extract any system library.
+  --libs-dir=<path>     Specify system libraries extraction directory.
+
+EOF
+  exit 0
+fi
+
+if [ -z "$PACKAGE_NAME" ]; then
+  panic "Please specify a package name on the command line. See --help."
+fi
+
+if [[ -z "$SYMBOL_DIR" && -z "$CHROMIUM_OUTPUT_DIR" ]]; then
+  if [[ -e "build.ninja" ]]; then
+    CHROMIUM_OUTPUT_DIR=$PWD
+  else
+    panic "Please specify an output directory by using one of:
+       --output-directory=out/Debug
+       CHROMIUM_OUTPUT_DIR=out/Debug
+       Setting working directory to an output directory.
+       See --help."
+   fi
+fi
+
+if ls *.so >/dev/null 2>&1; then
+  panic ".so files found in your working directory. These will conflict with" \
+      "library lookup logic. Change your working directory and try again."
+fi
+
+# Detect the build type and symbol directory. This is done by finding
+# the most recent sub-directory containing debug shared libraries under
+# $CHROMIUM_OUTPUT_DIR.
+#
+# Out: nothing, but this sets SYMBOL_DIR
+#
+detect_symbol_dir () {
+  # GYP places unstripped libraries under out/lib
+  # GN places them under out/lib.unstripped
+  local PARENT_DIR="$CHROMIUM_OUTPUT_DIR"
+  if [[ ! -e "$PARENT_DIR" ]]; then
+    PARENT_DIR="$CHROMIUM_SRC/$PARENT_DIR"
+  fi
+  SYMBOL_DIR="$PARENT_DIR/lib.unstripped"
+  if [[ -z "$(ls "$SYMBOL_DIR"/lib*.so 2>/dev/null)" ]]; then
+    SYMBOL_DIR="$PARENT_DIR/lib"
+    if [[ -z "$(ls "$SYMBOL_DIR"/lib*.so 2>/dev/null)" ]]; then
+      panic "Could not find any symbols under \
+$PARENT_DIR/lib{.unstripped}. Please build the program first!"
+    fi
+  fi
+  log "Auto-config: --symbol-dir=$SYMBOL_DIR"
+}
+
+if [ -z "$SYMBOL_DIR" ]; then
+  detect_symbol_dir
+elif [[ -z "$(ls "$SYMBOL_DIR"/lib*.so 2>/dev/null)" ]]; then
+  panic "Could not find any symbols under $SYMBOL_DIR"
+fi
+
+if [ -z "$NDK_DIR" ]; then
+  ANDROID_NDK_ROOT=$(PYTHONPATH=$CHROMIUM_SRC/build/android python -c \
+'from pylib.constants import ANDROID_NDK_ROOT; print ANDROID_NDK_ROOT,')
+else
+  if [ ! -d "$NDK_DIR" ]; then
+    panic "Invalid directory: $NDK_DIR"
+  fi
+  if [ ! -f "$NDK_DIR/ndk-build" ]; then
+    panic "Not a valid NDK directory: $NDK_DIR"
+  fi
+  ANDROID_NDK_ROOT=$NDK_DIR
+fi
+
+if [ "$GDBINIT" -a ! -f "$GDBINIT" ]; then
+  panic "Unknown --script file: $GDBINIT"
+fi
+
+# Check that ADB is in our path
+if [ -z "$ADB" ]; then
+  ADB=$(which adb 2>/dev/null)
+  if [ -z "$ADB" ]; then
+    panic "Can't find 'adb' tool in your path. Install it or use \
+--adb=<file>"
+  fi
+  log "Auto-config: --adb=$ADB"
+fi
+
+# Check that it works minimally
+ADB_VERSION=$($ADB version 2>/dev/null)
+echo "$ADB_VERSION" | fgrep -q -e "Android Debug Bridge"
+if [ $? != 0 ]; then
+  panic "Your 'adb' tool seems invalid, use --adb=<file> to specify a \
+different one: $ADB"
+fi
+
+# If there are more than one device connected, and ANDROID_SERIAL is not
+# defined, print an error message.
+NUM_DEVICES_PLUS2=$($ADB devices 2>/dev/null | wc -l)
+if [ "$NUM_DEVICES_PLUS2" -gt 3 -a -z "$ANDROID_SERIAL" ]; then
+  echo "ERROR: There is more than one Android device connected to ADB."
+  echo "Please define ANDROID_SERIAL to specify which one to use."
+  exit 1
+fi
+
+# Run a command through adb shell, strip the extra \r from the output
+# and return the correct status code to detect failures. This assumes
+# that the adb shell command prints a final \n to stdout.
+# $1+: command to run
+# Out: command's stdout
+# Return: command's status
+# Note: the command's stderr is lost
+adb_shell () {
+  local TMPOUT="$(mktemp)"
+  local LASTLINE RET
+  local ADB=${ADB:-adb}
+
+  # The weird sed rule is to strip the final \r on each output line
+  # Since 'adb shell' never returns the command's proper exit/status code,
+  # we force it to print it as '%%<status>' in the temporary output file,
+  # which we will later strip from it.
+  $ADB shell $@ ";" echo "%%\$?" 2>/dev/null | \
+      sed -e 's![[:cntrl:]]!!g' > $TMPOUT
+  # Get last line in log, which contains the exit code from the command
+  LASTLINE=$(sed -e '$!d' $TMPOUT)
+  # Extract the status code from the end of the line, which must
+  # be '%%<code>'.
+  RET=$(echo "$LASTLINE" | \
+    awk '{ if (match($0, "%%[0-9]+$")) { print substr($0,RSTART+2); } }')
+  # Remove the status code from the last line. Note that this may result
+  # in an empty line.
+  LASTLINE=$(echo "$LASTLINE" | \
+    awk '{ if (match($0, "%%[0-9]+$")) { print substr($0,1,RSTART-1); } }')
+  # The output itself: all lines except the status code.
+  sed -e '$d' $TMPOUT && printf "%s" "$LASTLINE"
+  # Remove temp file.
+  rm -f $TMPOUT
+  # Exit with the appropriate status.
+  return $RET
+}
+
+# Find the target architecture from a local shared library.
+# This returns an NDK-compatible architecture name.
+# out: NDK Architecture name, or empty string.
+get_gyp_target_arch () {
+  # ls prints a broken pipe error when there are a lot of libs.
+  local RANDOM_LIB=$(ls "$SYMBOL_DIR"/lib*.so 2>/dev/null| head -n1)
+  local SO_DESC=$(file $RANDOM_LIB)
+  case $ARCH in
+    *32-bit*ARM,*) echo "arm";;
+    *64-bit*ARM,*) echo "arm64";;
+    *32-bit*Intel,*) echo "x86";;
+    *x86-64,*) echo "x86_64";;
+    *32-bit*MIPS,*) echo "mips";;
+    *) echo "";
+  esac
+}
+
+if [ -z "$TARGET_ARCH" ]; then
+  TARGET_ARCH=$(get_gyp_target_arch)
+  if [ -z "$TARGET_ARCH" ]; then
+    TARGET_ARCH=arm
+  fi
+else
+  # Nit: accept Chromium's 'ia32' as a valid target architecture. This
+  # script prefers the NDK 'x86' name instead because it uses it to find
+  # NDK-specific files (host gdb) with it.
+  if [ "$TARGET_ARCH" = "ia32" ]; then
+    TARGET_ARCH=x86
+    log "Auto-config: --arch=$TARGET_ARCH  (equivalent to ia32)"
+  fi
+fi
+
+# Detect the NDK system name, i.e. the name used to identify the host.
+# out: NDK system name (e.g. 'linux' or 'darwin')
+get_ndk_host_system () {
+  local HOST_OS
+  if [ -z "$NDK_HOST_SYSTEM" ]; then
+    HOST_OS=$(uname -s)
+    case $HOST_OS in
+      Linux) NDK_HOST_SYSTEM=linux;;
+      Darwin) NDK_HOST_SYSTEM=darwin;;
+      *) panic "You can't run this script on this system: $HOST_OS";;
+    esac
+  fi
+  echo "$NDK_HOST_SYSTEM"
+}
+
+# Detect the NDK host architecture name.
+# out: NDK arch name (e.g. 'x86' or 'x86_64')
+get_ndk_host_arch () {
+  local HOST_ARCH HOST_OS
+  if [ -z "$NDK_HOST_ARCH" ]; then
+    HOST_OS=$(get_ndk_host_system)
+    HOST_ARCH=$(uname -p)
+    if [ "$HOST_ARCH" = "unknown" ]; then
+      # In case where "-p" returns "unknown" just use "-m" (machine hardware
+      # name). According to this patch from Fedora "-p" is equivalent to "-m"
+      # anyway: https://goo.gl/Pd47x3
+      HOST_ARCH=$(uname -m)
+    fi
+    case $HOST_ARCH in
+      i?86) NDK_HOST_ARCH=x86;;
+      x86_64|amd64) NDK_HOST_ARCH=x86_64;;
+      *) panic "You can't run this script on this host architecture: $HOST_ARCH";;
+    esac
+    # Darwin trick: "uname -p" always returns i386 on 64-bit installations.
+    if [ "$HOST_OS" = darwin -a "$NDK_HOST_ARCH" = "x86" ]; then
+      # Use '/usr/bin/file', not just 'file' to avoid buggy MacPorts
+      # implementations of the tool. See http://b.android.com/53769
+      HOST_64BITS=$(/usr/bin/file -L "$SHELL" | grep -e "x86[_-]64")
+      if [ "$HOST_64BITS" ]; then
+        NDK_HOST_ARCH=x86_64
+      fi
+    fi
+  fi
+  echo "$NDK_HOST_ARCH"
+}
+
+# Convert an NDK architecture name into a GNU configure triplet.
+# $1: NDK architecture name (e.g. 'arm')
+# Out: Android GNU configure triplet (e.g. 'arm-linux-androideabi')
+get_arch_gnu_config () {
+  case $1 in
+    arm)
+      echo "arm-linux-androideabi"
+      ;;
+    arm64)
+      echo "aarch64-linux-android"
+      ;;
+    x86)
+      echo "i686-linux-android"
+      ;;
+    x86_64)
+      echo "x86_64-linux-android"
+      ;;
+    mips)
+      echo "mipsel-linux-android"
+      ;;
+    *)
+      echo "$ARCH-linux-android"
+      ;;
+  esac
+}
+
+# Convert an NDK architecture name into a toolchain name prefix
+# $1: NDK architecture name (e.g. 'arm')
+# Out: NDK toolchain name prefix (e.g. 'arm-linux-androideabi')
+get_arch_toolchain_prefix () {
+  # Return the configure triplet, except for x86 and x86_64!
+  if [ "$1" = "x86" -o "$1" = "x86_64" ]; then
+    echo "$1"
+  else
+    get_arch_gnu_config $1
+  fi
+}
+
+# Find a NDK toolchain prebuilt file or sub-directory.
+# This will probe the various arch-specific toolchain directories
+# in the NDK for the needed file.
+# $1: NDK install path
+# $2: NDK architecture name
+# $3: prebuilt sub-path to look for.
+# Out: file path, or empty if none is found.
+get_ndk_toolchain_prebuilt () {
+  local NDK_DIR="${1%/}"
+  local ARCH="$2"
+  local SUBPATH="$3"
+  local NAME="$(get_arch_toolchain_prefix $ARCH)"
+  local FILE TARGET
+  FILE=$NDK_DIR/toolchains/$NAME-4.9/prebuilt/$SUBPATH
+  if [ ! -f "$FILE" ]; then
+    FILE=$NDK_DIR/toolchains/$NAME-4.8/prebuilt/$SUBPATH
+    if [ ! -f "$FILE" ]; then
+      FILE=
+    fi
+  fi
+  echo "$FILE"
+}
+
+# Find the path to an NDK's toolchain full prefix for a given architecture
+# $1: NDK install path
+# $2: NDK target architecture name
+# Out: install path + binary prefix (e.g.
+#      ".../path/to/bin/arm-linux-androideabi-")
+get_ndk_toolchain_fullprefix () {
+  local NDK_DIR="$1"
+  local ARCH="$2"
+  local TARGET NAME HOST_OS HOST_ARCH LD CONFIG
+
+  # NOTE: This will need to be updated if the NDK changes the names or moves
+  #        the location of its prebuilt toolchains.
+  #
+  LD=
+  HOST_OS=$(get_ndk_host_system)
+  HOST_ARCH=$(get_ndk_host_arch)
+  CONFIG=$(get_arch_gnu_config $ARCH)
+  LD=$(get_ndk_toolchain_prebuilt \
+        "$NDK_DIR" "$ARCH" "$HOST_OS-$HOST_ARCH/bin/$CONFIG-ld")
+  if [ -z "$LD" -a "$HOST_ARCH" = "x86_64" ]; then
+    LD=$(get_ndk_toolchain_prebuilt \
+         "$NDK_DIR" "$ARCH" "$HOST_OS-x86/bin/$CONFIG-ld")
+  fi
+  if [ ! -f "$LD" -a "$ARCH" = "x86" ]; then
+    # Special case, the x86 toolchain used to be incorrectly
+    # named i686-android-linux-gcc!
+    LD=$(get_ndk_toolchain_prebuilt \
+         "$NDK_DIR" "$ARCH" "$HOST_OS-x86/bin/i686-android-linux-ld")
+  fi
+  if [ -z "$LD" ]; then
+    panic "Cannot find Android NDK toolchain for '$ARCH' architecture. \
+Please verify your NDK installation!"
+  fi
+  echo "${LD%%ld}"
+}
+
+# $1: NDK install path
+get_ndk_host_gdb_client() {
+  local NDK_DIR="$1"
+  local HOST_OS HOST_ARCH
+
+  HOST_OS=$(get_ndk_host_system)
+  HOST_ARCH=$(get_ndk_host_arch)
+  echo "$NDK_DIR/prebuilt/$HOST_OS-$HOST_ARCH/bin/gdb"
+}
+
+# $1: NDK install path
+# $2: target architecture.
+get_ndk_gdbserver () {
+  local NDK_DIR="$1"
+  local ARCH=$2
+  local BINARY
+
+  # The location has moved after NDK r8
+  BINARY=$NDK_DIR/prebuilt/android-$ARCH/gdbserver/gdbserver
+  if [ ! -f "$BINARY" ]; then
+    BINARY=$(get_ndk_toolchain_prebuilt "$NDK_DIR" "$ARCH" gdbserver)
+  fi
+  echo "$BINARY"
+}
+
+# Check/probe the path to the Android toolchain installation. Always
+# use the NDK versions of gdb and gdbserver. They must match to avoid
+# issues when both binaries do not speak the same wire protocol.
+#
+if [ -z "$TOOLCHAIN" ]; then
+  ANDROID_TOOLCHAIN=$(get_ndk_toolchain_fullprefix \
+                      "$ANDROID_NDK_ROOT" "$TARGET_ARCH")
+  ANDROID_TOOLCHAIN=$(dirname "$ANDROID_TOOLCHAIN")
+  log "Auto-config: --toolchain=$ANDROID_TOOLCHAIN"
+else
+  # Be flexible, allow one to specify either the install path or the bin
+  # sub-directory in --toolchain:
+  #
+  if [ -d "$TOOLCHAIN/bin" ]; then
+    TOOLCHAIN=$TOOLCHAIN/bin
+  fi
+  ANDROID_TOOLCHAIN=$TOOLCHAIN
+fi
+
+# Cosmetic: Remove trailing directory separator.
+ANDROID_TOOLCHAIN=${ANDROID_TOOLCHAIN%/}
+
+# Find host GDB client binary
+if [ -z "$GDB" ]; then
+  GDB=$(get_ndk_host_gdb_client "$ANDROID_NDK_ROOT")
+  if [ -z "$GDB" ]; then
+    panic "Can't find Android gdb client in your path, check your \
+--toolchain or --gdb path."
+  fi
+  log "Host gdb client: $GDB"
+fi
+
+# Find gdbserver binary, we will later push it to /data/local/tmp
+# This ensures that both gdbserver and $GDB talk the same binary protocol,
+# otherwise weird problems will appear.
+#
+if [ -z "$GDBSERVER" ]; then
+  GDBSERVER=$(get_ndk_gdbserver "$ANDROID_NDK_ROOT" "$TARGET_ARCH")
+  if [ -z "$GDBSERVER" ]; then
+    panic "Can't find NDK gdbserver binary. use --gdbserver to specify \
+valid one!"
+  fi
+  log "Auto-config: --gdbserver=$GDBSERVER"
+fi
+
+# A unique ID for this script's session. This needs to be the same in all
+# sub-shell commands we're going to launch, so take the PID of the launcher
+# process.
+TMP_ID=$$
+
+# Temporary directory, will get cleaned up on exit.
+TMPDIR=/tmp/$USER-adb-gdb-tmp-$TMP_ID
+mkdir -p "$TMPDIR" && rm -rf "$TMPDIR"/*
+
+GDBSERVER_PIDFILE="$TMPDIR"/gdbserver-$TMP_ID.pid
+
+# Return the timestamp of a given file, as number of seconds since epoch.
+# $1: file path
+# Out: file timestamp
+get_file_timestamp () {
+  stat -c %Y "$1" 2>/dev/null
+}
+
+# Allow several concurrent debugging sessions
+APP_DATA_DIR=$(adb_shell run-as $PACKAGE_NAME /system/bin/sh -c pwd)
+fail_panic "Failed to run-as $PACKAGE_NAME, is the app debuggable?"
+TARGET_GDBSERVER="$APP_DATA_DIR/gdbserver-adb-gdb-$TMP_ID"
+TMP_TARGET_GDBSERVER=/data/local/tmp/gdbserver-adb-gdb-$TMP_ID
+
+# Select correct app_process for architecture.
+case $TARGET_ARCH in
+      arm|x86|mips) GDBEXEC=app_process32;;
+      arm64|x86_64) GDBEXEC=app_process64; SUFFIX_64_BIT=64;;
+      *) panic "Unknown app_process for architecture!";;
+esac
+
+# Default to app_process if bit-width specific process isn't found.
+adb_shell ls /system/bin/$GDBEXEC > /dev/null
+if [ $? != 0 ]; then
+    GDBEXEC=app_process
+fi
+
+# Detect AddressSanitizer setup on the device. In that case app_process is a
+# script, and the real executable is app_process.real.
+GDBEXEC_ASAN=app_process.real
+adb_shell ls /system/bin/$GDBEXEC_ASAN > /dev/null
+if [ $? == 0 ]; then
+    GDBEXEC=$GDBEXEC_ASAN
+fi
+
+ORG_PULL_LIBS_DIR=$PULL_LIBS_DIR
+if [[ -n "$ANDROID_SERIAL" ]]; then
+  DEFAULT_PULL_LIBS_DIR="$DEFAULT_PULL_LIBS_DIR/$ANDROID_SERIAL-$SUFFIX_64_BIT"
+fi
+PULL_LIBS_DIR=${PULL_LIBS_DIR:-$DEFAULT_PULL_LIBS_DIR}
+
+HOST_FINGERPRINT=
+DEVICE_FINGERPRINT=$(adb_shell getprop ro.build.fingerprint)
+[[ "$DEVICE_FINGERPRINT" ]] || panic "Failed to get the device fingerprint"
+log "Device build fingerprint: $DEVICE_FINGERPRINT"
+
+if [ ! -f "$PULL_LIBS_DIR/build.fingerprint" ]; then
+  log "Auto-config: --pull-libs  (no cached libraries)"
+  PULL_LIBS=true
+else
+  HOST_FINGERPRINT=$(< "$PULL_LIBS_DIR/build.fingerprint")
+  log "Host build fingerprint:   $HOST_FINGERPRINT"
+  if [ "$HOST_FINGERPRINT" == "$DEVICE_FINGERPRINT" ]; then
+    log "Auto-config: --no-pull-libs (fingerprint match)"
+    NO_PULL_LIBS=true
+  else
+    log "Auto-config: --pull-libs  (fingerprint mismatch)"
+    PULL_LIBS=true
+  fi
+fi
+
+# If requested, work for M-x gdb.  The gdb indirections make it
+# difficult to pass --annotate=3 to the gdb binary itself.
+if [ "$ANNOTATE" ]; then
+  GDB_ARGS=$GDB_ARGS" --annotate=$ANNOTATE"
+fi
+
+# Get the PID from the first argument or else find the PID of the
+# browser process.
+if [ -z "$PID" ]; then
+  PROCESSNAME=$PACKAGE_NAME
+  if [ -z "$PID" ]; then
+    PID=$(adb_shell ps | \
+          awk '$9 == "'$PROCESSNAME'" { print $2; }' | head -1)
+  fi
+  if [ -z "$PID" ]; then
+    panic "Can't find application process PID."
+  fi
+  log "Found process PID: $PID"
+fi
+
+# Determine if 'adb shell' runs as root or not.
+# If so, we can launch gdbserver directly, otherwise, we have to
+# use run-as $PACKAGE_NAME ..., which requires the package to be debuggable.
+#
+if [ "$SU_PREFIX" ]; then
+  # Need to check that this works properly.
+  SU_PREFIX_TEST_LOG=$TMPDIR/su-prefix.log
+  adb_shell $SU_PREFIX \"echo "foo"\" > $SU_PREFIX_TEST_LOG 2>&1
+  if [ $? != 0 -o "$(cat $SU_PREFIX_TEST_LOG)" != "foo" ]; then
+    echo "ERROR: Cannot use '$SU_PREFIX' as a valid su prefix:"
+    echo "$ adb shell $SU_PREFIX \"echo foo\""
+    cat $SU_PREFIX_TEST_LOG
+    exit 1
+  fi
+  COMMAND_PREFIX="$SU_PREFIX \""
+  COMMAND_SUFFIX="\""
+else
+  SHELL_UID=$("$ADB" shell cat /proc/self/status | \
+              awk '$1 == "Uid:" { print $2; }')
+  log "Shell UID: $SHELL_UID"
+  if [ "$SHELL_UID" != 0 -o -n "$NO_ROOT" ]; then
+    COMMAND_PREFIX="run-as $PACKAGE_NAME"
+    COMMAND_SUFFIX=
+  else
+    COMMAND_PREFIX=
+    COMMAND_SUFFIX=
+  fi
+fi
+log "Command prefix: '$COMMAND_PREFIX'"
+log "Command suffix: '$COMMAND_SUFFIX'"
+
+mkdir -p "$PULL_LIBS_DIR"
+fail_panic "Can't create --libs-dir directory: $PULL_LIBS_DIR"
+
+# Pull device's system libraries that are mapped by our process.
+# Pulling all system libraries is too long, so determine which ones
+# we need by looking at /proc/$PID/maps instead
+if [ "$PULL_LIBS" -a -z "$NO_PULL_LIBS" ]; then
+  echo "Extracting system libraries into: $PULL_LIBS_DIR"
+  MAPPINGS=$(adb_shell $COMMAND_PREFIX cat /proc/$PID/maps $COMMAND_SUFFIX)
+  if [ $? != 0 ]; then
+    echo "ERROR: Could not list process's memory mappings."
+    if [ "$SU_PREFIX" ]; then
+      panic "Are you sure your --su-prefix is correct?"
+    else
+      panic "Use --su-prefix if the application is not debuggable."
+    fi
+  fi
+  # Remove the fingerprint file in case pulling one of the libs fails.
+  rm -f "$PULL_LIBS_DIR/build.fingerprint"
+  SYSTEM_LIBS=$(echo "$MAPPINGS" | \
+      awk '$6 ~ /\/(system|apex|vendor)\/.*\.so$/ { print $6; }' | sort -u)
+  for SYSLIB in /system/bin/linker$SUFFIX_64_BIT $SYSTEM_LIBS; do
+    echo "Pulling from device: $SYSLIB"
+    DST_FILE=$PULL_LIBS_DIR$SYSLIB
+    DST_DIR=$(dirname "$DST_FILE")
+    mkdir -p "$DST_DIR" && "$ADB" pull $SYSLIB "$DST_FILE" 2>/dev/null
+    fail_panic "Could not pull $SYSLIB from device !?"
+  done
+  echo "Writing the device fingerprint"
+  echo "$DEVICE_FINGERPRINT" > "$PULL_LIBS_DIR/build.fingerprint"
+fi
+
+# Pull the app_process binary from the device.
+log "Pulling $GDBEXEC from device"
+"$ADB" pull /system/bin/$GDBEXEC "$TMPDIR"/$GDBEXEC &>/dev/null
+fail_panic "Could not retrieve $GDBEXEC from the device!"
+
+# Find all the sub-directories of $PULL_LIBS_DIR, up to depth 4
+# so we can add them to solib-search-path later.
+SOLIB_DIRS=$(find $PULL_LIBS_DIR -mindepth 1 -maxdepth 4 -type d | \
+             grep -v "^$" | tr '\n' ':')
+SOLIB_DIRS=${SOLIB_DIRS%:}  # Strip trailing :
+
+# Applications with minSdkVersion >= 24 will have their data directories
+# created with rwx------ permissions, preventing adbd from forwarding to
+# the gdbserver socket.
+adb_shell $COMMAND_PREFIX chmod a+x $APP_DATA_DIR $COMMAND_SUFFIX
+
+# Push gdbserver to the device
+log "Pushing gdbserver $GDBSERVER to $TARGET_GDBSERVER"
+"$ADB" push $GDBSERVER $TMP_TARGET_GDBSERVER >/dev/null && \
+    adb_shell $COMMAND_PREFIX cp $TMP_TARGET_GDBSERVER $TARGET_GDBSERVER $COMMAND_SUFFIX && \
+    adb_shell rm $TMP_TARGET_GDBSERVER
+fail_panic "Could not copy gdbserver to the device!"
+
+if [ -z "$PORT" ]; then
+  # Random port to allow multiple concurrent sessions.
+  PORT=$(( $RANDOM % 1000 + 5039 ))
+fi
+HOST_PORT=$PORT
+TARGET_DOMAIN_SOCKET=$APP_DATA_DIR/gdb-socket-$HOST_PORT
+
+# Setup network redirection
+log "Setting network redirection (host:$HOST_PORT -> device:$TARGET_DOMAIN_SOCKET)"
+"$ADB" forward tcp:$HOST_PORT localfilesystem:$TARGET_DOMAIN_SOCKET
+fail_panic "Could not setup network redirection from \
+host:localhost:$HOST_PORT to device:$TARGET_DOMAIN_SOCKET"
+
+# Start gdbserver in the background
+# Note that using run-as requires the package to be debuggable.
+#
+# If not, this will fail horribly. The alternative is to run the
+# program as root, which requires of course root privileges.
+# Maybe we should add a --root option to enable this?
+#
+
+for i in 1 2; do
+  log "Starting gdbserver in the background:"
+  GDBSERVER_LOG=$TMPDIR/gdbserver-$TMP_ID.log
+  log "adb shell $COMMAND_PREFIX $TARGET_GDBSERVER \
+    --once +$TARGET_DOMAIN_SOCKET \
+    --attach $PID $COMMAND_SUFFIX"
+  "$ADB" shell $COMMAND_PREFIX $TARGET_GDBSERVER \
+    --once +$TARGET_DOMAIN_SOCKET \
+    --attach $PID $COMMAND_SUFFIX > $GDBSERVER_LOG 2>&1 &
+  GDBSERVER_PID=$!
+  echo "$GDBSERVER_PID" > $GDBSERVER_PIDFILE
+  log "background job pid: $GDBSERVER_PID"
+
+  # Sleep to allow gdbserver to attach to the remote process and be
+  # ready to connect to.
+  log "Sleeping ${ATTACH_DELAY}s to ensure gdbserver is alive"
+  sleep "$ATTACH_DELAY"
+  log "Job control: $(jobs -l)"
+  STATE=$(jobs -l | awk '$2 == "'$GDBSERVER_PID'" { print $3; }')
+  if [ "$STATE" != "Running" ]; then
+    pid_msg=$(grep "is already traced by process" $GDBSERVER_LOG 2>/dev/null)
+    if [[ -n "$pid_msg" ]]; then
+      old_pid=${pid_msg##* }
+      old_pid=${old_pid//[$'\r\n']}  # Trim trailing \r.
+      echo "Killing previous gdb server process (pid=$old_pid)"
+      adb_shell $COMMAND_PREFIX kill -9 $old_pid $COMMAND_SUFFIX
+      continue
+    fi
+    echo "ERROR: GDBServer either failed to run or attach to PID $PID!"
+    echo "Here is the output from gdbserver (also try --verbose for more):"
+    echo "===== gdbserver.log start ====="
+    cat $GDBSERVER_LOG
+    echo ="===== gdbserver.log end ======"
+    exit 1
+  fi
+  break
+done
+
+# Generate a file containing useful GDB initialization commands
+readonly COMMANDS=$TMPDIR/gdb.init
+log "Generating GDB initialization commands file: $COMMANDS"
+cat > "$COMMANDS" <<EOF
+set osabi GNU/Linux  # Copied from ndk-gdb.py.
+set print pretty 1
+python
+import sys
+sys.path.insert(0, '$CHROMIUM_SRC/tools/gdb/')
+try:
+  import gdb_chrome
+finally:
+  sys.path.pop(0)
+end
+file $TMPDIR/$GDBEXEC
+directory $CHROMIUM_OUTPUT_DIR
+set solib-absolute-prefix $PULL_LIBS_DIR
+set solib-search-path $SOLIB_DIRS:$PULL_LIBS_DIR:$SYMBOL_DIR
+
+python
+# Copied from ndk-gdb.py:
+def target_remote_with_retry(target, timeout_seconds):
+  import time
+  end_time = time.time() + timeout_seconds
+  while True:
+    try:
+      gdb.execute('target remote ' + target)
+      return True
+    except gdb.error as e:
+      time_left = end_time - time.time()
+      if time_left < 0 or time_left > timeout_seconds:
+        print("Error: unable to connect to device.")
+        print(e)
+        return False
+      time.sleep(min(0.25, time_left))
+
+print("Connecting to :$HOST_PORT...")
+if target_remote_with_retry(':$HOST_PORT', 5):
+  print("Attached! Reading symbols (takes ~30 seconds).")
+end
+EOF
+
+if [ "$GDBINIT" ]; then
+  cat "$GDBINIT" >> "$COMMANDS"
+fi
+
+if [ "$VERBOSE" -gt 0 ]; then
+  echo "### START $COMMANDS"
+  cat "$COMMANDS"
+  echo "### END $COMMANDS"
+fi
+
+if [ "$IDE" ]; then
+  mkdir -p "$IDE_DIR"
+  SYM_GDB="$IDE_DIR/gdb"
+  SYM_EXE="$IDE_DIR/app_process"
+  SYM_INIT="$IDE_DIR/gdbinit"
+  ln -sf "$TMPDIR/$GDBEXEC" "$SYM_EXE"
+  ln -sf "$COMMANDS" "$SYM_INIT"
+  # gdb doesn't work when symlinked, so create a wrapper.
+  echo
+  cat > $SYM_GDB <<EOF
+#!/bin/sh
+exec $GDB "\$@"
+EOF
+  chmod u+x $SYM_GDB
+
+  echo "GDB server listening on: localhost:$PORT"
+  echo "GDB wrapper script: $SYM_GDB"
+  echo "App executable: $SYM_EXE"
+  echo "gdbinit: $SYM_INIT"
+  echo "Connect with vscode: https://chromium.googlesource.com/chromium/src/+/master/docs/vscode.md#Launch-Commands"
+  echo "Showing gdbserver logs. Press Ctrl-C to disconnect."
+  tail -f "$GDBSERVER_LOG"
+else
+  log "Launching gdb client: $GDB $GDB_ARGS -x $COMMANDS"
+  echo "Server log: $GDBSERVER_LOG"
+  if [ "$CGDB" ]; then
+    $CGDB -d $GDB -- $GDB_ARGS -x "$COMMANDS"
+  else
+    $GDB $GDB_ARGS -x "$COMMANDS"
+  fi
+fi
diff --git a/src/build/android/adb_install_apk.py b/src/build/android/adb_install_apk.py
new file mode 100755
index 0000000..6ec98e2
--- /dev/null
+++ b/src/build/android/adb_install_apk.py
@@ -0,0 +1,131 @@
+#!/usr/bin/env vpython
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility script to install APKs from the command line quickly."""
+
+import argparse
+import glob
+import logging
+import os
+import sys
+
+import devil_chromium
+from devil.android import apk_helper
+from devil.android import device_denylist
+from devil.android import device_errors
+from devil.android import device_utils
+from devil.utils import run_tests_helper
+from pylib import constants
+
+
+def main():
+  parser = argparse.ArgumentParser()
+
+  apk_group = parser.add_mutually_exclusive_group(required=True)
+  apk_group.add_argument('--apk', dest='apk_name',
+                         help='DEPRECATED The name of the apk containing the'
+                              ' application (with the .apk extension).')
+  apk_group.add_argument('apk_path', nargs='?',
+                         help='The path to the APK to install.')
+
+  # TODO(jbudorick): Remove once no clients pass --apk_package
+  parser.add_argument('--apk_package', help='DEPRECATED unused')
+  parser.add_argument('--split',
+                      action='append',
+                      dest='splits',
+                      help='A glob matching the apk splits. '
+                           'Can be specified multiple times.')
+  parser.add_argument('--keep_data',
+                      action='store_true',
+                      default=False,
+                      help='Keep the package data when installing '
+                           'the application.')
+  parser.add_argument('--debug', action='store_const', const='Debug',
+                      dest='build_type',
+                      default=os.environ.get('BUILDTYPE', 'Debug'),
+                      help='If set, run test suites under out/Debug. '
+                           'Default is env var BUILDTYPE or Debug')
+  parser.add_argument('--release', action='store_const', const='Release',
+                      dest='build_type',
+                      help='If set, run test suites under out/Release. '
+                           'Default is env var BUILDTYPE or Debug.')
+  parser.add_argument('-d', '--device', dest='devices', action='append',
+                      default=[],
+                      help='Target device for apk to install on. Enter multiple'
+                           ' times for multiple devices.')
+  parser.add_argument('--adb-path', type=os.path.abspath,
+                      help='Absolute path to the adb binary to use.')
+  parser.add_argument('--denylist-file', help='Device denylist JSON file.')
+  parser.add_argument('-v', '--verbose', action='count',
+                      help='Enable verbose logging.')
+  parser.add_argument('--downgrade', action='store_true',
+                      help='If set, allows downgrading of apk.')
+  parser.add_argument('--timeout', type=int,
+                      default=device_utils.DeviceUtils.INSTALL_DEFAULT_TIMEOUT,
+                      help='Seconds to wait for APK installation. '
+                           '(default: %(default)s)')
+
+  args = parser.parse_args()
+
+  run_tests_helper.SetLogLevel(args.verbose)
+  constants.SetBuildType(args.build_type)
+
+  devil_chromium.Initialize(
+      output_directory=constants.GetOutDirectory(),
+      adb_path=args.adb_path)
+
+  apk = args.apk_path or args.apk_name
+  if not apk.endswith('.apk'):
+    apk += '.apk'
+  if not os.path.exists(apk):
+    apk = os.path.join(constants.GetOutDirectory(), 'apks', apk)
+    if not os.path.exists(apk):
+      parser.error('%s not found.' % apk)
+
+  if args.splits:
+    splits = []
+    base_apk_package = apk_helper.ApkHelper(apk).GetPackageName()
+    for split_glob in args.splits:
+      apks = [f for f in glob.glob(split_glob) if f.endswith('.apk')]
+      if not apks:
+        logging.warning('No apks matched for %s.', split_glob)
+      for f in apks:
+        helper = apk_helper.ApkHelper(f)
+        if (helper.GetPackageName() == base_apk_package
+            and helper.GetSplitName()):
+          splits.append(f)
+
+  denylist = (device_denylist.Denylist(args.denylist_file)
+              if args.denylist_file else None)
+  devices = device_utils.DeviceUtils.HealthyDevices(denylist=denylist,
+                                                    device_arg=args.devices)
+
+  def denylisting_install(device):
+    try:
+      if args.splits:
+        device.InstallSplitApk(apk, splits, reinstall=args.keep_data,
+                               allow_downgrade=args.downgrade)
+      else:
+        device.Install(apk, reinstall=args.keep_data,
+                       allow_downgrade=args.downgrade,
+                       timeout=args.timeout)
+    except (device_errors.CommandFailedError,
+            device_errors.DeviceUnreachableError):
+      logging.exception('Failed to install %s', apk)
+      if denylist:
+        denylist.Extend([str(device)], reason='install_failure')
+        logging.warning('Denylisting %s', str(device))
+    except device_errors.CommandTimeoutError:
+      logging.exception('Timed out while installing %s', apk)
+      if denylist:
+        denylist.Extend([str(device)], reason='install_timeout')
+        logging.warning('Denylisting %s', str(device))
+
+  device_utils.DeviceUtils.parallel(devices).pMap(denylisting_install)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/adb_logcat_monitor.py b/src/build/android/adb_logcat_monitor.py
new file mode 100755
index 0000000..a919722
--- /dev/null
+++ b/src/build/android/adb_logcat_monitor.py
@@ -0,0 +1,158 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Saves logcats from all connected devices.
+
+Usage: adb_logcat_monitor.py <base_dir> [<adb_binary_path>]
+
+This script will repeatedly poll adb for new devices and save logcats
+inside the <base_dir> directory, which it attempts to create.  The
+script will run until killed by an external signal.  To test, run the
+script in a shell and <Ctrl>-C it after a while.  It should be
+resilient across phone disconnects and reconnects and start the logcat
+early enough to not miss anything.
+"""
+
+from __future__ import print_function
+
+import logging
+import os
+import re
+import shutil
+import signal
+import subprocess
+import sys
+import time
+
+# Map from device_id -> (process, logcat_num)
+devices = {}
+
+
+class TimeoutException(Exception):
+  """Exception used to signal a timeout."""
+  pass
+
+
+class SigtermError(Exception):
+  """Exception used to catch a sigterm."""
+  pass
+
+
+def StartLogcatIfNecessary(device_id, adb_cmd, base_dir):
+  """Spawns a adb logcat process if one is not currently running."""
+  process, logcat_num = devices[device_id]
+  if process:
+    if process.poll() is None:
+      # Logcat process is still happily running
+      return
+    else:
+      logging.info('Logcat for device %s has died', device_id)
+      error_filter = re.compile('- waiting for device -')
+      for line in process.stderr:
+        if not error_filter.match(line):
+          logging.error(device_id + ':   ' + line)
+
+  logging.info('Starting logcat %d for device %s', logcat_num,
+               device_id)
+  logcat_filename = 'logcat_%s_%03d' % (device_id, logcat_num)
+  logcat_file = open(os.path.join(base_dir, logcat_filename), 'w')
+  process = subprocess.Popen([adb_cmd, '-s', device_id,
+                              'logcat', '-v', 'threadtime'],
+                             stdout=logcat_file,
+                             stderr=subprocess.PIPE)
+  devices[device_id] = (process, logcat_num + 1)
+
+
+def GetAttachedDevices(adb_cmd):
+  """Gets the device list from adb.
+
+  We use an alarm in this function to avoid deadlocking from an external
+  dependency.
+
+  Args:
+    adb_cmd: binary to run adb
+
+  Returns:
+    list of devices or an empty list on timeout
+  """
+  signal.alarm(2)
+  try:
+    out, err = subprocess.Popen([adb_cmd, 'devices'],
+                                stdout=subprocess.PIPE,
+                                stderr=subprocess.PIPE).communicate()
+    if err:
+      logging.warning('adb device error %s', err.strip())
+    return re.findall('^(\\S+)\tdevice$', out, re.MULTILINE)
+  except TimeoutException:
+    logging.warning('"adb devices" command timed out')
+    return []
+  except (IOError, OSError):
+    logging.exception('Exception from "adb devices"')
+    return []
+  finally:
+    signal.alarm(0)
+
+
+def main(base_dir, adb_cmd='adb'):
+  """Monitor adb forever.  Expects a SIGINT (Ctrl-C) to kill."""
+  # We create the directory to ensure 'run once' semantics
+  if os.path.exists(base_dir):
+    print('adb_logcat_monitor: %s already exists? Cleaning' % base_dir)
+    shutil.rmtree(base_dir, ignore_errors=True)
+
+  os.makedirs(base_dir)
+  logging.basicConfig(filename=os.path.join(base_dir, 'eventlog'),
+                      level=logging.INFO,
+                      format='%(asctime)-2s %(levelname)-8s %(message)s')
+
+  # Set up the alarm for calling 'adb devices'. This is to ensure
+  # our script doesn't get stuck waiting for a process response
+  def TimeoutHandler(_signum, _unused_frame):
+    raise TimeoutException()
+  signal.signal(signal.SIGALRM, TimeoutHandler)
+
+  # Handle SIGTERMs to ensure clean shutdown
+  def SigtermHandler(_signum, _unused_frame):
+    raise SigtermError()
+  signal.signal(signal.SIGTERM, SigtermHandler)
+
+  logging.info('Started with pid %d', os.getpid())
+  pid_file_path = os.path.join(base_dir, 'LOGCAT_MONITOR_PID')
+
+  try:
+    with open(pid_file_path, 'w') as f:
+      f.write(str(os.getpid()))
+    while True:
+      for device_id in GetAttachedDevices(adb_cmd):
+        if not device_id in devices:
+          subprocess.call([adb_cmd, '-s', device_id, 'logcat', '-c'])
+          devices[device_id] = (None, 0)
+
+      for device in devices:
+        # This will spawn logcat watchers for any device ever detected
+        StartLogcatIfNecessary(device, adb_cmd, base_dir)
+
+      time.sleep(5)
+  except SigtermError:
+    logging.info('Received SIGTERM, shutting down')
+  except: # pylint: disable=bare-except
+    logging.exception('Unexpected exception in main.')
+  finally:
+    for process, _ in devices.itervalues():
+      if process:
+        try:
+          process.terminate()
+        except OSError:
+          pass
+    os.remove(pid_file_path)
+
+
+if __name__ == '__main__':
+  if 2 <= len(sys.argv) <= 3:
+    print('adb_logcat_monitor: Initializing')
+    sys.exit(main(*sys.argv[1:3]))
+
+  print('Usage: %s <base_dir> [<adb_binary_path>]' % sys.argv[0])
diff --git a/src/build/android/adb_logcat_printer.py b/src/build/android/adb_logcat_printer.py
new file mode 100755
index 0000000..a715170
--- /dev/null
+++ b/src/build/android/adb_logcat_printer.py
@@ -0,0 +1,222 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Shutdown adb_logcat_monitor and print accumulated logs.
+
+To test, call './adb_logcat_printer.py <base_dir>' where
+<base_dir> contains 'adb logcat -v threadtime' files named as
+logcat_<deviceID>_<sequenceNum>
+
+The script will print the files to out, and will combine multiple
+logcats from a single device if there is overlap.
+
+Additionally, if a <base_dir>/LOGCAT_MONITOR_PID exists, the script
+will attempt to terminate the contained PID by sending a SIGINT and
+monitoring for the deletion of the aforementioned file.
+"""
+# pylint: disable=W0702
+
+import cStringIO
+import logging
+import optparse
+import os
+import re
+import signal
+import sys
+import time
+
+
+# Set this to debug for more verbose output
+LOG_LEVEL = logging.INFO
+
+
+def CombineLogFiles(list_of_lists, logger):
+  """Splices together multiple logcats from the same device.
+
+  Args:
+    list_of_lists: list of pairs (filename, list of timestamped lines)
+    logger: handler to log events
+
+  Returns:
+    list of lines with duplicates removed
+  """
+  cur_device_log = ['']
+  for cur_file, cur_file_lines in list_of_lists:
+    # Ignore files with just the logcat header
+    if len(cur_file_lines) < 2:
+      continue
+    common_index = 0
+    # Skip this step if list just has empty string
+    if len(cur_device_log) > 1:
+      try:
+        line = cur_device_log[-1]
+        # Used to make sure we only splice on a timestamped line
+        if re.match(r'^\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{3} ', line):
+          common_index = cur_file_lines.index(line)
+        else:
+          logger.warning('splice error - no timestamp in "%s"?', line.strip())
+      except ValueError:
+        # The last line was valid but wasn't found in the next file
+        cur_device_log += ['***** POSSIBLE INCOMPLETE LOGCAT *****']
+        logger.info('Unable to splice %s. Incomplete logcat?', cur_file)
+
+    cur_device_log += ['*'*30 + '  %s' % cur_file]
+    cur_device_log.extend(cur_file_lines[common_index:])
+
+  return cur_device_log
+
+
+def FindLogFiles(base_dir):
+  """Search a directory for logcat files.
+
+  Args:
+    base_dir: directory to search
+
+  Returns:
+    Mapping of device_id to a sorted list of file paths for a given device
+  """
+  logcat_filter = re.compile(r'^logcat_(\S+)_(\d+)$')
+  # list of tuples (<device_id>, <seq num>, <full file path>)
+  filtered_list = []
+  for cur_file in os.listdir(base_dir):
+    matcher = logcat_filter.match(cur_file)
+    if matcher:
+      filtered_list += [(matcher.group(1), int(matcher.group(2)),
+                         os.path.join(base_dir, cur_file))]
+  filtered_list.sort()
+  file_map = {}
+  for device_id, _, cur_file in filtered_list:
+    if device_id not in file_map:
+      file_map[device_id] = []
+
+    file_map[device_id] += [cur_file]
+  return file_map
+
+
+def GetDeviceLogs(log_filenames, logger):
+  """Read log files, combine and format.
+
+  Args:
+    log_filenames: mapping of device_id to sorted list of file paths
+    logger: logger handle for logging events
+
+  Returns:
+    list of formatted device logs, one for each device.
+  """
+  device_logs = []
+
+  for device, device_files in log_filenames.iteritems():
+    logger.debug('%s: %s', device, str(device_files))
+    device_file_lines = []
+    for cur_file in device_files:
+      with open(cur_file) as f:
+        device_file_lines += [(cur_file, f.read().splitlines())]
+    combined_lines = CombineLogFiles(device_file_lines, logger)
+    # Prepend each line with a short unique ID so it's easy to see
+    # when the device changes.  We don't use the start of the device
+    # ID because it can be the same among devices.  Example lines:
+    # AB324:  foo
+    # AB324:  blah
+    device_logs += [('\n' + device[-5:] + ':  ').join(combined_lines)]
+  return device_logs
+
+
+def ShutdownLogcatMonitor(base_dir, logger):
+  """Attempts to shutdown adb_logcat_monitor and blocks while waiting."""
+  try:
+    monitor_pid_path = os.path.join(base_dir, 'LOGCAT_MONITOR_PID')
+    with open(monitor_pid_path) as f:
+      monitor_pid = int(f.readline())
+
+    logger.info('Sending SIGTERM to %d', monitor_pid)
+    os.kill(monitor_pid, signal.SIGTERM)
+    i = 0
+    while True:
+      time.sleep(.2)
+      if not os.path.exists(monitor_pid_path):
+        return
+      if not os.path.exists('/proc/%d' % monitor_pid):
+        logger.warning('Monitor (pid %d) terminated uncleanly?', monitor_pid)
+        return
+      logger.info('Waiting for logcat process to terminate.')
+      i += 1
+      if i >= 10:
+        logger.warning('Monitor pid did not terminate. Continuing anyway.')
+        return
+
+  except (ValueError, IOError, OSError):
+    logger.exception('Error signaling logcat monitor - continuing')
+
+
+def main(argv):
+  parser = optparse.OptionParser(usage='Usage: %prog [options] <log dir>')
+  parser.add_option('--output-path',
+                    help='Output file path (if unspecified, prints to stdout)')
+  options, args = parser.parse_args(argv)
+  if len(args) != 1:
+    parser.error('Wrong number of unparsed args')
+  base_dir = args[0]
+
+  log_stringio = cStringIO.StringIO()
+  logger = logging.getLogger('LogcatPrinter')
+  logger.setLevel(LOG_LEVEL)
+  sh = logging.StreamHandler(log_stringio)
+  sh.setFormatter(logging.Formatter('%(asctime)-2s %(levelname)-8s'
+                                    ' %(message)s'))
+  logger.addHandler(sh)
+
+  if options.output_path:
+    if not os.path.exists(os.path.dirname(options.output_path)):
+      logger.warning('Output dir %s doesn\'t exist. Creating it.',
+                      os.path.dirname(options.output_path))
+      os.makedirs(os.path.dirname(options.output_path))
+    output_file = open(options.output_path, 'w')
+    logger.info('Dumping logcat to local file %s. If running in a build, '
+                'this file will likely will be uploaded to google storage '
+                'in a later step. It can be downloaded from there.',
+                options.output_path)
+  else:
+    output_file = sys.stdout
+
+  try:
+    # Wait at least 5 seconds after base_dir is created before printing.
+    #
+    # The idea is that 'adb logcat > file' output consists of 2 phases:
+    #  1 Dump all the saved logs to the file
+    #  2 Stream log messages as they are generated
+    #
+    # We want to give enough time for phase 1 to complete.  There's no
+    # good method to tell how long to wait, but it usually only takes a
+    # second.  On most bots, this code path won't occur at all, since
+    # adb_logcat_monitor.py command will have spawned more than 5 seconds
+    # prior to called this shell script.
+    try:
+      sleep_time = 5 - (time.time() - os.path.getctime(base_dir))
+    except OSError:
+      sleep_time = 5
+    if sleep_time > 0:
+      logger.warning('Monitor just started? Sleeping %.1fs', sleep_time)
+      time.sleep(sleep_time)
+
+    assert os.path.exists(base_dir), '%s does not exist' % base_dir
+    ShutdownLogcatMonitor(base_dir, logger)
+    separator = '\n' + '*' * 80 + '\n\n'
+    for log in GetDeviceLogs(FindLogFiles(base_dir), logger):
+      output_file.write(log)
+      output_file.write(separator)
+    with open(os.path.join(base_dir, 'eventlog')) as f:
+      output_file.write('\nLogcat Monitor Event Log\n')
+      output_file.write(f.read())
+  except:
+    logger.exception('Unexpected exception')
+
+  logger.info('Done.')
+  sh.flush()
+  output_file.write('\nLogcat Printer Event Log\n')
+  output_file.write(log_stringio.getvalue())
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/adb_profile_chrome b/src/build/android/adb_profile_chrome
new file mode 100755
index 0000000..d3244ff
--- /dev/null
+++ b/src/build/android/adb_profile_chrome
@@ -0,0 +1,9 @@
+#!/bin/bash
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Start / stop profiling in chrome.
+CATAPULT_DIR="$(dirname "$0")"/../../third_party/catapult
+exec "${CATAPULT_DIR}"/systrace/bin/adb_profile_chrome "$@"
diff --git a/src/build/android/adb_profile_chrome_startup b/src/build/android/adb_profile_chrome_startup
new file mode 100755
index 0000000..d5836cd
--- /dev/null
+++ b/src/build/android/adb_profile_chrome_startup
@@ -0,0 +1,9 @@
+#!/bin/bash
+#
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Start / stop profiling for chrome startup.
+CATAPULT_DIR="$(dirname "$0")"/../../third_party/catapult
+exec "${CATAPULT_DIR}"/systrace/bin/adb_profile_chrome_startup "$@"
diff --git a/src/build/android/adb_reverse_forwarder.py b/src/build/android/adb_reverse_forwarder.py
new file mode 100755
index 0000000..90d3139
--- /dev/null
+++ b/src/build/android/adb_reverse_forwarder.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env vpython
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Command line tool for forwarding ports from a device to the host.
+
+Allows an Android device to connect to services running on the host machine,
+i.e., "adb forward" in reverse. Requires |host_forwarder| and |device_forwarder|
+to be built.
+"""
+
+import argparse
+import sys
+import time
+
+import devil_chromium
+
+from devil.android import device_denylist
+from devil.android import device_utils
+from devil.android import forwarder
+from devil.utils import run_tests_helper
+
+from pylib import constants
+
+
+def main(argv):
+  parser = argparse.ArgumentParser(
+      usage='Usage: %(prog)s [options] device_port '
+            'host_port [device_port_2 host_port_2] ...',
+      description=__doc__)
+  parser.add_argument(
+      '-v', '--verbose',
+      dest='verbose_count',
+      default=0,
+      action='count',
+      help='Verbose level (multiple times for more)')
+  parser.add_argument(
+      '--device',
+      help='Serial number of device we should use.')
+  parser.add_argument('--denylist-file', help='Device denylist JSON file.')
+  parser.add_argument(
+      '--debug',
+      action='store_const',
+      const='Debug',
+      dest='build_type',
+      default='Release',
+      help='DEPRECATED: use --output-directory instead.')
+  parser.add_argument(
+      '--output-directory',
+      help='Path to the root build directory.')
+  parser.add_argument(
+      'ports',
+      nargs='+',
+      type=int,
+      help='Port pair to reverse forward.')
+
+  args = parser.parse_args(argv)
+  run_tests_helper.SetLogLevel(args.verbose_count)
+
+  if len(args.ports) < 2 or len(args.ports) % 2:
+    parser.error('Need even number of port pairs')
+
+  port_pairs = zip(args.ports[::2], args.ports[1::2])
+
+  if args.build_type:
+    constants.SetBuildType(args.build_type)
+  if args.output_directory:
+    constants.SetOutputDirectory(args.output_directory)
+  devil_chromium.Initialize(output_directory=constants.GetOutDirectory())
+
+  denylist = (device_denylist.Denylist(args.denylist_file)
+              if args.denylist_file else None)
+  device = device_utils.DeviceUtils.HealthyDevices(denylist=denylist,
+                                                   device_arg=args.device)[0]
+  try:
+    forwarder.Forwarder.Map(port_pairs, device)
+    while True:
+      time.sleep(60)
+  except KeyboardInterrupt:
+    sys.exit(0)
+  finally:
+    forwarder.Forwarder.UnmapAllDevicePorts(device)
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/adb_system_webview_command_line b/src/build/android/adb_system_webview_command_line
new file mode 100755
index 0000000..a0d2705
--- /dev/null
+++ b/src/build/android/adb_system_webview_command_line
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# If no flags are given, prints the current content shell flags.
+#
+# Otherwise, the given flags are used to REPLACE (not modify) the content shell
+# flags. For example:
+#   adb_system_webview_command_line --enable-webgl
+#
+# To remove all content shell flags, pass an empty string for the flags:
+#   adb_system_webview_command_line ""
+
+exec $(dirname $0)/adb_command_line.py --name webview-command-line "$@"
diff --git a/src/build/android/android_only_explicit_jni_exports.lst b/src/build/android/android_only_explicit_jni_exports.lst
new file mode 100644
index 0000000..f989691
--- /dev/null
+++ b/src/build/android/android_only_explicit_jni_exports.lst
@@ -0,0 +1,13 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Linker script that exports only JNI_OnLoad.
+# Should be used for libraries that do explicit JNI registration.
+
+{
+  global:
+    JNI_OnLoad;
+  local:
+    *;
+};
diff --git a/src/build/android/android_only_jni_exports.lst b/src/build/android/android_only_jni_exports.lst
new file mode 100644
index 0000000..1336fee
--- /dev/null
+++ b/src/build/android/android_only_jni_exports.lst
@@ -0,0 +1,13 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Linker script that exports only symbols required for JNI to work.
+
+{
+  global:
+    JNI_OnLoad;
+    Java_*;
+  local:
+    *;
+};
diff --git a/src/build/android/apk_operations.py b/src/build/android/apk_operations.py
new file mode 100755
index 0000000..d6cd583
--- /dev/null
+++ b/src/build/android/apk_operations.py
@@ -0,0 +1,1970 @@
+#!/usr/bin/env vpython
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Using colorama.Fore/Back/Style members
+# pylint: disable=no-member
+
+from __future__ import print_function
+
+import argparse
+import collections
+import json
+import logging
+import os
+import pipes
+import posixpath
+import random
+import re
+import shlex
+import shutil
+import subprocess
+import sys
+import tempfile
+import textwrap
+import zipfile
+
+import adb_command_line
+import devil_chromium
+from devil import devil_env
+from devil.android import apk_helper
+from devil.android import device_errors
+from devil.android import device_utils
+from devil.android import flag_changer
+from devil.android.sdk import adb_wrapper
+from devil.android.sdk import build_tools
+from devil.android.sdk import intent
+from devil.android.sdk import version_codes
+from devil.utils import run_tests_helper
+
+_DIR_SOURCE_ROOT = os.path.normpath(
+    os.path.join(os.path.dirname(__file__), '..', '..'))
+_JAVA_HOME = os.path.join(_DIR_SOURCE_ROOT, 'third_party', 'jdk', 'current')
+
+with devil_env.SysPath(
+    os.path.join(_DIR_SOURCE_ROOT, 'third_party', 'colorama', 'src')):
+  import colorama
+
+from incremental_install import installer
+from pylib import constants
+from pylib.symbols import deobfuscator
+from pylib.utils import simpleperf
+from pylib.utils import app_bundle_utils
+
+with devil_env.SysPath(
+    os.path.join(_DIR_SOURCE_ROOT, 'build', 'android', 'gyp')):
+  import bundletool
+
+BASE_MODULE = 'base'
+
+
+def _Colorize(text, style=''):
+  return (style
+      + text
+      + colorama.Style.RESET_ALL)
+
+
+def _InstallApk(devices, apk, install_dict):
+  def install(device):
+    if install_dict:
+      installer.Install(device, install_dict, apk=apk, permissions=[])
+    else:
+      device.Install(apk, permissions=[], allow_downgrade=True, reinstall=True)
+
+  logging.info('Installing %sincremental apk.', '' if install_dict else 'non-')
+  device_utils.DeviceUtils.parallel(devices).pMap(install)
+
+
+# A named tuple containing the information needed to convert a bundle into
+# an installable .apks archive.
+# Fields:
+#   bundle_path: Path to input bundle file.
+#   bundle_apk_path: Path to output bundle .apks archive file.
+#   aapt2_path: Path to aapt2 tool.
+#   keystore_path: Path to keystore file.
+#   keystore_password: Password for the keystore file.
+#   keystore_alias: Signing key name alias within the keystore file.
+#   system_image_locales: List of Chromium locales to include in system .apks.
+BundleGenerationInfo = collections.namedtuple(
+    'BundleGenerationInfo',
+    'bundle_path,bundle_apks_path,aapt2_path,keystore_path,keystore_password,'
+    'keystore_alias,system_image_locales')
+
+
+def _GenerateBundleApks(info,
+                        output_path=None,
+                        minimal=False,
+                        minimal_sdk_version=None,
+                        mode=None,
+                        optimize_for=None):
+  """Generate an .apks archive from a bundle on demand.
+
+  Args:
+    info: A BundleGenerationInfo instance.
+    output_path: Path of output .apks archive.
+    minimal: Create the minimal set of apks possible (english-only).
+    minimal_sdk_version: When minimal=True, use this sdkVersion.
+    mode: Build mode, either None, or one of app_bundle_utils.BUILD_APKS_MODES.
+    optimize_for: Override split config, either None, or one of
+      app_bundle_utils.OPTIMIZE_FOR_OPTIONS.
+  """
+  logging.info('Generating .apks file')
+  app_bundle_utils.GenerateBundleApks(
+      info.bundle_path,
+      # Store .apks file beside the .aab file by default so that it gets cached.
+      output_path or info.bundle_apks_path,
+      info.aapt2_path,
+      info.keystore_path,
+      info.keystore_password,
+      info.keystore_alias,
+      system_image_locales=info.system_image_locales,
+      mode=mode,
+      minimal=minimal,
+      minimal_sdk_version=minimal_sdk_version,
+      optimize_for=optimize_for)
+
+
+def _InstallBundle(devices, apk_helper_instance, package_name,
+                   command_line_flags_file, modules, fake_modules):
+  # Path Chrome creates after validating fake modules. This needs to be cleared
+  # for pushed fake modules to be picked up.
+  SPLITCOMPAT_PATH = '/data/data/' + package_name + '/files/splitcompat'
+  # Chrome command line flag needed for fake modules to work.
+  FAKE_FEATURE_MODULE_INSTALL = '--fake-feature-module-install'
+
+  def ShouldWarnFakeFeatureModuleInstallFlag(device):
+    if command_line_flags_file:
+      changer = flag_changer.FlagChanger(device, command_line_flags_file)
+      return FAKE_FEATURE_MODULE_INSTALL not in changer.GetCurrentFlags()
+    return False
+
+  def ClearFakeModules(device):
+    if device.PathExists(SPLITCOMPAT_PATH, as_root=True):
+      device.RemovePath(
+          SPLITCOMPAT_PATH, force=True, recursive=True, as_root=True)
+      logging.info('Removed %s', SPLITCOMPAT_PATH)
+    else:
+      logging.info('Skipped removing nonexistent %s', SPLITCOMPAT_PATH)
+
+  def Install(device):
+    ClearFakeModules(device)
+    if fake_modules and ShouldWarnFakeFeatureModuleInstallFlag(device):
+      # Print warning if command line is not set up for fake modules.
+      msg = ('Command line has no %s: Fake modules will be ignored.' %
+             FAKE_FEATURE_MODULE_INSTALL)
+      print(_Colorize(msg, colorama.Fore.YELLOW + colorama.Style.BRIGHT))
+
+    device.Install(
+        apk_helper_instance,
+        permissions=[],
+        modules=modules,
+        fake_modules=fake_modules,
+        allow_downgrade=True)
+
+  # Basic checks for |modules| and |fake_modules|.
+  # * |fake_modules| cannot include 'base'.
+  # * If |fake_modules| is given, ensure |modules| includes 'base'.
+  # * They must be disjoint (checked by device.Install).
+  modules_set = set(modules) if modules else set()
+  fake_modules_set = set(fake_modules) if fake_modules else set()
+  if BASE_MODULE in fake_modules_set:
+    raise Exception('\'-f {}\' is disallowed.'.format(BASE_MODULE))
+  if fake_modules_set and BASE_MODULE not in modules_set:
+    raise Exception(
+        '\'-f FAKE\' must be accompanied by \'-m {}\''.format(BASE_MODULE))
+
+  logging.info('Installing bundle.')
+  device_utils.DeviceUtils.parallel(devices).pMap(Install)
+
+
+def _UninstallApk(devices, install_dict, package_name):
+  def uninstall(device):
+    if install_dict:
+      installer.Uninstall(device, package_name)
+    else:
+      device.Uninstall(package_name)
+  device_utils.DeviceUtils.parallel(devices).pMap(uninstall)
+
+
+def _IsWebViewProvider(apk_helper_instance):
+  meta_data = apk_helper_instance.GetAllMetadata()
+  meta_data_keys = [pair[0] for pair in meta_data]
+  return 'com.android.webview.WebViewLibrary' in meta_data_keys
+
+
+def _SetWebViewProvider(devices, package_name):
+
+  def switch_provider(device):
+    if device.build_version_sdk < version_codes.NOUGAT:
+      logging.error('No need to switch provider on pre-Nougat devices (%s)',
+                    device.serial)
+    else:
+      device.SetWebViewImplementation(package_name)
+
+  device_utils.DeviceUtils.parallel(devices).pMap(switch_provider)
+
+
+def _NormalizeProcessName(debug_process_name, package_name):
+  if not debug_process_name:
+    debug_process_name = package_name
+  elif debug_process_name.startswith(':'):
+    debug_process_name = package_name + debug_process_name
+  elif '.' not in debug_process_name:
+    debug_process_name = package_name + ':' + debug_process_name
+  return debug_process_name
+
+
+def _LaunchUrl(devices, package_name, argv=None, command_line_flags_file=None,
+               url=None, apk=None, wait_for_java_debugger=False,
+               debug_process_name=None, nokill=None):
+  if argv and command_line_flags_file is None:
+    raise Exception('This apk does not support any flags.')
+  if url:
+    # TODO(agrieve): Launch could be changed to require only package name by
+    #     parsing "dumpsys package" rather than relying on the apk.
+    if not apk:
+      raise Exception('Launching with URL is not supported when using '
+                      '--package-name. Use --apk-path instead.')
+    view_activity = apk.GetViewActivityName()
+    if not view_activity:
+      raise Exception('APK does not support launching with URLs.')
+
+  debug_process_name = _NormalizeProcessName(debug_process_name, package_name)
+
+  def launch(device):
+    # --persistent is required to have Settings.Global.DEBUG_APP be set, which
+    # we currently use to allow reading of flags. https://crbug.com/784947
+    if not nokill:
+      cmd = ['am', 'set-debug-app', '--persistent', debug_process_name]
+      if wait_for_java_debugger:
+        cmd[-1:-1] = ['-w']
+      # Ignore error since it will fail if apk is not debuggable.
+      device.RunShellCommand(cmd, check_return=False)
+
+      # The flags are first updated with input args.
+      if command_line_flags_file:
+        changer = flag_changer.FlagChanger(device, command_line_flags_file)
+        flags = []
+        if argv:
+          adb_command_line.CheckBuildTypeSupportsFlags(device,
+                                                       command_line_flags_file)
+          flags = shlex.split(argv)
+        try:
+          changer.ReplaceFlags(flags)
+        except device_errors.AdbShellCommandFailedError:
+          logging.exception('Failed to set flags')
+
+    if url is None:
+      # Simulate app icon click if no url is present.
+      cmd = [
+          'am', 'start', '-p', package_name, '-c',
+          'android.intent.category.LAUNCHER', '-a', 'android.intent.action.MAIN'
+      ]
+      device.RunShellCommand(cmd, check_return=True)
+    else:
+      launch_intent = intent.Intent(action='android.intent.action.VIEW',
+                                    activity=view_activity, data=url,
+                                    package=package_name)
+      device.StartActivity(launch_intent)
+  device_utils.DeviceUtils.parallel(devices).pMap(launch)
+  if wait_for_java_debugger:
+    print('Waiting for debugger to attach to process: ' +
+          _Colorize(debug_process_name, colorama.Fore.YELLOW))
+
+
+def _ChangeFlags(devices, argv, command_line_flags_file):
+  if argv is None:
+    _DisplayArgs(devices, command_line_flags_file)
+  else:
+    flags = shlex.split(argv)
+    def update(device):
+      adb_command_line.CheckBuildTypeSupportsFlags(device,
+                                                   command_line_flags_file)
+      changer = flag_changer.FlagChanger(device, command_line_flags_file)
+      changer.ReplaceFlags(flags)
+    device_utils.DeviceUtils.parallel(devices).pMap(update)
+
+
+def _TargetCpuToTargetArch(target_cpu):
+  if target_cpu == 'x64':
+    return 'x86_64'
+  if target_cpu == 'mipsel':
+    return 'mips'
+  return target_cpu
+
+
+def _RunGdb(device, package_name, debug_process_name, pid, output_directory,
+            target_cpu, port, ide, verbose):
+  if not pid:
+    debug_process_name = _NormalizeProcessName(debug_process_name, package_name)
+    pid = device.GetApplicationPids(debug_process_name, at_most_one=True)
+  if not pid:
+    # Attaching gdb makes the app run so slow that it takes *minutes* to start
+    # up (as of 2018). Better to just fail than to start & attach.
+    raise Exception('App not running.')
+
+  gdb_script_path = os.path.dirname(__file__) + '/adb_gdb'
+  cmd = [
+      gdb_script_path,
+      '--package-name=%s' % package_name,
+      '--output-directory=%s' % output_directory,
+      '--adb=%s' % adb_wrapper.AdbWrapper.GetAdbPath(),
+      '--device=%s' % device.serial,
+      '--pid=%s' % pid,
+      '--port=%d' % port,
+  ]
+  if ide:
+    cmd.append('--ide')
+  # Enable verbose output of adb_gdb if it's set for this script.
+  if verbose:
+    cmd.append('--verbose')
+  if target_cpu:
+    cmd.append('--target-arch=%s' % _TargetCpuToTargetArch(target_cpu))
+  logging.warning('Running: %s', ' '.join(pipes.quote(x) for x in cmd))
+  print(_Colorize('All subsequent output is from adb_gdb script.',
+                  colorama.Fore.YELLOW))
+  os.execv(gdb_script_path, cmd)
+
+
+def _PrintPerDeviceOutput(devices, results, single_line=False):
+  for d, result in zip(devices, results):
+    if not single_line and d is not devices[0]:
+      sys.stdout.write('\n')
+    sys.stdout.write(
+          _Colorize('{} ({}):'.format(d, d.build_description),
+                    colorama.Fore.YELLOW))
+    sys.stdout.write(' ' if single_line else '\n')
+    yield result
+
+
+def _RunMemUsage(devices, package_name, query_app=False):
+  cmd_args = ['dumpsys', 'meminfo']
+  if not query_app:
+    cmd_args.append('--local')
+
+  def mem_usage_helper(d):
+    ret = []
+    for process in sorted(_GetPackageProcesses(d, package_name)):
+      meminfo = d.RunShellCommand(cmd_args + [str(process.pid)])
+      ret.append((process.name, '\n'.join(meminfo)))
+    return ret
+
+  parallel_devices = device_utils.DeviceUtils.parallel(devices)
+  all_results = parallel_devices.pMap(mem_usage_helper).pGet(None)
+  for result in _PrintPerDeviceOutput(devices, all_results):
+    if not result:
+      print('No processes found.')
+    else:
+      for name, usage in sorted(result):
+        print(_Colorize('==== Output of "dumpsys meminfo %s" ====' % name,
+                        colorama.Fore.GREEN))
+        print(usage)
+
+
+def _DuHelper(device, path_spec, run_as=None):
+  """Runs "du -s -k |path_spec|" on |device| and returns parsed result.
+
+  Args:
+    device: A DeviceUtils instance.
+    path_spec: The list of paths to run du on. May contain shell expansions
+        (will not be escaped).
+    run_as: Package name to run as, or None to run as shell user. If not None
+        and app is not android:debuggable (run-as fails), then command will be
+        run as root.
+
+  Returns:
+    A dict of path->size in KiB containing all paths in |path_spec| that exist
+    on device. Paths that do not exist are silently ignored.
+  """
+  # Example output for: du -s -k /data/data/org.chromium.chrome/{*,.*}
+  # 144     /data/data/org.chromium.chrome/cache
+  # 8       /data/data/org.chromium.chrome/files
+  # <snip>
+  # du: .*: No such file or directory
+
+  # The -d flag works differently across android version, so use -s instead.
+  # Without the explicit 2>&1, stderr and stdout get combined at random :(.
+  cmd_str = 'du -s -k ' + path_spec + ' 2>&1'
+  lines = device.RunShellCommand(cmd_str, run_as=run_as, shell=True,
+                                 check_return=False)
+  output = '\n'.join(lines)
+  # run-as: Package 'com.android.chrome' is not debuggable
+  if output.startswith('run-as:'):
+    # check_return=False needed for when some paths in path_spec do not exist.
+    lines = device.RunShellCommand(cmd_str, as_root=True, shell=True,
+                                   check_return=False)
+  ret = {}
+  try:
+    for line in lines:
+      # du: .*: No such file or directory
+      if line.startswith('du:'):
+        continue
+      size, subpath = line.split(None, 1)
+      ret[subpath] = int(size)
+    return ret
+  except ValueError:
+    logging.error('du command was: %s', cmd_str)
+    logging.error('Failed to parse du output:\n%s', output)
+    raise
+
+
+def _RunDiskUsage(devices, package_name):
+  # Measuring dex size is a bit complicated:
+  # https://source.android.com/devices/tech/dalvik/jit-compiler
+  #
+  # For KitKat and below:
+  #   dumpsys package contains:
+  #     dataDir=/data/data/org.chromium.chrome
+  #     codePath=/data/app/org.chromium.chrome-1.apk
+  #     resourcePath=/data/app/org.chromium.chrome-1.apk
+  #     nativeLibraryPath=/data/app-lib/org.chromium.chrome-1
+  #   To measure odex:
+  #     ls -l /data/dalvik-cache/data@app@org.chromium.chrome-1.apk@classes.dex
+  #
+  # For Android L and M (and maybe for N+ system apps):
+  #   dumpsys package contains:
+  #     codePath=/data/app/org.chromium.chrome-1
+  #     resourcePath=/data/app/org.chromium.chrome-1
+  #     legacyNativeLibraryDir=/data/app/org.chromium.chrome-1/lib
+  #   To measure odex:
+  #     # Option 1:
+  #  /data/dalvik-cache/arm/data@app@org.chromium.chrome-1@base.apk@classes.dex
+  #  /data/dalvik-cache/arm/data@app@org.chromium.chrome-1@base.apk@classes.vdex
+  #     ls -l /data/dalvik-cache/profiles/org.chromium.chrome
+  #         (these profiles all appear to be 0 bytes)
+  #     # Option 2:
+  #     ls -l /data/app/org.chromium.chrome-1/oat/arm/base.odex
+  #
+  # For Android N+:
+  #   dumpsys package contains:
+  #     dataDir=/data/user/0/org.chromium.chrome
+  #     codePath=/data/app/org.chromium.chrome-UuCZ71IE-i5sZgHAkU49_w==
+  #     resourcePath=/data/app/org.chromium.chrome-UuCZ71IE-i5sZgHAkU49_w==
+  #     legacyNativeLibraryDir=/data/app/org.chromium.chrome-GUID/lib
+  #     Instruction Set: arm
+  #       path: /data/app/org.chromium.chrome-UuCZ71IE-i5sZgHAkU49_w==/base.apk
+  #       status: /data/.../oat/arm/base.odex[status=kOatUpToDate, compilation_f
+  #       ilter=quicken]
+  #     Instruction Set: arm64
+  #       path: /data/app/org.chromium.chrome-UuCZ71IE-i5sZgHAkU49_w==/base.apk
+  #       status: /data/.../oat/arm64/base.odex[status=..., compilation_filter=q
+  #       uicken]
+  #   To measure odex:
+  #     ls -l /data/app/.../oat/arm/base.odex
+  #     ls -l /data/app/.../oat/arm/base.vdex (optional)
+  #   To measure the correct odex size:
+  #     cmd package compile -m speed org.chromium.chrome  # For webview
+  #     cmd package compile -m speed-profile org.chromium.chrome  # For others
+  def disk_usage_helper(d):
+    package_output = '\n'.join(d.RunShellCommand(
+        ['dumpsys', 'package', package_name], check_return=True))
+    # Does not return error when apk is not installed.
+    if not package_output or 'Unable to find package:' in package_output:
+      return None
+
+    # Ignore system apks that have updates installed.
+    package_output = re.sub(r'Hidden system packages:.*?^\b', '',
+                            package_output, flags=re.S | re.M)
+
+    try:
+      data_dir = re.search(r'dataDir=(.*)', package_output).group(1)
+      code_path = re.search(r'codePath=(.*)', package_output).group(1)
+      lib_path = re.search(r'(?:legacyN|n)ativeLibrary(?:Dir|Path)=(.*)',
+                           package_output).group(1)
+    except AttributeError:
+      raise Exception('Error parsing dumpsys output: ' + package_output)
+
+    if code_path.startswith('/system'):
+      logging.warning('Measurement of system image apks can be innacurate')
+
+    compilation_filters = set()
+    # Match "compilation_filter=value", where a line break can occur at any spot
+    # (refer to examples above).
+    awful_wrapping = r'\s*'.join('compilation_filter=')
+    for m in re.finditer(awful_wrapping + r'([\s\S]+?)[\],]', package_output):
+      compilation_filters.add(re.sub(r'\s+', '', m.group(1)))
+    # Starting Android Q, output looks like:
+    #  arm: [status=speed-profile] [reason=install]
+    for m in re.finditer(r'\[status=(.+?)\]', package_output):
+      compilation_filters.add(m.group(1))
+    compilation_filter = ','.join(sorted(compilation_filters))
+
+    data_dir_sizes = _DuHelper(d, '%s/{*,.*}' % data_dir, run_as=package_name)
+    # Measure code_cache separately since it can be large.
+    code_cache_sizes = {}
+    code_cache_dir = next(
+        (k for k in data_dir_sizes if k.endswith('/code_cache')), None)
+    if code_cache_dir:
+      data_dir_sizes.pop(code_cache_dir)
+      code_cache_sizes = _DuHelper(d, '%s/{*,.*}' % code_cache_dir,
+                                   run_as=package_name)
+
+    apk_path_spec = code_path
+    if not apk_path_spec.endswith('.apk'):
+      apk_path_spec += '/*.apk'
+    apk_sizes = _DuHelper(d, apk_path_spec)
+    if lib_path.endswith('/lib'):
+      # Shows architecture subdirectory.
+      lib_sizes = _DuHelper(d, '%s/{*,.*}' % lib_path)
+    else:
+      lib_sizes = _DuHelper(d, lib_path)
+
+    # Look at all possible locations for odex files.
+    odex_paths = []
+    for apk_path in apk_sizes:
+      mangled_apk_path = apk_path[1:].replace('/', '@')
+      apk_basename = posixpath.basename(apk_path)[:-4]
+      for ext in ('dex', 'odex', 'vdex', 'art'):
+        # Easier to check all architectures than to determine active ones.
+        for arch in ('arm', 'arm64', 'x86', 'x86_64', 'mips', 'mips64'):
+          odex_paths.append(
+              '%s/oat/%s/%s.%s' % (code_path, arch, apk_basename, ext))
+          # No app could possibly have more than 6 dex files.
+          for suffix in ('', '2', '3', '4', '5'):
+            odex_paths.append('/data/dalvik-cache/%s/%s@classes%s.%s' % (
+                arch, mangled_apk_path, suffix, ext))
+            # This path does not have |arch|, so don't repeat it for every arch.
+            if arch == 'arm':
+              odex_paths.append('/data/dalvik-cache/%s@classes%s.dex' % (
+                  mangled_apk_path, suffix))
+
+    odex_sizes = _DuHelper(d, ' '.join(pipes.quote(p) for p in odex_paths))
+
+    return (data_dir_sizes, code_cache_sizes, apk_sizes, lib_sizes, odex_sizes,
+            compilation_filter)
+
+  def print_sizes(desc, sizes):
+    print('%s: %d KiB' % (desc, sum(sizes.itervalues())))
+    for path, size in sorted(sizes.iteritems()):
+      print('    %s: %s KiB' % (path, size))
+
+  parallel_devices = device_utils.DeviceUtils.parallel(devices)
+  all_results = parallel_devices.pMap(disk_usage_helper).pGet(None)
+  for result in _PrintPerDeviceOutput(devices, all_results):
+    if not result:
+      print('APK is not installed.')
+      continue
+
+    (data_dir_sizes, code_cache_sizes, apk_sizes, lib_sizes, odex_sizes,
+     compilation_filter) = result
+    total = sum(sum(sizes.itervalues()) for sizes in result[:-1])
+
+    print_sizes('Apk', apk_sizes)
+    print_sizes('App Data (non-code cache)', data_dir_sizes)
+    print_sizes('App Data (code cache)', code_cache_sizes)
+    print_sizes('Native Libs', lib_sizes)
+    show_warning = compilation_filter and 'speed' not in compilation_filter
+    compilation_filter = compilation_filter or 'n/a'
+    print_sizes('odex (compilation_filter=%s)' % compilation_filter, odex_sizes)
+    if show_warning:
+      logging.warning('For a more realistic odex size, run:')
+      logging.warning('    %s compile-dex [speed|speed-profile]', sys.argv[0])
+    print('Total: %s KiB (%.1f MiB)' % (total, total / 1024.0))
+
+
+class _LogcatProcessor(object):
+  ParsedLine = collections.namedtuple(
+      'ParsedLine',
+      ['date', 'invokation_time', 'pid', 'tid', 'priority', 'tag', 'message'])
+
+  class NativeStackSymbolizer(object):
+    """Buffers lines from native stacks and symbolizes them when done."""
+    # E.g.: #06 pc 0x0000d519 /apex/com.android.runtime/lib/libart.so
+    # E.g.: #01 pc 00180c8d  /data/data/.../lib/libbase.cr.so
+    _STACK_PATTERN = re.compile(r'\s*#\d+\s+(?:pc )?(0x)?[0-9a-f]{8,16}\s')
+
+    def __init__(self, stack_script_context, print_func):
+      # To symbolize native stacks, we need to pass all lines at once.
+      self._stack_script_context = stack_script_context
+      self._print_func = print_func
+      self._crash_lines_buffer = None
+
+    def _FlushLines(self):
+      """Prints queued lines after sending them through stack.py."""
+      crash_lines = self._crash_lines_buffer
+      self._crash_lines_buffer = None
+      with tempfile.NamedTemporaryFile() as f:
+        f.writelines(x[0].message + '\n' for x in crash_lines)
+        f.flush()
+        proc = self._stack_script_context.Popen(
+            input_file=f.name, stdout=subprocess.PIPE)
+        lines = proc.communicate()[0].splitlines()
+
+      for i, line in enumerate(lines):
+        parsed_line, dim = crash_lines[min(i, len(crash_lines) - 1)]
+        d = parsed_line._asdict()
+        d['message'] = line
+        parsed_line = _LogcatProcessor.ParsedLine(**d)
+        self._print_func(parsed_line, dim)
+
+    def AddLine(self, parsed_line, dim):
+      # Assume all lines from DEBUG are stacks.
+      # Also look for "stack-looking" lines to catch manual stack prints.
+      # It's important to not buffer non-stack lines because stack.py does not
+      # pass them through.
+      is_crash_line = parsed_line.tag == 'DEBUG' or (self._STACK_PATTERN.match(
+          parsed_line.message))
+
+      if is_crash_line:
+        if self._crash_lines_buffer is None:
+          self._crash_lines_buffer = []
+        self._crash_lines_buffer.append((parsed_line, dim))
+        return
+
+      if self._crash_lines_buffer is not None:
+        self._FlushLines()
+
+      self._print_func(parsed_line, dim)
+
+
+  # Logcat tags for messages that are generally relevant but are not from PIDs
+  # associated with the apk.
+  _ALLOWLISTED_TAGS = {
+      'ActivityManager',  # Shows activity lifecycle messages.
+      'ActivityTaskManager',  # More activity lifecycle messages.
+      'AndroidRuntime',  # Java crash dumps
+      'DEBUG',  # Native crash dump.
+  }
+
+  # Matches messages only on pre-L (Dalvik) that are spammy and unimportant.
+  _DALVIK_IGNORE_PATTERN = re.compile('|'.join([
+      r'^Added shared lib',
+      r'^Could not find ',
+      r'^DexOpt:',
+      r'^GC_',
+      r'^Late-enabling CheckJNI',
+      r'^Link of class',
+      r'^No JNI_OnLoad found in',
+      r'^Trying to load lib',
+      r'^Unable to resolve superclass',
+      r'^VFY:',
+      r'^WAIT_',
+  ]))
+
+  def __init__(self,
+               device,
+               package_name,
+               stack_script_context,
+               deobfuscate=None,
+               verbose=False):
+    self._device = device
+    self._package_name = package_name
+    self._verbose = verbose
+    self._deobfuscator = deobfuscate
+    self._native_stack_symbolizer = _LogcatProcessor.NativeStackSymbolizer(
+        stack_script_context, self._PrintParsedLine)
+    # Process ID for the app's main process (with no :name suffix).
+    self._primary_pid = None
+    # Set of all Process IDs that belong to the app.
+    self._my_pids = set()
+    # Set of all Process IDs that we've parsed at some point.
+    self._seen_pids = set()
+    # Start proc 22953:com.google.chromeremotedesktop/
+    self._pid_pattern = re.compile(r'Start proc (\d+):{}/'.format(package_name))
+    # START u0 {act=android.intent.action.MAIN \
+    # cat=[android.intent.category.LAUNCHER] \
+    # flg=0x10000000 pkg=com.google.chromeremotedesktop} from uid 2000
+    self._start_pattern = re.compile(r'START .*pkg=' + package_name)
+
+    self.nonce = 'Chromium apk_operations.py nonce={}'.format(random.random())
+    # Holds lines buffered on start-up, before we find our nonce message.
+    self._initial_buffered_lines = []
+    self._UpdateMyPids()
+    # Give preference to PID reported by "ps" over those found from
+    # _start_pattern. There can be multiple "Start proc" messages from prior
+    # runs of the app.
+    self._found_initial_pid = self._primary_pid != None
+    # Retrieve any additional patterns that are relevant for the User.
+    self._user_defined_highlight = None
+    user_regex = os.environ.get('CHROMIUM_LOGCAT_HIGHLIGHT')
+    if user_regex:
+      self._user_defined_highlight = re.compile(user_regex)
+      if not self._user_defined_highlight:
+        print(_Colorize(
+            'Rejecting invalid regular expression: {}'.format(user_regex),
+            colorama.Fore.RED + colorama.Style.BRIGHT))
+
+  def _UpdateMyPids(self):
+    # We intentionally do not clear self._my_pids to make sure that the
+    # ProcessLine method below also includes lines from processes which may
+    # have already exited.
+    self._primary_pid = None
+    for process in _GetPackageProcesses(self._device, self._package_name):
+      # We take only the first "main" process found in order to account for
+      # possibly forked() processes.
+      if ':' not in process.name and self._primary_pid is None:
+        self._primary_pid = process.pid
+      self._my_pids.add(process.pid)
+
+  def _GetPidStyle(self, pid, dim=False):
+    if pid == self._primary_pid:
+      return colorama.Fore.WHITE
+    elif pid in self._my_pids:
+      # TODO(wnwen): Use one separate persistent color per process, pop LRU
+      return colorama.Fore.YELLOW
+    elif dim:
+      return colorama.Style.DIM
+    return ''
+
+  def _GetPriorityStyle(self, priority, dim=False):
+    # pylint:disable=no-self-use
+    if dim:
+      return ''
+    style = colorama.Fore.BLACK
+    if priority == 'E' or priority == 'F':
+      style += colorama.Back.RED
+    elif priority == 'W':
+      style += colorama.Back.YELLOW
+    elif priority == 'I':
+      style += colorama.Back.GREEN
+    elif priority == 'D':
+      style += colorama.Back.BLUE
+    return style
+
+  def _ParseLine(self, line):
+    tokens = line.split(None, 6)
+
+    def consume_token_or_default(default):
+      return tokens.pop(0) if len(tokens) > 0 else default
+
+    def consume_integer_token_or_default(default):
+      if len(tokens) == 0:
+        return default
+
+      try:
+        return int(tokens.pop(0))
+      except ValueError:
+        return default
+
+    date = consume_token_or_default('')
+    invokation_time = consume_token_or_default('')
+    pid = consume_integer_token_or_default(-1)
+    tid = consume_integer_token_or_default(-1)
+    priority = consume_token_or_default('')
+    tag = consume_token_or_default('')
+    original_message = consume_token_or_default('')
+
+    # Example:
+    #   09-19 06:35:51.113  9060  9154 W GCoreFlp: No location...
+    #   09-19 06:01:26.174  9060 10617 I Auth    : [ReflectiveChannelBinder]...
+    # Parsing "GCoreFlp:" vs "Auth    :", we only want tag to contain the word,
+    # and we don't want to keep the colon for the message.
+    if tag and tag[-1] == ':':
+      tag = tag[:-1]
+    elif len(original_message) > 2:
+      original_message = original_message[2:]
+    return self.ParsedLine(
+        date, invokation_time, pid, tid, priority, tag, original_message)
+
+  def _PrintParsedLine(self, parsed_line, dim=False):
+    tid_style = colorama.Style.NORMAL
+    user_match = self._user_defined_highlight and (
+        re.search(self._user_defined_highlight, parsed_line.tag)
+        or re.search(self._user_defined_highlight, parsed_line.message))
+
+    # Make the main thread bright.
+    if not dim and parsed_line.pid == parsed_line.tid:
+      tid_style = colorama.Style.BRIGHT
+    pid_style = self._GetPidStyle(parsed_line.pid, dim)
+    msg_style = pid_style if not user_match else (colorama.Fore.GREEN +
+                                                  colorama.Style.BRIGHT)
+    # We have to pad before adding color as that changes the width of the tag.
+    pid_str = _Colorize('{:5}'.format(parsed_line.pid), pid_style)
+    tid_str = _Colorize('{:5}'.format(parsed_line.tid), tid_style)
+    tag = _Colorize('{:8}'.format(parsed_line.tag),
+                    pid_style + ('' if dim else colorama.Style.BRIGHT))
+    priority = _Colorize(parsed_line.priority,
+                         self._GetPriorityStyle(parsed_line.priority))
+    messages = [parsed_line.message]
+    if self._deobfuscator:
+      messages = self._deobfuscator.TransformLines(messages)
+    for message in messages:
+      message = _Colorize(message, msg_style)
+      sys.stdout.write('{} {} {} {} {} {}: {}\n'.format(
+          parsed_line.date, parsed_line.invokation_time, pid_str, tid_str,
+          priority, tag, message))
+
+  def _TriggerNonceFound(self):
+    # Once the nonce is hit, we have confidence that we know which lines
+    # belong to the current run of the app. Process all of the buffered lines.
+    if self._primary_pid:
+      for args in self._initial_buffered_lines:
+        self._native_stack_symbolizer.AddLine(*args)
+    self._initial_buffered_lines = None
+    self.nonce = None
+
+  def ProcessLine(self, line):
+    if not line or line.startswith('------'):
+      return
+
+    if self.nonce and self.nonce in line:
+      self._TriggerNonceFound()
+
+    nonce_found = self.nonce is None
+
+    log = self._ParseLine(line)
+    if log.pid not in self._seen_pids:
+      self._seen_pids.add(log.pid)
+      if nonce_found:
+        # Update list of owned PIDs each time a new PID is encountered.
+        self._UpdateMyPids()
+
+    # Search for "Start proc $pid:$package_name/" message.
+    if not nonce_found:
+      # Capture logs before the nonce. Start with the most recent "am start".
+      if self._start_pattern.match(log.message):
+        self._initial_buffered_lines = []
+
+      # If we didn't find the PID via "ps", then extract it from log messages.
+      # This will happen if the app crashes too quickly.
+      if not self._found_initial_pid:
+        m = self._pid_pattern.match(log.message)
+        if m:
+          # Find the most recent "Start proc" line before the nonce.
+          # Track only the primary pid in this mode.
+          # The main use-case is to find app logs when no current PIDs exist.
+          # E.g.: When the app crashes on launch.
+          self._primary_pid = m.group(1)
+          self._my_pids.clear()
+          self._my_pids.add(m.group(1))
+
+    owned_pid = log.pid in self._my_pids
+    if owned_pid and not self._verbose and log.tag == 'dalvikvm':
+      if self._DALVIK_IGNORE_PATTERN.match(log.message):
+        return
+
+    if owned_pid or self._verbose or (log.priority == 'F' or  # Java crash dump
+                                      log.tag in self._ALLOWLISTED_TAGS):
+      if nonce_found:
+        self._native_stack_symbolizer.AddLine(log, not owned_pid)
+      else:
+        self._initial_buffered_lines.append((log, not owned_pid))
+
+
+def _RunLogcat(device, package_name, stack_script_context, deobfuscate,
+               verbose):
+  logcat_processor = _LogcatProcessor(
+      device, package_name, stack_script_context, deobfuscate, verbose)
+  device.RunShellCommand(['log', logcat_processor.nonce])
+  for line in device.adb.Logcat(logcat_format='threadtime'):
+    try:
+      logcat_processor.ProcessLine(line)
+    except:
+      sys.stderr.write('Failed to process line: ' + line + '\n')
+      # Skip stack trace for the common case of the adb server being
+      # restarted.
+      if 'unexpected EOF' in line:
+        sys.exit(1)
+      raise
+
+
+def _GetPackageProcesses(device, package_name):
+  return [
+      p for p in device.ListProcesses(package_name)
+      if p.name == package_name or p.name.startswith(package_name + ':')]
+
+
+def _RunPs(devices, package_name):
+  parallel_devices = device_utils.DeviceUtils.parallel(devices)
+  all_processes = parallel_devices.pMap(
+      lambda d: _GetPackageProcesses(d, package_name)).pGet(None)
+  for processes in _PrintPerDeviceOutput(devices, all_processes):
+    if not processes:
+      print('No processes found.')
+    else:
+      proc_map = collections.defaultdict(list)
+      for p in processes:
+        proc_map[p.name].append(str(p.pid))
+      for name, pids in sorted(proc_map.items()):
+        print(name, ','.join(pids))
+
+
+def _RunShell(devices, package_name, cmd):
+  if cmd:
+    parallel_devices = device_utils.DeviceUtils.parallel(devices)
+    outputs = parallel_devices.RunShellCommand(
+        cmd, run_as=package_name).pGet(None)
+    for output in _PrintPerDeviceOutput(devices, outputs):
+      for line in output:
+        print(line)
+  else:
+    adb_path = adb_wrapper.AdbWrapper.GetAdbPath()
+    cmd = [adb_path, '-s', devices[0].serial, 'shell']
+    # Pre-N devices do not support -t flag.
+    if devices[0].build_version_sdk >= version_codes.NOUGAT:
+      cmd += ['-t', 'run-as', package_name]
+    else:
+      print('Upon entering the shell, run:')
+      print('run-as', package_name)
+      print()
+    os.execv(adb_path, cmd)
+
+
+def _RunCompileDex(devices, package_name, compilation_filter):
+  cmd = ['cmd', 'package', 'compile', '-f', '-m', compilation_filter,
+         package_name]
+  parallel_devices = device_utils.DeviceUtils.parallel(devices)
+  outputs = parallel_devices.RunShellCommand(cmd, timeout=120).pGet(None)
+  for output in _PrintPerDeviceOutput(devices, outputs):
+    for line in output:
+      print(line)
+
+
+def _RunProfile(device, package_name, host_build_directory, pprof_out_path,
+                process_specifier, thread_specifier, extra_args):
+  simpleperf.PrepareDevice(device)
+  device_simpleperf_path = simpleperf.InstallSimpleperf(device, package_name)
+  with tempfile.NamedTemporaryFile() as fh:
+    host_simpleperf_out_path = fh.name
+
+    with simpleperf.RunSimpleperf(device, device_simpleperf_path, package_name,
+                                  process_specifier, thread_specifier,
+                                  extra_args, host_simpleperf_out_path):
+      sys.stdout.write('Profiler is running; press Enter to stop...')
+      sys.stdin.read(1)
+      sys.stdout.write('Post-processing data...')
+      sys.stdout.flush()
+
+    simpleperf.ConvertSimpleperfToPprof(host_simpleperf_out_path,
+                                        host_build_directory, pprof_out_path)
+    print(textwrap.dedent("""
+        Profile data written to %(s)s.
+
+        To view profile as a call graph in browser:
+          pprof -web %(s)s
+
+        To print the hottest methods:
+          pprof -top %(s)s
+
+        pprof has many useful customization options; `pprof --help` for details.
+        """ % {'s': pprof_out_path}))
+
+
+class _StackScriptContext(object):
+  """Maintains temporary files needed by stack.py."""
+
+  def __init__(self,
+               output_directory,
+               apk_path,
+               bundle_generation_info,
+               quiet=False):
+    self._output_directory = output_directory
+    self._apk_path = apk_path
+    self._bundle_generation_info = bundle_generation_info
+    self._staging_dir = None
+    self._quiet = quiet
+
+  def _CreateStaging(self):
+    # In many cases, stack decoding requires APKs to map trace lines to native
+    # libraries. Create a temporary directory, and either unpack a bundle's
+    # APKS into it, or simply symlink the standalone APK into it. This
+    # provides an unambiguous set of APK files for the stack decoding process
+    # to inspect.
+    logging.debug('Creating stack staging directory')
+    self._staging_dir = tempfile.mkdtemp()
+    bundle_generation_info = self._bundle_generation_info
+
+    if bundle_generation_info:
+      # TODO(wnwen): Use apk_helper instead.
+      _GenerateBundleApks(bundle_generation_info)
+      logging.debug('Extracting .apks file')
+      with zipfile.ZipFile(bundle_generation_info.bundle_apks_path, 'r') as z:
+        files_to_extract = [
+            f for f in z.namelist() if f.endswith('-master.apk')
+        ]
+        z.extractall(self._staging_dir, files_to_extract)
+    elif self._apk_path:
+      # Otherwise an incremental APK and an empty apks directory is correct.
+      output = os.path.join(self._staging_dir, os.path.basename(self._apk_path))
+      os.symlink(self._apk_path, output)
+
+  def Close(self):
+    if self._staging_dir:
+      logging.debug('Clearing stack staging directory')
+      shutil.rmtree(self._staging_dir)
+      self._staging_dir = None
+
+  def Popen(self, input_file=None, **kwargs):
+    if self._staging_dir is None:
+      self._CreateStaging()
+    stack_script = os.path.join(
+        constants.host_paths.ANDROID_PLATFORM_DEVELOPMENT_SCRIPTS_PATH,
+        'stack.py')
+    cmd = [
+        stack_script, '--output-directory', self._output_directory,
+        '--apks-directory', self._staging_dir
+    ]
+    if self._quiet:
+      cmd.append('--quiet')
+    if input_file:
+      cmd.append(input_file)
+    logging.info('Running stack.py')
+    return subprocess.Popen(cmd, **kwargs)
+
+
+def _GenerateAvailableDevicesMessage(devices):
+  devices_obj = device_utils.DeviceUtils.parallel(devices)
+  descriptions = devices_obj.pMap(lambda d: d.build_description).pGet(None)
+  msg = 'Available devices:\n'
+  for d, desc in zip(devices, descriptions):
+    msg += '  %s (%s)\n' % (d, desc)
+  return msg
+
+
+# TODO(agrieve):add "--all" in the MultipleDevicesError message and use it here.
+def _GenerateMissingAllFlagMessage(devices):
+  return ('More than one device available. Use --all to select all devices, ' +
+          'or use --device to select a device by serial.\n\n' +
+          _GenerateAvailableDevicesMessage(devices))
+
+
+def _DisplayArgs(devices, command_line_flags_file):
+  def flags_helper(d):
+    changer = flag_changer.FlagChanger(d, command_line_flags_file)
+    return changer.GetCurrentFlags()
+
+  parallel_devices = device_utils.DeviceUtils.parallel(devices)
+  outputs = parallel_devices.pMap(flags_helper).pGet(None)
+  print('Existing flags per-device (via /data/local/tmp/{}):'.format(
+      command_line_flags_file))
+  for flags in _PrintPerDeviceOutput(devices, outputs, single_line=True):
+    quoted_flags = ' '.join(pipes.quote(f) for f in flags)
+    print(quoted_flags or 'No flags set.')
+
+
+def _DeviceCachePath(device, output_directory):
+  file_name = 'device_cache_%s.json' % device.serial
+  return os.path.join(output_directory, file_name)
+
+
+def _LoadDeviceCaches(devices, output_directory):
+  if not output_directory:
+    return
+  for d in devices:
+    cache_path = _DeviceCachePath(d, output_directory)
+    if os.path.exists(cache_path):
+      logging.debug('Using device cache: %s', cache_path)
+      with open(cache_path) as f:
+        d.LoadCacheData(f.read())
+      # Delete the cached file so that any exceptions cause it to be cleared.
+      os.unlink(cache_path)
+    else:
+      logging.debug('No cache present for device: %s', d)
+
+
+def _SaveDeviceCaches(devices, output_directory):
+  if not output_directory:
+    return
+  for d in devices:
+    cache_path = _DeviceCachePath(d, output_directory)
+    with open(cache_path, 'w') as f:
+      f.write(d.DumpCacheData())
+      logging.info('Wrote device cache: %s', cache_path)
+
+
+class _Command(object):
+  name = None
+  description = None
+  long_description = None
+  needs_package_name = False
+  needs_output_directory = False
+  needs_apk_helper = False
+  supports_incremental = False
+  accepts_command_line_flags = False
+  accepts_args = False
+  need_device_args = True
+  all_devices_by_default = False
+  calls_exec = False
+  supports_multiple_devices = True
+
+  def __init__(self, from_wrapper_script, is_bundle):
+    self._parser = None
+    self._from_wrapper_script = from_wrapper_script
+    self.args = None
+    self.apk_helper = None
+    self.additional_apk_helpers = None
+    self.install_dict = None
+    self.devices = None
+    self.is_bundle = is_bundle
+    self.bundle_generation_info = None
+    # Only support  incremental install from APK wrapper scripts.
+    if is_bundle or not from_wrapper_script:
+      self.supports_incremental = False
+
+  def RegisterBundleGenerationInfo(self, bundle_generation_info):
+    self.bundle_generation_info = bundle_generation_info
+
+  def _RegisterExtraArgs(self, subp):
+    pass
+
+  def RegisterArgs(self, parser):
+    subp = parser.add_parser(
+        self.name, help=self.description,
+        description=self.long_description or self.description,
+        formatter_class=argparse.RawDescriptionHelpFormatter)
+    self._parser = subp
+    subp.set_defaults(command=self)
+    if self.need_device_args:
+      subp.add_argument('--all',
+                        action='store_true',
+                        default=self.all_devices_by_default,
+                        help='Operate on all connected devices.',)
+      subp.add_argument('-d',
+                        '--device',
+                        action='append',
+                        default=[],
+                        dest='devices',
+                        help='Target device for script to work on. Enter '
+                            'multiple times for multiple devices.')
+    subp.add_argument('-v',
+                      '--verbose',
+                      action='count',
+                      default=0,
+                      dest='verbose_count',
+                      help='Verbose level (multiple times for more)')
+    group = subp.add_argument_group('%s arguments' % self.name)
+
+    if self.needs_package_name:
+      # Three cases to consider here, since later code assumes
+      #  self.args.package_name always exists, even if None:
+      #
+      # - Called from a bundle wrapper script, the package_name is already
+      #   set through parser.set_defaults(), so don't call add_argument()
+      #   to avoid overriding its value.
+      #
+      # - Called from an apk wrapper script. The --package-name argument
+      #   should not appear, but self.args.package_name will be gleaned from
+      #   the --apk-path file later.
+      #
+      # - Called directly, then --package-name is required on the command-line.
+      #
+      if not self.is_bundle:
+        group.add_argument(
+            '--package-name',
+            help=argparse.SUPPRESS if self._from_wrapper_script else (
+                "App's package name."))
+
+    if self.needs_apk_helper or self.needs_package_name:
+      # Adding this argument to the subparser would override the set_defaults()
+      # value set by on the parent parser (even if None).
+      if not self._from_wrapper_script and not self.is_bundle:
+        group.add_argument(
+            '--apk-path', required=self.needs_apk_helper, help='Path to .apk')
+
+    if self.supports_incremental:
+      group.add_argument('--incremental',
+                          action='store_true',
+                          default=False,
+                          help='Always install an incremental apk.')
+      group.add_argument('--non-incremental',
+                          action='store_true',
+                          default=False,
+                          help='Always install a non-incremental apk.')
+
+    # accepts_command_line_flags and accepts_args are mutually exclusive.
+    # argparse will throw if they are both set.
+    if self.accepts_command_line_flags:
+      group.add_argument(
+          '--args', help='Command-line flags. Use = to assign args.')
+
+    if self.accepts_args:
+      group.add_argument(
+          '--args', help='Extra arguments. Use = to assign args')
+
+    if not self._from_wrapper_script and self.accepts_command_line_flags:
+      # Provided by wrapper scripts.
+      group.add_argument(
+          '--command-line-flags-file',
+          help='Name of the command-line flags file')
+
+    self._RegisterExtraArgs(group)
+
+  def _CreateApkHelpers(self, args, incremental_apk_path, install_dict):
+    """Returns true iff self.apk_helper was created and assigned."""
+    if self.apk_helper is None:
+      if args.apk_path:
+        self.apk_helper = apk_helper.ToHelper(args.apk_path)
+      elif incremental_apk_path:
+        self.install_dict = install_dict
+        self.apk_helper = apk_helper.ToHelper(incremental_apk_path)
+      elif self.is_bundle:
+        _GenerateBundleApks(self.bundle_generation_info)
+        self.apk_helper = apk_helper.ToHelper(
+            self.bundle_generation_info.bundle_apks_path)
+    if args.additional_apk_paths and self.additional_apk_helpers is None:
+      self.additional_apk_helpers = [
+          apk_helper.ToHelper(apk_path)
+          for apk_path in args.additional_apk_paths
+      ]
+    return self.apk_helper is not None
+
+  def ProcessArgs(self, args):
+    self.args = args
+    # Ensure these keys always exist. They are set by wrapper scripts, but not
+    # always added when not using wrapper scripts.
+    args.__dict__.setdefault('apk_path', None)
+    args.__dict__.setdefault('incremental_json', None)
+
+    incremental_apk_path = None
+    install_dict = None
+    if args.incremental_json and not (self.supports_incremental and
+                                      args.non_incremental):
+      with open(args.incremental_json) as f:
+        install_dict = json.load(f)
+        incremental_apk_path = os.path.join(args.output_directory,
+                                            install_dict['apk_path'])
+        if not os.path.exists(incremental_apk_path):
+          incremental_apk_path = None
+
+    if self.supports_incremental:
+      if args.incremental and args.non_incremental:
+        self._parser.error('Must use only one of --incremental and '
+                           '--non-incremental')
+      elif args.non_incremental:
+        if not args.apk_path:
+          self._parser.error('Apk has not been built.')
+      elif args.incremental:
+        if not incremental_apk_path:
+          self._parser.error('Incremental apk has not been built.')
+        args.apk_path = None
+
+      if args.apk_path and incremental_apk_path:
+        self._parser.error('Both incremental and non-incremental apks exist. '
+                           'Select using --incremental or --non-incremental')
+
+
+    # Gate apk_helper creation with _CreateApkHelpers since for bundles it takes
+    # a while to unpack the apks file from the aab file, so avoid this slowdown
+    # for simple commands that don't need apk_helper.
+    if self.needs_apk_helper:
+      if not self._CreateApkHelpers(args, incremental_apk_path, install_dict):
+        self._parser.error('App is not built.')
+
+    if self.needs_package_name and not args.package_name:
+      if self._CreateApkHelpers(args, incremental_apk_path, install_dict):
+        args.package_name = self.apk_helper.GetPackageName()
+      elif self._from_wrapper_script:
+        self._parser.error('App is not built.')
+      else:
+        self._parser.error('One of --package-name or --apk-path is required.')
+
+    self.devices = []
+    if self.need_device_args:
+      abis = None
+      if self._CreateApkHelpers(args, incremental_apk_path, install_dict):
+        abis = self.apk_helper.GetAbis()
+      self.devices = device_utils.DeviceUtils.HealthyDevices(
+          device_arg=args.devices,
+          enable_device_files_cache=bool(args.output_directory),
+          default_retries=0,
+          abis=abis)
+      # TODO(agrieve): Device cache should not depend on output directory.
+      #     Maybe put into /tmp?
+      _LoadDeviceCaches(self.devices, args.output_directory)
+
+      try:
+        if len(self.devices) > 1:
+          if not self.supports_multiple_devices:
+            self._parser.error(device_errors.MultipleDevicesError(self.devices))
+          if not args.all and not args.devices:
+            self._parser.error(_GenerateMissingAllFlagMessage(self.devices))
+        # Save cache now if command will not get a chance to afterwards.
+        if self.calls_exec:
+          _SaveDeviceCaches(self.devices, args.output_directory)
+      except:
+        _SaveDeviceCaches(self.devices, args.output_directory)
+        raise
+
+
+class _DevicesCommand(_Command):
+  name = 'devices'
+  description = 'Describe attached devices.'
+  all_devices_by_default = True
+
+  def Run(self):
+    print(_GenerateAvailableDevicesMessage(self.devices))
+
+
+class _PackageInfoCommand(_Command):
+  name = 'package-info'
+  description = 'Show various attributes of this app.'
+  need_device_args = False
+  needs_package_name = True
+  needs_apk_helper = True
+
+  def Run(self):
+    # Format all (even ints) as strings, to handle cases where APIs return None
+    print('Package name: "%s"' % self.args.package_name)
+    print('versionCode: %s' % self.apk_helper.GetVersionCode())
+    print('versionName: "%s"' % self.apk_helper.GetVersionName())
+    print('minSdkVersion: %s' % self.apk_helper.GetMinSdkVersion())
+    print('targetSdkVersion: %s' % self.apk_helper.GetTargetSdkVersion())
+    print('Supported ABIs: %r' % self.apk_helper.GetAbis())
+
+
+class _InstallCommand(_Command):
+  name = 'install'
+  description = 'Installs the APK or bundle to one or more devices.'
+  needs_apk_helper = True
+  supports_incremental = True
+  default_modules = []
+
+  def _RegisterExtraArgs(self, group):
+    if self.is_bundle:
+      group.add_argument(
+          '-m',
+          '--module',
+          action='append',
+          default=self.default_modules,
+          help='Module to install. Can be specified multiple times.')
+      group.add_argument(
+          '-f',
+          '--fake',
+          action='append',
+          default=[],
+          help='Fake bundle module install. Can be specified multiple times. '
+          'Requires \'-m {0}\' to be given, and \'-f {0}\' is illegal.'.format(
+              BASE_MODULE))
+      # Add even if |self.default_modules| is empty, for consistency.
+      group.add_argument('--no-module',
+                         action='append',
+                         choices=self.default_modules,
+                         default=[],
+                         help='Module to exclude from default install.')
+
+  def Run(self):
+    if self.additional_apk_helpers:
+      for additional_apk_helper in self.additional_apk_helpers:
+        _InstallApk(self.devices, additional_apk_helper, None)
+    if self.is_bundle:
+      modules = list(
+          set(self.args.module) - set(self.args.no_module) -
+          set(self.args.fake))
+      _InstallBundle(self.devices, self.apk_helper, self.args.package_name,
+                     self.args.command_line_flags_file, modules, self.args.fake)
+    else:
+      _InstallApk(self.devices, self.apk_helper, self.install_dict)
+
+
+class _UninstallCommand(_Command):
+  name = 'uninstall'
+  description = 'Removes the APK or bundle from one or more devices.'
+  needs_package_name = True
+
+  def Run(self):
+    _UninstallApk(self.devices, self.install_dict, self.args.package_name)
+
+
+class _SetWebViewProviderCommand(_Command):
+  name = 'set-webview-provider'
+  description = ("Sets the device's WebView provider to this APK's "
+                 "package name.")
+  needs_package_name = True
+  needs_apk_helper = True
+
+  def Run(self):
+    if not _IsWebViewProvider(self.apk_helper):
+      raise Exception('This package does not have a WebViewLibrary meta-data '
+                      'tag. Are you sure it contains a WebView implementation?')
+    _SetWebViewProvider(self.devices, self.args.package_name)
+
+
+class _LaunchCommand(_Command):
+  name = 'launch'
+  description = ('Sends a launch intent for the APK or bundle after first '
+                 'writing the command-line flags file.')
+  needs_package_name = True
+  accepts_command_line_flags = True
+  all_devices_by_default = True
+
+  def _RegisterExtraArgs(self, group):
+    group.add_argument('-w', '--wait-for-java-debugger', action='store_true',
+                       help='Pause execution until debugger attaches. Applies '
+                            'only to the main process. To have renderers wait, '
+                            'use --args="--renderer-wait-for-java-debugger"')
+    group.add_argument('--debug-process-name',
+                       help='Name of the process to debug. '
+                            'E.g. "privileged_process0", or "foo.bar:baz"')
+    group.add_argument('--nokill', action='store_true',
+                       help='Do not set the debug-app, nor set command-line '
+                            'flags. Useful to load a URL without having the '
+                             'app restart.')
+    group.add_argument('url', nargs='?', help='A URL to launch with.')
+
+  def Run(self):
+    if self.args.url and self.is_bundle:
+      # TODO(digit): Support this, maybe by using 'dumpsys' as described
+      # in the _LaunchUrl() comment.
+      raise Exception('Launching with URL not supported for bundles yet!')
+    _LaunchUrl(self.devices, self.args.package_name, argv=self.args.args,
+               command_line_flags_file=self.args.command_line_flags_file,
+               url=self.args.url, apk=self.apk_helper,
+               wait_for_java_debugger=self.args.wait_for_java_debugger,
+               debug_process_name=self.args.debug_process_name,
+               nokill=self.args.nokill)
+
+
+class _StopCommand(_Command):
+  name = 'stop'
+  description = 'Force-stops the app.'
+  needs_package_name = True
+  all_devices_by_default = True
+
+  def Run(self):
+    device_utils.DeviceUtils.parallel(self.devices).ForceStop(
+        self.args.package_name)
+
+
+class _ClearDataCommand(_Command):
+  name = 'clear-data'
+  descriptions = 'Clears all app data.'
+  needs_package_name = True
+  all_devices_by_default = True
+
+  def Run(self):
+    device_utils.DeviceUtils.parallel(self.devices).ClearApplicationState(
+        self.args.package_name)
+
+
+class _ArgvCommand(_Command):
+  name = 'argv'
+  description = 'Display and optionally update command-line flags file.'
+  needs_package_name = True
+  accepts_command_line_flags = True
+  all_devices_by_default = True
+
+  def Run(self):
+    _ChangeFlags(self.devices, self.args.args,
+                 self.args.command_line_flags_file)
+
+
+class _GdbCommand(_Command):
+  name = 'gdb'
+  description = 'Runs //build/android/adb_gdb with apk-specific args.'
+  long_description = description + """
+
+To attach to a process other than the APK's main process, use --pid=1234.
+To list all PIDs, use the "ps" command.
+
+If no apk process is currently running, sends a launch intent.
+"""
+  needs_package_name = True
+  needs_output_directory = True
+  calls_exec = True
+  supports_multiple_devices = False
+
+  def Run(self):
+    _RunGdb(self.devices[0], self.args.package_name,
+            self.args.debug_process_name, self.args.pid,
+            self.args.output_directory, self.args.target_cpu, self.args.port,
+            self.args.ide, bool(self.args.verbose_count))
+
+  def _RegisterExtraArgs(self, group):
+    pid_group = group.add_mutually_exclusive_group()
+    pid_group.add_argument('--debug-process-name',
+                           help='Name of the process to attach to. '
+                                'E.g. "privileged_process0", or "foo.bar:baz"')
+    pid_group.add_argument('--pid',
+                           help='The process ID to attach to. Defaults to '
+                                'the main process for the package.')
+    group.add_argument('--ide', action='store_true',
+                       help='Rather than enter a gdb prompt, set up the '
+                            'gdb connection and wait for an IDE to '
+                            'connect.')
+    # Same default port that ndk-gdb.py uses.
+    group.add_argument('--port', type=int, default=5039,
+                       help='Use the given port for the GDB connection')
+
+
+class _LogcatCommand(_Command):
+  name = 'logcat'
+  description = 'Runs "adb logcat" with filters relevant the current APK.'
+  long_description = description + """
+
+"Relevant filters" means:
+  * Log messages from processes belonging to the apk,
+  * Plus log messages from log tags: ActivityManager|DEBUG,
+  * Plus fatal logs from any process,
+  * Minus spamy dalvikvm logs (for pre-L devices).
+
+Colors:
+  * Primary process is white
+  * Other processes (gpu, renderer) are yellow
+  * Non-apk processes are grey
+  * UI thread has a bolded Thread-ID
+
+Java stack traces are detected and deobfuscated (for release builds).
+
+To disable filtering, (but keep coloring), use --verbose.
+"""
+  needs_package_name = True
+  supports_multiple_devices = False
+
+  def Run(self):
+    deobfuscate = None
+    if self.args.proguard_mapping_path and not self.args.no_deobfuscate:
+      deobfuscate = deobfuscator.Deobfuscator(self.args.proguard_mapping_path)
+
+    stack_script_context = _StackScriptContext(
+        self.args.output_directory,
+        self.args.apk_path,
+        self.bundle_generation_info,
+        quiet=True)
+    try:
+      _RunLogcat(self.devices[0], self.args.package_name, stack_script_context,
+                 deobfuscate, bool(self.args.verbose_count))
+    except KeyboardInterrupt:
+      pass  # Don't show stack trace upon Ctrl-C
+    finally:
+      stack_script_context.Close()
+      if deobfuscate:
+        deobfuscate.Close()
+
+  def _RegisterExtraArgs(self, group):
+    if self._from_wrapper_script:
+      group.add_argument('--no-deobfuscate', action='store_true',
+          help='Disables ProGuard deobfuscation of logcat.')
+    else:
+      group.set_defaults(no_deobfuscate=False)
+      group.add_argument('--proguard-mapping-path',
+          help='Path to ProGuard map (enables deobfuscation)')
+
+
+class _PsCommand(_Command):
+  name = 'ps'
+  description = 'Show PIDs of any APK processes currently running.'
+  needs_package_name = True
+  all_devices_by_default = True
+
+  def Run(self):
+    _RunPs(self.devices, self.args.package_name)
+
+
+class _DiskUsageCommand(_Command):
+  name = 'disk-usage'
+  description = 'Show how much device storage is being consumed by the app.'
+  needs_package_name = True
+  all_devices_by_default = True
+
+  def Run(self):
+    _RunDiskUsage(self.devices, self.args.package_name)
+
+
+class _MemUsageCommand(_Command):
+  name = 'mem-usage'
+  description = 'Show memory usage of currently running APK processes.'
+  needs_package_name = True
+  all_devices_by_default = True
+
+  def _RegisterExtraArgs(self, group):
+    group.add_argument('--query-app', action='store_true',
+        help='Do not add --local to "dumpsys meminfo". This will output '
+             'additional metrics (e.g. Context count), but also cause memory '
+             'to be used in order to gather the metrics.')
+
+  def Run(self):
+    _RunMemUsage(self.devices, self.args.package_name,
+                 query_app=self.args.query_app)
+
+
+class _ShellCommand(_Command):
+  name = 'shell'
+  description = ('Same as "adb shell <command>", but runs as the apk\'s uid '
+                 '(via run-as). Useful for inspecting the app\'s data '
+                 'directory.')
+  needs_package_name = True
+
+  @property
+  def calls_exec(self):
+    return not self.args.cmd
+
+  @property
+  def supports_multiple_devices(self):
+    return not self.args.cmd
+
+  def _RegisterExtraArgs(self, group):
+    group.add_argument(
+        'cmd', nargs=argparse.REMAINDER, help='Command to run.')
+
+  def Run(self):
+    _RunShell(self.devices, self.args.package_name, self.args.cmd)
+
+
+class _CompileDexCommand(_Command):
+  name = 'compile-dex'
+  description = ('Applicable only for Android N+. Forces .odex files to be '
+                 'compiled with the given compilation filter. To see existing '
+                 'filter, use "disk-usage" command.')
+  needs_package_name = True
+  all_devices_by_default = True
+
+  def _RegisterExtraArgs(self, group):
+    group.add_argument(
+        'compilation_filter',
+        choices=['verify', 'quicken', 'space-profile', 'space',
+                 'speed-profile', 'speed'],
+        help='For WebView/Monochrome, use "speed". For other apks, use '
+             '"speed-profile".')
+
+  def Run(self):
+    _RunCompileDex(self.devices, self.args.package_name,
+                   self.args.compilation_filter)
+
+
+class _PrintCertsCommand(_Command):
+  name = 'print-certs'
+  description = 'Print info about certificates used to sign this APK.'
+  need_device_args = False
+  needs_apk_helper = True
+
+  def _RegisterExtraArgs(self, group):
+    group.add_argument(
+        '--full-cert',
+        action='store_true',
+        help=("Print the certificate's full signature, Base64-encoded. "
+              "Useful when configuring an Android image's "
+              "config_webview_packages.xml."))
+
+  def Run(self):
+    keytool = os.path.join(_JAVA_HOME, 'bin', 'keytool')
+    if self.is_bundle:
+      # Bundles are not signed until converted to .apks. The wrapper scripts
+      # record which key will be used to sign though.
+      with tempfile.NamedTemporaryFile() as f:
+        logging.warning('Bundles are not signed until turned into .apk files.')
+        logging.warning('Showing signing info based on associated keystore.')
+        cmd = [
+            keytool, '-exportcert', '-keystore',
+            self.bundle_generation_info.keystore_path, '-storepass',
+            self.bundle_generation_info.keystore_password, '-alias',
+            self.bundle_generation_info.keystore_alias, '-file', f.name
+        ]
+        subprocess.check_output(cmd, stderr=subprocess.STDOUT)
+        cmd = [keytool, '-printcert', '-file', f.name]
+        logging.warning('Running: %s', ' '.join(cmd))
+        subprocess.check_call(cmd)
+        if self.args.full_cert:
+          # Redirect stderr to hide a keytool warning about using non-standard
+          # keystore format.
+          full_output = subprocess.check_output(
+              cmd + ['-rfc'], stderr=subprocess.STDOUT)
+    else:
+      cmd = [
+          build_tools.GetPath('apksigner'), 'verify', '--print-certs',
+          '--verbose', self.apk_helper.path
+      ]
+      logging.warning('Running: %s', ' '.join(cmd))
+      env = os.environ.copy()
+      env['PATH'] = os.path.pathsep.join(
+          [os.path.join(_JAVA_HOME, 'bin'),
+           env.get('PATH')])
+      stdout = subprocess.check_output(cmd, env=env)
+      print(stdout)
+      if self.args.full_cert:
+        if 'v1 scheme (JAR signing): true' not in stdout:
+          raise Exception(
+              'Cannot print full certificate because apk is not V1 signed.')
+
+        cmd = [keytool, '-printcert', '-jarfile', self.apk_helper.path, '-rfc']
+        # Redirect stderr to hide a keytool warning about using non-standard
+        # keystore format.
+        full_output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
+
+    if self.args.full_cert:
+      m = re.search(
+          r'-+BEGIN CERTIFICATE-+([\r\n0-9A-Za-z+/=]+)-+END CERTIFICATE-+',
+          full_output, re.MULTILINE)
+      if not m:
+        raise Exception('Unable to parse certificate:\n{}'.format(full_output))
+      signature = re.sub(r'[\r\n]+', '', m.group(1))
+      print()
+      print('Full Signature:')
+      print(signature)
+
+
+class _ProfileCommand(_Command):
+  name = 'profile'
+  description = ('Run the simpleperf sampling CPU profiler on the currently-'
+                 'running APK. If --args is used, the extra arguments will be '
+                 'passed on to simpleperf; otherwise, the following default '
+                 'arguments are used: -g -f 1000 -o /data/local/tmp/perf.data')
+  needs_package_name = True
+  needs_output_directory = True
+  supports_multiple_devices = False
+  accepts_args = True
+
+  def _RegisterExtraArgs(self, group):
+    group.add_argument(
+        '--profile-process', default='browser',
+        help=('Which process to profile. This may be a process name or pid '
+              'such as you would get from running `%s ps`; or '
+              'it can be one of (browser, renderer, gpu).' % sys.argv[0]))
+    group.add_argument(
+        '--profile-thread', default=None,
+        help=('(Optional) Profile only a single thread. This may be either a '
+              'thread ID such as you would get by running `adb shell ps -t` '
+              '(pre-Oreo) or `adb shell ps -e -T` (Oreo and later); or it may '
+              'be one of (io, compositor, main, render), in which case '
+              '--profile-process is also required. (Note that "render" thread '
+              'refers to a thread in the browser process that manages a '
+              'renderer; to profile the main thread of the renderer process, '
+              'use --profile-thread=main).'))
+    group.add_argument('--profile-output', default='profile.pb',
+                       help='Output file for profiling data')
+
+  def Run(self):
+    extra_args = shlex.split(self.args.args or '')
+    _RunProfile(self.devices[0], self.args.package_name,
+                self.args.output_directory, self.args.profile_output,
+                self.args.profile_process, self.args.profile_thread,
+                extra_args)
+
+
+class _RunCommand(_InstallCommand, _LaunchCommand, _LogcatCommand):
+  name = 'run'
+  description = 'Install, launch, and show logcat (when targeting one device).'
+  all_devices_by_default = False
+  supports_multiple_devices = True
+
+  def _RegisterExtraArgs(self, group):
+    _InstallCommand._RegisterExtraArgs(self, group)
+    _LaunchCommand._RegisterExtraArgs(self, group)
+    _LogcatCommand._RegisterExtraArgs(self, group)
+    group.add_argument('--no-logcat', action='store_true',
+                       help='Install and launch, but do not enter logcat.')
+
+  def Run(self):
+    logging.warning('Installing...')
+    _InstallCommand.Run(self)
+    logging.warning('Sending launch intent...')
+    _LaunchCommand.Run(self)
+    if len(self.devices) == 1 and not self.args.no_logcat:
+      logging.warning('Entering logcat...')
+      _LogcatCommand.Run(self)
+
+
+class _BuildBundleApks(_Command):
+  name = 'build-bundle-apks'
+  description = ('Build the .apks archive from an Android app bundle, and '
+                 'optionally copy it to a specific destination.')
+  need_device_args = False
+
+  def _RegisterExtraArgs(self, group):
+    group.add_argument(
+        '--output-apks', required=True, help='Destination path for .apks file.')
+    group.add_argument(
+        '--minimal',
+        action='store_true',
+        help='Build .apks archive that targets the bundle\'s minSdkVersion and '
+        'contains only english splits. It still contains optional splits.')
+    group.add_argument(
+        '--sdk-version', help='The sdkVersion to build the .apks for.')
+    group.add_argument(
+        '--build-mode',
+        choices=app_bundle_utils.BUILD_APKS_MODES,
+        help='Specify which type of APKs archive to build. "default" '
+        'generates regular splits, "universal" generates an archive with a '
+        'single universal APK, "system" generates an archive with a system '
+        'image APK, while "system_compressed" generates a compressed system '
+        'APK, with an additional stub APK for the system image.')
+    group.add_argument(
+        '--optimize-for',
+        choices=app_bundle_utils.OPTIMIZE_FOR_OPTIONS,
+        help='Override split configuration.')
+
+  def Run(self):
+    _GenerateBundleApks(
+        self.bundle_generation_info,
+        output_path=self.args.output_apks,
+        minimal=self.args.minimal,
+        minimal_sdk_version=self.args.sdk_version,
+        mode=self.args.build_mode,
+        optimize_for=self.args.optimize_for)
+
+
+class _ManifestCommand(_Command):
+  name = 'dump-manifest'
+  description = 'Dump the android manifest from this bundle, as XML, to stdout.'
+  need_device_args = False
+
+  def Run(self):
+    bundletool.RunBundleTool([
+        'dump', 'manifest', '--bundle', self.bundle_generation_info.bundle_path
+    ])
+
+
+class _StackCommand(_Command):
+  name = 'stack'
+  description = 'Decodes an Android stack.'
+  need_device_args = False
+
+  def _RegisterExtraArgs(self, group):
+    group.add_argument(
+        'file',
+        nargs='?',
+        help='File to decode. If not specified, stdin is processed.')
+
+  def Run(self):
+    context = _StackScriptContext(self.args.output_directory,
+                                  self.args.apk_path,
+                                  self.bundle_generation_info)
+    try:
+      proc = context.Popen(input_file=self.args.file)
+      if proc.wait():
+        raise Exception('stack script returned {}'.format(proc.returncode))
+    finally:
+      context.Close()
+
+
+# Shared commands for regular APKs and app bundles.
+_COMMANDS = [
+    _DevicesCommand,
+    _PackageInfoCommand,
+    _InstallCommand,
+    _UninstallCommand,
+    _SetWebViewProviderCommand,
+    _LaunchCommand,
+    _StopCommand,
+    _ClearDataCommand,
+    _ArgvCommand,
+    _GdbCommand,
+    _LogcatCommand,
+    _PsCommand,
+    _DiskUsageCommand,
+    _MemUsageCommand,
+    _ShellCommand,
+    _CompileDexCommand,
+    _PrintCertsCommand,
+    _ProfileCommand,
+    _RunCommand,
+    _StackCommand,
+]
+
+# Commands specific to app bundles.
+_BUNDLE_COMMANDS = [
+    _BuildBundleApks,
+    _ManifestCommand,
+]
+
+
+def _ParseArgs(parser, from_wrapper_script, is_bundle):
+  subparsers = parser.add_subparsers()
+  command_list = _COMMANDS + (_BUNDLE_COMMANDS if is_bundle else [])
+  commands = [clazz(from_wrapper_script, is_bundle) for clazz in command_list]
+
+  for command in commands:
+    if from_wrapper_script or not command.needs_output_directory:
+      command.RegisterArgs(subparsers)
+
+  # Show extended help when no command is passed.
+  argv = sys.argv[1:]
+  if not argv:
+    argv = ['--help']
+
+  return parser.parse_args(argv)
+
+
+def _RunInternal(parser,
+                 output_directory=None,
+                 additional_apk_paths=None,
+                 bundle_generation_info=None):
+  colorama.init()
+  parser.set_defaults(
+      additional_apk_paths=additional_apk_paths,
+      output_directory=output_directory)
+  from_wrapper_script = bool(output_directory)
+  args = _ParseArgs(parser, from_wrapper_script, bool(bundle_generation_info))
+  run_tests_helper.SetLogLevel(args.verbose_count)
+  if bundle_generation_info:
+    args.command.RegisterBundleGenerationInfo(bundle_generation_info)
+  if args.additional_apk_paths:
+    for path in additional_apk_paths:
+      if not path or not os.path.exists(path):
+        raise Exception('Invalid additional APK path "{}"'.format(path))
+  args.command.ProcessArgs(args)
+  args.command.Run()
+  # Incremental install depends on the cache being cleared when uninstalling.
+  if args.command.name != 'uninstall':
+    _SaveDeviceCaches(args.command.devices, output_directory)
+
+
+def Run(output_directory, apk_path, additional_apk_paths, incremental_json,
+        command_line_flags_file, target_cpu, proguard_mapping_path):
+  """Entry point for generated wrapper scripts."""
+  constants.SetOutputDirectory(output_directory)
+  devil_chromium.Initialize(output_directory=output_directory)
+  parser = argparse.ArgumentParser()
+  exists_or_none = lambda p: p if p and os.path.exists(p) else None
+
+  parser.set_defaults(
+      command_line_flags_file=command_line_flags_file,
+      target_cpu=target_cpu,
+      apk_path=exists_or_none(apk_path),
+      incremental_json=exists_or_none(incremental_json),
+      proguard_mapping_path=proguard_mapping_path)
+  _RunInternal(
+      parser,
+      output_directory=output_directory,
+      additional_apk_paths=additional_apk_paths)
+
+
+def RunForBundle(output_directory, bundle_path, bundle_apks_path,
+                 additional_apk_paths, aapt2_path, keystore_path,
+                 keystore_password, keystore_alias, package_name,
+                 command_line_flags_file, proguard_mapping_path, target_cpu,
+                 system_image_locales, default_modules):
+  """Entry point for generated app bundle wrapper scripts.
+
+  Args:
+    output_dir: Chromium output directory path.
+    bundle_path: Input bundle path.
+    bundle_apks_path: Output bundle .apks archive path.
+    additional_apk_paths: Additional APKs to install prior to bundle install.
+    aapt2_path: Aapt2 tool path.
+    keystore_path: Keystore file path.
+    keystore_password: Keystore password.
+    keystore_alias: Signing key name alias in keystore file.
+    package_name: Application's package name.
+    command_line_flags_file: Optional. Name of an on-device file that will be
+      used to store command-line flags for this bundle.
+    proguard_mapping_path: Input path to the Proguard mapping file, used to
+      deobfuscate Java stack traces.
+    target_cpu: Chromium target CPU name, used by the 'gdb' command.
+    system_image_locales: List of Chromium locales that should be included in
+      system image APKs.
+    default_modules: List of modules that are installed in addition to those
+      given by the '-m' switch.
+  """
+  constants.SetOutputDirectory(output_directory)
+  devil_chromium.Initialize(output_directory=output_directory)
+  bundle_generation_info = BundleGenerationInfo(
+      bundle_path=bundle_path,
+      bundle_apks_path=bundle_apks_path,
+      aapt2_path=aapt2_path,
+      keystore_path=keystore_path,
+      keystore_password=keystore_password,
+      keystore_alias=keystore_alias,
+      system_image_locales=system_image_locales)
+  _InstallCommand.default_modules = default_modules
+
+  parser = argparse.ArgumentParser()
+  parser.set_defaults(
+      package_name=package_name,
+      command_line_flags_file=command_line_flags_file,
+      proguard_mapping_path=proguard_mapping_path,
+      target_cpu=target_cpu)
+  _RunInternal(
+      parser,
+      output_directory=output_directory,
+      additional_apk_paths=additional_apk_paths,
+      bundle_generation_info=bundle_generation_info)
+
+
+def main():
+  devil_chromium.Initialize()
+  _RunInternal(argparse.ArgumentParser())
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/apk_operations.pydeps b/src/build/android/apk_operations.pydeps
new file mode 100644
index 0000000..60b1289
--- /dev/null
+++ b/src/build/android/apk_operations.pydeps
@@ -0,0 +1,110 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android --output build/android/apk_operations.pydeps build/android/apk_operations.py
+../../third_party/catapult/common/py_utils/py_utils/__init__.py
+../../third_party/catapult/common/py_utils/py_utils/cloud_storage.py
+../../third_party/catapult/common/py_utils/py_utils/cloud_storage_global_lock.py
+../../third_party/catapult/common/py_utils/py_utils/lock.py
+../../third_party/catapult/common/py_utils/py_utils/tempfile_ext.py
+../../third_party/catapult/dependency_manager/dependency_manager/__init__.py
+../../third_party/catapult/dependency_manager/dependency_manager/archive_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/base_config.py
+../../third_party/catapult/dependency_manager/dependency_manager/cloud_storage_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/dependency_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/dependency_manager_util.py
+../../third_party/catapult/dependency_manager/dependency_manager/exceptions.py
+../../third_party/catapult/dependency_manager/dependency_manager/local_path_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/manager.py
+../../third_party/catapult/dependency_manager/dependency_manager/uploader.py
+../../third_party/catapult/devil/devil/__init__.py
+../../third_party/catapult/devil/devil/android/__init__.py
+../../third_party/catapult/devil/devil/android/apk_helper.py
+../../third_party/catapult/devil/devil/android/constants/__init__.py
+../../third_party/catapult/devil/devil/android/constants/chrome.py
+../../third_party/catapult/devil/devil/android/constants/file_system.py
+../../third_party/catapult/devil/devil/android/decorators.py
+../../third_party/catapult/devil/devil/android/device_denylist.py
+../../third_party/catapult/devil/devil/android/device_errors.py
+../../third_party/catapult/devil/devil/android/device_signal.py
+../../third_party/catapult/devil/devil/android/device_temp_file.py
+../../third_party/catapult/devil/devil/android/device_utils.py
+../../third_party/catapult/devil/devil/android/flag_changer.py
+../../third_party/catapult/devil/devil/android/install_commands.py
+../../third_party/catapult/devil/devil/android/logcat_monitor.py
+../../third_party/catapult/devil/devil/android/md5sum.py
+../../third_party/catapult/devil/devil/android/ndk/__init__.py
+../../third_party/catapult/devil/devil/android/ndk/abis.py
+../../third_party/catapult/devil/devil/android/sdk/__init__.py
+../../third_party/catapult/devil/devil/android/sdk/aapt.py
+../../third_party/catapult/devil/devil/android/sdk/adb_wrapper.py
+../../third_party/catapult/devil/devil/android/sdk/build_tools.py
+../../third_party/catapult/devil/devil/android/sdk/bundletool.py
+../../third_party/catapult/devil/devil/android/sdk/intent.py
+../../third_party/catapult/devil/devil/android/sdk/keyevent.py
+../../third_party/catapult/devil/devil/android/sdk/split_select.py
+../../third_party/catapult/devil/devil/android/sdk/version_codes.py
+../../third_party/catapult/devil/devil/android/tools/__init__.py
+../../third_party/catapult/devil/devil/android/tools/script_common.py
+../../third_party/catapult/devil/devil/base_error.py
+../../third_party/catapult/devil/devil/constants/__init__.py
+../../third_party/catapult/devil/devil/constants/exit_codes.py
+../../third_party/catapult/devil/devil/devil_env.py
+../../third_party/catapult/devil/devil/utils/__init__.py
+../../third_party/catapult/devil/devil/utils/cmd_helper.py
+../../third_party/catapult/devil/devil/utils/host_utils.py
+../../third_party/catapult/devil/devil/utils/lazy/__init__.py
+../../third_party/catapult/devil/devil/utils/lazy/weak_constant.py
+../../third_party/catapult/devil/devil/utils/logging_common.py
+../../third_party/catapult/devil/devil/utils/lsusb.py
+../../third_party/catapult/devil/devil/utils/parallelizer.py
+../../third_party/catapult/devil/devil/utils/reraiser_thread.py
+../../third_party/catapult/devil/devil/utils/reset_usb.py
+../../third_party/catapult/devil/devil/utils/run_tests_helper.py
+../../third_party/catapult/devil/devil/utils/timeout_retry.py
+../../third_party/catapult/devil/devil/utils/watchdog_timer.py
+../../third_party/catapult/devil/devil/utils/zip_utils.py
+../../third_party/catapult/third_party/six/six.py
+../../third_party/jinja2/__init__.py
+../../third_party/jinja2/_compat.py
+../../third_party/jinja2/bccache.py
+../../third_party/jinja2/compiler.py
+../../third_party/jinja2/defaults.py
+../../third_party/jinja2/environment.py
+../../third_party/jinja2/exceptions.py
+../../third_party/jinja2/filters.py
+../../third_party/jinja2/idtracking.py
+../../third_party/jinja2/lexer.py
+../../third_party/jinja2/loaders.py
+../../third_party/jinja2/nodes.py
+../../third_party/jinja2/optimizer.py
+../../third_party/jinja2/parser.py
+../../third_party/jinja2/runtime.py
+../../third_party/jinja2/tests.py
+../../third_party/jinja2/utils.py
+../../third_party/jinja2/visitor.py
+../../third_party/markupsafe/__init__.py
+../../third_party/markupsafe/_compat.py
+../../third_party/markupsafe/_native.py
+../gn_helpers.py
+../print_python_deps.py
+adb_command_line.py
+apk_operations.py
+convert_dex_profile.py
+devil_chromium.py
+gyp/bundletool.py
+gyp/dex.py
+gyp/util/__init__.py
+gyp/util/build_utils.py
+gyp/util/md5_check.py
+gyp/util/resource_utils.py
+gyp/util/zipalign.py
+incremental_install/__init__.py
+incremental_install/installer.py
+pylib/__init__.py
+pylib/constants/__init__.py
+pylib/constants/host_paths.py
+pylib/symbols/__init__.py
+pylib/symbols/deobfuscator.py
+pylib/utils/__init__.py
+pylib/utils/app_bundle_utils.py
+pylib/utils/simpleperf.py
+pylib/utils/time_profile.py
diff --git a/src/build/android/apply_shared_preference_file.py b/src/build/android/apply_shared_preference_file.py
new file mode 100755
index 0000000..187bf18
--- /dev/null
+++ b/src/build/android/apply_shared_preference_file.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env vpython
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Manually applies a shared preference JSON file.
+
+If needed during automation, use the --shared-prefs-file in test_runner.py
+instead.
+"""
+
+import argparse
+import sys
+
+# pylint: disable=ungrouped-imports
+from pylib.constants import host_paths
+if host_paths.DEVIL_PATH not in sys.path:
+  sys.path.append(host_paths.DEVIL_PATH)
+
+from devil.android import device_utils
+from devil.android.sdk import shared_prefs
+from pylib.utils import shared_preference_utils
+
+
+def main():
+  parser = argparse.ArgumentParser(
+      description='Manually apply shared preference JSON files.')
+  parser.add_argument('filepaths', nargs='*',
+                      help='Any number of paths to shared preference JSON '
+                           'files to apply.')
+  args = parser.parse_args()
+
+  all_devices = device_utils.DeviceUtils.HealthyDevices()
+  if not all_devices:
+    raise RuntimeError('No healthy devices attached')
+
+  for filepath in args.filepaths:
+    all_settings = shared_preference_utils.ExtractSettingsFromJson(filepath)
+    for setting in all_settings:
+      for device in all_devices:
+        shared_pref = shared_prefs.SharedPrefs(
+            device, setting['package'], setting['filename'],
+            use_encrypted_path=setting.get('supports_encrypted_path', False))
+        shared_preference_utils.ApplySharedPreferenceSetting(
+            shared_pref, setting)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/asan_symbolize.py b/src/build/android/asan_symbolize.py
new file mode 100755
index 0000000..6585089
--- /dev/null
+++ b/src/build/android/asan_symbolize.py
@@ -0,0 +1,151 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import collections
+import optparse
+import os
+import re
+import sys
+
+from pylib import constants
+from pylib.constants import host_paths
+
+# pylint: disable=wrong-import-order
+# Uses symbol.py from third_party/android_platform, not python's.
+with host_paths.SysPath(
+    host_paths.ANDROID_PLATFORM_DEVELOPMENT_SCRIPTS_PATH,
+    position=0):
+  import symbol
+
+
+_RE_ASAN = re.compile(
+    r"""
+    (?P<prefix>.*?)
+    (?P<pos>\#\S*?)          # position of the call in stack.
+                             # escape the char "#" due to the VERBOSE flag.
+    \s+(\S*?)\s+
+    \(                       # match the char "(".
+        (?P<lib>.*?)         # library path.
+        \+0[xX](?P<addr>.*?) # address of the symbol in hex.
+                             # the prefix "0x" is skipped.
+    \)                       # match the char ")".
+    """, re.VERBOSE)
+
+# This named tuple models a parsed Asan log line.
+AsanParsedLine = collections.namedtuple('AsanParsedLine',
+                                        'prefix,library,pos,rel_address')
+
+# This named tuple models an Asan log line. 'raw' is the raw content
+# while 'parsed' is None or an AsanParsedLine instance.
+AsanLogLine = collections.namedtuple('AsanLogLine', 'raw,parsed')
+
+def _ParseAsanLogLine(line):
+  """Parse line into corresponding AsanParsedLine value, if any, or None."""
+  m = re.match(_RE_ASAN, line)
+  if not m:
+    return None
+  return AsanParsedLine(prefix=m.group('prefix'),
+                        library=m.group('lib'),
+                        pos=m.group('pos'),
+                        rel_address='%08x' % int(m.group('addr'), 16))
+
+
+def _FindASanLibraries():
+  asan_lib_dir = os.path.join(host_paths.DIR_SOURCE_ROOT,
+                              'third_party', 'llvm-build',
+                              'Release+Asserts', 'lib')
+  asan_libs = []
+  for src_dir, _, files in os.walk(asan_lib_dir):
+    asan_libs += [os.path.relpath(os.path.join(src_dir, f))
+                  for f in files
+                  if f.endswith('.so')]
+  return asan_libs
+
+
+def _TranslateLibPath(library, asan_libs):
+  for asan_lib in asan_libs:
+    if os.path.basename(library) == os.path.basename(asan_lib):
+      return '/' + asan_lib
+  # pylint: disable=no-member
+  return symbol.TranslateLibPath(library)
+
+
+def _PrintSymbolized(asan_input, arch):
+  """Print symbolized logcat output for Asan symbols.
+
+  Args:
+    asan_input: list of input lines.
+    arch: Target CPU architecture.
+  """
+  asan_libs = _FindASanLibraries()
+
+  # Maps library -> [ AsanParsedLine... ]
+  libraries = collections.defaultdict(list)
+
+  asan_log_lines = []
+  for line in asan_input:
+    line = line.rstrip()
+    parsed = _ParseAsanLogLine(line)
+    if parsed:
+      libraries[parsed.library].append(parsed)
+    asan_log_lines.append(AsanLogLine(raw=line, parsed=parsed))
+
+  # Maps library -> { address -> [(symbol, location, obj_sym_with_offset)...] }
+  all_symbols = collections.defaultdict(dict)
+
+  for library, items in libraries.iteritems():
+    libname = _TranslateLibPath(library, asan_libs)
+    lib_relative_addrs = set([i.rel_address for i in items])
+    # pylint: disable=no-member
+    info_dict = symbol.SymbolInformationForSet(libname,
+                                               lib_relative_addrs,
+                                               True,
+                                               cpu_arch=arch)
+    if info_dict:
+      all_symbols[library] = info_dict
+
+  for log_line in asan_log_lines:
+    m = log_line.parsed
+    if (m and m.library in all_symbols and
+        m.rel_address in all_symbols[m.library]):
+      # NOTE: all_symbols[lib][address] is a never-emtpy list of tuples.
+      # NOTE: The documentation for SymbolInformationForSet() indicates
+      # that usually one wants to display the last list item, not the first.
+      # The code below takes the first, is this the best choice here?
+      s = all_symbols[m.library][m.rel_address][0]
+      print('%s%s %s %s' % (m.prefix, m.pos, s[0], s[1]))
+    else:
+      print(log_line.raw)
+
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('-l', '--logcat',
+                    help='File containing adb logcat output with ASan stacks. '
+                         'Use stdin if not specified.')
+  parser.add_option('--output-directory',
+                    help='Path to the root build directory.')
+  parser.add_option('--arch', default='arm',
+                    help='CPU architecture name')
+  options, _ = parser.parse_args()
+
+  if options.output_directory:
+    constants.SetOutputDirectory(options.output_directory)
+  # Do an up-front test that the output directory is known.
+  constants.CheckOutputDirectory()
+
+  if options.logcat:
+    asan_input = file(options.logcat, 'r')
+  else:
+    asan_input = sys.stdin
+
+  _PrintSymbolized(asan_input.readlines(), options.arch)
+
+
+if __name__ == "__main__":
+  sys.exit(main())
diff --git a/src/build/android/bytecode/BUILD.gn b/src/build/android/bytecode/BUILD.gn
new file mode 100644
index 0000000..36b5432
--- /dev/null
+++ b/src/build/android/bytecode/BUILD.gn
@@ -0,0 +1,56 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+
+java_binary("bytecode_processor") {
+  sources = [
+    "java/org/chromium/bytecode/ByteCodeProcessor.java",
+    "java/org/chromium/bytecode/ClassPathValidator.java",
+    "java/org/chromium/bytecode/TypeUtils.java",
+  ]
+  main_class = "org.chromium.bytecode.ByteCodeProcessor"
+  deps = [
+    "//third_party/android_deps:org_ow2_asm_asm_java",
+    "//third_party/android_deps:org_ow2_asm_asm_util_java",
+  ]
+  wrapper_script_name = "helper/bytecode_processor"
+  enable_bytecode_checks = false
+}
+
+# A bytecode rewriter that replaces all calls to
+# `FragmentActivity Fragment.getActivity()` with
+# `Activity Fragment.getActivity()`.
+java_binary("fragment_activity_replacer") {
+  main_class = "org.chromium.bytecode.FragmentActivityReplacer"
+  deps = [ ":fragment_activity_replacer_java" ]
+  wrapper_script_name = "helper/fragment_activity_replacer"
+}
+
+# A bytecode rewriter that replaces all calls to
+# `FragmentActivity Fragment.getActivity()` with
+# `Activity Fragment.getActivity()`  followed by a cast to FragmentActivity.
+# Prefer :fragment_activity_replacer. This rewriter should only be used for
+# libraries that rely on getActivity() returning a FragmentActivity *and* are
+# not going to be used in an app that contains multiple copies of the AndroidX
+# Fragment library (i.e. WebLayer).
+java_binary("fragment_activity_replacer_single_androidx") {
+  main_class = "org.chromium.bytecode.FragmentActivityReplacer"
+  deps = [ ":fragment_activity_replacer_java" ]
+  wrapper_script_name = "helper/fragment_activity_replacer_single_androidx"
+  wrapper_script_args = [ "--single-androidx" ]
+}
+
+java_library("fragment_activity_replacer_java") {
+  visibility = [ ":*" ]
+  sources = [
+    "java/org/chromium/bytecode/ByteCodeRewriter.java",
+    "java/org/chromium/bytecode/FragmentActivityReplacer.java",
+  ]
+  deps = [
+    "//third_party/android_deps:org_ow2_asm_asm_commons_java",
+    "//third_party/android_deps:org_ow2_asm_asm_java",
+    "//third_party/android_deps:org_ow2_asm_asm_util_java",
+  ]
+}
diff --git a/src/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java b/src/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java
new file mode 100644
index 0000000..b767f4f
--- /dev/null
+++ b/src/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java
@@ -0,0 +1,167 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import org.objectweb.asm.ClassReader;
+
+import java.io.BufferedInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipInputStream;
+
+/**
+ * Java application that takes in an input jar, performs a series of bytecode
+ * transformations, and generates an output jar.
+ */
+class ByteCodeProcessor {
+    private static final String CLASS_FILE_SUFFIX = ".class";
+    private static final int BUFFER_SIZE = 16384;
+    private static boolean sVerbose;
+    private static boolean sIsPrebuilt;
+    private static ClassLoader sDirectClassPathClassLoader;
+    private static ClassLoader sFullClassPathClassLoader;
+    private static Set<String> sFullClassPathJarPaths;
+    private static Set<String> sMissingClassesAllowlist;
+    private static Map<String, String> sJarToGnTarget;
+    private static ClassPathValidator sValidator;
+
+    private static Void processEntry(ZipEntry entry, byte[] data) {
+        ClassReader reader = new ClassReader(data);
+        if (sIsPrebuilt) {
+            sValidator.validateFullClassPath(
+                    reader, sFullClassPathClassLoader, sMissingClassesAllowlist);
+        } else {
+            sValidator.validateDirectClassPath(reader, sDirectClassPathClassLoader,
+                    sFullClassPathClassLoader, sFullClassPathJarPaths, sMissingClassesAllowlist,
+                    sVerbose);
+        }
+        return null;
+    }
+
+    private static void process(String gnTarget, String inputJarPath)
+            throws ExecutionException, InterruptedException {
+        ExecutorService executorService =
+                Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
+        try (ZipInputStream inputStream = new ZipInputStream(
+                     new BufferedInputStream(new FileInputStream(inputJarPath)))) {
+            while (true) {
+                ZipEntry entry = inputStream.getNextEntry();
+                if (entry == null) {
+                    break;
+                }
+                byte[] data = readAllBytes(inputStream);
+                executorService.submit(() -> processEntry(entry, data));
+            }
+            executorService.shutdown(); // This is essential in order to avoid waiting infinitely.
+            executorService.awaitTermination(1, TimeUnit.HOURS);
+        } catch (IOException e) {
+            throw new RuntimeException(e);
+        }
+
+        if (sValidator.hasErrors()) {
+            sValidator.printAll(gnTarget, sJarToGnTarget);
+            System.exit(1);
+        }
+    }
+
+    private static byte[] readAllBytes(InputStream inputStream) throws IOException {
+        ByteArrayOutputStream buffer = new ByteArrayOutputStream();
+        int numRead = 0;
+        byte[] data = new byte[BUFFER_SIZE];
+        while ((numRead = inputStream.read(data, 0, data.length)) != -1) {
+            buffer.write(data, 0, numRead);
+        }
+        return buffer.toByteArray();
+    }
+
+    /**
+     * Loads a list of jars and returns a ClassLoader capable of loading all classes found in the
+     * given jars.
+     */
+    static ClassLoader loadJars(Collection<String> paths) {
+        URL[] jarUrls = new URL[paths.size()];
+        int i = 0;
+        for (String path : paths) {
+            try {
+                jarUrls[i++] = new File(path).toURI().toURL();
+            } catch (MalformedURLException e) {
+                throw new RuntimeException(e);
+            }
+        }
+        return new URLClassLoader(jarUrls);
+    }
+
+    /**
+     * Extracts a length-encoded list of strings from the arguments, and adds them to |out|. Returns
+     * the new "next index" to be processed.
+     */
+    private static int parseListArgument(String[] args, int index, Collection<String> out) {
+        int argLength = Integer.parseInt(args[index++]);
+        out.addAll(Arrays.asList(Arrays.copyOfRange(args, index, index + argLength)));
+        return index + argLength;
+    }
+
+    public static void main(String[] args) throws ClassPathValidator.ClassNotLoadedException,
+                                                  ExecutionException, InterruptedException {
+        // Invoke this script using //build/android/gyp/bytecode_processor.py
+        int currIndex = 0;
+        String gnTarget = args[currIndex++];
+        String inputJarPath = args[currIndex++];
+        sVerbose = args[currIndex++].equals("--verbose");
+        sIsPrebuilt = args[currIndex++].equals("--is-prebuilt");
+
+        sMissingClassesAllowlist = new HashSet<>();
+        currIndex = parseListArgument(args, currIndex, sMissingClassesAllowlist);
+
+        ArrayList<String> sdkJarPaths = new ArrayList<>();
+        currIndex = parseListArgument(args, currIndex, sdkJarPaths);
+
+        ArrayList<String> directClassPathJarPaths = new ArrayList<>();
+        directClassPathJarPaths.add(inputJarPath);
+        directClassPathJarPaths.addAll(sdkJarPaths);
+        currIndex = parseListArgument(args, currIndex, directClassPathJarPaths);
+        sDirectClassPathClassLoader = loadJars(directClassPathJarPaths);
+
+        ArrayList<String> fullClassPathJarPaths = new ArrayList<>();
+        currIndex = parseListArgument(args, currIndex, fullClassPathJarPaths);
+        ArrayList<String> gnTargets = new ArrayList<>();
+        parseListArgument(args, currIndex, gnTargets);
+        sJarToGnTarget = new HashMap<>();
+        assert fullClassPathJarPaths.size() == gnTargets.size();
+        for (int i = 0; i < fullClassPathJarPaths.size(); ++i) {
+            sJarToGnTarget.put(fullClassPathJarPaths.get(i), gnTargets.get(i));
+        }
+
+        // Load all jars that are on the classpath for the input jar for analyzing class
+        // hierarchy.
+        sFullClassPathJarPaths = new HashSet<>();
+        sFullClassPathJarPaths.add(inputJarPath);
+        sFullClassPathJarPaths.addAll(sdkJarPaths);
+        sFullClassPathJarPaths.addAll(fullClassPathJarPaths);
+        sFullClassPathClassLoader = loadJars(sFullClassPathJarPaths);
+        sFullClassPathJarPaths.removeAll(directClassPathJarPaths);
+
+        sValidator = new ClassPathValidator();
+        process(gnTarget, inputJarPath);
+    }
+}
diff --git a/src/build/android/bytecode/java/org/chromium/bytecode/ByteCodeRewriter.java b/src/build/android/bytecode/java/org/chromium/bytecode/ByteCodeRewriter.java
new file mode 100644
index 0000000..3d0d9cd
--- /dev/null
+++ b/src/build/android/bytecode/java/org/chromium/bytecode/ByteCodeRewriter.java
@@ -0,0 +1,91 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import org.objectweb.asm.ClassReader;
+import org.objectweb.asm.ClassVisitor;
+import org.objectweb.asm.ClassWriter;
+
+import java.io.BufferedInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipInputStream;
+import java.util.zip.ZipOutputStream;
+
+/**
+ * Base class for scripts that perform bytecode modifications on a jar file.
+ */
+public abstract class ByteCodeRewriter {
+    private static final String CLASS_FILE_SUFFIX = ".class";
+
+    public void rewrite(File inputJar, File outputJar) throws IOException {
+        if (!inputJar.exists()) {
+            throw new FileNotFoundException("Input jar not found: " + inputJar.getPath());
+        }
+        try (InputStream inputStream = new BufferedInputStream(new FileInputStream(inputJar))) {
+            try (OutputStream outputStream = new FileOutputStream(outputJar)) {
+                processZip(inputStream, outputStream);
+            }
+        }
+    }
+
+    /** Returns true if the class at the given path in the archive should be rewritten. */
+    protected abstract boolean shouldRewriteClass(String classPath);
+
+    /**
+     * Returns the ClassVisitor that should be used to modify the bytecode of class at the given
+     * path in the archive.
+     */
+    protected abstract ClassVisitor getClassVisitorForClass(
+            String classPath, ClassVisitor delegate);
+
+    private void processZip(InputStream inputStream, OutputStream outputStream) {
+        try (ZipOutputStream zipOutputStream = new ZipOutputStream(outputStream)) {
+            ZipInputStream zipInputStream = new ZipInputStream(inputStream);
+            ZipEntry entry;
+            while ((entry = zipInputStream.getNextEntry()) != null) {
+                ByteArrayOutputStream buffer = new ByteArrayOutputStream();
+                boolean handled = processClassEntry(entry, zipInputStream, buffer);
+                if (handled) {
+                    ZipEntry newEntry = new ZipEntry(entry.getName());
+                    zipOutputStream.putNextEntry(newEntry);
+                    zipOutputStream.write(buffer.toByteArray(), 0, buffer.size());
+                } else {
+                    zipOutputStream.putNextEntry(entry);
+                    zipInputStream.transferTo(zipOutputStream);
+                }
+            }
+        } catch (IOException e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    private boolean processClassEntry(
+            ZipEntry entry, InputStream inputStream, OutputStream outputStream) {
+        if (!entry.getName().endsWith(CLASS_FILE_SUFFIX) || !shouldRewriteClass(entry.getName())) {
+            return false;
+        }
+        try {
+            ClassReader reader = new ClassReader(inputStream);
+            ClassWriter writer = new ClassWriter(reader, ClassWriter.COMPUTE_FRAMES);
+            ClassVisitor classVisitor = getClassVisitorForClass(entry.getName(), writer);
+            reader.accept(classVisitor, ClassReader.EXPAND_FRAMES);
+
+            writer.visitEnd();
+            byte[] classData = writer.toByteArray();
+            outputStream.write(classData, 0, classData.length);
+            return true;
+        } catch (IOException e) {
+            throw new RuntimeException(e);
+        }
+    }
+}
diff --git a/src/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java b/src/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java
new file mode 100644
index 0000000..9f45df5
--- /dev/null
+++ b/src/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java
@@ -0,0 +1,233 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import org.objectweb.asm.ClassReader;
+
+import java.io.PrintStream;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeMap;
+import java.util.TreeSet;
+import java.util.function.Consumer;
+
+/**
+ * Checks classpaths (given as ClassLoaders) by reading the constant pool of the class file and
+ * attempting to load every referenced class. If there are some that are unable to be found, it
+ * stores a helpful error message if it knows where it might find them, and exits the program if it
+ * can't find the class with any given classpath.
+ */
+public class ClassPathValidator {
+    // Number of warnings to print.
+    private static final int MAX_MISSING_CLASS_WARNINGS = 10;
+    // Number of missing classes to show per missing jar.
+    private static final int MAX_ERRORS_PER_JAR = 2;
+    // Map of missing .jar -> Missing class -> Classes that failed.
+    // TreeMap so that error messages have sorted list of jars.
+    private final Map<String, Map<String, Set<String>>> mDirectErrors =
+            Collections.synchronizedMap(new TreeMap<>());
+    // Missing classes we only track the first one for each jar.
+    // Map of missingClass -> srcClass.
+    private final Map<String, String> mMissingClasses =
+            Collections.synchronizedMap(new TreeMap<>());
+
+    static class ClassNotLoadedException extends ClassNotFoundException {
+        private final String mClassName;
+
+        ClassNotLoadedException(String className, Throwable ex) {
+            super("Couldn't load " + className, ex);
+            mClassName = className;
+        }
+
+        public String getClassName() {
+            return mClassName;
+        }
+    }
+
+    private static void validateClass(ClassLoader classLoader, String className)
+            throws ClassNotLoadedException {
+        if (className.startsWith("[")) {
+            // Dealing with an array type which isn't encoded nicely in the constant pool.
+            // For example, [[Lorg/chromium/Class$1;
+            className = className.substring(className.lastIndexOf('[') + 1);
+            if (className.charAt(0) == 'L' && className.endsWith(";")) {
+                className = className.substring(1, className.length() - 1);
+            } else {
+                // Bailing out if we have an non-class array type.
+                // This could be something like [B
+                return;
+            }
+        }
+        if (className.matches(".*\\bR(\\$\\w+)?$")) {
+            // Resources in R.java files are not expected to be valid at this stage in the build.
+            return;
+        }
+        if (className.matches("^libcore\\b.*")) {
+            // libcore exists on devices, but is not included in the Android sdk as it is a private
+            // API.
+            return;
+        }
+        try {
+            classLoader.loadClass(className.replace('/', '.'));
+        } catch (ClassNotFoundException e) {
+            throw new ClassNotLoadedException(className, e);
+        } catch (NoClassDefFoundError e) {
+            // We assume that this is caused by another class that is not going to able to be
+            // loaded, so we will skip this and let that class fail with ClassNotFoundException.
+        }
+    }
+
+    /**
+     * Given a .class file, see if every class referenced in the main class' constant pool can be
+     * loaded by the given ClassLoader.
+     *
+     * @param classReader .class file interface for reading the constant pool.
+     * @param classLoader classpath you wish to validate.
+     * @param errorConsumer Called for each missing class.
+     */
+    private static void validateClassPath(ClassReader classReader, ClassLoader classLoader,
+            Consumer<ClassNotLoadedException> errorConsumer) {
+        char[] charBuffer = new char[classReader.getMaxStringLength()];
+        // According to the Java spec, the constant pool is indexed from 1 to constant_pool_count -
+        // 1. See https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.4
+        for (int i = 1; i < classReader.getItemCount(); i++) {
+            int offset = classReader.getItem(i);
+            // Class entries correspond to 7 in the constant pool
+            // https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.4
+            if (offset > 0 && classReader.readByte(offset - 1) == 7) {
+                try {
+                    validateClass(classLoader, classReader.readUTF8(offset, charBuffer));
+                } catch (ClassNotLoadedException e) {
+                    errorConsumer.accept(e);
+                }
+            }
+        }
+    }
+
+    public void validateFullClassPath(ClassReader classReader, ClassLoader fullClassLoader,
+            Set<String> missingClassAllowlist) {
+        // Prebuilts only need transitive dependencies checked, not direct dependencies.
+        validateClassPath(classReader, fullClassLoader, (e) -> {
+            if (!missingClassAllowlist.contains(e.getClassName())) {
+                addMissingError(classReader.getClassName(), e.getClassName());
+            }
+        });
+    }
+
+    public void validateDirectClassPath(ClassReader classReader, ClassLoader directClassLoader,
+            ClassLoader fullClassLoader, Collection<String> jarsOnlyInFullClassPath,
+            Set<String> missingClassAllowlist, boolean verbose) {
+        validateClassPath(classReader, directClassLoader, (e) -> {
+            try {
+                validateClass(fullClassLoader, e.getClassName());
+            } catch (ClassNotLoadedException d) {
+                if (!missingClassAllowlist.contains(e.getClassName())) {
+                    addMissingError(classReader.getClassName(), e.getClassName());
+                }
+                return;
+            }
+            if (verbose) {
+                System.err.println("Class \"" + e.getClassName()
+                        + "\" not found in direct dependencies,"
+                        + " but found in indirect dependiences.");
+            }
+            // Iterating through all jars that are in the full classpath but not the direct
+            // classpath to find which one provides the class we are looking for.
+            for (String jarPath : jarsOnlyInFullClassPath) {
+                try {
+                    ClassLoader smallLoader =
+                            ByteCodeProcessor.loadJars(Collections.singletonList(jarPath));
+                    validateClass(smallLoader, e.getClassName());
+                    addDirectError(jarPath, classReader.getClassName(), e.getClassName());
+                    break;
+                } catch (ClassNotLoadedException f) {
+                }
+            }
+        });
+    }
+
+    private void addMissingError(String srcClass, String missingClass) {
+        mMissingClasses.put(missingClass, srcClass);
+    }
+
+    private void addDirectError(String jarPath, String srcClass, String missingClass) {
+        synchronized (mDirectErrors) {
+            Map<String, Set<String>> failedClassesByMissingClass = mDirectErrors.get(jarPath);
+            if (failedClassesByMissingClass == null) {
+                // TreeMap so that error messages have sorted list of classes.
+                failedClassesByMissingClass = new TreeMap<>();
+                mDirectErrors.put(jarPath, failedClassesByMissingClass);
+            }
+            Set<String> failedClasses = failedClassesByMissingClass.get(missingClass);
+            if (failedClasses == null) {
+                failedClasses = new TreeSet<>();
+                failedClassesByMissingClass.put(missingClass, failedClasses);
+            }
+            failedClasses.add(srcClass);
+        }
+    }
+
+    public boolean hasErrors() {
+        return !mDirectErrors.isEmpty() || !mMissingClasses.isEmpty();
+    }
+
+    private static void printValidationError(
+            PrintStream out, String gnTarget, Map<String, Set<String>> missingClasses) {
+        out.print(" * ");
+        out.println(gnTarget);
+        int i = 0;
+        // The list of missing classes is non-exhaustive because each class that fails to validate
+        // reports only the first missing class.
+        for (Map.Entry<String, Set<String>> entry : missingClasses.entrySet()) {
+            String missingClass = entry.getKey();
+            Set<String> filesThatNeededIt = entry.getValue();
+            out.print("     * ");
+            if (i == MAX_ERRORS_PER_JAR) {
+                out.print(String.format(
+                        "And %d more...", missingClasses.size() - MAX_ERRORS_PER_JAR));
+                break;
+            }
+            out.print(missingClass.replace('/', '.'));
+            out.print(" (needed by ");
+            out.print(filesThatNeededIt.iterator().next().replace('/', '.'));
+            if (filesThatNeededIt.size() > 1) {
+                out.print(String.format(" and %d more", filesThatNeededIt.size() - 1));
+            }
+            out.println(")");
+            i++;
+        }
+    }
+
+    public void printAll(String gnTarget, Map<String, String> jarToGnTarget) {
+        String streamer = "=============================";
+        System.err.println();
+        System.err.println(streamer + " Dependency Checks Failed " + streamer);
+        System.err.println("Target: " + gnTarget);
+        if (!mMissingClasses.isEmpty()) {
+            int i = 0;
+            for (Map.Entry<String, String> entry : mMissingClasses.entrySet()) {
+                if (++i > MAX_MISSING_CLASS_WARNINGS) {
+                    System.err.println(String.format("... and %d more.",
+                            mMissingClasses.size() - MAX_MISSING_CLASS_WARNINGS));
+                    break;
+                }
+                System.err.println(String.format(
+                        "Class \"%s\" not found on any classpath. Used by class \"%s\"",
+                        entry.getKey(), entry.getValue()));
+            }
+            System.err.println();
+        }
+        if (!mDirectErrors.isEmpty()) {
+            System.err.println("Direct classpath is incomplete. To fix, add deps on:");
+            for (Map.Entry<String, Map<String, Set<String>>> entry : mDirectErrors.entrySet()) {
+                printValidationError(
+                        System.err, jarToGnTarget.get(entry.getKey()), entry.getValue());
+            }
+            System.err.println();
+        }
+    }
+}
diff --git a/src/build/android/bytecode/java/org/chromium/bytecode/FragmentActivityReplacer.java b/src/build/android/bytecode/java/org/chromium/bytecode/FragmentActivityReplacer.java
new file mode 100644
index 0000000..a40f39c
--- /dev/null
+++ b/src/build/android/bytecode/java/org/chromium/bytecode/FragmentActivityReplacer.java
@@ -0,0 +1,238 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import org.objectweb.asm.ClassVisitor;
+import org.objectweb.asm.MethodVisitor;
+import org.objectweb.asm.Opcodes;
+import org.objectweb.asm.Type;
+import org.objectweb.asm.commons.MethodRemapper;
+import org.objectweb.asm.commons.Remapper;
+
+import java.io.File;
+import java.io.IOException;
+
+/**
+ * Java application that modifies Fragment.getActivity() to return an Activity instead of a
+ * FragmentActivity, and updates any existing getActivity() calls to reference the updated method.
+ *
+ * See crbug.com/1144345 for more context.
+ */
+public class FragmentActivityReplacer extends ByteCodeRewriter {
+    private static final String GET_ACTIVITY_METHOD_NAME = "getActivity";
+    private static final String GET_LIFECYCLE_ACTIVITY_METHOD_NAME = "getLifecycleActivity";
+    private static final String NEW_METHOD_DESCRIPTOR = "()Landroid/app/Activity;";
+    private static final String OLD_METHOD_DESCRIPTOR =
+            "()Landroidx/fragment/app/FragmentActivity;";
+    private static final String REQUIRE_ACTIVITY_METHOD_NAME = "requireActivity";
+    private static final String SUPPORT_LIFECYCLE_FRAGMENT_IMPL_BINARY_NAME =
+            "com.google.android.gms.common.api.internal.SupportLifecycleFragmentImpl";
+
+    public static void main(String[] args) throws IOException {
+        // Invoke this script using //build/android/gyp/bytecode_rewriter.py
+        if (!(args.length == 2 || args.length == 3 && args[0].equals("--single-androidx"))) {
+            System.err.println("Expected arguments: [--single-androidx] <input.jar> <output.jar>");
+            System.exit(1);
+        }
+
+        if (args.length == 2) {
+            FragmentActivityReplacer rewriter = new FragmentActivityReplacer(false);
+            rewriter.rewrite(new File(args[0]), new File(args[1]));
+        } else {
+            FragmentActivityReplacer rewriter = new FragmentActivityReplacer(true);
+            rewriter.rewrite(new File(args[1]), new File(args[2]));
+        }
+    }
+
+    private final boolean mSingleAndroidX;
+
+    public FragmentActivityReplacer(boolean singleAndroidX) {
+        mSingleAndroidX = singleAndroidX;
+    }
+
+    @Override
+    protected boolean shouldRewriteClass(String classPath) {
+        return true;
+    }
+
+    @Override
+    protected ClassVisitor getClassVisitorForClass(String classPath, ClassVisitor delegate) {
+        ClassVisitor invocationVisitor = new InvocationReplacer(delegate, mSingleAndroidX);
+        switch (classPath) {
+            case "androidx/fragment/app/Fragment.class":
+                return new FragmentClassVisitor(invocationVisitor);
+            case "com/google/android/gms/common/api/internal/SupportLifecycleFragmentImpl.class":
+                return new SupportLifecycleFragmentImplClassVisitor(invocationVisitor);
+            default:
+                return invocationVisitor;
+        }
+    }
+
+    /**
+     * Updates any Fragment.getActivity/requireActivity() or getLifecycleActivity() calls to call
+     * the replaced method.
+     */
+    private static class InvocationReplacer extends ClassVisitor {
+        private final boolean mSingleAndroidX;
+
+        private InvocationReplacer(ClassVisitor baseVisitor, boolean singleAndroidX) {
+            super(Opcodes.ASM7, baseVisitor);
+            mSingleAndroidX = singleAndroidX;
+        }
+
+        @Override
+        public MethodVisitor visitMethod(
+                int access, String name, String descriptor, String signature, String[] exceptions) {
+            MethodVisitor base = super.visitMethod(access, name, descriptor, signature, exceptions);
+            return new MethodVisitor(Opcodes.ASM7, base) {
+                @Override
+                public void visitMethodInsn(int opcode, String owner, String name,
+                        String descriptor, boolean isInterface) {
+                    boolean isFragmentGetActivity = name.equals(GET_ACTIVITY_METHOD_NAME)
+                            && descriptor.equals(OLD_METHOD_DESCRIPTOR)
+                            && isFragmentSubclass(owner);
+                    boolean isFragmentRequireActivity = name.equals(REQUIRE_ACTIVITY_METHOD_NAME)
+                            && descriptor.equals(OLD_METHOD_DESCRIPTOR)
+                            && isFragmentSubclass(owner);
+                    boolean isSupportLifecycleFragmentImplGetLifecycleActivity =
+                            name.equals(GET_LIFECYCLE_ACTIVITY_METHOD_NAME)
+                            && descriptor.equals(OLD_METHOD_DESCRIPTOR)
+                            && owner.equals(SUPPORT_LIFECYCLE_FRAGMENT_IMPL_BINARY_NAME);
+                    if ((opcode == Opcodes.INVOKEVIRTUAL || opcode == Opcodes.INVOKESPECIAL)
+                            && (isFragmentGetActivity || isFragmentRequireActivity
+                                    || isSupportLifecycleFragmentImplGetLifecycleActivity)) {
+                        super.visitMethodInsn(
+                                opcode, owner, name, NEW_METHOD_DESCRIPTOR, isInterface);
+                        if (mSingleAndroidX) {
+                            super.visitTypeInsn(
+                                    Opcodes.CHECKCAST, "androidx/fragment/app/FragmentActivity");
+                        }
+                    } else {
+                        super.visitMethodInsn(opcode, owner, name, descriptor, isInterface);
+                    }
+                }
+
+                private boolean isFragmentSubclass(String internalType) {
+                    // Look up classes with a ClassLoader that will resolve any R classes to Object.
+                    // This is fine in this case as resource classes shouldn't be in the class
+                    // hierarchy of any Fragments.
+                    ClassLoader resourceStubbingClassLoader = new ClassLoader() {
+                        @Override
+                        protected Class<?> findClass(String name) throws ClassNotFoundException {
+                            if (name.matches(".*\\.R(\\$.+)?")) {
+                                return Object.class;
+                            }
+                            return super.findClass(name);
+                        }
+                    };
+
+                    // This doesn't use Class#isAssignableFrom to avoid us needing to load
+                    // AndroidX's Fragment class, which may not be on the classpath.
+                    try {
+                        String binaryName = Type.getObjectType(internalType).getClassName();
+                        Class<?> clazz = resourceStubbingClassLoader.loadClass(binaryName);
+                        while (clazz != null) {
+                            if (clazz.getName().equals("androidx.fragment.app.Fragment")) {
+                                return true;
+                            }
+                            clazz = clazz.getSuperclass();
+                        }
+                        return false;
+                    } catch (ClassNotFoundException e) {
+                        throw new RuntimeException(e);
+                    }
+                }
+            };
+        }
+    }
+
+    /**
+     * Updates the implementation of Fragment.getActivity() and Fragment.requireActivity().
+     */
+    private static class FragmentClassVisitor extends ClassVisitor {
+        private FragmentClassVisitor(ClassVisitor baseVisitor) {
+            super(Opcodes.ASM7, baseVisitor);
+        }
+
+        @Override
+        public MethodVisitor visitMethod(
+                int access, String name, String descriptor, String signature, String[] exceptions) {
+            // Update the descriptor of getActivity() and requireActivity().
+            MethodVisitor baseVisitor;
+            if (descriptor.equals(OLD_METHOD_DESCRIPTOR)
+                    && (name.equals(GET_ACTIVITY_METHOD_NAME)
+                            || name.equals(REQUIRE_ACTIVITY_METHOD_NAME))) {
+                // Some Fragments in a Clank library implement an interface that defines an
+                // `Activity getActivity()` method. Fragment.getActivity() is considered its
+                // implementation from a typechecking perspective, but javac still generates a
+                // getActivity() method in these Fragments that call Fragment.getActivity(). This
+                // isn't an issue when the methods return different types, but after changing
+                // Fragment.getActivity() to return an Activity, this generated implementation is
+                // now overriding Fragment's, which it can't do because Fragment.getActivity() is
+                // final. We make it non-final here to avoid this issue.
+                baseVisitor = super.visitMethod(
+                        access & ~Opcodes.ACC_FINAL, name, NEW_METHOD_DESCRIPTOR, null, exceptions);
+            } else {
+                baseVisitor = super.visitMethod(access, name, descriptor, signature, exceptions);
+            }
+
+            // Replace getActivity() with `return ContextUtils.activityFromContext(getContext());`
+            if (name.equals(GET_ACTIVITY_METHOD_NAME) && descriptor.equals(OLD_METHOD_DESCRIPTOR)) {
+                baseVisitor.visitVarInsn(Opcodes.ALOAD, 0);
+                baseVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "androidx/fragment/app/Fragment",
+                        "getContext", "()Landroid/content/Context;", false);
+                baseVisitor.visitMethodInsn(Opcodes.INVOKESTATIC, "org/chromium/utils/ContextUtils",
+                        "activityFromContext", "(Landroid/content/Context;)Landroid/app/Activity;",
+                        false);
+                baseVisitor.visitInsn(Opcodes.ARETURN);
+                // Since we set COMPUTE_FRAMES, the arguments of visitMaxs are ignored, but calling
+                // it forces ClassWriter to actually recompute the correct stack/local values.
+                // Without this call ClassWriter keeps the original stack=0,locals=1 which is wrong.
+                baseVisitor.visitMaxs(0, 0);
+                return null;
+            }
+
+            return new MethodRemapper(baseVisitor, new Remapper() {
+                @Override
+                public String mapType(String internalName) {
+                    if (internalName.equals("androidx/fragment/app/FragmentActivity")) {
+                        return "android/app/Activity";
+                    }
+                    return internalName;
+                }
+            });
+        }
+    }
+
+    /**
+     * Update SupportLifecycleFragmentImpl.getLifecycleActivity().
+     */
+    private static class SupportLifecycleFragmentImplClassVisitor extends ClassVisitor {
+        private SupportLifecycleFragmentImplClassVisitor(ClassVisitor baseVisitor) {
+            super(Opcodes.ASM7, baseVisitor);
+        }
+
+        @Override
+        public MethodVisitor visitMethod(
+                int access, String name, String descriptor, String signature, String[] exceptions) {
+            // SupportLifecycleFragmentImpl has two getActivity methods:
+            //   1. public FragmentActivity getLifecycleActivity():
+            //      This is what you'll see in the source. This delegates to Fragment.getActivity().
+            //   2. public Activity getLifecycleActivity():
+            //      This is generated because the class implements LifecycleFragment, which
+            //      declares this method, and delegates to #1.
+            //
+            // Here we change the return type of #1 and delete #2.
+            if (name.equals(GET_LIFECYCLE_ACTIVITY_METHOD_NAME)) {
+                if (descriptor.equals(OLD_METHOD_DESCRIPTOR)) {
+                    return super.visitMethod(
+                            access, name, NEW_METHOD_DESCRIPTOR, signature, exceptions);
+                }
+                return null;
+            }
+            return super.visitMethod(access, name, descriptor, signature, exceptions);
+        }
+    }
+}
diff --git a/src/build/android/bytecode/java/org/chromium/bytecode/TypeUtils.java b/src/build/android/bytecode/java/org/chromium/bytecode/TypeUtils.java
new file mode 100644
index 0000000..ed2dc2d
--- /dev/null
+++ b/src/build/android/bytecode/java/org/chromium/bytecode/TypeUtils.java
@@ -0,0 +1,87 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import org.objectweb.asm.Type;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Utility methods for accessing {@link Type}s Strings.
+ *
+ * Useful definitions to keep in mind when using this class:
+ * Internal name - The fully qualified name for a type with dots replaced by slashes. Not really
+ * relevant for primitive types.
+ * Type descriptor - Single letters for primitive types, "L" + internal name + ";" for class types.
+ *
+ * The methods in this class accept internal names or primitive type descriptors.
+ */
+class TypeUtils {
+    static final String ASSERTION_ERROR = "java/lang/AssertionError";
+    static final String ASSET_MANAGER = "android/content/res/AssetManager";
+    static final String BUILD_HOOKS = "org/chromium/build/BuildHooks";
+    static final String BUILD_HOOKS_ANDROID = "org/chromium/build/BuildHooksAndroid";
+    static final String CONFIGURATION = "android/content/res/Configuration";
+    static final String CONTEXT = "android/content/Context";
+    static final String CONTEXT_WRAPPER = "android/content/ContextWrapper";
+    static final String RESOURCES = "android/content/res/Resources";
+    static final String STRING = "java/lang/String";
+    static final String THEME = "android/content/res/Resources$Theme";
+
+    static final String BOOLEAN = "Z";
+    static final String INT = "I";
+    static final String VOID = "V";
+    private static final Map<String, Type> PRIMITIVE_DESCRIPTORS;
+    static {
+        PRIMITIVE_DESCRIPTORS = new HashMap<>();
+        PRIMITIVE_DESCRIPTORS.put(Type.BOOLEAN_TYPE.toString(), Type.BOOLEAN_TYPE);
+        PRIMITIVE_DESCRIPTORS.put(Type.INT_TYPE.toString(), Type.INT_TYPE);
+        PRIMITIVE_DESCRIPTORS.put(Type.VOID_TYPE.toString(), Type.VOID_TYPE);
+    }
+
+    /**
+     * Returns the full method signature with internal names.
+     *
+     * @param methodName Name of the method (ex. "getResources").
+     * @param returnType Internal name for the return type.
+     * @param argumentTypes List of internal names for argument types.
+     * @return String representation of the method signature.
+     */
+    static String getMethodSignature(
+            String methodName, String returnType, String... argumentTypes) {
+        return methodName + getMethodDescriptor(returnType, argumentTypes);
+    }
+
+    /**
+     * Builds a method descriptor suitable for use with {@link org.objectweb.asm.MethodVisitor}.
+     *
+     * @param returnType Internal name for the return type of the method (primitive or class).
+     * @param argumentTypes Internal names for the argument types (primitive or class).
+     * @return The generated method descriptor.
+     */
+    static String getMethodDescriptor(String returnType, String... argumentTypes) {
+        Type[] typedArguments = new Type[argumentTypes.length];
+        for (int i = 0; i < argumentTypes.length; ++i) {
+            // Argument list should be empty in this case, not V (void).
+            assert !Type.VOID_TYPE.toString().equals(argumentTypes[i]);
+            typedArguments[i] = convert(argumentTypes[i]);
+        }
+        return Type.getMethodDescriptor(convert(returnType), typedArguments);
+    }
+
+    /**
+     * Converts an internal name for a type to a {@link Type}.
+     *
+     * @param type Internal name for a type (primitive or class).
+     * @return The resulting Type.
+     */
+    private static Type convert(String type) {
+        if (PRIMITIVE_DESCRIPTORS.containsKey(type)) {
+            return PRIMITIVE_DESCRIPTORS.get(type);
+        }
+        return Type.getObjectType(type);
+    }
+}
diff --git a/src/build/android/chromium-debug.keystore b/src/build/android/chromium-debug.keystore
new file mode 100644
index 0000000..67eb0aa
--- /dev/null
+++ b/src/build/android/chromium-debug.keystore
Binary files differ
diff --git a/src/build/android/convert_dex_profile.py b/src/build/android/convert_dex_profile.py
new file mode 100755
index 0000000..f9fdeb6
--- /dev/null
+++ b/src/build/android/convert_dex_profile.py
@@ -0,0 +1,557 @@
+#!/usr/bin/env vpython
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import collections
+import logging
+import re
+import subprocess
+import sys
+
+DEX_CLASS_NAME_RE = re.compile(r'\'L(?P<class_name>[^;]+);\'')
+DEX_METHOD_NAME_RE = re.compile(r'\'(?P<method_name>[^\']+)\'')
+DEX_METHOD_TYPE_RE = re.compile( # type descriptor method signature re
+    r'\''
+    r'\('
+    r'(?P<method_params>[^)]*)'
+    r'\)'
+    r'(?P<method_return_type>[^\']+)'
+    r'\'')
+DEX_METHOD_LINE_NR_RE = re.compile(r'line=(?P<line_number>\d+)')
+
+PROFILE_METHOD_RE = re.compile(
+    r'(?P<tags>[HSP]+)' # tags such as H/S/P
+    r'(?P<class_name>L[^;]+;)' # class name in type descriptor format
+    r'->(?P<method_name>[^(]+)'
+    r'\((?P<method_params>[^)]*)\)'
+    r'(?P<method_return_type>.+)')
+
+PROGUARD_CLASS_MAPPING_RE = re.compile(
+    r'(?P<original_name>[^ ]+)'
+    r' -> '
+    r'(?P<obfuscated_name>[^:]+):')
+PROGUARD_METHOD_MAPPING_RE = re.compile(
+    # line_start:line_end: (optional)
+    r'((?P<line_start>\d+):(?P<line_end>\d+):)?'
+    r'(?P<return_type>[^ ]+)' # original method return type
+    # original method class name (if exists)
+    r' (?:(?P<original_method_class>[a-zA-Z_\d.$]+)\.)?'
+    r'(?P<original_method_name>[^.\(]+)'
+    r'\((?P<params>[^\)]*)\)' # original method params
+    r'(?:[^ ]*)' # original method line numbers (ignored)
+    r' -> '
+    r'(?P<obfuscated_name>.+)') # obfuscated method name
+
+TYPE_DESCRIPTOR_RE = re.compile(
+    r'(?P<brackets>\[*)'
+    r'(?:'
+    r'(?P<class_name>L[^;]+;)'
+    r'|'
+    r'[VZBSCIJFD]'
+    r')')
+
+DOT_NOTATION_MAP = {
+    '': '',
+    'boolean': 'Z',
+    'byte': 'B',
+    'void': 'V',
+    'short': 'S',
+    'char': 'C',
+    'int': 'I',
+    'long': 'J',
+    'float': 'F',
+    'double': 'D'
+}
+
+class Method(object):
+  def __init__(self, name, class_name, param_types=None, return_type=None):
+    self.name = name
+    self.class_name = class_name
+    self.param_types = param_types
+    self.return_type = return_type
+
+  def __str__(self):
+    return '{}->{}({}){}'.format(self.class_name, self.name,
+        self.param_types or '', self.return_type or '')
+
+  def __repr__(self):
+    return 'Method<{}->{}({}){}>'.format(self.class_name, self.name,
+        self.param_types or '', self.return_type or '')
+
+  def __cmp__(self, other):
+    return cmp((self.class_name, self.name, self.param_types, self.return_type),
+        (other.class_name, other.name, other.param_types, other.return_type))
+
+  def __hash__(self):
+    # only hash name and class_name since other fields may not be set yet.
+    return hash((self.name, self.class_name))
+
+
+class Class(object):
+  def __init__(self, name):
+    self.name = name
+    self._methods = []
+
+  def AddMethod(self, method, line_numbers):
+    self._methods.append((method, set(line_numbers)))
+
+  def FindMethodsAtLine(self, method_name, line_start, line_end=None):
+    """Searches through dex class for a method given a name and line numbers
+
+    The dex maps methods to line numbers, this method, given the a method name
+    in this class as well as a start line and an optional end line (which act as
+    hints as to which function in the class is being looked for), returns a list
+    of possible matches (or none if none are found).
+
+    Args:
+      method_name: name of method being searched for
+      line_start: start of hint range for lines in this method
+      line_end: end of hint range for lines in this method (optional)
+
+    Returns:
+      A list of Method objects that could match the hints given, or None if no
+      method is found.
+    """
+    found_methods = []
+    if line_end is None:
+      hint_lines = set([line_start])
+    else:
+      hint_lines = set(range(line_start, line_end+1))
+
+    named_methods = [(method, l) for method, l in self._methods
+                     if method.name == method_name]
+
+    if len(named_methods) == 1:
+      return [method for method, l in named_methods]
+    if len(named_methods) == 0:
+      return None
+
+    for method, line_numbers in named_methods:
+      if not hint_lines.isdisjoint(line_numbers):
+        found_methods.append(method)
+
+    if len(found_methods) > 0:
+      if len(found_methods) > 1:
+        logging.warning('ambigous methods in dex %s at lines %s in class "%s"',
+            found_methods, hint_lines, self.name)
+      return found_methods
+
+    for method, line_numbers in named_methods:
+      if (max(hint_lines) >= min(line_numbers)
+          and min(hint_lines) <= max(line_numbers)):
+        found_methods.append(method)
+
+    if len(found_methods) > 0:
+      if len(found_methods) > 1:
+        logging.warning('ambigous methods in dex %s at lines %s in class "%s"',
+            found_methods, hint_lines, self.name)
+      return found_methods
+    else:
+      logging.warning('No method named "%s" in class "%s" is '
+                      'mapped to lines %s', method_name, self.name, hint_lines)
+      return None
+
+
+class Profile(object):
+  def __init__(self):
+    # {Method: set(char)}
+    self._methods = collections.defaultdict(set)
+    self._classes = []
+
+  def AddMethod(self, method, tags):
+    for tag in tags:
+      self._methods[method].add(tag)
+
+  def AddClass(self, cls):
+    self._classes.append(cls)
+
+  def WriteToFile(self, path):
+    with open(path, 'w') as output_profile:
+      for cls in sorted(self._classes):
+        output_profile.write(cls + '\n')
+      for method in sorted(self._methods):
+        tags = sorted(self._methods[method])
+        line = '{}{}\n'.format(''.join(tags), str(method))
+        output_profile.write(line)
+
+
+class ProguardMapping(object):
+  def __init__(self):
+    # {Method: set(Method)}
+    self._method_mapping = collections.defaultdict(set)
+    # {String: String} String is class name in type descriptor format
+    self._class_mapping = dict()
+
+  def AddMethodMapping(self, from_method, to_method):
+    self._method_mapping[from_method].add(to_method)
+
+  def AddClassMapping(self, from_class, to_class):
+    self._class_mapping[from_class] = to_class
+
+  def GetMethodMapping(self, from_method):
+    return self._method_mapping.get(from_method)
+
+  def GetClassMapping(self, from_class):
+    return self._class_mapping.get(from_class, from_class)
+
+  def MapTypeDescriptor(self, type_descriptor):
+    match = TYPE_DESCRIPTOR_RE.search(type_descriptor)
+    assert match is not None
+    class_name = match.group('class_name')
+    if class_name is not None:
+      return match.group('brackets') + self.GetClassMapping(class_name)
+    # just a native type, return as is
+    return match.group()
+
+  def MapTypeDescriptorList(self, type_descriptor_list):
+    return TYPE_DESCRIPTOR_RE.sub(
+        lambda match: self.MapTypeDescriptor(match.group()),
+        type_descriptor_list)
+
+
+class MalformedLineException(Exception):
+  def __init__(self, message, line_number):
+    super(MalformedLineException, self).__init__(message)
+    self.line_number = line_number
+
+  def __str__(self):
+    return self.message + ' at line {}'.format(self.line_number)
+
+
+class MalformedProguardMappingException(MalformedLineException):
+  pass
+
+
+class MalformedProfileException(MalformedLineException):
+  pass
+
+
+def _RunDexDump(dexdump_path, dex_file_path):
+  return subprocess.check_output([dexdump_path, dex_file_path]).splitlines()
+
+
+def _ReadFile(file_path):
+  with open(file_path, 'r') as f:
+    return f.readlines()
+
+
+def _ToTypeDescriptor(dot_notation):
+  """Parses a dot notation type and returns it in type descriptor format
+
+  eg:
+  org.chromium.browser.ChromeActivity -> Lorg/chromium/browser/ChromeActivity;
+  boolean -> Z
+  int[] -> [I
+
+  Args:
+    dot_notation: trimmed string with a single type in dot notation format
+
+  Returns:
+    A string with the type in type descriptor format
+  """
+  dot_notation = dot_notation.strip()
+  prefix = ''
+  while dot_notation.endswith('[]'):
+    prefix += '['
+    dot_notation = dot_notation[:-2]
+  if dot_notation in DOT_NOTATION_MAP:
+    return prefix + DOT_NOTATION_MAP[dot_notation]
+  return prefix + 'L' + dot_notation.replace('.', '/') + ';'
+
+
+def _DotNotationListToTypeDescriptorList(dot_notation_list_string):
+  """Parses a param list of dot notation format and returns it in type
+  descriptor format
+
+  eg:
+  org.chromium.browser.ChromeActivity,boolean,int[] ->
+      Lorg/chromium/browser/ChromeActivity;Z[I
+
+  Args:
+    dot_notation_list_string: single string with multiple comma separated types
+                              in dot notation format
+
+  Returns:
+    A string with the param list in type descriptor format
+  """
+  return ''.join(_ToTypeDescriptor(param) for param in
+      dot_notation_list_string.split(','))
+
+
+def ProcessDex(dex_dump):
+  """Parses dexdump output returning a dict of class names to Class objects
+
+  Parses output of the dexdump command on a dex file and extracts information
+  about classes and their respective methods and which line numbers a method is
+  mapped to.
+
+  Methods that are not mapped to any line number are ignored and not listed
+  inside their respective Class objects.
+
+  Args:
+    dex_dump: An array of lines of dexdump output
+
+  Returns:
+    A dict that maps from class names in type descriptor format (but without the
+    surrounding 'L' and ';') to Class objects.
+  """
+  # class_name: Class
+  classes_by_name = {}
+  current_class = None
+  current_method = None
+  reading_positions = False
+  reading_methods = False
+  method_line_numbers = []
+  for line in dex_dump:
+    line = line.strip()
+    if line.startswith('Class descriptor'):
+      # New class started, no longer reading methods.
+      reading_methods = False
+      current_class = Class(DEX_CLASS_NAME_RE.search(line).group('class_name'))
+      classes_by_name[current_class.name] = current_class
+    elif (line.startswith('Direct methods')
+          or line.startswith('Virtual methods')):
+      reading_methods = True
+    elif reading_methods and line.startswith('name'):
+      assert current_class is not None
+      current_method = Method(
+          DEX_METHOD_NAME_RE.search(line).group('method_name'),
+          "L" + current_class.name + ";")
+    elif reading_methods and line.startswith('type'):
+      assert current_method is not None
+      match = DEX_METHOD_TYPE_RE.search(line)
+      current_method.param_types = match.group('method_params')
+      current_method.return_type = match.group('method_return_type')
+    elif line.startswith('positions'):
+      assert reading_methods
+      reading_positions = True
+      method_line_numbers = []
+    elif reading_positions and line.startswith('0x'):
+      line_number = DEX_METHOD_LINE_NR_RE.search(line).group('line_number')
+      method_line_numbers.append(int(line_number))
+    elif reading_positions and line.startswith('locals'):
+      if len(method_line_numbers) > 0:
+        current_class.AddMethod(current_method, method_line_numbers)
+      # finished reading method line numbers
+      reading_positions = False
+  return classes_by_name
+
+
+def ProcessProguardMapping(proguard_mapping_lines, dex):
+  """Parses a proguard mapping file
+
+  This takes proguard mapping file lines and then uses the obfuscated dex to
+  create a mapping of unobfuscated methods to obfuscated ones and vice versa.
+
+  The dex is used because the proguard mapping file only has the name of the
+  obfuscated methods but not their signature, thus the dex is read to look up
+  which method with a specific name was mapped to the lines mentioned in the
+  proguard mapping file.
+
+  Args:
+    proguard_mapping_lines: Array of strings, each is a line from the proguard
+                            mapping file (in order).
+    dex: a dict of class name (in type descriptor format but without the
+         enclosing 'L' and ';') to a Class object.
+  Returns:
+    Two dicts the first maps from obfuscated methods to a set of non-obfuscated
+    ones. It also maps the obfuscated class names to original class names, both
+    in type descriptor format (with the enclosing 'L' and ';')
+  """
+  mapping = ProguardMapping()
+  reverse_mapping = ProguardMapping()
+  to_be_obfuscated = []
+  current_class_orig = None
+  current_class_obfs = None
+  for index, line in enumerate(proguard_mapping_lines):
+    if line.strip() == '':
+      continue
+    if not line.startswith(' '):
+      match = PROGUARD_CLASS_MAPPING_RE.search(line)
+      if match is None:
+        raise MalformedProguardMappingException(
+            'Malformed class mapping', index)
+      current_class_orig = match.group('original_name')
+      current_class_obfs = match.group('obfuscated_name')
+      mapping.AddClassMapping(_ToTypeDescriptor(current_class_obfs),
+                              _ToTypeDescriptor(current_class_orig))
+      reverse_mapping.AddClassMapping(_ToTypeDescriptor(current_class_orig),
+                                      _ToTypeDescriptor(current_class_obfs))
+      continue
+
+    assert current_class_orig is not None
+    assert current_class_obfs is not None
+    line = line.strip()
+    match = PROGUARD_METHOD_MAPPING_RE.search(line)
+    # check if is a method mapping (we ignore field mappings)
+    if match is not None:
+      # check if this line is an inlining by reading ahead 1 line.
+      if index + 1 < len(proguard_mapping_lines):
+        next_match = PROGUARD_METHOD_MAPPING_RE.search(
+            proguard_mapping_lines[index+1].strip())
+        if (next_match and match.group('line_start') is not None
+            and next_match.group('line_start') == match.group('line_start')
+            and next_match.group('line_end') == match.group('line_end')):
+          continue # This is an inlining, skip
+
+      original_method = Method(
+          match.group('original_method_name'),
+          _ToTypeDescriptor(
+              match.group('original_method_class') or current_class_orig),
+          _DotNotationListToTypeDescriptorList(match.group('params')),
+          _ToTypeDescriptor(match.group('return_type')))
+
+      if match.group('line_start') is not None:
+        obfs_methods = (dex[current_class_obfs.replace('.', '/')]
+            .FindMethodsAtLine(
+                match.group('obfuscated_name'),
+                int(match.group('line_start')),
+                int(match.group('line_end'))))
+
+        if obfs_methods is None:
+          continue
+
+        for obfs_method in obfs_methods:
+          mapping.AddMethodMapping(obfs_method, original_method)
+          reverse_mapping.AddMethodMapping(original_method, obfs_method)
+      else:
+        to_be_obfuscated.append(
+            (original_method, match.group('obfuscated_name')))
+
+  for original_method, obfuscated_name in to_be_obfuscated:
+    obfuscated_method = Method(
+        obfuscated_name,
+        reverse_mapping.GetClassMapping(original_method.class_name),
+        reverse_mapping.MapTypeDescriptorList(original_method.param_types),
+        reverse_mapping.MapTypeDescriptor(original_method.return_type))
+    mapping.AddMethodMapping(obfuscated_method, original_method)
+    reverse_mapping.AddMethodMapping(original_method, obfuscated_method)
+  return mapping, reverse_mapping
+
+
+def ProcessProfile(input_profile, proguard_mapping):
+  """Parses an android profile and uses the proguard mapping to (de)obfuscate it
+
+  This takes the android profile lines and for each method or class for the
+  profile, it uses the mapping to either obfuscate or deobfuscate (based on the
+  provided mapping) and returns a Profile object that stores this information.
+
+  Args:
+    input_profile: array of lines of the input profile
+    proguard_mapping: a proguard mapping that would map from the classes and
+                      methods in the input profile to the classes and methods
+                      that should be in the output profile.
+
+  Returns:
+    A Profile object that stores the information (ie list of mapped classes and
+    methods + tags)
+  """
+  profile = Profile()
+  for index, line in enumerate(input_profile):
+    line = line.strip()
+    if line.startswith('L'):
+      profile.AddClass(proguard_mapping.GetClassMapping(line))
+      continue
+    match = PROFILE_METHOD_RE.search(line)
+    if not match:
+      raise MalformedProfileException("Malformed line", index)
+
+    method = Method(
+        match.group('method_name'),
+        match.group('class_name'),
+        match.group('method_params'),
+        match.group('method_return_type'))
+
+    mapped_methods = proguard_mapping.GetMethodMapping(method)
+    if mapped_methods is None:
+      logging.warning('No method matching "%s" has been found in the proguard '
+                      'mapping file', method)
+      continue
+
+    for original_method in mapped_methods:
+      profile.AddMethod(original_method, match.group('tags'))
+
+  return profile
+
+
+def ObfuscateProfile(nonobfuscated_profile, dex_file, proguard_mapping,
+                     dexdump_path, output_filename):
+  """Helper method for obfuscating a profile.
+
+  Args:
+    nonobfuscated_profile: a profile with nonobfuscated symbols.
+    dex_file: path to the dex file matching the mapping.
+    proguard_mapping: a mapping from nonobfuscated to obfuscated symbols used
+      in the dex file.
+    dexdump_path: path to the dexdump utility.
+    output_filename: output filename in which to write the obfuscated profile.
+  """
+  dexinfo = ProcessDex(_RunDexDump(dexdump_path, dex_file))
+  _, reverse_mapping = ProcessProguardMapping(
+      _ReadFile(proguard_mapping), dexinfo)
+  obfuscated_profile = ProcessProfile(
+      _ReadFile(nonobfuscated_profile), reverse_mapping)
+  obfuscated_profile.WriteToFile(output_filename)
+
+
+def main(args):
+  parser = argparse.ArgumentParser()
+  parser.add_argument(
+      '--dexdump-path',
+      required=True,
+      help='Path to dexdump binary.')
+  parser.add_argument(
+      '--dex-path',
+      required=True,
+      help='Path to dex file corresponding to the proguard mapping file.')
+  parser.add_argument(
+      '--proguard-mapping-path',
+      required=True,
+      help='Path to input proguard mapping file corresponding to the dex file.')
+  parser.add_argument(
+      '--output-profile-path',
+      required=True,
+      help='Path to output profile.')
+  parser.add_argument(
+      '--input-profile-path',
+      required=True,
+      help='Path to output profile.')
+  parser.add_argument(
+      '--verbose',
+      action='store_true',
+      default=False,
+      help='Print verbose output.')
+  obfuscation = parser.add_mutually_exclusive_group(required=True)
+  obfuscation.add_argument('--obfuscate', action='store_true',
+      help='Indicates to output an obfuscated profile given a deobfuscated '
+     'one.')
+  obfuscation.add_argument('--deobfuscate', dest='obfuscate',
+      action='store_false', help='Indicates to output a deobfuscated profile '
+      'given an obfuscated one.')
+  options = parser.parse_args(args)
+
+  if options.verbose:
+    log_level = logging.WARNING
+  else:
+    log_level = logging.ERROR
+  logging.basicConfig(format='%(levelname)s: %(message)s', level=log_level)
+
+  dex = ProcessDex(_RunDexDump(options.dexdump_path, options.dex_path))
+  proguard_mapping, reverse_proguard_mapping = ProcessProguardMapping(
+      _ReadFile(options.proguard_mapping_path), dex)
+  if options.obfuscate:
+    profile = ProcessProfile(
+        _ReadFile(options.input_profile_path),
+        reverse_proguard_mapping)
+  else:
+    profile = ProcessProfile(
+        _ReadFile(options.input_profile_path),
+        proguard_mapping)
+  profile.WriteToFile(options.output_profile_path)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/src/build/android/convert_dex_profile_tests.py b/src/build/android/convert_dex_profile_tests.py
new file mode 100644
index 0000000..0ddc5ce
--- /dev/null
+++ b/src/build/android/convert_dex_profile_tests.py
@@ -0,0 +1,276 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for convert_dex_profile.
+
+Can be run from build/android/:
+  $ cd build/android
+  $ python convert_dex_profile_tests.py
+"""
+
+import os
+import sys
+import tempfile
+import unittest
+
+import convert_dex_profile as cp
+
+sys.path.insert(1, os.path.join(os.path.dirname(__file__), 'gyp'))
+from util import build_utils
+
+cp.logging.disable(cp.logging.CRITICAL)
+
+# There are two obfuscations used in the tests below, each with the same
+# unobfuscated profile. The first, corresponding to DEX_DUMP, PROGUARD_MAPPING,
+# and OBFUSCATED_PROFILE, has an ambiguous method a() which is mapped to both
+# getInstance and initialize. The second, corresponding to DEX_DUMP_2,
+# PROGUARD_MAPPING_2 and OBFUSCATED_PROFILE_2, removes the ambiguity.
+
+DEX_DUMP = """
+
+Class descriptor  : 'La;'
+  Direct methods    -
+      #0              : (in La;)
+        name          : '<clinit>'
+        type          : '(Ljava/lang/String;)V'
+        code          -
+        catches       : 1
+                0x000f - 0x001e
+                  <any> -> 0x0093
+        positions     :
+                0x0001 line=310
+                0x0057 line=313
+        locals        : 
+      #1              : (in La;)
+        name          : '<init>'
+        type          : '()V'
+        positions     :
+        locals        : 
+  Virtual methods   -
+      #0              : (in La;)
+        name          : 'a'
+        type          : '(Ljava/lang/String;)I'
+        positions     : 
+          0x0000 line=2
+          0x0003 line=3
+          0x001b line=8
+        locals        : 
+          0x0000 - 0x0021 reg=3 this La; 
+      #1              : (in La;)
+        name          : 'a'
+        type          : '(Ljava/lang/Object;)I'
+        positions     : 
+          0x0000 line=8
+          0x0003 line=9
+        locals        : 
+          0x0000 - 0x0021 reg=3 this La; 
+      #2              : (in La;)
+        name          : 'b'
+        type          : '()La;'
+        positions     : 
+          0x0000 line=1
+        locals        : 
+"""
+
+# pylint: disable=line-too-long
+PROGUARD_MAPPING = \
+"""org.chromium.Original -> a:
+    org.chromium.Original sDisplayAndroidManager -> e
+    org.chromium.Original another() -> b
+    4:4:void inlined():237:237 -> a
+    4:4:org.chromium.Original getInstance():203 -> a
+    5:5:void org.chromium.Original$Subclass.<init>(org.chromium.Original,byte):130:130 -> a
+    5:5:void initialize():237 -> a
+    5:5:org.chromium.Original getInstance():203 -> a
+    6:6:void initialize():237:237 -> a
+    9:9:android.content.Context org.chromium.base.ContextUtils.getApplicationContext():49:49 -> a
+    9:9:android.content.Context getContext():219 -> a
+    9:9:void initialize():245 -> a
+    9:9:org.chromium.Original getInstance():203 -> a"""
+
+OBFUSCATED_PROFILE = \
+"""La;
+PLa;->b()La;
+SLa;->a(Ljava/lang/Object;)I
+HPLa;->a(Ljava/lang/String;)I"""
+
+DEX_DUMP_2 = """
+
+Class descriptor  : 'La;'
+  Direct methods    -
+      #0              : (in La;)
+        name          : '<clinit>'
+        type          : '(Ljava/lang/String;)V'
+        code          -
+        catches       : 1
+                0x000f - 0x001e
+                  <any> -> 0x0093
+        positions     :
+                0x0001 line=310
+                0x0057 line=313
+        locals        : 
+      #1              : (in La;)
+        name          : '<init>'
+        type          : '()V'
+        positions     :
+        locals        : 
+  Virtual methods   -
+      #0              : (in La;)
+        name          : 'a'
+        type          : '(Ljava/lang/String;)I'
+        positions     : 
+          0x0000 line=2
+          0x0003 line=3
+          0x001b line=8
+        locals        : 
+          0x0000 - 0x0021 reg=3 this La; 
+      #1              : (in La;)
+        name          : 'c'
+        type          : '(Ljava/lang/Object;)I'
+        positions     : 
+          0x0000 line=8
+          0x0003 line=9
+        locals        : 
+          0x0000 - 0x0021 reg=3 this La; 
+      #2              : (in La;)
+        name          : 'b'
+        type          : '()La;'
+        positions     : 
+          0x0000 line=1
+        locals        : 
+"""
+
+# pylint: disable=line-too-long
+PROGUARD_MAPPING_2 = \
+"""org.chromium.Original -> a:
+    org.chromium.Original sDisplayAndroidManager -> e
+    org.chromium.Original another() -> b
+    void initialize() -> c
+    org.chromium.Original getInstance():203 -> a
+    4:4:void inlined():237:237 -> a"""
+
+OBFUSCATED_PROFILE_2 = \
+"""La;
+PLa;->b()La;
+HPSLa;->a()La;
+HPLa;->c()V"""
+
+UNOBFUSCATED_PROFILE = \
+"""Lorg/chromium/Original;
+PLorg/chromium/Original;->another()Lorg/chromium/Original;
+HPSLorg/chromium/Original;->getInstance()Lorg/chromium/Original;
+HPLorg/chromium/Original;->initialize()V"""
+
+class GenerateProfileTests(unittest.TestCase):
+  def testProcessDex(self):
+    dex = cp.ProcessDex(DEX_DUMP.splitlines())
+    self.assertIsNotNone(dex['a'])
+
+    self.assertEquals(len(dex['a'].FindMethodsAtLine('<clinit>', 311, 313)), 1)
+    self.assertEquals(len(dex['a'].FindMethodsAtLine('<clinit>', 309, 315)), 1)
+    clinit = dex['a'].FindMethodsAtLine('<clinit>', 311, 313)[0]
+    self.assertEquals(clinit.name, '<clinit>')
+    self.assertEquals(clinit.return_type, 'V')
+    self.assertEquals(clinit.param_types, 'Ljava/lang/String;')
+
+    self.assertEquals(len(dex['a'].FindMethodsAtLine('a', 8, None)), 2)
+    self.assertIsNone(dex['a'].FindMethodsAtLine('a', 100, None))
+
+# pylint: disable=protected-access
+  def testProcessProguardMapping(self):
+    dex = cp.ProcessDex(DEX_DUMP.splitlines())
+    mapping, reverse = cp.ProcessProguardMapping(
+        PROGUARD_MAPPING.splitlines(), dex)
+
+    self.assertEquals('La;', reverse.GetClassMapping('Lorg/chromium/Original;'))
+
+    getInstance = cp.Method(
+        'getInstance', 'Lorg/chromium/Original;', '', 'Lorg/chromium/Original;')
+    initialize = cp.Method('initialize', 'Lorg/chromium/Original;', '', 'V')
+    another = cp.Method(
+        'another', 'Lorg/chromium/Original;', '', 'Lorg/chromium/Original;')
+    subclassInit = cp.Method(
+        '<init>', 'Lorg/chromium/Original$Subclass;',
+        'Lorg/chromium/Original;B', 'V')
+
+    mapped = mapping.GetMethodMapping(
+        cp.Method('a', 'La;', 'Ljava/lang/String;', 'I'))
+    self.assertEquals(len(mapped), 2)
+    self.assertIn(getInstance, mapped)
+    self.assertNotIn(subclassInit, mapped)
+    self.assertNotIn(
+        cp.Method('inlined', 'Lorg/chromium/Original;', '', 'V'), mapped)
+    self.assertIn(initialize, mapped)
+
+    mapped = mapping.GetMethodMapping(
+        cp.Method('a', 'La;', 'Ljava/lang/Object;', 'I'))
+    self.assertEquals(len(mapped), 1)
+    self.assertIn(getInstance, mapped)
+
+    mapped = mapping.GetMethodMapping(cp.Method('b', 'La;', '', 'La;'))
+    self.assertEquals(len(mapped), 1)
+    self.assertIn(another, mapped)
+
+    for from_method, to_methods in mapping._method_mapping.iteritems():
+      for to_method in to_methods:
+        self.assertIn(from_method, reverse.GetMethodMapping(to_method))
+    for from_class, to_class in mapping._class_mapping.iteritems():
+      self.assertEquals(from_class, reverse.GetClassMapping(to_class))
+
+  def testProcessProfile(self):
+    dex = cp.ProcessDex(DEX_DUMP.splitlines())
+    mapping, _ = cp.ProcessProguardMapping(PROGUARD_MAPPING.splitlines(), dex)
+    profile = cp.ProcessProfile(OBFUSCATED_PROFILE.splitlines(), mapping)
+
+    getInstance = cp.Method(
+        'getInstance', 'Lorg/chromium/Original;', '', 'Lorg/chromium/Original;')
+    initialize = cp.Method('initialize', 'Lorg/chromium/Original;', '', 'V')
+    another = cp.Method(
+        'another', 'Lorg/chromium/Original;', '', 'Lorg/chromium/Original;')
+
+    self.assertIn('Lorg/chromium/Original;', profile._classes)
+    self.assertIn(getInstance, profile._methods)
+    self.assertIn(initialize, profile._methods)
+    self.assertIn(another, profile._methods)
+
+    self.assertEquals(profile._methods[getInstance], set(['H', 'S', 'P']))
+    self.assertEquals(profile._methods[initialize], set(['H', 'P']))
+    self.assertEquals(profile._methods[another], set(['P']))
+
+  def testEndToEnd(self):
+    dex = cp.ProcessDex(DEX_DUMP.splitlines())
+    mapping, _ = cp.ProcessProguardMapping(PROGUARD_MAPPING.splitlines(), dex)
+
+    profile = cp.ProcessProfile(OBFUSCATED_PROFILE.splitlines(), mapping)
+    with tempfile.NamedTemporaryFile() as temp:
+      profile.WriteToFile(temp.name)
+      with open(temp.name, 'r') as f:
+        for a, b in zip(sorted(f), sorted(UNOBFUSCATED_PROFILE.splitlines())):
+          self.assertEquals(a.strip(), b.strip())
+
+  def testObfuscateProfile(self):
+    with build_utils.TempDir() as temp_dir:
+      # The dex dump is used as the dexfile, by passing /bin/cat as the dexdump
+      # program.
+      dex_path = os.path.join(temp_dir, 'dexdump')
+      with open(dex_path, 'w') as dex_file:
+        dex_file.write(DEX_DUMP_2)
+      mapping_path = os.path.join(temp_dir, 'mapping')
+      with open(mapping_path, 'w') as mapping_file:
+        mapping_file.write(PROGUARD_MAPPING_2)
+      unobfuscated_path = os.path.join(temp_dir, 'unobfuscated')
+      with open(unobfuscated_path, 'w') as unobfuscated_file:
+        unobfuscated_file.write(UNOBFUSCATED_PROFILE)
+      obfuscated_path = os.path.join(temp_dir, 'obfuscated')
+      cp.ObfuscateProfile(unobfuscated_path, dex_path, mapping_path, '/bin/cat',
+                          obfuscated_path)
+      with open(obfuscated_path) as obfuscated_file:
+        obfuscated_profile = sorted(obfuscated_file.readlines())
+      for a, b in zip(
+          sorted(OBFUSCATED_PROFILE_2.splitlines()), obfuscated_profile):
+        self.assertEquals(a.strip(), b.strip())
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/dcheck_is_off.flags b/src/build/android/dcheck_is_off.flags
new file mode 100644
index 0000000..78b9cc2
--- /dev/null
+++ b/src/build/android/dcheck_is_off.flags
@@ -0,0 +1,17 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Contains flags that are applied only when ENABLE_DCHECK=false.
+
+-checkdiscard @org.chromium.base.annotations.CheckDiscard class ** {
+  *;
+}
+-checkdiscard class ** {
+  @org.chromium.base.annotations.CheckDiscard *;
+}
+
+# Ensure @RemovableInRelease actually works.
+-checkdiscard class ** {
+  @org.chromium.base.annotations.RemovableInRelease *;
+}
diff --git a/src/build/android/devil_chromium.json b/src/build/android/devil_chromium.json
new file mode 100644
index 0000000..0bfcfd8
--- /dev/null
+++ b/src/build/android/devil_chromium.json
@@ -0,0 +1,120 @@
+{
+  "config_type": "BaseConfig",
+  "dependencies": {
+    "aapt": {
+      "file_info": {
+        "linux2_x86_64": {
+          "local_paths": [
+            "../../third_party/android_sdk/public/build-tools/27.0.3/aapt"
+          ]
+        }
+      }
+    },
+    "adb": {
+      "file_info": {
+        "linux2_x86_64": {
+          "local_paths": [
+            "../../third_party/android_sdk/public/platform-tools/adb"
+          ]
+        }
+      }
+    },
+    "android_build_tools_libc++": {
+      "file_info": {
+        "linux2_x86_64": {
+          "local_paths": [
+            "../../third_party/android_sdk/public/build-tools/27.0.3/lib64/libc++.so"
+          ]
+        }
+      }
+    },
+    "android_sdk": {
+      "file_info": {
+        "linux2_x86_64": {
+          "local_paths": [
+            "../../third_party/android_sdk/public"
+          ]
+        }
+      }
+    },
+    "dexdump": {
+      "file_info": {
+        "linux2_x86_64": {
+          "local_paths": [
+            "../../third_party/android_sdk/public/build-tools/27.0.3/dexdump"
+          ]
+        }
+      }
+    },
+    "split-select": {
+      "file_info": {
+        "linux2_x86_64": {
+          "local_paths": [
+            "../../third_party/android_sdk/public/build-tools/27.0.3/split-select"
+          ]
+        }
+      }
+    },
+    "simpleperf": {
+      "file_info": {
+        "android_armeabi-v7a": {
+          "local_paths": [
+            "../../third_party/android_ndk/simpleperf/bin/android/arm/simpleperf"
+          ]
+        },
+        "android_arm64-v8a": {
+          "local_paths": [
+            "../../third_party/android_ndk/simpleperf/bin/android/arm64/simpleperf"
+          ]
+        },
+        "android_x86": {
+          "local_paths": [
+            "../../third_party/android_ndk/simpleperf/bin/android/x86/simpleperf"
+          ]
+        },
+        "android_x86_64": {
+          "local_paths": [
+            "../../third_party/android_ndk/simpleperf/bin/android/x86_64/simpleperf"
+          ]
+        },
+        "linux_x86": {
+          "local_paths": [
+            "../../third_party/android_ndk/simpleperf/bin/linux/x86/simpleperf"
+          ]
+        },
+        "linux_x86_64": {
+          "local_paths": [
+            "../../third_party/android_ndk/simpleperf/bin/linux/x86_64/simpleperf"
+          ]
+        }
+      }
+    },
+    "simpleperf_scripts": {
+      "file_info": {
+        "default": {
+          "local_paths": [
+            "../../third_party/android_ndk/simpleperf"
+          ]
+        }
+      }
+    },
+    "llvm-symbolizer": {
+      "file_info": {
+        "default": {
+          "local_paths": [
+            "../../third_party/llvm-build/Release+Asserts/bin/llvm-symbolizer"
+          ]
+        }
+      }
+    },
+    "bundletool": {
+      "file_info": {
+        "default": {
+          "local_paths": [
+            "../../third_party/android_build_tools/bundletool/bundletool-all-1.4.0.jar"
+          ]
+        }
+      }
+    }
+  }
+}
diff --git a/src/build/android/devil_chromium.py b/src/build/android/devil_chromium.py
new file mode 100644
index 0000000..20ae1e3
--- /dev/null
+++ b/src/build/android/devil_chromium.py
@@ -0,0 +1,200 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Configures devil for use in chromium."""
+
+import os
+import sys
+
+from pylib import constants
+from pylib.constants import host_paths
+
+if host_paths.DEVIL_PATH not in sys.path:
+  sys.path.insert(1, host_paths.DEVIL_PATH)
+
+from devil import devil_env
+from devil.android.ndk import abis
+
+_BUILD_DIR = os.path.join(constants.DIR_SOURCE_ROOT, 'build')
+if _BUILD_DIR not in sys.path:
+  sys.path.insert(1, _BUILD_DIR)
+
+import gn_helpers
+
+_DEVIL_CONFIG = os.path.abspath(
+    os.path.join(os.path.dirname(__file__), 'devil_chromium.json'))
+
+_DEVIL_BUILD_PRODUCT_DEPS = {
+  'chromium_commands': [
+    {
+      'platform': 'linux2',
+      'arch': 'x86_64',
+      'path_components': ['lib.java', 'chromium_commands.dex.jar'],
+    }
+  ],
+  'forwarder_device': [
+    {
+      'platform': 'android',
+      'arch': abis.ARM,
+      'path_components': ['forwarder_dist'],
+    },
+    {
+      'platform': 'android',
+      'arch': abis.ARM_64,
+      'path_components': ['forwarder_dist'],
+    },
+    {
+      'platform': 'android',
+      'arch': 'mips',
+      'path_components': ['forwarder_dist'],
+    },
+    {
+      'platform': 'android',
+      'arch': 'mips64',
+      'path_components': ['forwarder_dist'],
+    },
+    {
+      'platform': 'android',
+      'arch': abis.X86,
+      'path_components': ['forwarder_dist'],
+    },
+    {
+      'platform': 'android',
+      'arch': abis.X86_64,
+      'path_components': ['forwarder_dist'],
+    },
+  ],
+  'forwarder_host': [
+    {
+      'platform': 'linux2',
+      'arch': 'x86_64',
+      'path_components': ['host_forwarder'],
+    },
+  ],
+  'md5sum_device': [
+    {
+      'platform': 'android',
+      'arch': abis.ARM,
+      'path_components': ['md5sum_dist'],
+    },
+    {
+      'platform': 'android',
+      'arch': abis.ARM_64,
+      'path_components': ['md5sum_dist'],
+    },
+    {
+      'platform': 'android',
+      'arch': 'mips',
+      'path_components': ['md5sum_dist'],
+    },
+    {
+      'platform': 'android',
+      'arch': 'mips64',
+      'path_components': ['md5sum_dist'],
+    },
+    {
+      'platform': 'android',
+      'arch': abis.X86,
+      'path_components': ['md5sum_dist'],
+    },
+    {
+      'platform': 'android',
+      'arch': abis.X86_64,
+      'path_components': ['md5sum_dist'],
+    },
+  ],
+  'md5sum_host': [
+    {
+      'platform': 'linux2',
+      'arch': 'x86_64',
+      'path_components': ['md5sum_bin_host'],
+    },
+  ],
+}
+
+
+def _UseLocalBuildProducts(output_directory, devil_dynamic_config):
+  output_directory = os.path.abspath(output_directory)
+  devil_dynamic_config['dependencies'] = {
+      dep_name: {
+          'file_info': {
+              '%s_%s' % (dep_config['platform'], dep_config['arch']): {
+                  'local_paths': [
+                      os.path.join(output_directory,
+                                   *dep_config['path_components']),
+                  ],
+              }
+              for dep_config in dep_configs
+          }
+      }
+      for dep_name, dep_configs in _DEVIL_BUILD_PRODUCT_DEPS.iteritems()
+  }
+
+
+def _BuildWithChromium():
+  """Returns value of gclient's |build_with_chromium|."""
+  gni_path = os.path.join(_BUILD_DIR, 'config', 'gclient_args.gni')
+  if not os.path.exists(gni_path):
+    return False
+  with open(gni_path) as f:
+    data = f.read()
+  args = gn_helpers.FromGNArgs(data)
+  return args.get('build_with_chromium', False)
+
+
+def Initialize(output_directory=None, custom_deps=None, adb_path=None):
+  """Initializes devil with chromium's binaries and third-party libraries.
+
+  This includes:
+    - Libraries:
+      - the android SDK ("android_sdk")
+    - Build products:
+      - host & device forwarder binaries
+          ("forwarder_device" and "forwarder_host")
+      - host & device md5sum binaries ("md5sum_device" and "md5sum_host")
+
+  Args:
+    output_directory: An optional path to the output directory. If not set,
+      no built dependencies are configured.
+    custom_deps: An optional dictionary specifying custom dependencies.
+      This should be of the form:
+
+        {
+          'dependency_name': {
+            'platform': 'path',
+            ...
+          },
+          ...
+        }
+    adb_path: An optional path to use for the adb binary. If not set, this uses
+      the adb binary provided by the Android SDK.
+  """
+  build_with_chromium = _BuildWithChromium()
+
+  devil_dynamic_config = {
+    'config_type': 'BaseConfig',
+    'dependencies': {},
+  }
+  if build_with_chromium and output_directory:
+    # Non-chromium users of chromium's //build directory fetch build products
+    # from google storage rather than use locally built copies. Chromium uses
+    # locally-built copies so that changes to the tools can be easily tested.
+    _UseLocalBuildProducts(output_directory, devil_dynamic_config)
+
+  if custom_deps:
+    devil_dynamic_config['dependencies'].update(custom_deps)
+  if adb_path:
+    devil_dynamic_config['dependencies'].update({
+      'adb': {
+        'file_info': {
+          devil_env.GetPlatform(): {
+            'local_paths': [adb_path]
+          }
+        }
+      }
+    })
+
+  config_files = [_DEVIL_CONFIG] if build_with_chromium else None
+  devil_env.config.Initialize(configs=[devil_dynamic_config],
+                              config_files=config_files)
diff --git a/src/build/android/devil_chromium.pydeps b/src/build/android/devil_chromium.pydeps
new file mode 100644
index 0000000..4143805
--- /dev/null
+++ b/src/build/android/devil_chromium.pydeps
@@ -0,0 +1,39 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android --output build/android/devil_chromium.pydeps build/android/devil_chromium.py
+../../third_party/catapult/common/py_utils/py_utils/__init__.py
+../../third_party/catapult/common/py_utils/py_utils/cloud_storage.py
+../../third_party/catapult/common/py_utils/py_utils/cloud_storage_global_lock.py
+../../third_party/catapult/common/py_utils/py_utils/lock.py
+../../third_party/catapult/dependency_manager/dependency_manager/__init__.py
+../../third_party/catapult/dependency_manager/dependency_manager/archive_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/base_config.py
+../../third_party/catapult/dependency_manager/dependency_manager/cloud_storage_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/dependency_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/dependency_manager_util.py
+../../third_party/catapult/dependency_manager/dependency_manager/exceptions.py
+../../third_party/catapult/dependency_manager/dependency_manager/local_path_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/manager.py
+../../third_party/catapult/dependency_manager/dependency_manager/uploader.py
+../../third_party/catapult/devil/devil/__init__.py
+../../third_party/catapult/devil/devil/android/__init__.py
+../../third_party/catapult/devil/devil/android/constants/__init__.py
+../../third_party/catapult/devil/devil/android/constants/chrome.py
+../../third_party/catapult/devil/devil/android/ndk/__init__.py
+../../third_party/catapult/devil/devil/android/ndk/abis.py
+../../third_party/catapult/devil/devil/android/sdk/__init__.py
+../../third_party/catapult/devil/devil/android/sdk/keyevent.py
+../../third_party/catapult/devil/devil/android/sdk/version_codes.py
+../../third_party/catapult/devil/devil/base_error.py
+../../third_party/catapult/devil/devil/constants/__init__.py
+../../third_party/catapult/devil/devil/constants/exit_codes.py
+../../third_party/catapult/devil/devil/devil_env.py
+../../third_party/catapult/devil/devil/utils/__init__.py
+../../third_party/catapult/devil/devil/utils/reraiser_thread.py
+../../third_party/catapult/devil/devil/utils/timeout_retry.py
+../../third_party/catapult/devil/devil/utils/watchdog_timer.py
+../../third_party/catapult/third_party/six/six.py
+../gn_helpers.py
+devil_chromium.py
+pylib/__init__.py
+pylib/constants/__init__.py
+pylib/constants/host_paths.py
diff --git a/src/build/android/diff_resource_sizes.py b/src/build/android/diff_resource_sizes.py
new file mode 100755
index 0000000..eefb6cd
--- /dev/null
+++ b/src/build/android/diff_resource_sizes.py
@@ -0,0 +1,200 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs resource_sizes.py on two apks and outputs the diff."""
+
+from __future__ import print_function
+
+import argparse
+import json
+import logging
+import os
+import subprocess
+import sys
+
+from pylib.constants import host_paths
+from pylib.utils import shared_preference_utils
+
+with host_paths.SysPath(host_paths.BUILD_COMMON_PATH):
+  import perf_tests_results_helper # pylint: disable=import-error
+
+with host_paths.SysPath(host_paths.TRACING_PATH):
+  from tracing.value import convert_chart_json # pylint: disable=import-error
+
+_ANDROID_DIR = os.path.dirname(os.path.abspath(__file__))
+with host_paths.SysPath(os.path.join(_ANDROID_DIR, 'gyp')):
+  from util import build_utils  # pylint: disable=import-error
+
+
+_BASE_CHART = {
+    'format_version': '0.1',
+    'benchmark_name': 'resource_sizes_diff',
+    'benchmark_description': 'APK resource size diff information',
+    'trace_rerun_options': [],
+    'charts': {},
+}
+
+_CHARTJSON_FILENAME = 'results-chart.json'
+_HISTOGRAMS_FILENAME = 'perf_results.json'
+
+
+def DiffResults(chartjson, base_results, diff_results):
+  """Reports the diff between the two given results.
+
+  Args:
+    chartjson: A dictionary that chartjson results will be placed in, or None
+        to only print results.
+    base_results: The chartjson-formatted size results of the base APK.
+    diff_results: The chartjson-formatted size results of the diff APK.
+  """
+  for graph_title, graph in base_results['charts'].iteritems():
+    for trace_title, trace in graph.iteritems():
+      perf_tests_results_helper.ReportPerfResult(
+          chartjson, graph_title, trace_title,
+          diff_results['charts'][graph_title][trace_title]['value']
+              - trace['value'],
+          trace['units'], trace['improvement_direction'],
+          trace['important'])
+
+
+def AddIntermediateResults(chartjson, base_results, diff_results):
+  """Copies the intermediate size results into the output chartjson.
+
+  Args:
+    chartjson: A dictionary that chartjson results will be placed in.
+    base_results: The chartjson-formatted size results of the base APK.
+    diff_results: The chartjson-formatted size results of the diff APK.
+  """
+  for graph_title, graph in base_results['charts'].iteritems():
+    for trace_title, trace in graph.iteritems():
+      perf_tests_results_helper.ReportPerfResult(
+          chartjson, graph_title + '_base_apk', trace_title,
+          trace['value'], trace['units'], trace['improvement_direction'],
+          trace['important'])
+
+  # Both base_results and diff_results should have the same charts/traces, but
+  # loop over them separately in case they don't
+  for graph_title, graph in diff_results['charts'].iteritems():
+    for trace_title, trace in graph.iteritems():
+      perf_tests_results_helper.ReportPerfResult(
+          chartjson, graph_title + '_diff_apk', trace_title,
+          trace['value'], trace['units'], trace['improvement_direction'],
+          trace['important'])
+
+
+def _CreateArgparser():
+  def chromium_path(arg):
+    if arg.startswith('//'):
+      return os.path.join(host_paths.DIR_SOURCE_ROOT, arg[2:])
+    return arg
+
+  argparser = argparse.ArgumentParser(
+      description='Diff resource sizes of two APKs. Arguments not listed here '
+                  'will be passed on to both invocations of resource_sizes.py.')
+  argparser.add_argument('--chromium-output-directory-base',
+                         dest='out_dir_base',
+                         type=chromium_path,
+                         help='Location of the build artifacts for the base '
+                              'APK, i.e. what the size increase/decrease will '
+                              'be measured from.')
+  argparser.add_argument('--chromium-output-directory-diff',
+                         dest='out_dir_diff',
+                         type=chromium_path,
+                         help='Location of the build artifacts for the diff '
+                              'APK.')
+  argparser.add_argument('--chartjson',
+                         action='store_true',
+                         help='DEPRECATED. Use --output-format=chartjson '
+                              'instead.')
+  argparser.add_argument('--output-format',
+                         choices=['chartjson', 'histograms'],
+                         help='Output the results to a file in the given '
+                              'format instead of printing the results.')
+  argparser.add_argument('--include-intermediate-results',
+                         action='store_true',
+                         help='Include the results from the resource_sizes.py '
+                              'runs in the chartjson output.')
+  argparser.add_argument('--output-dir',
+                         default='.',
+                         type=chromium_path,
+                         help='Directory to save chartjson to.')
+  argparser.add_argument('--base-apk',
+                         required=True,
+                         type=chromium_path,
+                         help='Path to the base APK, i.e. what the size '
+                              'increase/decrease will be measured from.')
+  argparser.add_argument('--diff-apk',
+                         required=True,
+                         type=chromium_path,
+                         help='Path to the diff APK, i.e. the APK whose size '
+                              'increase/decrease will be measured against the '
+                              'base APK.')
+  return argparser
+
+
+def main():
+  args, unknown_args = _CreateArgparser().parse_known_args()
+  # TODO(bsheedy): Remove this once all uses of --chartjson are removed.
+  if args.chartjson:
+    args.output_format = 'chartjson'
+
+  chartjson = _BASE_CHART.copy() if args.output_format else None
+
+  with build_utils.TempDir() as base_dir, build_utils.TempDir() as diff_dir:
+    # Run resource_sizes.py on the two APKs
+    resource_sizes_path = os.path.join(_ANDROID_DIR, 'resource_sizes.py')
+    shared_args = (['python', resource_sizes_path, '--output-format=chartjson']
+                   + unknown_args)
+
+    base_args = shared_args + ['--output-dir', base_dir, args.base_apk]
+    if args.out_dir_base:
+      base_args += ['--chromium-output-directory', args.out_dir_base]
+    try:
+      subprocess.check_output(base_args, stderr=subprocess.STDOUT)
+    except subprocess.CalledProcessError as e:
+      print(e.output)
+      raise
+
+    diff_args = shared_args + ['--output-dir', diff_dir, args.diff_apk]
+    if args.out_dir_diff:
+      diff_args += ['--chromium-output-directory', args.out_dir_diff]
+    try:
+      subprocess.check_output(diff_args, stderr=subprocess.STDOUT)
+    except subprocess.CalledProcessError as e:
+      print(e.output)
+      raise
+
+    # Combine the separate results
+    base_file = os.path.join(base_dir, _CHARTJSON_FILENAME)
+    diff_file = os.path.join(diff_dir, _CHARTJSON_FILENAME)
+    base_results = shared_preference_utils.ExtractSettingsFromJson(base_file)
+    diff_results = shared_preference_utils.ExtractSettingsFromJson(diff_file)
+    DiffResults(chartjson, base_results, diff_results)
+    if args.include_intermediate_results:
+      AddIntermediateResults(chartjson, base_results, diff_results)
+
+    if args.output_format:
+      chartjson_path = os.path.join(os.path.abspath(args.output_dir),
+                                    _CHARTJSON_FILENAME)
+      logging.critical('Dumping diff chartjson to %s', chartjson_path)
+      with open(chartjson_path, 'w') as outfile:
+        json.dump(chartjson, outfile)
+
+      if args.output_format == 'histograms':
+        histogram_result = convert_chart_json.ConvertChartJson(chartjson_path)
+        if histogram_result.returncode != 0:
+          logging.error('chartjson conversion failed with error: %s',
+              histogram_result.stdout)
+          return 1
+
+        histogram_path = os.path.join(os.path.abspath(args.output_dir),
+            'perf_results.json')
+        logging.critical('Dumping diff histograms to %s', histogram_path)
+        with open(histogram_path, 'w') as json_file:
+          json_file.write(histogram_result.stdout)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/docs/README.md b/src/build/android/docs/README.md
new file mode 100644
index 0000000..6392f7d
--- /dev/null
+++ b/src/build/android/docs/README.md
@@ -0,0 +1,13 @@
+# Android Build Docs
+
+* [android_app_bundles.md](android_app_bundles.md)
+* [build_config.md](build_config.md)
+* [coverage.md](coverage.md)
+* [java_toolchain.md](java_toolchain.md)
+* [java_optimization.md](java_optimization.md)
+* [lint.md](lint.md)
+* [life_of_a_resource.md](life_of_a_resource.md)
+* [../incremental_install/README.md](../incremental_install/README.md)
+
+See also:
+* [//build/README.md](../../README.md)
diff --git a/src/build/android/docs/android_app_bundles.md b/src/build/android/docs/android_app_bundles.md
new file mode 100644
index 0000000..e71fe27
--- /dev/null
+++ b/src/build/android/docs/android_app_bundles.md
@@ -0,0 +1,205 @@
+# Introduction
+
+This document describes how the Chromium build system supports Android app
+bundles.
+
+[TOC]
+
+# Overview of app bundles
+
+An Android app bundle is an alternative application distribution format for
+Android applications on the Google Play Store, that allows reducing the size
+of binaries sent for installation to individual devices that run on Android L
+and beyond. For more information about them, see the official Android
+[documentation](https://developer.android.com/guide/app-bundle/).
+
+For the context of this document, the most important points are:
+
+  - Unlike a regular APK (e.g. `foo.apk`), the bundle (e.g. `foo.aab`) cannot
+    be installed directly on a device.
+
+  - Instead, it must be processed into a set of installable split APKs, which
+    are stored inside a special zip archive (e.g. `foo.apks`).
+
+  - The splitting can be based on various criteria: e.g. language or screen
+    density for resources, or cpu ABI for native code.
+
+  - The bundle also uses the notion of dynamic features modules (DFMs) to
+    separate several application features. Each module has its own code, assets
+    and resources, and can be installed separately from the rest of the
+    application if needed.
+
+  - The main application itself is stored in the '`base`' module (this name
+    cannot be changed).
+
+
+# Declaring app bundles with GN templates
+
+Here's an example that shows how to declare a simple bundle that contains a
+single base module, which enables language-based splits:
+
+```gn
+
+  # First declare the first bundle module. The base module is the one
+  # that contains the main application's code, resources and assets.
+  android_app_bundle_module("foo_base_module") {
+    # Declaration are similar to android_apk here.
+    ...
+  }
+
+  # Second, declare the bundle itself.
+  android_app_bundle("foo_bundle") {
+    # Indicate the base module to use for this bundle
+    base_module_target = ":foo_base_module"
+
+    # The name of our bundle file (without any suffix). Default would
+    # be 'foo_bundle' otherwise.
+    bundle_name = "FooBundle"
+
+    # Enable language-based splits for this bundle. Which means that
+    # resources and assets specific to a given language will be placed
+    # into their own split APK in the final .apks archive.
+    enable_language_splits = true
+
+    # Proguard settings must be passed at the bundle, not module, target.
+    proguard_enabled = !is_java_debug
+  }
+```
+
+When generating the `foo_bundle` target with Ninja, you will end up with
+the following:
+
+  - The bundle file under `out/Release/apks/FooBundle.aab`
+
+  - A helper script called `out/Release/bin/foo_bundle`, which can be used
+    to install / launch / uninstall the bundle on local devices.
+
+    This works like an APK wrapper script (e.g. `foo_apk`). Use `--help`
+    to see all possible commands supported by the script.
+
+
+# Declaring dynamic feature modules with GN templates
+
+Please see
+[Dynamic Feature Modules](../../../docs/android_dynamic_feature_modules.md) for
+more details. In short, if you need more modules besides the base one, you
+will need to list all the extra ones using the extra_modules variable which
+takes a list of GN scopes, as in:
+
+```gn
+
+  android_app_bundle_module("foo_base_module") {
+    ...
+  }
+
+  android_app_bundle_module("foo_extra_module") {
+    ...
+  }
+
+  android_app_bundle("foo_bundle") {
+    base_module_target = ":foo_base_module"
+
+    extra_modules = [
+      { # NOTE: Scopes require one field per line, and no comma separators.
+        name = "my_module"
+        module_target = ":foo_extra_module"
+      }
+    ]
+
+    ...
+  }
+```
+
+Note that each extra module is identified by a unique name, which cannot
+be '`base`'.
+
+
+# Bundle signature issues
+
+Signing an app bundle is not necessary, unless you want to upload it to the
+Play Store. Since this process is very slow (it uses `jarsigner` instead of
+the much faster `apkbuilder`), you can control it with the `sign_bundle`
+variable, as described in the example above.
+
+The `.apks` archive however always contains signed split APKs. The keystore
+path/password/alias being used are the default ones, unless you use custom
+values when declaring the bundle itself, as in:
+
+```gn
+  android_app_bundle("foo_bundle") {
+    ...
+    keystore_path = "//path/to/keystore"
+    keystore_password = "K3y$t0Re-Pa$$w0rd"
+    keystore_name = "my-signing-key-name"
+  }
+```
+
+These values are not stored in the bundle itself, but in the wrapper script,
+which will use them to generate the `.apks` archive for you. This allows you
+to properly install updates on top of existing applications on any device.
+
+
+# Proguard and bundles
+
+When using an app bundle that is made of several modules, it is crucial to
+ensure that proguard, if enabled:
+
+- Keeps the obfuscated class names used by each module consistent.
+- Does not remove classes that are not used in one module, but referenced
+  by others.
+
+To achieve this, a special scheme called *synchronized proguarding* is
+performed, which consists of the following steps:
+
+- The list of unoptimized .jar files from all modules are sent to a single
+  proguard command. This generates a new temporary optimized *group* .jar file.
+
+- Each module extracts the optimized class files from the optimized *group*
+  .jar file, to generate its own, module-specific, optimized .jar.
+
+- Each module-specific optimized .jar is then sent to dex generation.
+
+This synchronized proguarding step is added by the `android_app_bundle()` GN
+template. In practice this means the following:
+
+  - `proguard_enabled` must be passed to `android_app_bundle` targets, but not
+    to `android_app_bundle_module` ones.
+
+  - `proguard_configs` can be still passed to individual modules, just
+    like regular APKs. All proguard configs will be merged during the
+    synchronized proguard step.
+
+
+# Manual generation and installation of .apks archives
+
+Note that the `foo_bundle` script knows how to generate the .apks archive
+from the bundle file, and install it to local devices for you. For example,
+to install and launch a bundle, use:
+
+```sh
+  out/Release/bin/foo_bundle run
+```
+
+If you want to manually look or use the `.apks` archive, use the following
+command to generate it:
+
+```sh
+  out/Release/bin/foo_bundle build-bundle-apks \
+      --output-apks=/tmp/BundleFoo.apks
+```
+
+All split APKs within the archive will be properly signed. And you will be
+able to look at its content (with `unzip -l`), or install it manually with:
+
+```sh
+  build/android/gyp/bundletool.py install-apks \
+      --apks=/tmp/BundleFoo.apks \
+      --adb=$(which adb)
+```
+
+The task of examining the manifest is simplified by running the following,
+which dumps the application manifest as XML to stdout:
+
+```sh
+  build/android/gyp/bundletool.py dump-manifest
+```
diff --git a/src/build/android/docs/build_config.md b/src/build/android/docs/build_config.md
new file mode 100644
index 0000000..8a301c8
--- /dev/null
+++ b/src/build/android/docs/build_config.md
@@ -0,0 +1,168 @@
+# Introduction
+
+This document describes the `.build_config` files that are used by the
+Chromium build system for Android-specific targets like APK, resources,
+and more.
+
+[TOC]
+
+# I. Overview of .build_config files:
+
+The Android build requires performing computations about dependencies in
+various targets, which are not possible with the GN build language. To address
+this, `.build_config` files are written during the build to store the needed
+per-target information as JSON files.
+
+They are always written to `$target_gen_dir/${target_name}.build_config`.
+
+Many scripts under [`build/android/gyp/`](build/android_gyp/), which are used
+during the build, can also accept parameter arguments using
+`@FileArg references`, which look like:
+
+    --some-param=@FileArg(<filename>:<key1>:<key2>:..<keyN>)
+
+This placeholder will ensure that `<filename>` is read as a JSON file, then
+return the value at `[key1][key2]...[keyN]` for the `--some-param` option.
+
+Apart from that, the scripts do not need to know anything about the structure
+of `.build_config` files (but the GN rules that invoke them do and select
+which `@FileArg()` references to use).
+
+For a concrete example, consider the following GN fragment:
+
+```gn
+# From //ui/android/BUILD.gn:
+android_resources("ui_java_resources") {
+  custom_package = "org.chromium.ui"
+  resource_dirs = [ "java/res" ]
+  deps = [
+    ":ui_strings_grd",
+  ]
+}
+```
+
+This will end up generating the following JSON file under
+`$CHROMIUM_OUTPUT_DIR/gen/ui/android/ui_java_resources.build_config`:
+
+```json
+{
+  "deps_info": {
+    "deps_configs": [
+      "gen/ui/android/ui_strings_grd.build_config"
+    ],
+    "name": "ui_java_resources.build_config",
+    "package_name": "org.chromium.ui",
+    "path": "gen/ui/android/ui_java_resources.build_config",
+    "r_text": "gen/ui/android/ui_java_resources_R.txt",
+    "resources_dirs": [
+      "../../ui/android/java/res"
+    ],
+    "resources_zip": "resource_zips/ui/android/ui_java_resources.resources.zip",
+    "srcjar": "gen/ui/android/ui_java_resources.srcjar",
+    "type": "android_resources"
+  },
+  "gradle": {},
+  "resources": {
+    "dependency_zips": [
+      "resource_zips/ui/android/ui_strings_grd.resources.zip"
+    ],
+    "extra_package_names": [],
+  }
+}
+```
+
+NOTE: All path values in `.build_config` files are relative to your
+`$CHROMIUM_OUTPUT_DIR`.
+
+# II. Generation of .build_config files:
+
+They are generated by the GN [`write_build_config()`](gn_write_build_config)
+internal template, which ends up invoking
+[`write_build_config.py`](write_build_config_py). For our example above, this
+is with the following parameters:
+
+```
+python ../../build/android/gyp/write_build_config.py \
+    --type=android_resources \
+    --depfile gen/ui/android/ui_java_resources__build_config_crbug_908819.d \
+    --deps-configs=\[\"gen/ui/android/ui_strings_grd.build_config\"\] \
+    --build-config gen/ui/android/ui_java_resources.build_config \
+    --resources-zip resource_zips/ui/android/ui_java_resources.resources.zip \
+    --package-name org.chromium.ui \
+    --r-text gen/ui/android/ui_java_resources_R.txt \
+    --resource-dirs=\[\"../../ui/android/java/res\"\] \
+    --srcjar gen/ui/android/ui_java_resources.srcjar
+```
+
+Note that *most* of the content of the JSON file comes from command-line
+parameters, but not all of it.
+
+In particular, the `resources['dependency_zips']` entry was computed by
+inspecting the content of all dependencies (here, only
+`ui_string_grd.build_config`), and collecting their
+`deps_configs['resources_zip']` values.
+
+Because a target's `.build_config` file will always be generated after
+that of all of its dependencies,
+[`write_build_config.py`](write_build_config_py) can traverse the
+whole (transitive) set of direct *and* indirect dependencies for a given target
+and extract useful information out of it.
+
+This is the kind of processing that cannot be done at the GN language level,
+and is very powerful for Android builds.
+
+
+# III. Usage of .build_config files:
+
+In addition to being parsed by `write_build_config.py`, when they are listed
+in the `--deps-configs` of a given target, the `.build_config` files are used
+by other scripts under [build/android/gyp/] to build stuff.
+
+For example, the GN `android_resources` template uses it to invoke the
+[`process_resources.py`] script with the following command, in order to
+generate various related files (e.g. `ui_java_resources_R.txt`):
+
+```sh
+python ../../build/android/gyp/process_resources.py \
+    --depfile gen/ui/android/ui_java_resources_1.d \
+    --android-sdk-jar ../../third_party/android_sdk/public/platforms/android-29/android.jar \
+    --aapt-path ../../third_party/android_sdk/public/build-tools/29.0.2/aapt \
+    --dependencies-res-zips=@FileArg\(gen/ui/android/ui_java_resources.build_config:resources:dependency_zips\) \
+    --extra-res-packages=@FileArg\(gen/ui/android/ui_java_resources.build_config:resources:extra_package_names\) \
+    --resource-dirs=\[\"../../ui/android/java/res\"\] \
+    --debuggable \
+    --resource-zip-out resource_zips/ui/android/ui_java_resources.resources.zip \
+    --r-text-out gen/ui/android/ui_java_resources_R.txt \
+    --srcjar-out gen/ui/android/ui_java_resources.srcjar \
+    --non-constant-id \
+    --custom-package org.chromium.ui \
+    --shared-resources
+```
+
+Note the use of `@FileArg()` references here, to tell the script where to find
+the information it needs.
+
+
+# IV. Format of .build_config files:
+
+Thanks to `@FileArg()` references, Python build scripts under
+[`build/android/gyp/`](build/android/gyp/)  do not need to know anything
+about the internal format of `.build_config` files.
+
+This format is decided between internal GN build rules and
+[`write_build_config.py`][write_build_config_py]. Since these changes rather
+often, the format documentation is kept inside the Python script itself, but
+can be extracted as a Markdown file and visualized with the following commands:
+
+```sh
+# Extract .build_config format documentation
+build/android/gyp/write_build_config.py \
+  --generate-markdown-format-doc > /tmp/format.md
+
+# Launch a browser to visualize the format documentation.
+python tools/md_browser/md_browser.py -d /tmp /tmp/format.md
+```
+
+[build/android/gyp/]: https://chromium.googlesource.com/chromium/src/build/+/master/android/gyp/
+[gn_write_build_config]: https://cs.chromium.org/chromium/src/build/config/android/internal_rules.gni?q=write_build_config&sq=package:chromium
+[write_build_config_py]: https://chromium.googlesource.com/chromium/src/build/+/master/android/gyp/write_build_config.py
diff --git a/src/build/android/docs/class_verification_failures.md b/src/build/android/docs/class_verification_failures.md
new file mode 100644
index 0000000..e3e4745
--- /dev/null
+++ b/src/build/android/docs/class_verification_failures.md
@@ -0,0 +1,286 @@
+# Class Verification Failures
+
+[TOC]
+
+## What's this all about?
+
+This document aims to explain class verification on Android, how this can affect
+app performance, how to identify problems, and chromium-specific solutions. For
+simplicity, this document focuses on how class verification is implemented by
+ART, the virtual machine which replaced Dalvik starting in Android Lollipop.
+
+## What is class verification?
+
+The Java language requires any virtual machine to _verify_ the class files it
+loads and executes. Generally, verification is extra work the virtual machine is
+responsible for doing, on top of the work of loading the class and performing
+[class initialization][1].
+
+A class may fail verification for a wide variety of reasons, but in practice
+it's usually because the class's code refers to unknown classes or methods. An
+example case might look like:
+
+```java
+public class WindowHelper {
+    // ...
+    public boolean isWideColorGamut() {
+        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O_MR1) {
+            return mWindow.isWideColorGamut();
+        }
+        return false;
+    }
+}
+```
+
+### Why does that fail?
+
+In this example, `WindowHelper` is a helper class intended to help callers
+figure out wide color gamut support, even on pre-OMR1 devices. However, this
+class will fail class verification on pre-OMR1 devices, because it refers to
+[`Window#isWideColorGamut()`][2] (new-in-OMR1), which appears to be an undefined
+method.
+
+### Huh? But we have an SDK check!
+
+SDK checks are completely irrelevant for class verification. Although readers
+can see we'll never call the new-in-OMR1 API unless we're on >= OMR1 devices,
+the Oreo version of ART doesn't know `isWideColorGamut()` was added in next
+year's release. From ART's perspective, we may as well be calling
+`methodWhichDoesNotExist()`, which would clearly be unsafe.
+
+All the SDK check does is protect us from crashing at runtime if we call this
+method on Oreo or below.
+
+### Class verification on ART
+
+While the above is a mostly general description of class verification, it's
+important to understand how the Android runtime handles this.
+
+Since class verification is extra work, ART has an optimization called **AOT
+("ahead-of-time") verification**¹. Immediately after installing an app, ART will
+scan the dex files and verify as many classes as it can. If a class fails
+verification, this is usually a "soft failure" (hard failures are uncommon), and
+ART marks the class with the status `RetryVerificationAtRuntime`.
+
+`RetryVerificationAtRuntime`, as the name suggests, means ART must try again to
+verify the class at runtime. ART does so the first time you access the class
+(right before class initialization/`<clinit>()` method). However, depending on
+the class, this verification step can be very expensive (we've observed cases
+which take [several milliseconds][3]). Since apps tend to initialize most of
+their classes during startup, verification significantly increases startup time.
+
+Another minor cost to failing class verification is that ART cannot optimize
+classes which fail verification, so **all** methods in the class will perform
+slower at runtime, even after the verification step.
+
+*** aside
+¹ AOT _verification_ should not be confused with AOT _compilation_ (another ART
+feature). Unlike compilation, AOT verification happens during install time for
+every application, whereas recent versions of ART aim to apply AOT compilation
+selectively to optimize space.
+***
+
+## Chromium's solution
+
+In Chromium, we try to avoid doing class verification at runtime by
+manually out-of-lining all Android API usage like so:
+
+```java
+public class ApiHelperForOMR1 {
+    public static boolean isWideColorGamut(Window window) {
+        return window.isWideColorGamut();
+    }
+}
+
+public class WindowHelper {
+    // ...
+    public boolean isWideColorGamut() {
+        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O_MR1) {
+            return ApiHelperForOMR1.isWideColorGamut(mWindow);
+        }
+        return false;
+    }
+}
+```
+
+This pushes the class verification failure out of `WindowHelper` and into the
+new `ApiHelperForOMR1` class. There's no magic here: `ApiHelperForOMR1` will
+fail class verification on Oreo and below, for the same reason `WindowHelper`
+did previously.
+
+The key is that, while `WindowHelper` is used on all API levels, it only calls
+into `ApiHelperForOMR1` on OMR1 and above. Because we never use
+`ApiHelperForOMR1` on Oreo and below, we never load and initialize the class,
+and thanks to ART's lazy runtime class verification, we never actually retry
+verification. **Note:** `list_class_verification_failures.py` will still list
+`ApiHelperFor*` classes in its output, although these don't cause performance
+issues.
+
+### Creating ApiHelperFor\* classes
+
+There are several examples throughout the code base, but such classes should
+look as follows:
+
+```java
+/**
+ * Utility class to use new APIs that were added in O_MR1 (API level 27).
+ * These need to exist in a separate class so that Android framework can successfully verify
+ * classes without encountering the new APIs.
+ */
+@VerifiesOnOMR1
+@TargetApi(Build.VERSION_CODES.O_MR1)
+public class ApiHelperForOMR1 {
+    private ApiHelperForOMR1() {}
+
+    // ...
+}
+```
+
+* `@VerifiesOnO_MR1`: this is a chromium-defined annotation to tell proguard
+  (and similar tools) not to inline this class or its methods (since that would
+  defeat the point of out-of-lining!)
+* `@TargetApi(Build.VERSION_CODES.O_MR1)`: this tells Android Lint it's OK to
+  use OMR1 APIs since this class is only used on OMR1 and above. Substitute
+  `O_MR1` for the [appropriate constant][4], depending when the APIs were
+  introduced.
+* Don't put any `SDK_INT` checks inside this class, because it must only be
+  called on >= OMR1.
+
+### Out-of-lining if your method has a new type in its signature
+
+Sometimes you'll run into a situation where a class **needs** to have a method
+which either accepts a parameter which is a new type or returns a new type
+(e.g., externally-facing code, such as WebView's glue layer). Even though it's
+impossible to write such a class without referring to the new type, it's still
+possible to avoid failing class verification. ART has a useful optimization: if
+your class only moves a value between registers (i.e., it doesn't call any
+methods or fields on the value), then ART will not check for the existence of
+that value's type. This means you can write your class like so:
+
+```java
+public class FooBar {
+    // FooBar needs to have the getNewTypeInAndroidP method, but it would be
+    // expensive to fail verification. This method will only be called on >= P
+    // but other methods on the class will be used on lower OS versions (and
+    // also can't be factored into another class).
+    public NewTypeInAndroidP getNewTypeInAndroidP() {
+        assert Build.VERSION.SDK_INT >= Build.VERSION_CODES.P;
+        // Stores a NewTypeInAndroidP in the return register, but doesn't do
+        // anything else with it
+        return ApiHelperForP.getNewTypeInAndroidP();
+    }
+
+    // ...
+}
+
+@VerifiesOnP
+@TargetApi(Build.VERSION_CODES.P)
+public class ApiHelperForP {
+    public static NewTypeInAndroidP getNewTypeInAndroidP() {
+        return new NewTypeInAndroidP();
+    }
+
+    // ...
+}
+```
+
+**Note:** this only works in ART (L+), not Dalvik (KitKat and earlier).
+
+## Investigating class verification failures
+
+Class verification is generally surprising and nonintuitive. Fortunately, the
+ART team have provided tools to investigate errors (and the chromium team has
+built helpful wrappers).
+
+### Listing failing classes
+
+The main starting point is to figure out which classes fail verification (those
+which ART marks as `RetryVerificationAtRuntime`). This can be done for **any
+Android app** (it doesn't have to be from the chromium project) like so:
+
+```shell
+# Install the app first. Using Chrome as an example.
+autoninja -C out/Default chrome_public_apk
+out/Default/bin/chrome_public_apk install
+
+# List all classes marked as 'RetryVerificationAtRuntime'
+build/android/list_class_verification_failures.py --package="org.chromium.chrome"
+W    0.000s Main  Skipping deobfuscation because no map file was provided.
+first.failing.Class
+second.failing.Class
+...
+```
+
+"Skipping deobfuscation because no map file was provided" is a warning, since
+many Android applications (including Chrome's release builds) are built with
+proguard (or similar tools) to obfuscate Java classes and shrink code. Although
+it's safe to ignore this warning if you don't obfuscate Java code, the script
+knows how to deobfuscate classes for you (useful for `is_debug = true` or
+`is_java_debug = true`):
+
+```shell
+build/android/list_class_verification_failures.py --package="org.chromium.chrome" \
+  --mapping=<path/to/file.mapping> # ex. out/Release/apks/ChromePublic.apk.mapping
+android.support.design.widget.AppBarLayout
+android.support.design.widget.TextInputLayout
+...
+```
+
+Googlers can also download mappings for [official
+builds](http://go/webview-official-builds).
+
+### Understanding the reason for the failure
+
+ART team also provide tooling for this. You can configure ART on a rooted device
+to log all class verification failures (during installation), at which point the
+cause is much clearer:
+
+```shell
+# Enable ART logging (requires root). Note the 2 pairs of quotes!
+adb root
+adb shell setprop dalvik.vm.dex2oat-flags '"--runtime-arg -verbose:verifier"'
+
+# Restart Android services to pick up the settings
+adb shell stop && adb shell start
+
+# Optional: clear logs which aren't relevant
+adb logcat -c
+
+# Install the app and check for ART logs
+adb install -d -r out/Default/apks/ChromePublic.apk
+adb logcat | grep 'dex2oat'
+...
+... I dex2oat : Soft verification failures in boolean org.chromium.content.browser.selection.SelectionPopupControllerImpl.b(android.view.ActionMode, android.view.Menu)
+... I dex2oat : boolean org.chromium.content.browser.selection.SelectionPopupControllerImpl.b(android.view.ActionMode, android.view.Menu): [0xF0] couldn't find method android.view.textclassifier.TextClassification.getActions ()Ljava/util/List;
+... I dex2oat : boolean org.chromium.content.browser.selection.SelectionPopupControllerImpl.b(android.view.ActionMode, android.view.Menu): [0xFA] couldn't find method android.view.textclassifier.TextClassification.getActions ()Ljava/util/List;
+...
+```
+
+*** note
+**Note:** you may want to avoid `adb` wrapper scripts (ex.
+`out/Default/bin/chrome_public_apk install`). These scripts cache the package
+manager state to optimize away idempotent installs. However in this case, we
+**do** want to trigger idempotent installs, because we want to re-trigger AOT
+verification.
+***
+
+In the above example, `SelectionPopupControllerImpl` fails verification on Oreo
+(API 26) because it refers to [`TextClassification.getActions()`][5], which was
+added in Pie (API 28). If `SelectionPopupControllerImpl` is used on pre-Pie
+devices, then `TextClassification.getActions()` must be out-of-lined.
+
+## See also
+
+* Bugs or questions? Contact ntfschr@chromium.org
+* ART team's Google I/O talks: [2014](https://youtu.be/EBlTzQsUoOw) and later
+  years
+* Analysis of class verification in Chrome and WebView (Google-only
+  [doc](http://go/class-verification-chromium-analysis))
+* Presentation on class verification in Chrome and WebView (Google-only
+  [slide deck](http://go/class-verification-chromium-slides))
+
+[1]: https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-5.html#jvms-5.5
+[2]: https://developer.android.com/reference/android/view/Window.html#isWideColorGamut()
+[3]: https://bugs.chromium.org/p/chromium/issues/detail?id=838702
+[4]: https://developer.android.com/reference/android/os/Build.VERSION_CODES
+[5]: https://developer.android.com/reference/android/view/textclassifier/TextClassification.html#getActions()
diff --git a/src/build/android/docs/coverage.md b/src/build/android/docs/coverage.md
new file mode 100644
index 0000000..17c83c6
--- /dev/null
+++ b/src/build/android/docs/coverage.md
@@ -0,0 +1,73 @@
+# Android code coverage instructions
+
+These are instructions for collecting code coverage data for android
+instrumentation and JUnit tests.
+
+[TOC]
+
+## How JaCoCo coverage works
+
+In order to use JaCoCo code coverage, we need to create build time pre-instrumented
+class files and runtime **.exec** files. Then we need to process them using the
+**build/android/generate_jacoco_report.py** script.
+
+## How to collect coverage data
+
+1. Use the following GN build arguments:
+
+  ```gn
+  target_os = "android"
+  use_jacoco_coverage = true
+  ```
+
+   Now when building, pre-instrumented files will be created in the build directory.
+
+2. Run tests, with option `--coverage-dir <directory>`, to specify where to save
+   the .exec file. For example, you can run chrome JUnit tests:
+   `out/Debug/bin/run_chrome_junit_tests --coverage-dir /tmp/coverage`.
+
+3. The coverage results of JUnit and instrumentation tests will be merged
+   automatically if they are in the same directory.
+
+## How to generate coverage report
+
+1. Now we have generated .exec files already. We can create a JaCoCo HTML/XML/CSV
+   report using `generate_jacoco_report.py`, for example:
+
+  ```shell
+  build/android/generate_jacoco_report.py \
+     --format html \
+     --output-dir /tmp/coverage_report/ \
+     --coverage-dir /tmp/coverage/ \
+     --sources-json-dir out/Debug/ \
+  ```
+   Then an index.html containing coverage info will be created in output directory:
+
+  ```
+  [INFO] Loading execution data file /tmp/coverage/testTitle.exec.
+  [INFO] Loading execution data file /tmp/coverage/testSelected.exec.
+  [INFO] Loading execution data file /tmp/coverage/testClickToSelect.exec.
+  [INFO] Loading execution data file /tmp/coverage/testClickToClose.exec.
+  [INFO] Loading execution data file /tmp/coverage/testThumbnail.exec.
+  [INFO] Analyzing 58 classes.
+  ```
+
+2. For XML and CSV reports, we need to specify `--output-file` instead of `--output-dir` since
+   only one file will be generated as XML or CSV report.
+  ```shell
+  build/android/generate_jacoco_report.py \
+    --format xml \
+    --output-file /tmp/coverage_report/report.xml \
+    --coverage-dir /tmp/coverage/ \
+    --sources-json-dir out/Debug/ \
+  ```
+
+   or
+
+  ```shell
+  build/android/generate_jacoco_report.py \
+    --format csv \
+    --output-file /tmp/coverage_report/report.csv \
+    --coverage-dir /tmp/coverage/ \
+    --sources-json-dir out/Debug/ \
+  ```
diff --git a/src/build/android/docs/java_optimization.md b/src/build/android/docs/java_optimization.md
new file mode 100644
index 0000000..0ba0d50
--- /dev/null
+++ b/src/build/android/docs/java_optimization.md
@@ -0,0 +1,149 @@
+# Optimizing Java Code
+
+This doc describes how Java code is optimized in Chrome on Android and how to
+deal with issues caused by the optimizer. For tips on how to write optimized
+code, see [//docs/speed/binary_size/optimization_advice.md#optimizing-java-code](/docs/speed/binary_size/optimization_advice.md#optimizing-java-code).
+
+[TOC]
+
+## ProGuard vs R8
+
+ProGuard is the original open-source tool used by many Android applications to
+perform whole-program bytecode optimization. [R8](https://r8.googlesource.com/r8),
+is a re-implementation that is used by Chrome (and the default for Android Studio).
+The terms "ProGuard" and "R8" are used interchangeably within Chromium but
+generally they're meant to refer to the tool providing Java code optimizations.
+
+## What does ProGuard do?
+
+1. Shrinking: ProGuard will remove unused code. This is especially useful
+   when depending on third party libraries where only a few functions are used.
+
+2. Obfuscation: ProGuard will rename classes/fields/methods to use shorter
+   names. Obfuscation is used for minification purposes only (not security).
+
+3. Optimization: ProGuard performs a series of optimizations to shrink code
+   further through various approaches (ex. inlining, outlining, class merging,
+   etc).
+
+## Build Process
+
+ProGuard is enabled only for release builds of Chrome because it is a slow build
+step and breaks Java debugging. It can also be enabled manually via the GN arg:
+```is_java_debug = false```
+
+### ProGuard configuration files
+
+Most GN Java targets can specify ProGuard configuration files by setting the
+`proguard_configs` variable. [//base/android/proguard](/base/android/proguard)
+contains common flags shared by most Chrome applications.
+
+### GN build rules
+
+When `is_java_debug = false` and a target has enabled ProGuard, the `proguard`
+step generates the `.dex` files for the application. The `proguard` step takes
+as input a list of `.jar` files, runs R8/ProGuard on those `.jar` files, and
+produces the final `.dex` file(s) that will be packaged into your `.apk`
+
+## Deobfuscation
+
+Obfuscation can be turned off for local builds while leaving ProGuard enabled
+by setting `enable_proguard_obfuscation = false` in GN args.
+
+There are two main methods for deobfuscating Java stack traces locally:
+1. Using APK wrapper scripts (stacks are automatically deobfuscated)
+  * `$OUT/bin/chrome_public_apk logcat`  # Run adb logcat
+  * `$OUT/bin/chrome_public_apk run`  # Launch chrome and run adb logcat
+
+2. Using `java_deobfuscate`
+  * build/android/stacktrace/java_deobfuscate.py $OUT/apks/ChromePublic.apk.mapping < logcat.txt`
+    * ProGuard mapping files are located beside APKs (ex.
+      `$OUT/apks/ChromePublic.apk` and `$OUT/apks/ChromePublic.apk.mapping`)
+
+Helpful links for deobfuscation:
+
+* [Internal bits about how mapping files are archived][proguard-site]
+* [More detailed deobfuscation instructions][proguard-doc]
+* [Script for deobfuscating official builds][deob-official]
+
+[proguard-site]: http://goto.google.com/chrome-android-proguard
+[proguard-doc]: http://goto.google.com/chromejavadeobfuscation
+[deob-official]: http://goto.google.com/chrome-android-official-deobfuscation
+
+## Debugging common failures
+
+ProGuard failures are often hard to debug. This section aims to outline some of
+the more common errors.
+
+### Classes expected to be discarded
+
+The `-checkdiscard` directive can be used to ensure that certain items are
+removed by ProGuard. A common use of `-checkdiscard` it to ensure that ProGuard
+optimizations do not regress in their ability to remove code, such as code
+intended only for debug builds, or generated JNI classes that are meant to be
+zero-overhead abstractions. Annotating a class with
+[@CheckDiscard][checkdiscard] will add a `-checkdiscard` rule automatically.
+
+[checkdiscard]: /base/android/java/src/org/chromium/base/annotations/CheckDiscard.java
+
+```
+Item void org.chromium.base.library_loader.LibraryPrefetcherJni.<init>() was not discarded.
+void org.chromium.base.library_loader.LibraryPrefetcherJni.<init>()
+|- is invoked from:
+|  void org.chromium.base.library_loader.LibraryPrefetcher.asyncPrefetchLibrariesToMemory()
+... more code path lines
+|- is referenced in keep rule:
+|  obj/chrome/android/chrome_public_apk/chrome_public_apk.resources.proguard.txt:104:1
+
+Error: Discard checks failed.
+```
+
+Things to check
+  * Did you add code that is referenced by code path in the error message?
+  * If so, check the original class for why the `CheckDiscard` was added
+    originally and verify that the reason is still valid with your change (may
+    need git blame to do this).
+  * Try the extra debugging steps listed in the JNI section below.
+
+### JNI wrapper classes not discarded
+
+Proxy native methods (`@NativeMethods`) use generated wrapper classes to provide
+access to native methods. We rely on ProGuard to fully optimize the generated
+code so that native methods aren't a source of binary size bloat. The above
+error message is an example when a JNI wrapper class wasn't discarded (notice
+the name of the offending class).
+  * The ProGuard rule pointed to in the error message isn't helpful (just tells
+    us a code path that reaches the not-inlined class).
+  * Common causes:
+    * Caching the result of `ClassNameJni.get()` in a member variable.
+    * Passing a native wrapper method reference instead of using a lambda (i.e.
+      `Jni.get()::methodName` vs. `() -> Jni.get.methodName()`).
+  * For more debugging info, add to `base/android/proguard/chromium_code.flags`:
+      ```
+      -whyareyounotinlining class org.chromium.base.library_loader.LibraryPrefetcherJni {
+          <init>();
+      }
+      ```
+
+### Duplicate classes
+
+```
+Type YourClassName is defined multiple times: obj/jar1.jar:YourClassName.class, obj/jar2.jar:YourClassName.class
+```
+
+Common causes:
+  * Multiple targets with overlapping `srcjar_deps`:
+    * Each `.srcjar` can only be depended on by a single Java target in any
+      given APK target. `srcjar_deps` are just a convenient way to depend on
+      generated files and should be treated like source files rather than
+      `deps`.
+    * Solution: Wrap the `srcjar` in an `android_library` target or have only a
+      single Java target depend on the `srcjar` and have other targets depend on
+      the containing Java target instead.
+  * Accidentally enabling APK level generated files for multiple targets that
+    share generated code (ex. Trichrome or App Bundles):
+    * Solution: Make sure the generated file is only added once.
+
+Debugging ProGuard failures isn't easy, so please message java@chromium.org
+or [file a bug](crbug.com/new) with `component=Build os=Android` for any
+issues related to Java code optimization.
diff --git a/src/build/android/docs/java_toolchain.md b/src/build/android/docs/java_toolchain.md
new file mode 100644
index 0000000..ef11548
--- /dev/null
+++ b/src/build/android/docs/java_toolchain.md
@@ -0,0 +1,284 @@
+# Chromium's Java Toolchain
+
+This doc aims to describe the Chrome build process that takes a set of `.java`
+files and turns them into a `classes.dex` file.
+
+[TOC]
+
+## Core GN Target Types
+
+The following have `supports_android` and `requires_android` set to false by
+default:
+* `java_library()`: Compiles `.java` -> `.jar`
+* `java_prebuilt()`:  Imports a prebuilt `.jar` file.
+
+The following have `supports_android` and `requires_android` set to true. They
+also have a default `jar_excluded_patterns` set (more on that later):
+* `android_library()`
+* `android_java_prebuilt()`
+
+All target names must end with "_java" so that the build system can distinguish
+them from non-java targets (or [other variations](https://cs.chromium.org/chromium/src/build/config/android/internal_rules.gni?rcl=ec2c17d7b4e424e060c3c7972842af87343526a1&l=20)).
+
+Most targets produce two separate `.jar` files:
+* Device `.jar`: Used to produce `.dex.jar`, which is used on-device.
+* Host `.jar`: For use on the host machine (`junit_binary` / `java_binary`).
+  * Host `.jar` files live in `lib.java/` so that they are archived in
+    builder/tester bots (which do not archive `obj/`).
+
+## From Source to Final Dex
+
+### Step 1: Create interface .jar with turbine or ijar
+
+For prebuilt `.jar` files, use [//third_party/ijar] to create interface `.jar`
+from prebuilt `.jar`.
+
+For non-prebuilt targets, use [//third_party/turbine] to create interface `.jar`
+from `.java` source files. Turbine is much faster than javac, and so enables
+full compilation to happen more concurrently.
+
+What are interface jars?:
+
+* The contain `.class` files with all non-public symbols and function bodies
+  removed.
+* Dependant targets use interface `.jar` files to skip having to be rebuilt
+  when only private implementation details change.
+
+[//third_party/ijar]: /third_party/ijar/README.chromium
+[//third_party/turbine]: /third_party/turbine/README.chromium
+
+### Step 2a: Compile with javac
+
+This step is the only step that does not apply to prebuilt targets.
+
+* All `.java` files in a target are compiled by `javac` into `.class` files.
+  * This includes `.java` files that live within `.srcjar` files, referenced
+    through `srcjar_deps`.
+* The `classpath` used when compiling a target is comprised of `.jar` files of
+  its deps.
+  * When deps are library targets, the Step 1 `.jar` file is used.
+  * When deps are prebuilt targets, the original `.jar` file is used.
+  * All `.jar` processing done in subsequent steps does not impact compilation
+    classpath.
+* `.class` files are zipped into an output `.jar` file.
+* There is **no support** for incremental compilation at this level.
+  * If one source file changes within a library, then the entire library is
+    recompiled.
+  * Prefer smaller targets to avoid slow compiles.
+
+### Step 2b: Compile with ErrorProne
+
+This step can be disabled via GN arg: `use_errorprone_java_compiler = false`
+
+* Concurrently with step 1a: [ErrorProne] compiles java files and checks for bug
+  patterns, including some [custom to Chromium][ep_plugins].
+* ErrorProne used to replace step 1a, but was changed to a concurrent step after
+  being identified as being slower.
+
+[ErrorProne]: https://errorprone.info/
+[ep_plugins]: /tools/android/errorprone_plugin/
+
+### Step 3: Desugaring (Device .jar Only)
+
+This step happens only when targets have `supports_android = true`. It is not
+applied to `.jar` files used by `junit_binary`.
+
+* `//third_party/bazel/desugar` converts certain Java 8 constructs, such as
+  lambdas and default interface methods, into constructs that are compatible
+  with Java 7.
+
+### Step 4: Instrumenting (Device .jar Only)
+
+This step happens only when this GN arg is set: `use_jacoco_coverage = true`
+
+* [Jacoco] adds instrumentation hooks to methods.
+
+[Jacoco]: https://www.eclemma.org/jacoco/
+
+### Step 5: Filtering
+
+This step happens only when targets that have `jar_excluded_patterns` or
+`jar_included_patterns` set (e.g. all `android_` targets).
+
+* Remove `.class` files that match the filters from the `.jar`. These `.class`
+  files are generally those that are re-created with different implementations
+  further on in the build process.
+  * E.g.: `R.class` files - a part of [Android Resources].
+  * E.g.: `GEN_JNI.class` - a part of our [JNI] glue.
+  * E.g.: `AppHooksImpl.class` - how `chrome_java` wires up different
+    implementations for [non-public builds][apphooks].
+
+[JNI]: /base/android/jni_generator/README.md
+[Android Resources]: life_of_a_resource.md
+[apphooks]: /chrome/android/java/src/org/chromium/chrome/browser/AppHooksImpl.java
+
+### Step 6: Per-Library Dexing
+
+This step happens only when targets have `supports_android = true`.
+
+* [d8] converts `.jar` files containing `.class` files into `.dex.jar` files
+  containing `classes.dex` files.
+* Dexing is incremental - it will reuse dex'ed classes from a previous build if
+  the corresponding `.class` file is unchanged.
+* These per-library `.dex.jar` files are used directly by [incremental install],
+  and are inputs to the Apk step when `enable_proguard = false`.
+  * Even when `is_java_debug = false`, many apk targets do not enable ProGuard
+    (e.g. unit tests).
+
+[d8]: https://developer.android.com/studio/command-line/d8
+[incremental install]: /build/android/incremental_install/README.md
+
+### Step 7: Apk / Bundle Module Compile
+
+* Each `android_apk` and `android_bundle_module` template has a nested
+  `java_library` target. The nested library includes final copies of files
+  stripped out by prior filtering steps. These files include:
+  * Final `R.java` files, created by `compile_resources.py`.
+  * Final `GEN_JNI.java` for [JNI glue].
+  * `BuildConfig.java` and `NativeLibraries.java` (//base dependencies).
+
+[JNI glue]: /base/android/jni_generator/README.md
+
+### Step 8: Final Dexing
+
+This step is skipped when building using [Incremental Install].
+
+When `is_java_debug = true`:
+* [d8] merges all library `.dex.jar` files into a final `.mergeddex.jar`.
+
+When `is_java_debug = false`:
+* [R8] performs whole-program optimization on all library `lib.java` `.jar`
+  files and outputs a final `.r8dex.jar`.
+  * For App Bundles, R8 creates a `.r8dex.jar` for each module.
+
+[Incremental Install]: /build/android/incremental_install/README.md
+[R8]: https://r8.googlesource.com/r8
+
+## Test APKs with apk_under_test
+
+Test APKs are normal APKs that contain an `<instrumentation>` tag within their
+`AndroidManifest.xml`. If this tag specifies an `android:targetPackage`
+different from itself, then Android will add that package's `classes.dex` to the
+test APK's Java classpath when run. In GN, you can enable this behavior using
+the `apk_under_test` parameter on `instrumentation_test_apk` targets. Using it
+is discouraged if APKs have `proguard_enabled=true`.
+
+### Difference in Final Dex
+
+When `enable_proguard=false`:
+* Any library depended on by the test APK that is also depended on by the
+  apk-under-test is excluded from the test APK's final dex step.
+
+When `enable_proguard=true`:
+* Test APKs cannot make use of the apk-under-test's dex because only symbols
+  explicitly kept by `-keep` directives are guaranteed to exist after
+  ProGuarding. As a work-around, test APKs include all of the apk-under-test's
+  libraries directly in its own final dex such that the under-test apk's Java
+  code is never used (because it is entirely shadowed by the test apk's dex).
+  * We've found this configuration to be fragile, and are trying to [move away
+    from it](https://bugs.chromium.org/p/chromium/issues/detail?id=890452).
+
+### Difference in GEN_JNI.java
+* Calling native methods using [JNI glue] requires that a `GEN_JNI.java` class
+  be generated that contains all native methods for an APK. There cannot be
+  conflicting `GEN_JNI` classes in both the test apk and the apk-under-test, so
+  only the apk-under-test has one generated for it. As a result this,
+  instrumentation test APKs that use apk-under-test cannot use native methods
+  that aren't already part of the apk-under-test.
+
+## How to Generate Java Source Code
+There are two ways to go about generating source files: Annotation Processors
+and custom build steps.
+
+### Annotation Processors
+* These are run by `javac` as part of the compile step.
+* They **cannot** modify the source files that they apply to. They can only
+  generate new sources.
+* Use these when:
+  * an existing Annotation Processor does what you want
+    (E.g. Dagger, AutoService, etc.), or
+  * you need to understand Java types to do generation.
+
+### Custom Build Steps
+* These use discrete build actions to generate source files.
+  * Some generate `.java` directly, but most generate a zip file of sources
+    (called a `.srcjar`) to simplify the number of inputs / outputs.
+* Examples of existing templates:
+  * `jinja_template`: Generates source files using [Jinja].
+  * `java_cpp_template`: Generates source files using the C preprocessor.
+  * `java_cpp_enum`: Generates `@IntDef`s based on enums within `.h` files.
+  * `java_cpp_strings`: Generates String constants based on strings defined in
+    `.cc` files.
+* Custom build steps are preferred over Annotation Processors because they are
+  generally easier to understand, and can run in parallel with other steps
+  (rather than being tied to compiles).
+
+[Jinja]: https://palletsprojects.com/p/jinja/
+
+## Static Analysis & Code Checks
+
+We use several tools for static analysis.
+
+### [ErrorProne](https://errorprone.info/)
+* Runs as part of normal compilation. Controlled by GN arg: `use_errorprone_java_compiler`.
+* Most useful check:
+  * Enforcement of `@GuardedBy` annotations.
+* List of enabled / disabled checks exists [within javac.py](https://cs.chromium.org/chromium/src/build/android/gyp/javac.py?l=30)
+  * Many checks are currently disabled because there is work involved in fixing
+    violations they introduce. Please help!
+* Custom checks for Chrome:
+  * [//tools/android/errorprone_plugin/src/org/chromium/tools/errorprone/plugin/](/tools/android/errorprone_plugin/src/org/chromium/tools/errorprone/plugin/)
+* Use ErrorProne checks when you need something more sophisticated than pattern
+  matching.
+* Checks run on the entire codebase, not only on changed lines.
+* Does not run when `chromium_code = false` (e.g. for //third_party).
+
+### [Android Lint](https://developer.android.com/studio/write/lint)
+* Runs as part of normal compilation. Controlled by GN arg: `disable_android_lint` 
+* Most useful check:
+  * Enforcing `@TargetApi` annotations (ensure you don't call a function that
+    does not exist on all versions of Android unless guarded by an version
+    check).
+* List of disabled checks:
+  * [//build/android/lint/suppressions.xml](/build/android/lint/suppressions.xml)
+* Custom lint checks [are possible](lint_plugins), but we don't have any.
+* Checks run on the entire codebase, not only on changed lines.
+* Does not run when `chromium_code = false` (e.g. for //third_party).
+
+[lint_plugins]: http://tools.android.com/tips/lint-custom-rules
+
+### [Bytecode Processor](/build/android/bytecode/)
+* Performs a single check:
+  * That target `deps` are not missing any entries.
+  * In other words: Enforces that targets do not rely on indirect dependencies
+    to populate their classpath.
+* Checks run on the entire codebase, not only on changed lines.
+
+### [PRESUBMIT.py](/PRESUBMIT.py):
+* Checks for banned patterns via `_BANNED_JAVA_FUNCTIONS`.
+  * (These should likely be moved to checkstyle).
+* Checks for a random set of things in `ChecksAndroidSpecificOnUpload()`.
+  * Including running Checkstyle.
+  * (Some of these other checks should likely also be moved to checkstyle).
+* Checks run only on changed lines.
+
+### [Checkstyle](https://checkstyle.sourceforge.io/)
+* Checks Java style rules that are not covered by clang-format.
+  * E.g.: Unused imports and naming conventions.
+* Allows custom checks to be added via XML. Here [is ours].
+* Preferred over adding checks directly in PRESUBMIT.py because the tool
+  understands `@SuppressWarnings` annotations.
+* Checks run only on changed lines.
+
+[is ours]:  /tools/android/checkstyle/chromium-style-5.0.xml
+
+### [clang-format](https://clang.llvm.org/docs/ClangFormat.html)
+* Formats `.java` files via `git cl format`.
+* Can be toggle on/off with code comments.
+  ```java
+  // clang-format off
+  ... non-formatted code here ...
+  // clang-format on
+  ```
+* Does not work great for multiple annotations or on some lambda expressions,
+  but is generally agreed it is better than not having it at all.
diff --git a/src/build/android/docs/life_of_a_resource.md b/src/build/android/docs/life_of_a_resource.md
new file mode 100644
index 0000000..3aacd5e
--- /dev/null
+++ b/src/build/android/docs/life_of_a_resource.md
@@ -0,0 +1,260 @@
+# Life of an Android Resource
+
+[TOC]
+
+## Overview
+
+This document describes how [Android Resources][android resources]
+are built in Chromium's build system. It does not mention native resources
+which are [processed differently][native resources].
+
+[android resources]: https://developer.android.com/guide/topics/resources/providing-resources
+[native resources]: https://www.chromium.org/developers/tools-we-use-in-chromium/grit/grit-users-guide
+
+The steps consume the following files as inputs:
+* AndroidManifest.xml
+  * Including AndroidManifest.xml files from libraries, which get merged
+    together
+* res/ directories
+
+The steps produce the following intermediate files:
+* R.srcjar (contains R.java files)
+* R.txt
+* .resources.zip
+
+The steps produce the following files within an .apk:
+* AndroidManifest.xml (a binary xml file)
+* resources.arsc (contains all values and configuration metadata)
+* res/** (drawables and layouts)
+* classes.dex (just a small portion of classes from generated R.java files)
+
+
+## The Build Steps
+
+Whenever you try to compile an apk or library target, resources go through the
+following steps:
+
+### 1. Constructs .build\_config files:
+
+Inputs:
+* GN target metadata
+* Other .build_config files
+
+Outputs:
+* Target-specific .build_config file
+
+write_build_config.py is run to record target metadata needed by future steps.
+For more details, see [build_config.md](build_config.md).
+
+
+### 2. Prepares resources:
+
+Inputs:
+* Target-specific build\_config file
+* Target-specific Resource dirs (res/ directories)
+* resources.zip files from dependencies (used to generate the R.txt/java files)
+
+Outputs:
+* Target-specific resources.zip (containing only resources in the
+  target-specific resource dirs, no dependant resources here).
+* Target-specific R.txt
+  * Contains a list of resources and their ids (including of dependencies).
+* Target-specific R.java .srcjar
+  * See [What are R.java files and how are they generated](
+  #how-r_java-files-are-generated)
+
+prepare\_resources.py zips up the target-specific resource dirs and generates
+R.txt and R.java .srcjars. No optimizations, crunching, etc are done on the
+resources.
+
+**The following steps apply only to apk targets (not library targets).**
+
+### 3. Finalizes apk resources:
+
+Inputs:
+* Target-specific build\_config file
+* Dependencies' resources.zip files
+
+Output:
+* Packaged resources zip (named foo.ap_) containing:
+  * AndroidManifest.xml (as binary xml)
+  * resources.arsc
+  * res/**
+* Final R.txt
+  * Contains a list of resources and their ids (including of dependencies).
+* Final R.java .srcjar
+  * See [What are R.java files and how are they generated](
+  #how-r_java-files-are-generated)
+
+
+#### 3(a). Compiles resources:
+
+For each library / resources target your apk depends on, the following happens:
+* Use a regex (defined in the apk target) to remove select resources (optional).
+* Convert png images to webp for binary size (optional).
+* Move drawables in mdpi to non-mdpi directory ([why?](http://crbug.com/289843))
+* Use `aapt2 compile` to compile xml resources to binary xml (references to
+  other resources will now use the id rather than the name for faster lookup at
+  runtime).
+* `aapt2 compile` adds headers/metadata to 9-patch images about which parts of
+  the image are stretchable vs static.
+* `aapt2 compile` outputs a zip with the compiled resources (one for each
+  dependency).
+
+
+#### 3(b). Links resources:
+
+After each dependency is compiled into an intermediate .zip, all those zips are
+linked by the aapt2 link command which does the following:
+* Use the order of dependencies supplied so that some resources clober each
+  other.
+* Compile the AndroidManifest.xml to binary xml (references to resources are now
+  using ids rather than the string names)
+* Create a resources.arsc file that has the name and values of string
+  resources as well as the name and path of non-string resources (ie. layouts
+  and drawables).
+* Combine the compiled resources into one packaged resources apk (a zip file
+  with an .ap\_ extension) that has all the resources related files.
+
+
+#### 3(c). Optimizes resources:
+
+This step obfuscates / strips resources names from the resources.arsc so that
+they can be looked up only by their numeric ids (assigned in the compile
+resources step). Access to resources via `Resources.getIdentifier()` no longer
+work unless resources are [allowlisted](#adding-resources-to-the-allowlist).
+
+## App Bundles and Modules:
+
+Processing resources for bundles and modules is slightly different. Each module
+has its resources compiled and linked separately (ie: it goes through the
+entire process for each module). The modules are then combined to form a
+bundle. Moreover, during "Finalizing the apk resources" step, bundle modules
+produce a `resources.proto` file instead of a `resources.arsc` file.
+
+Resources in a dynamic feature module may reference resources in the base
+module. During the link step for feature module resources, the linked resources
+of the base module are passed in. However, linking against resources currently
+works only with `resources.arsc` format. Thus, when building the base module,
+resources are compiled as both `resources.arsc` and `resources.proto`.
+
+## Debugging resource related errors when resource names are obfuscated
+
+An example message from a stacktrace could be something like this:
+```
+java.lang.IllegalStateException: Could not find CoordinatorLayout descendant
+view with id org.chromium.chrome:id/0_resource_name_obfuscated to anchor view
+android.view.ViewStub{be192d5 G.E...... ......I. 0,0-0,0 #7f0a02ad
+app:id/0_resource_name_obfuscated}
+```
+
+`0_resource_name_obfuscated` is the resource name for all resources that had
+their name obfuscated/stripped during the optimize resources step. To help with
+debugging, the `R.txt` file is archived. The `R.txt` file contains a mapping
+from resource ids to resource names and can be used to get the original resource
+name from the id. In the above message the id is `0x7f0a02ad`.
+
+For local builds, `R.txt` files are output in the `out/*/apks` directory.
+
+For official builds, Googlers can get archived `R.txt` files next to archived
+apks.
+
+### Adding resources to the allowlist
+
+If a resource is accessed via `getIdentifier()` it needs to be allowed by an
+aapt2 resources config file. The config file looks like this:
+
+```
+<resource type>/<resource name>#no_obfuscate
+```
+eg:
+```
+string/app_name#no_obfuscate
+id/toolbar#no_obfuscate
+```
+
+The aapt2 config file is passed to the ninja target through the
+`resources_config_paths` variable. To add a resource to the allowlist, check
+where the config is for your target and add a new line for your resource. If
+none exist, create a new config file and pass its path in your target.
+
+### Webview resource ids
+
+The first two bytes of a resource id is the package id. For regular apks, this
+is `0x7f`. However, Webview is a shared library which gets loaded into other
+apks. The package id for webview resources is assigned dynamically at runtime.
+When webview is loaded it calls this [R file's][Base Module R.java File]
+onResourcesLoaded function to have the correct package id. When deobfuscating
+webview resource ids, disregard the first two bytes in the id when looking it up
+in the `R.txt` file.
+
+Monochrome, when loaded as webview, rewrites the package ids of resources used
+by the webview portion to the correct value at runtime, otherwise, its resources
+have package id `0x7f` when run as a regular apk.
+
+[Base Module R.java File]: https://cs.chromium.org/chromium/src/out/android-Debug/gen/android_webview/system_webview_apk/generated_java/gen/base_module/R.java
+
+## How R.java files are generated
+
+R.java is a list of static classes, each with multiple static fields containing
+ids. These ids are used in java code to reference resources in the apk.
+
+There are three types of R.java files in Chrome.
+1. Base Module Root R.java Files
+2. DFM Root R.java Files
+3. Source R.java Files
+
+Example Base Module Root R.java File
+```java
+package gen.base_module;
+
+public final class R {
+    public static class anim  {
+        public static final int abc_fade_in = 0x7f010000;
+        public static final int abc_fade_out = 0x7f010001;
+        public static final int abc_slide_in_top = 0x7f010007;
+    }
+    public static class animator  {
+        public static final int design_appbar_state_list_animator = 0x7f020000;
+    }
+}
+```
+Base module root R.java files contain base android resources. All R.java files
+can access base module resources through inheritance.
+
+Example DFM Root R.java File
+```java
+package gen.vr_module;
+
+public final class R {
+    public static class anim extends gen.base_module.R.anim {
+    }
+    public static class animator extends gen.base_module.R.animator  {
+        public static final int design_appbar_state_list_animator = 0x7f030000;
+    }
+}
+```
+DFM root R.java files extend base module root R.java files. This allows DFMs to
+access their own resources as well as the base module's resources.
+
+Example Source R.java File
+```java
+package org.chromium.chrome.vr;
+
+public final class R {
+    public static final class anim extends
+            gen.base_module.R.anim {}
+    public static final class animator extends
+            gen.base_module.R.animator {}
+}
+```
+Source R.java files extend root R.java files and have no resources of their own.
+Developers can import these R.java files to access resources in the apk.
+
+The R.java file generated via the prepare resources step above has temporary ids
+which are not marked `final`. That R.java file is only used so that javac can
+compile the java code that references R.*.
+
+The R.java generated during the finalize apk resources step has
+permanent ids. These ids are marked as `final` (except webview resources that
+need to be [rewritten at runtime](#webview-resource-ids)).
diff --git a/src/build/android/docs/lint.md b/src/build/android/docs/lint.md
new file mode 100644
index 0000000..4ba13d7
--- /dev/null
+++ b/src/build/android/docs/lint.md
@@ -0,0 +1,140 @@
+# Lint
+
+Android's [**lint**](https://developer.android.com/tools/help/lint.html) is a
+static analysis tool that Chromium uses to catch possible issues in Java code.
+
+This is a list of [**checks**](http://tools.android.com/tips/lint-checks) that
+you might encounter.
+
+[TOC]
+
+## How Chromium uses lint
+
+Chromium only runs lint on apk or bundle targets that explicitly set
+`enable_lint = true`. Some example targets that have this set are:
+
+ - `//chrome/android:monochrome_public_bundle`
+ - `//android_webview/support_library/boundary_interfaces:boundary_interface_example_apk`
+ - `//remoting/android:remoting_apk`
+
+## My code has a lint error
+
+If lint reports an issue in your code, there are several possible remedies.
+In descending order of preference:
+
+### Fix it
+
+While this isn't always the right response, fixing the lint error or warning
+should be the default.
+
+### Suppress it locally
+
+Java provides an annotation,
+[`@SuppressWarnings`](https://developer.android.com/reference/java/lang/SuppressWarnings),
+that tells lint to ignore the annotated element. It can be used on classes,
+constructors, methods, parameters, fields, or local variables, though usage in
+Chromium is typically limited to the first three. You do not need to import it
+since it is in the `java.lang` package.
+
+Like many suppression annotations, `@SuppressWarnings` takes a value that tells
+**lint** what to ignore. It can be a single `String`:
+
+```java
+@SuppressWarnings("NewApi")
+public void foo() {
+    a.methodThatRequiresHighSdkLevel();
+}
+```
+
+It can also be a list of `String`s:
+
+```java
+@SuppressWarnings({
+        "NewApi",
+        "UseSparseArrays"
+        })
+public Map<Integer, FakeObject> bar() {
+    Map<Integer, FakeObject> shouldBeASparseArray = new HashMap<Integer, FakeObject>();
+    another.methodThatRequiresHighSdkLevel(shouldBeASparseArray);
+    return shouldBeASparseArray;
+}
+```
+
+For resource xml files you can use `tools:ignore`:
+
+```xml
+<?xml version="1.0" encoding="utf-8"?>
+<resources xmlns:tools="http://schemas.android.com/tools">
+    <!-- TODO(crbug/###): remove tools:ignore once these colors are used -->
+    <color name="hi" tools:ignore="NewApi,UnusedResources">@color/unused</color>
+</resources>
+```
+
+The examples above are the recommended ways of suppressing lint warnings.
+
+### Suppress it in a `lint-suppressions.xml` file
+
+**lint** can be given a per-target XML configuration file containing warnings or
+errors that should be ignored. Each target defines its own configuration file
+via the `lint_suppressions_file` gn variable. It is usually defined near its
+`enable_lint` gn variable.
+
+These suppressions files should only be used for temporarily ignoring warnings
+that are too hard (or not possible) to suppress locally, and permanently
+ignoring warnings only for this target. To permanently ignore a warning for all
+targets, add the warning to the `_DISABLED_ALWAYS` list in
+[build/android/gyp/lint.py](https://source.chromium.org/chromium/chromium/src/+/master:build/android/gyp/lint.py).
+Disabling globally makes lint a bit faster.
+
+The exception to the above rule is for warnings that affect multiple languages.
+Feel free to suppress those in lint-suppressions.xml files since it is not
+practical to suppress them in each language file and it is a lot of extra bloat
+to list out every language for every violation in lint-baseline.xml files.
+
+Here is an example of how to structure a suppressions XML file:
+
+```xml
+<?xml version="1.0" encoding="utf-8" ?>
+<lint>
+  <!-- Chrome is a system app. -->
+  <issue id="ProtectedPermissions" severity="ignore"/>
+  <issue id="UnusedResources">
+    <!-- 1 raw resources are accessed by URL in various places. -->
+    <ignore regexp="gen/remoting/android/.*/res/raw/credits.*"/>
+    <!-- TODO(crbug.com/###): Remove the following line.  -->
+    <ignore regexp="The resource `R.string.soon_to_be_used` appears to be unused"/>
+  </issue>
+</lint>
+```
+
+## What are `lint-baseline.xml` files for?
+
+Baseline files are to help us introduce new lint warnings and errors without
+blocking on fixing all our existing code that violate these new errors. Since
+they are generated files, they should **not** be used to suppress lint warnings.
+One of the approaches above should be used instead. Eventually all the errors in
+baseline files should be either fixed or ignored permanently.
+
+The following are some common scenarios where you may need to update baseline
+files.
+
+### I updated `cmdline-tools` and now there are tons of new errors!
+
+This happens every time lint is updated, since lint is provided by
+`cmdline-tools`.
+
+Baseline files are defined via the `lint_baseline_file` gn variable. It is
+usually defined near a target's `enable_lint` gn variable. To regenerate the
+baseline file, delete it and re-run the lint target. The command will fail, but
+the baseline file will have been generated.
+
+This may need to be repeated for all targets that have set `enable_lint = true`,
+including downstream targets. Downstream baseline files should be updated and
+first to avoid build breakages. Each target has its own `lint_baseline_file`
+defined and so all these files can be removed and regenerated as needed.
+
+### I updated `library X` and now there are tons of new errors!
+
+This is usually because `library X`'s aar contains custom lint checks and/or
+custom annotation definition. Follow the same procedure as updates to
+`cmdline-tools`.
diff --git a/src/build/android/download_doclava.py b/src/build/android/download_doclava.py
new file mode 100755
index 0000000..1982fdb
--- /dev/null
+++ b/src/build/android/download_doclava.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Minimal tool to download doclava from Google storage when building for
+Android."""
+
+import os
+import subprocess
+import sys
+
+
+def main():
+  # Some Windows bots inadvertently have third_party/android_sdk installed,
+  # but are unable to run download_from_google_storage because depot_tools
+  # is not in their path, so avoid failure and bail.
+  if sys.platform == 'win32':
+    return 0
+  subprocess.check_call([
+      'download_from_google_storage',
+      '--no_resume',
+      '--no_auth',
+      '--bucket', 'chromium-doclava',
+      '--extract',
+      '-s',
+      os.path.join(os.path.dirname(__file__), '..', '..', 'buildtools',
+                   'android', 'doclava.tar.gz.sha1')])
+  return 0
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/dump_apk_resource_strings.py b/src/build/android/dump_apk_resource_strings.py
new file mode 100755
index 0000000..8417e29
--- /dev/null
+++ b/src/build/android/dump_apk_resource_strings.py
@@ -0,0 +1,664 @@
+#!/usr/bin/env vpython
+# encoding: utf-8
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A script to parse and dump localized strings in resource.arsc files."""
+
+from __future__ import print_function
+
+import argparse
+import collections
+import contextlib
+import cProfile
+import os
+import re
+import subprocess
+import sys
+import zipfile
+
+# pylint: disable=bare-except
+
+# Assuming this script is located under build/android, try to import
+# build/android/gyp/bundletool.py to get the default path to the bundletool
+# jar file. If this fail, using --bundletool-path will be required to parse
+# bundles, allowing this script to be relocated or reused somewhere else.
+try:
+  sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'gyp'))
+  import bundletool
+
+  _DEFAULT_BUNDLETOOL_PATH = bundletool.BUNDLETOOL_JAR_PATH
+except:
+  _DEFAULT_BUNDLETOOL_PATH = None
+
+# Try to get the path of the aapt build tool from catapult/devil.
+try:
+  import devil_chromium  # pylint: disable=unused-import
+  from devil.android.sdk import build_tools
+  _AAPT_DEFAULT_PATH = build_tools.GetPath('aapt')
+except:
+  _AAPT_DEFAULT_PATH = None
+
+
+def AutoIndentStringList(lines, indentation=2):
+  """Auto-indents a input list of text lines, based on open/closed braces.
+
+  For example, the following input text:
+
+    'Foo {',
+    'Bar {',
+    'Zoo',
+    '}',
+    '}',
+
+  Will return the following:
+
+    'Foo {',
+    '  Bar {',
+    '    Zoo',
+    '  }',
+    '}',
+
+  The rules are pretty simple:
+    - A line that ends with an open brace ({) increments indentation.
+    - A line that starts with a closing brace (}) decrements it.
+
+  The main idea is to make outputting structured text data trivial,
+  since it can be assumed that the final output will be passed through
+  this function to make it human-readable.
+
+  Args:
+    lines: an iterator over input text lines. They should not contain
+      line terminator (e.g. '\n').
+  Returns:
+    A new list of text lines, properly auto-indented.
+  """
+  margin = ''
+  result = []
+  # NOTE: Intentional but significant speed optimizations in this function:
+  #   - |line and line[0] == <char>| instead of |line.startswith(<char>)|.
+  #   - |line and line[-1] == <char>| instead of |line.endswith(<char>)|.
+  for line in lines:
+    if line and line[0] == '}':
+      margin = margin[:-indentation]
+    result.append(margin + line)
+    if line and line[-1] == '{':
+      margin += ' ' * indentation
+
+  return result
+
+
+# pylint: disable=line-too-long
+
+# NOTE: aapt dump will quote the following characters only: \n, \ and "
+# see https://android.googlesource.com/platform/frameworks/base/+/master/libs/androidfw/ResourceTypes.cpp#7270
+
+# pylint: enable=line-too-long
+
+
+def UnquoteString(s):
+  """Unquote a given string from aapt dump.
+
+  Args:
+    s: An UTF-8 encoded string that contains backslashes for quotes, as found
+      in the output of 'aapt dump resources --values'.
+  Returns:
+    The unquoted version of the input string.
+  """
+  if not '\\' in s:
+    return s
+
+  result = ''
+  start = 0
+  size = len(s)
+  while start < size:
+    pos = s.find('\\', start)
+    if pos < 0:
+      break
+
+    result += s[start:pos]
+    count = 1
+    while pos + count < size and s[pos + count] == '\\':
+      count += 1
+
+    result += '\\' * (count / 2)
+    start = pos + count
+    if count & 1:
+      if start < size:
+        ch = s[start]
+        if ch == 'n':  # \n is the only non-printable character supported.
+          ch = '\n'
+        result += ch
+        start += 1
+      else:
+        result += '\\'
+
+  result += s[start:]
+  return result
+
+
+assert UnquoteString(r'foo bar') == 'foo bar'
+assert UnquoteString(r'foo\nbar') == 'foo\nbar'
+assert UnquoteString(r'foo\\nbar') == 'foo\\nbar'
+assert UnquoteString(r'foo\\\nbar') == 'foo\\\nbar'
+assert UnquoteString(r'foo\n\nbar') == 'foo\n\nbar'
+assert UnquoteString(r'foo\\bar') == r'foo\bar'
+
+
+def QuoteString(s):
+  """Quote a given string for external output.
+
+  Args:
+    s: An input UTF-8 encoded string.
+  Returns:
+    A quoted version of the string, using the same rules as 'aapt dump'.
+  """
+  # NOTE: Using repr() would escape all non-ASCII bytes in the string, which
+  # is undesirable.
+  return s.replace('\\', r'\\').replace('"', '\\"').replace('\n', '\\n')
+
+
+assert QuoteString(r'foo "bar"') == 'foo \\"bar\\"'
+assert QuoteString('foo\nbar') == 'foo\\nbar'
+
+
+def ReadStringMapFromRTxt(r_txt_path):
+  """Read all string resource IDs and names from an R.txt file.
+
+  Args:
+    r_txt_path: Input file path.
+  Returns:
+    A {res_id -> res_name} dictionary corresponding to the string resources
+    from the input R.txt file.
+  """
+  # NOTE: Typical line of interest looks like:
+  # int string AllowedDomainsForAppsTitle 0x7f130001
+  result = {}
+  prefix = 'int string '
+  with open(r_txt_path) as f:
+    for line in f:
+      line = line.rstrip()
+      if line.startswith(prefix):
+        res_name, res_id = line[len(prefix):].split(' ')
+        result[int(res_id, 0)] = res_name
+  return result
+
+
+class ResourceStringValues(object):
+  """Models all possible values for a named string."""
+
+  def __init__(self):
+    self.res_name = None
+    self.res_values = {}
+
+  def AddValue(self, res_name, res_config, res_value):
+    """Add a new value to this entry.
+
+    Args:
+      res_name: Resource name. If this is not the first time this method
+        is called with the same resource name, then |res_name| should match
+        previous parameters for sanity checking.
+      res_config: Config associated with this value. This can actually be
+        anything that can be converted to a string.
+      res_value: UTF-8 encoded string value.
+    """
+    if res_name is not self.res_name and res_name != self.res_name:
+      if self.res_name is None:
+        self.res_name = res_name
+      else:
+        # Sanity check: the resource name should be the same for all chunks.
+        # Resource ID is redefined with a different name!!
+        print('WARNING: Resource key ignored (%s, should be %s)' %
+              (res_name, self.res_name))
+
+    if self.res_values.setdefault(res_config, res_value) is not res_value:
+      print('WARNING: Duplicate value definition for [config %s]: %s ' \
+            '(already has %s)' % (
+                res_config, res_value, self.res_values[res_config]))
+
+  def ToStringList(self, res_id):
+    """Convert entry to string list for human-friendly output."""
+    values = sorted(
+        [(str(config), value) for config, value in self.res_values.iteritems()])
+    if res_id is None:
+      # res_id will be None when the resource ID should not be part
+      # of the output.
+      result = ['name=%s count=%d {' % (self.res_name, len(values))]
+    else:
+      result = [
+          'res_id=0x%08x name=%s count=%d {' % (res_id, self.res_name,
+                                                len(values))
+      ]
+    for config, value in values:
+      result.append('%-16s "%s"' % (config, QuoteString(value)))
+    result.append('}')
+    return result
+
+
+class ResourceStringMap(object):
+  """Convenience class to hold the set of all localized strings in a table.
+
+  Usage is the following:
+     1) Create new (empty) instance.
+     2) Call AddValue() repeatedly to add new values.
+     3) Eventually call RemapResourceNames() to remap resource names.
+     4) Call ToStringList() to convert the instance to a human-readable
+        list of strings that can later be used with AutoIndentStringList()
+        for example.
+  """
+
+  def __init__(self):
+    self._res_map = collections.defaultdict(ResourceStringValues)
+
+  def AddValue(self, res_id, res_name, res_config, res_value):
+    self._res_map[res_id].AddValue(res_name, res_config, res_value)
+
+  def RemapResourceNames(self, id_name_map):
+    """Rename all entries according to a given {res_id -> res_name} map."""
+    for res_id, res_name in id_name_map.iteritems():
+      if res_id in self._res_map:
+        self._res_map[res_id].res_name = res_name
+
+  def ToStringList(self, omit_ids=False):
+    """Dump content to a human-readable string list.
+
+    Note that the strings are ordered by their resource name first, and
+    resource id second.
+
+    Args:
+      omit_ids: If True, do not put resource IDs in the result. This might
+        be useful when comparing the outputs of two different builds of the
+        same APK, or two related APKs (e.g. ChromePublic.apk vs Chrome.apk)
+        where the resource IDs might be slightly different, but not the
+        string contents.
+    Return:
+      A list of strings that can later be sent to AutoIndentStringList().
+    """
+    result = ['Resource strings (count=%d) {' % len(self._res_map)]
+    res_map = self._res_map
+
+    # A small function to compare two (res_id, values) tuples
+    # by resource name first, then resource ID.
+    def cmp_id_name(a, b):
+      result = cmp(a[1].res_name, b[1].res_name)
+      if result == 0:
+        result = cmp(a[0], b[0])
+      return result
+
+    for res_id, _ in sorted(res_map.iteritems(), cmp=cmp_id_name):
+      result += res_map[res_id].ToStringList(None if omit_ids else res_id)
+    result.append('}  # Resource strings')
+    return result
+
+
+@contextlib.contextmanager
+def ManagedOutput(output_file):
+  """Create an output File object that will be closed on exit if necessary.
+
+  Args:
+    output_file: Optional output file path.
+  Yields:
+    If |output_file| is empty, this simply yields sys.stdout. Otherwise, this
+    opens the file path for writing text, and yields its File object. The
+    context will ensure that the object is always closed on scope exit.
+  """
+  close_output = False
+  if output_file:
+    output = open(output_file, 'wt')
+    close_output = True
+  else:
+    output = sys.stdout
+  try:
+    yield output
+  finally:
+    if close_output:
+      output.close()
+
+
+@contextlib.contextmanager
+def ManagedPythonProfiling(enable_profiling, sort_key='tottime'):
+  """Enable Python profiling if needed.
+
+  Args:
+    enable_profiling: Boolean flag. True to enable python profiling.
+    sort_key: Sorting key for the final stats dump.
+  Yields:
+    If |enable_profiling| is False, this yields False. Otherwise, this
+    yields a new Profile instance just after enabling it. The manager
+    ensures that profiling stops and prints statistics on scope exit.
+  """
+  pr = None
+  if enable_profiling:
+    pr = cProfile.Profile()
+    pr.enable()
+  try:
+    yield pr
+  finally:
+    if pr:
+      pr.disable()
+      pr.print_stats(sort=sort_key)
+
+
+def IsFilePathABundle(input_file):
+  """Return True iff |input_file| holds an Android app bundle."""
+  try:
+    with zipfile.ZipFile(input_file) as input_zip:
+      _ = input_zip.getinfo('BundleConfig.pb')
+      return True
+  except:
+    return False
+
+
+# Example output from 'bundletool dump resources --values' corresponding
+# to strings:
+#
+# 0x7F1200A0 - string/abc_action_menu_overflow_description
+#         (default) - [STR] "More options"
+#         locale: "ca" - [STR] "Més opcions"
+#         locale: "da" - [STR] "Flere muligheder"
+#         locale: "fa" - [STR] " گزینه<U+200C>های بیشتر"
+#         locale: "ja" - [STR] "その他のオプション"
+#         locale: "ta" - [STR] "மேலும் விருப்பங்கள்"
+#         locale: "nb" - [STR] "Flere alternativer"
+#         ...
+#
+# Fun fact #1: Bundletool uses <lang>-<REGION> instead of <lang>-r<REGION>
+#              for locales!
+#
+# Fun fact #2: The <U+200C> is terminal output for \u200c, the output is
+#              really UTF-8 encoded when it is read by this script.
+#
+# Fun fact #3: Bundletool quotes \n, \\ and \" just like aapt since 0.8.0.
+#
+_RE_BUNDLE_STRING_RESOURCE_HEADER = re.compile(
+    r'^0x([0-9A-F]+)\s\-\sstring/(\w+)$')
+assert _RE_BUNDLE_STRING_RESOURCE_HEADER.match(
+    '0x7F1200A0 - string/abc_action_menu_overflow_description')
+
+_RE_BUNDLE_STRING_DEFAULT_VALUE = re.compile(
+    r'^\s+\(default\) - \[STR\] "(.*)"$')
+assert _RE_BUNDLE_STRING_DEFAULT_VALUE.match(
+    '        (default) - [STR] "More options"')
+assert _RE_BUNDLE_STRING_DEFAULT_VALUE.match(
+    '        (default) - [STR] "More options"').group(1) == "More options"
+
+_RE_BUNDLE_STRING_LOCALIZED_VALUE = re.compile(
+    r'^\s+locale: "([0-9a-zA-Z-]+)" - \[STR\] "(.*)"$')
+assert _RE_BUNDLE_STRING_LOCALIZED_VALUE.match(
+    u'        locale: "ar" - [STR] "گزینه\u200cهای بیشتر"'.encode('utf-8'))
+
+
+def ParseBundleResources(bundle_tool_jar_path, bundle_path):
+  """Use bundletool to extract the localized strings of a given bundle.
+
+  Args:
+    bundle_tool_jar_path: Path to bundletool .jar executable.
+    bundle_path: Path to input bundle.
+  Returns:
+    A new ResourceStringMap instance populated with the bundle's content.
+  """
+  cmd_args = [
+      'java', '-jar', bundle_tool_jar_path, 'dump', 'resources', '--bundle',
+      bundle_path, '--values'
+  ]
+  p = subprocess.Popen(cmd_args, bufsize=1, stdout=subprocess.PIPE)
+  res_map = ResourceStringMap()
+  current_resource_id = None
+  current_resource_name = None
+  keep_parsing = True
+  need_value = False
+  while keep_parsing:
+    line = p.stdout.readline()
+    if not line:
+      break
+    # Do not use rstrip(), since this should only remove trailing newlines
+    # but not trailing whitespace that happen to be embedded in the string
+    # value for some reason.
+    line = line.rstrip('\n\r')
+    m = _RE_BUNDLE_STRING_RESOURCE_HEADER.match(line)
+    if m:
+      current_resource_id = int(m.group(1), 16)
+      current_resource_name = m.group(2)
+      need_value = True
+      continue
+
+    if not need_value:
+      continue
+
+    resource_config = None
+    m = _RE_BUNDLE_STRING_DEFAULT_VALUE.match(line)
+    if m:
+      resource_config = 'config (default)'
+      resource_value = m.group(1)
+    else:
+      m = _RE_BUNDLE_STRING_LOCALIZED_VALUE.match(line)
+      if m:
+        resource_config = 'config %s' % m.group(1)
+        resource_value = m.group(2)
+
+    if resource_config is None:
+      need_value = False
+      continue
+
+    res_map.AddValue(current_resource_id, current_resource_name,
+                     resource_config, UnquoteString(resource_value))
+  return res_map
+
+
+# Name of the binary resources table file inside an APK.
+RESOURCES_FILENAME = 'resources.arsc'
+
+
+def IsFilePathAnApk(input_file):
+  """Returns True iff a ZipFile instance is for a regular APK."""
+  try:
+    with zipfile.ZipFile(input_file) as input_zip:
+      _ = input_zip.getinfo(RESOURCES_FILENAME)
+      return True
+  except:
+    return False
+
+
+# pylint: disable=line-too-long
+
+# Example output from 'aapt dump resources --values' corresponding
+# to strings:
+#
+#      config zh-rHK
+#        resource 0x7f12009c org.chromium.chrome:string/0_resource_name_obfuscated: t=0x03 d=0x0000caa9 (s=0x0008 r=0x00)
+#          (string8) "瀏覽首頁"
+#        resource 0x7f12009d org.chromium.chrome:string/0_resource_name_obfuscated: t=0x03 d=0x0000c8e0 (s=0x0008 r=0x00)
+#          (string8) "向上瀏覽"
+#
+
+# The following are compiled regular expressions used to recognize each
+# of line and extract relevant information.
+#
+_RE_AAPT_CONFIG = re.compile(r'^\s+config (.+):$')
+assert _RE_AAPT_CONFIG.match('   config (default):')
+assert _RE_AAPT_CONFIG.match('   config zh-rTW:')
+
+# Match an ISO 639-1 or ISO 639-2 locale.
+_RE_AAPT_ISO_639_LOCALE = re.compile(r'^[a-z]{2,3}(-r[A-Z]{2,3})?$')
+assert _RE_AAPT_ISO_639_LOCALE.match('de')
+assert _RE_AAPT_ISO_639_LOCALE.match('zh-rTW')
+assert _RE_AAPT_ISO_639_LOCALE.match('fil')
+assert not _RE_AAPT_ISO_639_LOCALE.match('land')
+
+_RE_AAPT_BCP47_LOCALE = re.compile(r'^b\+[a-z][a-zA-Z0-9\+]+$')
+assert _RE_AAPT_BCP47_LOCALE.match('b+sr')
+assert _RE_AAPT_BCP47_LOCALE.match('b+sr+Latn')
+assert _RE_AAPT_BCP47_LOCALE.match('b+en+US')
+assert not _RE_AAPT_BCP47_LOCALE.match('b+')
+assert not _RE_AAPT_BCP47_LOCALE.match('b+1234')
+
+_RE_AAPT_STRING_RESOURCE_HEADER = re.compile(
+    r'^\s+resource 0x([0-9a-f]+) [a-zA-Z][a-zA-Z0-9.]+:string/(\w+):.*$')
+assert _RE_AAPT_STRING_RESOURCE_HEADER.match(
+    r'  resource 0x7f12009c org.chromium.chrome:string/0_resource_name_obfuscated: t=0x03 d=0x0000caa9 (s=0x0008 r=0x00)'
+)
+
+_RE_AAPT_STRING_RESOURCE_VALUE = re.compile(r'^\s+\(string8\) "(.*)"$')
+assert _RE_AAPT_STRING_RESOURCE_VALUE.match(r'       (string8) "瀏覽首頁"')
+
+# pylint: enable=line-too-long
+
+
+def _ConvertAaptLocaleToBcp47(locale):
+  """Convert a locale name from 'aapt dump' to its BCP-47 form."""
+  if locale.startswith('b+'):
+    return '-'.join(locale[2:].split('+'))
+  lang, _, region = locale.partition('-r')
+  if region:
+    return '%s-%s' % (lang, region)
+  return lang
+
+
+assert _ConvertAaptLocaleToBcp47('(default)') == '(default)'
+assert _ConvertAaptLocaleToBcp47('en') == 'en'
+assert _ConvertAaptLocaleToBcp47('en-rUS') == 'en-US'
+assert _ConvertAaptLocaleToBcp47('en-US') == 'en-US'
+assert _ConvertAaptLocaleToBcp47('fil') == 'fil'
+assert _ConvertAaptLocaleToBcp47('b+sr+Latn') == 'sr-Latn'
+
+
+def ParseApkResources(aapt_path, apk_path):
+  """Use aapt to extract the localized strings of a given bundle.
+
+  Args:
+    bundle_tool_jar_path: Path to bundletool .jar executable.
+    bundle_path: Path to input bundle.
+  Returns:
+    A new ResourceStringMap instance populated with the bundle's content.
+  """
+  cmd_args = [aapt_path, 'dump', '--values', 'resources', apk_path]
+  p = subprocess.Popen(cmd_args, bufsize=1, stdout=subprocess.PIPE)
+
+  res_map = ResourceStringMap()
+  current_locale = None
+  current_resource_id = None
+  current_resource_name = None
+  need_value = False
+  while True:
+    line = p.stdout.readline().rstrip()
+    if not line:
+      break
+    m = _RE_AAPT_CONFIG.match(line)
+    if m:
+      locale = None
+      aapt_locale = m.group(1)
+      if aapt_locale == '(default)':
+        locale = aapt_locale
+      elif _RE_AAPT_ISO_639_LOCALE.match(aapt_locale):
+        locale = aapt_locale
+      elif _RE_AAPT_BCP47_LOCALE.match(aapt_locale):
+        locale = aapt_locale
+      if locale is not None:
+        current_locale = _ConvertAaptLocaleToBcp47(locale)
+      continue
+
+    if current_locale is None:
+      continue
+
+    if need_value:
+      m = _RE_AAPT_STRING_RESOURCE_VALUE.match(line)
+      if not m:
+        # Should not happen
+        sys.stderr.write('WARNING: Missing value for string ID 0x%08x "%s"' %
+                         (current_resource_id, current_resource_name))
+        resource_value = '<MISSING_STRING_%08x>' % current_resource_id
+      else:
+        resource_value = UnquoteString(m.group(1))
+
+      res_map.AddValue(current_resource_id, current_resource_name,
+                       'config %s' % current_locale, resource_value)
+      need_value = False
+    else:
+      m = _RE_AAPT_STRING_RESOURCE_HEADER.match(line)
+      if m:
+        current_resource_id = int(m.group(1), 16)
+        current_resource_name = m.group(2)
+        need_value = True
+
+  return res_map
+
+
+def main(args):
+  parser = argparse.ArgumentParser(
+      description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
+  parser.add_argument(
+      'input_file',
+      help='Input file path. This can be either an APK, or an app bundle.')
+  parser.add_argument('--output', help='Optional output file path.')
+  parser.add_argument(
+      '--omit-ids',
+      action='store_true',
+      help='Omit resource IDs in the output. This is useful '
+      'to compare the contents of two distinct builds of the '
+      'same APK.')
+  parser.add_argument(
+      '--aapt-path',
+      default=_AAPT_DEFAULT_PATH,
+      help='Path to aapt executable. Optional for APKs.')
+  parser.add_argument(
+      '--r-txt-path',
+      help='Path to an optional input R.txt file used to translate resource '
+      'IDs to string names. Useful when resources names in the input files '
+      'were obfuscated. NOTE: If ${INPUT_FILE}.R.txt exists, if will be used '
+      'automatically by this script.')
+  parser.add_argument(
+      '--bundletool-path',
+      default=_DEFAULT_BUNDLETOOL_PATH,
+      help='Path to alternate bundletool .jar file. Only used for bundles.')
+  parser.add_argument(
+      '--profile', action='store_true', help='Enable Python profiling.')
+
+  options = parser.parse_args(args)
+
+  # Create a {res_id -> res_name} map for unobfuscation, if needed.
+  res_id_name_map = {}
+  r_txt_path = options.r_txt_path
+  if not r_txt_path:
+    candidate_r_txt_path = options.input_file + '.R.txt'
+    if os.path.exists(candidate_r_txt_path):
+      r_txt_path = candidate_r_txt_path
+
+  if r_txt_path:
+    res_id_name_map = ReadStringMapFromRTxt(r_txt_path)
+
+  # Create a helper lambda that creates a new ResourceStringMap instance
+  # based on the input file's type.
+  if IsFilePathABundle(options.input_file):
+    if not options.bundletool_path:
+      parser.error(
+          '--bundletool-path <BUNDLETOOL_JAR> is required to parse bundles.')
+
+    # use bundletool to parse the bundle resources.
+    def create_string_map():
+      return ParseBundleResources(options.bundletool_path, options.input_file)
+
+  elif IsFilePathAnApk(options.input_file):
+    if not options.aapt_path:
+      parser.error('--aapt-path <AAPT> is required to parse APKs.')
+
+    # Use aapt dump to parse the APK resources.
+    def create_string_map():
+      return ParseApkResources(options.aapt_path, options.input_file)
+
+  else:
+    parser.error('Unknown file format: %s' % options.input_file)
+
+  # Print everything now.
+  with ManagedOutput(options.output) as output:
+    with ManagedPythonProfiling(options.profile):
+      res_map = create_string_map()
+      res_map.RemapResourceNames(res_id_name_map)
+      lines = AutoIndentStringList(res_map.ToStringList(options.omit_ids))
+      for line in lines:
+        output.write(line)
+        output.write('\n')
+
+
+if __name__ == "__main__":
+  main(sys.argv[1:])
diff --git a/src/build/android/emma_coverage_stats.py b/src/build/android/emma_coverage_stats.py
new file mode 100755
index 0000000..f45f4d4
--- /dev/null
+++ b/src/build/android/emma_coverage_stats.py
@@ -0,0 +1,479 @@
+#!/usr/bin/env vpython
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generates incremental code coverage reports for Java code in Chromium.
+
+Usage:
+
+  build/android/emma_coverage_stats.py -v --out <output file path> --emma-dir
+    <EMMA file directory> --lines-for-coverage-file
+    <path to file containing lines for coverage>
+
+  Creates a JSON representation of the overall and file coverage stats and saves
+  this information to the specified output file.
+"""
+
+import argparse
+import collections
+import json
+import logging
+import os
+import re
+import sys
+from xml.etree import ElementTree
+
+import devil_chromium
+from devil.utils import run_tests_helper
+
+NOT_EXECUTABLE = -1
+NOT_COVERED = 0
+COVERED = 1
+PARTIALLY_COVERED = 2
+
+# Coverage information about a single line of code.
+LineCoverage = collections.namedtuple(
+    'LineCoverage',
+    ['lineno', 'source', 'covered_status', 'fractional_line_coverage'])
+
+
+class _EmmaHtmlParser(object):
+  """Encapsulates HTML file parsing operations.
+
+  This class contains all operations related to parsing HTML files that were
+  produced using the EMMA code coverage tool.
+
+  Example HTML:
+
+  Package links:
+    <a href="_files/1.html">org.chromium.chrome</a>
+    This is returned by the selector |XPATH_SELECT_PACKAGE_ELEMENTS|.
+
+  Class links:
+    <a href="1e.html">DoActivity.java</a>
+    This is returned by the selector |XPATH_SELECT_CLASS_ELEMENTS|.
+
+  Line coverage data:
+    <tr class="p">
+       <td class="l" title="78% line coverage (7 out of 9)">108</td>
+       <td title="78% line coverage (7 out of 9 instructions)">
+         if (index < 0 || index = mSelectors.size()) index = 0;</td>
+    </tr>
+    <tr>
+       <td class="l">109</td>
+       <td> </td>
+    </tr>
+    <tr class="c">
+       <td class="l">110</td>
+       <td>        if (mSelectors.get(index) != null) {</td>
+    </tr>
+    <tr class="z">
+       <td class="l">111</td>
+       <td>            for (int i = 0; i < mSelectors.size(); i++) {</td>
+    </tr>
+    Each <tr> element is returned by the selector |XPATH_SELECT_LOC|.
+
+    We can parse this to get:
+      1. Line number
+      2. Line of source code
+      3. Coverage status (c, z, or p)
+      4. Fractional coverage value (% out of 100 if PARTIALLY_COVERED)
+  """
+  # Selector to match all <a> elements within the rows that are in the table
+  # that displays all of the different packages.
+  _XPATH_SELECT_PACKAGE_ELEMENTS = './/BODY/TABLE[4]/TR/TD/A'
+
+  # Selector to match all <a> elements within the rows that are in the table
+  # that displays all of the different classes within a package.
+  _XPATH_SELECT_CLASS_ELEMENTS = './/BODY/TABLE[3]/TR/TD/A'
+
+  # Selector to match all <tr> elements within the table containing Java source
+  # code in an EMMA HTML file.
+  _XPATH_SELECT_LOC = './/BODY/TABLE[4]/TR'
+
+  # Children of HTML elements are represented as a list in ElementTree. These
+  # constants represent list indices corresponding to relevant child elements.
+
+  # Child 1 contains percentage covered for a line.
+  _ELEMENT_PERCENT_COVERED = 1
+
+  # Child 1 contains the original line of source code.
+  _ELEMENT_CONTAINING_SOURCE_CODE = 1
+
+  # Child 0 contains the line number.
+  _ELEMENT_CONTAINING_LINENO = 0
+
+  # Maps CSS class names to corresponding coverage constants.
+  _CSS_TO_STATUS = {'c': COVERED, 'p': PARTIALLY_COVERED, 'z': NOT_COVERED}
+
+  # UTF-8 no break space.
+  _NO_BREAK_SPACE = '\xc2\xa0'
+
+  def __init__(self, emma_file_base_dir):
+    """Initializes _EmmaHtmlParser.
+
+    Args:
+      emma_file_base_dir: Path to the location where EMMA report files are
+        stored. Should be where index.html is stored.
+    """
+    self._base_dir = emma_file_base_dir
+    self._emma_files_path = os.path.join(self._base_dir, '_files')
+    self._index_path = os.path.join(self._base_dir, 'index.html')
+
+  def GetLineCoverage(self, emma_file_path):
+    """Returns a list of LineCoverage objects for the given EMMA HTML file.
+
+    Args:
+      emma_file_path: String representing the path to the EMMA HTML file.
+
+    Returns:
+      A list of LineCoverage objects.
+    """
+    line_tr_elements = self._FindElements(
+        emma_file_path, self._XPATH_SELECT_LOC)
+    line_coverage = []
+    for tr in line_tr_elements:
+      # Get the coverage status.
+      coverage_status = self._CSS_TO_STATUS.get(tr.get('CLASS'), NOT_EXECUTABLE)
+      # Get the fractional coverage value.
+      if coverage_status == PARTIALLY_COVERED:
+        title_attribute = (tr[self._ELEMENT_PERCENT_COVERED].get('TITLE'))
+        # Parse string that contains percent covered: "83% line coverage ...".
+        percent_covered = title_attribute.split('%')[0]
+        fractional_coverage = int(percent_covered) / 100.0
+      else:
+        fractional_coverage = 1.0
+
+      # Get the line number.
+      lineno_element = tr[self._ELEMENT_CONTAINING_LINENO]
+      # Handles oddly formatted HTML (where there is an extra <a> tag).
+      lineno = int(lineno_element.text or
+                   lineno_element[self._ELEMENT_CONTAINING_LINENO].text)
+      # Get the original line of Java source code.
+      raw_source = tr[self._ELEMENT_CONTAINING_SOURCE_CODE].text
+      utf8_source = raw_source.encode('UTF-8')
+      source = utf8_source.replace(self._NO_BREAK_SPACE, ' ')
+
+      line = LineCoverage(lineno, source, coverage_status, fractional_coverage)
+      line_coverage.append(line)
+
+    return line_coverage
+
+  def GetPackageNameToEmmaFileDict(self):
+    """Returns a dict mapping Java packages to EMMA HTML coverage files.
+
+    Parses the EMMA index.html file to get a list of packages, then parses each
+    package HTML file to get a list of classes for that package, and creates
+    a dict with this info.
+
+    Returns:
+      A dict mapping string representation of Java packages (with class
+        names appended) to the corresponding file paths of EMMA HTML files.
+    """
+    # These <a> elements contain each package name and the path of the file
+    # where all classes within said package are listed.
+    package_link_elements = self._FindElements(
+        self._index_path, self._XPATH_SELECT_PACKAGE_ELEMENTS)
+    # Maps file path of package directory (EMMA generated) to package name.
+    # Example: emma_dir/f.html: org.chromium.chrome.
+    package_links = {
+      os.path.join(self._base_dir, link.attrib['HREF']): link.text
+      for link in package_link_elements if 'HREF' in link.attrib
+    }
+
+    package_to_emma = {}
+    for package_emma_file_path, package_name in package_links.iteritems():
+      # These <a> elements contain each class name in the current package and
+      # the path of the file where the coverage info is stored for each class.
+      coverage_file_link_elements = self._FindElements(
+          package_emma_file_path, self._XPATH_SELECT_CLASS_ELEMENTS)
+
+      for class_name_element in coverage_file_link_elements:
+        emma_coverage_file_path = os.path.join(
+            self._emma_files_path, class_name_element.attrib['HREF'])
+        full_package_name = '%s.%s' % (package_name, class_name_element.text)
+        package_to_emma[full_package_name] = emma_coverage_file_path
+
+    return package_to_emma
+
+  # pylint: disable=no-self-use
+  def _FindElements(self, file_path, xpath_selector):
+    """Reads a HTML file and performs an XPath match.
+
+    Args:
+      file_path: String representing the path to the HTML file.
+      xpath_selector: String representing xpath search pattern.
+
+    Returns:
+      A list of ElementTree.Elements matching the given XPath selector.
+        Returns an empty list if there is no match.
+    """
+    with open(file_path) as f:
+      file_contents = f.read().decode('ISO-8859-1').encode('UTF-8')
+      root = ElementTree.fromstring(file_contents)
+      return root.findall(xpath_selector)
+
+
+class _EmmaCoverageStats(object):
+  """Computes code coverage stats for Java code using the coverage tool EMMA.
+
+  This class provides an API that allows users to capture absolute code coverage
+  and code coverage on a subset of lines for each Java source file. Coverage
+  reports are generated in JSON format.
+  """
+  # Regular expression to get package name from Java package statement.
+  RE_PACKAGE_MATCH_GROUP = 'package'
+  RE_PACKAGE = re.compile(r'package (?P<%s>[\w.]*);' % RE_PACKAGE_MATCH_GROUP)
+
+  def __init__(self, emma_file_base_dir, files_for_coverage):
+    """Initialize _EmmaCoverageStats.
+
+    Args:
+      emma_file_base_dir: String representing the path to the base directory
+        where EMMA HTML coverage files are stored, i.e. parent of index.html.
+      files_for_coverage: A list of Java source code file paths to get EMMA
+        coverage for.
+    """
+    self._emma_parser = _EmmaHtmlParser(emma_file_base_dir)
+    self._source_to_emma = self._GetSourceFileToEmmaFileDict(files_for_coverage)
+
+  def GetCoverageDict(self, lines_for_coverage):
+    """Returns a dict containing detailed coverage information.
+
+    Gets detailed coverage stats for each file specified in the
+    |lines_for_coverage| dict and the total incremental number of lines covered
+    and executable for all files in |lines_for_coverage|.
+
+    Args:
+      lines_for_coverage: A dict mapping Java source file paths to lists of line
+        numbers.
+
+    Returns:
+      A dict containing coverage stats for the given dict of files and lines.
+        Contains absolute coverage stats for each file, coverage stats for each
+        file's lines specified in |lines_for_coverage|, line by line coverage
+        for each file, and overall coverage stats for the lines specified in
+        |lines_for_coverage|.
+    """
+    file_coverage = {}
+    for file_path, line_numbers in lines_for_coverage.iteritems():
+      file_coverage_dict = self.GetCoverageDictForFile(file_path, line_numbers)
+      if file_coverage_dict:
+        file_coverage[file_path] = file_coverage_dict
+      else:
+        logging.warning(
+            'No code coverage data for %s, skipping.', file_path)
+
+    covered_statuses = [s['incremental'] for s in file_coverage.itervalues()]
+    num_covered_lines = sum(s['covered'] for s in covered_statuses)
+    num_total_lines = sum(s['total'] for s in covered_statuses)
+    return {
+      'files': file_coverage,
+      'patch': {
+        'incremental': {
+          'covered': num_covered_lines,
+          'total': num_total_lines
+        }
+      }
+    }
+
+  def GetCoverageDictForFile(self, file_path, line_numbers):
+    """Returns a dict containing detailed coverage info for the given file.
+
+    Args:
+      file_path: The path to the Java source file that we want to create the
+        coverage dict for.
+      line_numbers: A list of integer line numbers to retrieve additional stats
+        for.
+
+    Returns:
+      A dict containing absolute, incremental, and line by line coverage for
+        a file.
+    """
+    if file_path not in self._source_to_emma:
+      return None
+    emma_file = self._source_to_emma[file_path]
+    total_line_coverage = self._emma_parser.GetLineCoverage(emma_file)
+    incremental_line_coverage = [line for line in total_line_coverage
+                                 if line.lineno in line_numbers]
+    line_by_line_coverage = [
+      {
+        'line': line.source,
+        'coverage': line.covered_status,
+        'changed': line.lineno in line_numbers,
+        'fractional_coverage': line.fractional_line_coverage,
+      }
+      for line in total_line_coverage
+    ]
+    total_covered_lines, total_lines = (
+        self.GetSummaryStatsForLines(total_line_coverage))
+    incremental_covered_lines, incremental_total_lines = (
+        self.GetSummaryStatsForLines(incremental_line_coverage))
+
+    file_coverage_stats = {
+      'absolute': {
+        'covered': total_covered_lines,
+        'total': total_lines
+      },
+      'incremental': {
+        'covered': incremental_covered_lines,
+        'total': incremental_total_lines
+      },
+      'source': line_by_line_coverage,
+    }
+    return file_coverage_stats
+
+  # pylint: disable=no-self-use
+  def GetSummaryStatsForLines(self, line_coverage):
+    """Gets summary stats for a given list of LineCoverage objects.
+
+    Args:
+      line_coverage: A list of LineCoverage objects.
+
+    Returns:
+      A tuple containing the number of lines that are covered and the total
+        number of lines that are executable, respectively
+    """
+    partially_covered_sum = 0
+    covered_status_totals = {COVERED: 0, NOT_COVERED: 0, PARTIALLY_COVERED: 0}
+    for line in line_coverage:
+      status = line.covered_status
+      if status == NOT_EXECUTABLE:
+        continue
+      covered_status_totals[status] += 1
+      if status == PARTIALLY_COVERED:
+        partially_covered_sum += line.fractional_line_coverage
+
+    total_covered = covered_status_totals[COVERED] + partially_covered_sum
+    total_lines = sum(covered_status_totals.values())
+    return total_covered, total_lines
+
+  def _GetSourceFileToEmmaFileDict(self, files):
+    """Gets a dict used to correlate Java source files with EMMA HTML files.
+
+    This method gathers the information needed to correlate EMMA HTML
+    files with Java source files. EMMA XML and plain text reports do not provide
+    line by line coverage data, so HTML reports must be used instead.
+    Unfortunately, the HTML files that are created are given garbage names
+    (i.e 1.html) so we need to manually correlate EMMA HTML files
+    with the original Java source files.
+
+    Args:
+      files: A list of file names for which coverage information is desired.
+
+    Returns:
+      A dict mapping Java source file paths to EMMA HTML file paths.
+    """
+    # Maps Java source file paths to package names.
+    # Example: /usr/code/file.java -> org.chromium.file.java.
+    source_to_package = {}
+    for file_path in files:
+      package = self.GetPackageNameFromFile(file_path)
+      if package:
+        source_to_package[file_path] = package
+      else:
+        logging.warning("Skipping %s because it doesn\'t have a package "
+                        "statement.", file_path)
+
+    # Maps package names to EMMA report HTML files.
+    # Example: org.chromium.file.java -> out/coverage/1a.html.
+    package_to_emma = self._emma_parser.GetPackageNameToEmmaFileDict()
+    # Finally, we have a dict mapping Java file paths to EMMA report files.
+    # Example: /usr/code/file.java -> out/coverage/1a.html.
+    source_to_emma = {source: package_to_emma[package]
+                      for source, package in source_to_package.iteritems()
+                      if package in package_to_emma}
+    return source_to_emma
+
+  @staticmethod
+  def NeedsCoverage(file_path):
+    """Checks to see if the file needs to be analyzed for code coverage.
+
+    Args:
+      file_path: A string representing path to the file.
+
+    Returns:
+      True for Java files that exist, False for all others.
+    """
+    if os.path.splitext(file_path)[1] == '.java' and os.path.exists(file_path):
+      return True
+    else:
+      logging.info('Skipping file %s, cannot compute code coverage.', file_path)
+      return False
+
+  @staticmethod
+  def GetPackageNameFromFile(file_path):
+    """Gets the full package name including the file name for a given file path.
+
+    Args:
+      file_path: String representing the path to the Java source file.
+
+    Returns:
+      A string representing the full package name with file name appended or
+        None if there is no package statement in the file.
+    """
+    with open(file_path) as f:
+      file_content = f.read()
+      package_match = re.search(_EmmaCoverageStats.RE_PACKAGE, file_content)
+      if package_match:
+        package = package_match.group(_EmmaCoverageStats.RE_PACKAGE_MATCH_GROUP)
+        file_name = os.path.basename(file_path)
+        return '%s.%s' % (package, file_name)
+      else:
+        return None
+
+
+def GenerateCoverageReport(line_coverage_file, out_file_path, coverage_dir):
+  """Generates a coverage report for a given set of lines.
+
+  Writes the results of the coverage analysis to the file specified by
+  |out_file_path|.
+
+  Args:
+    line_coverage_file: The path to a file which contains a dict mapping file
+      names to lists of line numbers. Example: {file1: [1, 2, 3], ...} means
+      that we should compute coverage information on lines 1 - 3 for file1.
+    out_file_path: A string representing the location to write the JSON report.
+    coverage_dir: A string representing the file path where the EMMA
+      HTML coverage files are located (i.e. folder where index.html is located).
+  """
+  with open(line_coverage_file) as f:
+    potential_files_for_coverage = json.load(f)
+
+  files_for_coverage = {f: lines
+                        for f, lines in potential_files_for_coverage.iteritems()
+                        if _EmmaCoverageStats.NeedsCoverage(f)}
+
+  coverage_results = {}
+  if files_for_coverage:
+    code_coverage = _EmmaCoverageStats(coverage_dir, files_for_coverage.keys())
+    coverage_results = code_coverage.GetCoverageDict(files_for_coverage)
+  else:
+    logging.info('No Java files requiring coverage were included in %s.',
+                 line_coverage_file)
+
+  with open(out_file_path, 'w+') as out_status_file:
+    json.dump(coverage_results, out_status_file)
+
+
+def main():
+  argparser = argparse.ArgumentParser()
+  argparser.add_argument('--out', required=True, type=str,
+                         help='Report output file path.')
+  argparser.add_argument('--emma-dir', required=True, type=str,
+                         help='EMMA HTML report directory.')
+  argparser.add_argument('--lines-for-coverage-file', required=True, type=str,
+                         help='File containing a JSON object. Should contain a '
+                         'dict mapping file names to lists of line numbers of '
+                         'code for which coverage information is desired.')
+  argparser.add_argument('-v', '--verbose', action='count',
+                         help='Print verbose log information.')
+  args = argparser.parse_args()
+  run_tests_helper.SetLogLevel(args.verbose)
+  devil_chromium.Initialize()
+  GenerateCoverageReport(args.lines_for_coverage_file, args.out, args.emma_dir)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/emma_coverage_stats_test.py b/src/build/android/emma_coverage_stats_test.py
new file mode 100755
index 0000000..d53292c
--- /dev/null
+++ b/src/build/android/emma_coverage_stats_test.py
@@ -0,0 +1,561 @@
+#!/usr/bin/env vpython
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=protected-access
+
+import unittest
+from xml.etree import ElementTree
+
+import emma_coverage_stats
+
+import mock  # pylint: disable=import-error
+
+EMPTY_COVERAGE_STATS_DICT = {
+  'files': {},
+  'patch': {
+    'incremental': {
+      'covered': 0, 'total': 0
+    }
+  }
+}
+
+
+class _EmmaHtmlParserTest(unittest.TestCase):
+  """Tests for _EmmaHtmlParser.
+
+  Uses modified EMMA report HTML that contains only the subset of tags needed
+  for test verification.
+  """
+
+  def setUp(self):
+    self.emma_dir = 'fake/dir/'
+    self.parser = emma_coverage_stats._EmmaHtmlParser(self.emma_dir)
+    self.simple_html = '<TR><TD CLASS="p">Test HTML</TD></TR>'
+    self.index_html = (
+      '<HTML>'
+        '<BODY>'
+          '<TABLE CLASS="hdft" CELLSPACING="0" WIDTH="100%">'
+          '</TABLE>'
+          '<TABLE CELLSPACING="0" WIDTH="100%">'
+          '</TABLE>'
+          '<TABLE CLASS="it" CELLSPACING="0">'
+          '</TABLE>'
+          '<TABLE CELLSPACING="0" WIDTH="100%">'
+            '<TR>'
+              '<TH CLASS="f">name</TH>'
+              '<TH>class, %</TH>'
+              '<TH>method, %</TH>'
+              '<TH>block, %</TH>'
+              '<TH>line, %</TH>'
+            '</TR>'
+            '<TR CLASS="o">'
+              '<TD><A HREF="_files/0.html"'
+              '>org.chromium.chrome.browser</A></TD>'
+              '<TD CLASS="h">0%   (0/3)</TD>'
+            '</TR>'
+            '<TR>'
+              '<TD><A HREF="_files/1.html"'
+              '>org.chromium.chrome.browser.tabmodel</A></TD>'
+              '<TD CLASS="h">0%   (0/8)</TD>'
+            '</TR>'
+          '</TABLE>'
+          '<TABLE CLASS="hdft" CELLSPACING="0" WIDTH="100%">'
+          '</TABLE>'
+        '</BODY>'
+      '</HTML>'
+    )
+    self.package_1_class_list_html = (
+      '<HTML>'
+        '<BODY>'
+          '<TABLE CLASS="hdft" CELLSPACING="0" WIDTH="100%">'
+          '</TABLE>'
+          '<TABLE CELLSPACING="0" WIDTH="100%">'
+          '</TABLE>'
+          '<TABLE CELLSPACING="0" WIDTH="100%">'
+            '<TR>'
+              '<TH CLASS="f">name</TH>'
+              '<TH>class, %</TH>'
+              '<TH>method, %</TH>'
+              '<TH>block, %</TH>'
+              '<TH>line, %</TH>'
+            '</TR>'
+            '<TR CLASS="o">'
+              '<TD><A HREF="1e.html">IntentHelper.java</A></TD>'
+              '<TD CLASS="h">0%   (0/3)</TD>'
+              '<TD CLASS="h">0%   (0/9)</TD>'
+              '<TD CLASS="h">0%   (0/97)</TD>'
+              '<TD CLASS="h">0%   (0/26)</TD>'
+            '</TR>'
+          '</TABLE>'
+          '<TABLE CLASS="hdft" CELLSPACING="0" WIDTH="100%">'
+          '</TABLE>'
+        '</BODY>'
+      '</HTML>'
+    )
+    self.package_2_class_list_html = (
+      '<HTML>'
+        '<BODY>'
+          '<TABLE CLASS="hdft" CELLSPACING="0" WIDTH="100%">'
+          '</TABLE>'
+          '<TABLE CELLSPACING="0" WIDTH="100%">'
+          '</TABLE>'
+          '<TABLE CELLSPACING="0" WIDTH="100%">'
+            '<TR>'
+              '<TH CLASS="f">name</TH>'
+              '<TH>class, %</TH>'
+              '<TH>method, %</TH>'
+              '<TH>block, %</TH>'
+              '<TH>line, %</TH>'
+            '</TR>'
+            '<TR CLASS="o">'
+              '<TD><A HREF="1f.html">ContentSetting.java</A></TD>'
+              '<TD CLASS="h">0%   (0/1)</TD>'
+            '</TR>'
+            '<TR>'
+              '<TD><A HREF="20.html">DevToolsServer.java</A></TD>'
+            '</TR>'
+            '<TR CLASS="o">'
+              '<TD><A HREF="21.html">FileProviderHelper.java</A></TD>'
+            '</TR>'
+            '<TR>'
+              '<TD><A HREF="22.html">ContextualMenuBar.java</A></TD>'
+            '</TR>'
+            '<TR CLASS="o">'
+              '<TD><A HREF="23.html">AccessibilityUtil.java</A></TD>'
+            '</TR>'
+            '<TR>'
+              '<TD><A HREF="24.html">NavigationPopup.java</A></TD>'
+            '</TR>'
+          '</TABLE>'
+          '<TABLE CLASS="hdft" CELLSPACING="0" WIDTH="100%">'
+          '</TABLE>'
+        '</BODY>'
+      '</HTML>'
+    )
+    self.partially_covered_tr_html = (
+      '<TR CLASS="p">'
+        '<TD CLASS="l" TITLE="78% line coverage (7 out of 9)">108</TD>'
+        '<TD TITLE="78% line coverage (7 out of 9 instructions)">'
+          'if (index &lt; 0 || index = mSelectors.size()) index = 0;</TD>'
+      '</TR>'
+    )
+    self.covered_tr_html = (
+      '<TR CLASS="c">'
+        '<TD CLASS="l">110</TD>'
+        '<TD>        if (mSelectors.get(index) != null) {</TD>'
+      '</TR>'
+    )
+    self.not_executable_tr_html = (
+      '<TR>'
+        '<TD CLASS="l">109</TD>'
+        '<TD> </TD>'
+      '</TR>'
+    )
+    self.tr_with_extra_a_tag = (
+      '<TR CLASS="z">'
+        '<TD CLASS="l">'
+          '<A name="1f">54</A>'
+        '</TD>'
+        '<TD>            }</TD>'
+      '</TR>'
+    )
+
+  def testInit(self):
+    emma_dir = self.emma_dir
+    parser = emma_coverage_stats._EmmaHtmlParser(emma_dir)
+    self.assertEqual(parser._base_dir, emma_dir)
+    self.assertEqual(parser._emma_files_path, 'fake/dir/_files')
+    self.assertEqual(parser._index_path, 'fake/dir/index.html')
+
+  def testFindElements_basic(self):
+    read_values = [self.simple_html]
+    found, _ = MockOpenForFunction(self.parser._FindElements, read_values,
+                                   file_path='fake', xpath_selector='.//TD')
+    self.assertIs(type(found), list)
+    self.assertIs(type(found[0]), ElementTree.Element)
+    self.assertEqual(found[0].text, 'Test HTML')
+
+  def testFindElements_multipleElements(self):
+    multiple_trs = self.not_executable_tr_html + self.covered_tr_html
+    read_values = ['<div>' + multiple_trs + '</div>']
+    found, _ = MockOpenForFunction(self.parser._FindElements, read_values,
+                                   file_path='fake', xpath_selector='.//TR')
+    self.assertEquals(2, len(found))
+
+  def testFindElements_noMatch(self):
+    read_values = [self.simple_html]
+    found, _ = MockOpenForFunction(self.parser._FindElements, read_values,
+                                   file_path='fake', xpath_selector='.//TR')
+    self.assertEqual(found, [])
+
+  def testFindElements_badFilePath(self):
+    with self.assertRaises(IOError):
+      with mock.patch('os.path.exists', return_value=False):
+        self.parser._FindElements('fake', xpath_selector='//tr')
+
+  def testGetPackageNameToEmmaFileDict_basic(self):
+    expected_dict = {
+      'org.chromium.chrome.browser.AccessibilityUtil.java':
+      'fake/dir/_files/23.html',
+      'org.chromium.chrome.browser.ContextualMenuBar.java':
+      'fake/dir/_files/22.html',
+      'org.chromium.chrome.browser.tabmodel.IntentHelper.java':
+      'fake/dir/_files/1e.html',
+      'org.chromium.chrome.browser.ContentSetting.java':
+      'fake/dir/_files/1f.html',
+      'org.chromium.chrome.browser.DevToolsServer.java':
+      'fake/dir/_files/20.html',
+      'org.chromium.chrome.browser.NavigationPopup.java':
+      'fake/dir/_files/24.html',
+      'org.chromium.chrome.browser.FileProviderHelper.java':
+      'fake/dir/_files/21.html'}
+
+    read_values = [self.index_html, self.package_1_class_list_html,
+                   self.package_2_class_list_html]
+    return_dict, mock_open = MockOpenForFunction(
+        self.parser.GetPackageNameToEmmaFileDict, read_values)
+
+    self.assertDictEqual(return_dict, expected_dict)
+    self.assertEqual(mock_open.call_count, 3)
+    calls = [mock.call('fake/dir/index.html'),
+             mock.call('fake/dir/_files/1.html'),
+             mock.call('fake/dir/_files/0.html')]
+    mock_open.assert_has_calls(calls)
+
+  def testGetPackageNameToEmmaFileDict_noPackageElements(self):
+    self.parser._FindElements = mock.Mock(return_value=[])
+    return_dict = self.parser.GetPackageNameToEmmaFileDict()
+    self.assertDictEqual({}, return_dict)
+
+  def testGetLineCoverage_status_basic(self):
+    line_coverage = self.GetLineCoverageWithFakeElements([self.covered_tr_html])
+    self.assertEqual(line_coverage[0].covered_status,
+                     emma_coverage_stats.COVERED)
+
+  def testGetLineCoverage_status_statusMissing(self):
+    line_coverage = self.GetLineCoverageWithFakeElements(
+        [self.not_executable_tr_html])
+    self.assertEqual(line_coverage[0].covered_status,
+                     emma_coverage_stats.NOT_EXECUTABLE)
+
+  def testGetLineCoverage_fractionalCoverage_basic(self):
+    line_coverage = self.GetLineCoverageWithFakeElements([self.covered_tr_html])
+    self.assertEqual(line_coverage[0].fractional_line_coverage, 1.0)
+
+  def testGetLineCoverage_fractionalCoverage_partial(self):
+    line_coverage = self.GetLineCoverageWithFakeElements(
+        [self.partially_covered_tr_html])
+    self.assertEqual(line_coverage[0].fractional_line_coverage, 0.78)
+
+  def testGetLineCoverage_lineno_basic(self):
+    line_coverage = self.GetLineCoverageWithFakeElements([self.covered_tr_html])
+    self.assertEqual(line_coverage[0].lineno, 110)
+
+  def testGetLineCoverage_lineno_withAlternativeHtml(self):
+    line_coverage = self.GetLineCoverageWithFakeElements(
+        [self.tr_with_extra_a_tag])
+    self.assertEqual(line_coverage[0].lineno, 54)
+
+  def testGetLineCoverage_source(self):
+    self.parser._FindElements = mock.Mock(
+        return_value=[ElementTree.fromstring(self.covered_tr_html)])
+    line_coverage = self.parser.GetLineCoverage('fake_path')
+    self.assertEqual(line_coverage[0].source,
+                     '        if (mSelectors.get(index) != null) {')
+
+  def testGetLineCoverage_multipleElements(self):
+    line_coverage = self.GetLineCoverageWithFakeElements(
+        [self.covered_tr_html, self.partially_covered_tr_html,
+         self.tr_with_extra_a_tag])
+    self.assertEqual(len(line_coverage), 3)
+
+  def GetLineCoverageWithFakeElements(self, html_elements):
+    """Wraps GetLineCoverage so mock HTML can easily be used.
+
+    Args:
+      html_elements: List of strings each representing an HTML element.
+
+    Returns:
+      A list of LineCoverage objects.
+    """
+    elements = [ElementTree.fromstring(string) for string in html_elements]
+    with mock.patch('emma_coverage_stats._EmmaHtmlParser._FindElements',
+                    return_value=elements):
+      return self.parser.GetLineCoverage('fake_path')
+
+
+class _EmmaCoverageStatsTest(unittest.TestCase):
+  """Tests for _EmmaCoverageStats."""
+
+  def setUp(self):
+    self.good_source_to_emma = {
+      '/path/to/1/File1.java': '/emma/1.html',
+      '/path/2/File2.java': '/emma/2.html',
+      '/path/2/File3.java': '/emma/3.html'
+    }
+    self.line_coverage = [
+        emma_coverage_stats.LineCoverage(
+            1, '', emma_coverage_stats.COVERED, 1.0),
+        emma_coverage_stats.LineCoverage(
+            2, '', emma_coverage_stats.COVERED, 1.0),
+        emma_coverage_stats.LineCoverage(
+            3, '', emma_coverage_stats.NOT_EXECUTABLE, 1.0),
+        emma_coverage_stats.LineCoverage(
+            4, '', emma_coverage_stats.NOT_COVERED, 1.0),
+        emma_coverage_stats.LineCoverage(
+            5, '', emma_coverage_stats.PARTIALLY_COVERED, 0.85),
+        emma_coverage_stats.LineCoverage(
+            6, '', emma_coverage_stats.PARTIALLY_COVERED, 0.20)
+    ]
+    self.lines_for_coverage = [1, 3, 5, 6]
+    with mock.patch('emma_coverage_stats._EmmaHtmlParser._FindElements',
+                    return_value=[]):
+      self.simple_coverage = emma_coverage_stats._EmmaCoverageStats(
+          'fake_dir', {})
+
+  def testInit(self):
+    coverage_stats = self.simple_coverage
+    self.assertIsInstance(coverage_stats._emma_parser,
+                          emma_coverage_stats._EmmaHtmlParser)
+    self.assertIsInstance(coverage_stats._source_to_emma, dict)
+
+  def testNeedsCoverage_withExistingJavaFile(self):
+    test_file = '/path/to/file/File.java'
+    with mock.patch('os.path.exists', return_value=True):
+      self.assertTrue(
+          emma_coverage_stats._EmmaCoverageStats.NeedsCoverage(test_file))
+
+  def testNeedsCoverage_withNonJavaFile(self):
+    test_file = '/path/to/file/File.c'
+    with mock.patch('os.path.exists', return_value=True):
+      self.assertFalse(
+          emma_coverage_stats._EmmaCoverageStats.NeedsCoverage(test_file))
+
+  def testNeedsCoverage_fileDoesNotExist(self):
+    test_file = '/path/to/file/File.java'
+    with mock.patch('os.path.exists', return_value=False):
+      self.assertFalse(
+          emma_coverage_stats._EmmaCoverageStats.NeedsCoverage(test_file))
+
+  def testGetPackageNameFromFile_basic(self):
+    test_file_text = """// Test Copyright
+    package org.chromium.chrome.browser;
+    import android.graphics.RectF;"""
+    result_package, _ = MockOpenForFunction(
+        emma_coverage_stats._EmmaCoverageStats.GetPackageNameFromFile,
+        [test_file_text], file_path='/path/to/file/File.java')
+    self.assertEqual(result_package, 'org.chromium.chrome.browser.File.java')
+
+  def testGetPackageNameFromFile_noPackageStatement(self):
+    result_package, _ = MockOpenForFunction(
+        emma_coverage_stats._EmmaCoverageStats.GetPackageNameFromFile,
+        ['not a package statement'], file_path='/path/to/file/File.java')
+    self.assertIsNone(result_package)
+
+  def testGetSummaryStatsForLines_basic(self):
+    covered, total = self.simple_coverage.GetSummaryStatsForLines(
+        self.line_coverage)
+    self.assertEqual(covered, 3.05)
+    self.assertEqual(total, 5)
+
+  def testGetSourceFileToEmmaFileDict(self):
+    package_names = {
+      '/path/to/1/File1.java': 'org.fake.one.File1.java',
+      '/path/2/File2.java': 'org.fake.File2.java',
+      '/path/2/File3.java': 'org.fake.File3.java'
+    }
+    package_to_emma = {
+      'org.fake.one.File1.java': '/emma/1.html',
+      'org.fake.File2.java': '/emma/2.html',
+      'org.fake.File3.java': '/emma/3.html'
+    }
+    with mock.patch('os.path.exists', return_value=True):
+      coverage_stats = self.simple_coverage
+      coverage_stats._emma_parser.GetPackageNameToEmmaFileDict = mock.MagicMock(
+          return_value=package_to_emma)
+      coverage_stats.GetPackageNameFromFile = lambda x: package_names[x]
+      result_dict = coverage_stats._GetSourceFileToEmmaFileDict(
+          package_names.keys())
+    self.assertDictEqual(result_dict, self.good_source_to_emma)
+
+  def testGetCoverageDictForFile(self):
+    line_coverage = self.line_coverage
+    self.simple_coverage._emma_parser.GetLineCoverage = lambda x: line_coverage
+    self.simple_coverage._source_to_emma = {'/fake/src': 'fake/emma'}
+    lines = self.lines_for_coverage
+    expected_dict = {
+      'absolute': {
+        'covered': 3.05,
+        'total': 5
+      },
+      'incremental': {
+        'covered': 2.05,
+        'total': 3
+      },
+      'source': [
+        {
+          'line': line_coverage[0].source,
+          'coverage': line_coverage[0].covered_status,
+          'changed': True,
+          'fractional_coverage': line_coverage[0].fractional_line_coverage,
+        },
+        {
+          'line': line_coverage[1].source,
+          'coverage': line_coverage[1].covered_status,
+          'changed': False,
+          'fractional_coverage': line_coverage[1].fractional_line_coverage,
+        },
+        {
+          'line': line_coverage[2].source,
+          'coverage': line_coverage[2].covered_status,
+          'changed': True,
+          'fractional_coverage': line_coverage[2].fractional_line_coverage,
+        },
+        {
+          'line': line_coverage[3].source,
+          'coverage': line_coverage[3].covered_status,
+          'changed': False,
+          'fractional_coverage': line_coverage[3].fractional_line_coverage,
+        },
+        {
+          'line': line_coverage[4].source,
+          'coverage': line_coverage[4].covered_status,
+          'changed': True,
+          'fractional_coverage': line_coverage[4].fractional_line_coverage,
+        },
+        {
+          'line': line_coverage[5].source,
+          'coverage': line_coverage[5].covered_status,
+          'changed': True,
+          'fractional_coverage': line_coverage[5].fractional_line_coverage,
+        }
+      ]
+    }
+    result_dict = self.simple_coverage.GetCoverageDictForFile(
+        '/fake/src', lines)
+    self.assertDictEqual(result_dict, expected_dict)
+
+  def testGetCoverageDictForFile_emptyCoverage(self):
+    expected_dict = {
+      'absolute': {'covered': 0, 'total': 0},
+      'incremental': {'covered': 0, 'total': 0},
+      'source': []
+    }
+    self.simple_coverage._emma_parser.GetLineCoverage = lambda x: []
+    self.simple_coverage._source_to_emma = {'fake_dir': 'fake/emma'}
+    result_dict = self.simple_coverage.GetCoverageDictForFile('fake_dir', {})
+    self.assertDictEqual(result_dict, expected_dict)
+
+  def testGetCoverageDictForFile_missingCoverage(self):
+    self.simple_coverage._source_to_emma = {}
+    result_dict = self.simple_coverage.GetCoverageDictForFile('fake_file', {})
+    self.assertIsNone(result_dict)
+
+  def testGetCoverageDict_basic(self):
+    files_for_coverage = {
+      '/path/to/1/File1.java': [1, 3, 4],
+      '/path/2/File2.java': [1, 2]
+    }
+    self.simple_coverage._source_to_emma = {
+      '/path/to/1/File1.java': 'emma_1',
+      '/path/2/File2.java': 'emma_2'
+    }
+    coverage_info = {
+      'emma_1': [
+        emma_coverage_stats.LineCoverage(
+            1, '', emma_coverage_stats.COVERED, 1.0),
+        emma_coverage_stats.LineCoverage(
+            2, '', emma_coverage_stats.PARTIALLY_COVERED, 0.5),
+        emma_coverage_stats.LineCoverage(
+            3, '', emma_coverage_stats.NOT_EXECUTABLE, 1.0),
+        emma_coverage_stats.LineCoverage(
+            4, '', emma_coverage_stats.COVERED, 1.0)
+      ],
+      'emma_2': [
+        emma_coverage_stats.LineCoverage(
+            1, '', emma_coverage_stats.NOT_COVERED, 1.0),
+        emma_coverage_stats.LineCoverage(
+            2, '', emma_coverage_stats.COVERED, 1.0)
+      ]
+    }
+    expected_dict = {
+      'files': {
+        '/path/2/File2.java': {
+          'absolute': {'covered': 1, 'total': 2},
+          'incremental': {'covered': 1, 'total': 2},
+          'source': [{'changed': True, 'coverage': 0,
+                      'line': '', 'fractional_coverage': 1.0},
+                     {'changed': True, 'coverage': 1,
+                      'line': '', 'fractional_coverage': 1.0}]
+        },
+        '/path/to/1/File1.java': {
+          'absolute': {'covered': 2.5, 'total': 3},
+          'incremental': {'covered': 2, 'total': 2},
+          'source': [{'changed': True, 'coverage': 1,
+                      'line': '', 'fractional_coverage': 1.0},
+                     {'changed': False, 'coverage': 2,
+                      'line': '', 'fractional_coverage': 0.5},
+                     {'changed': True, 'coverage': -1,
+                      'line': '', 'fractional_coverage': 1.0},
+                     {'changed': True, 'coverage': 1,
+                      'line': '', 'fractional_coverage': 1.0}]
+        }
+      },
+      'patch': {'incremental': {'covered': 3, 'total': 4}}
+    }
+    # Return the relevant coverage info for each file.
+    self.simple_coverage._emma_parser.GetLineCoverage = (
+        lambda x: coverage_info[x])
+    result_dict = self.simple_coverage.GetCoverageDict(files_for_coverage)
+    self.assertDictEqual(result_dict, expected_dict)
+
+  def testGetCoverageDict_noCoverage(self):
+    result_dict = self.simple_coverage.GetCoverageDict({})
+    self.assertDictEqual(result_dict, EMPTY_COVERAGE_STATS_DICT)
+
+
+class EmmaCoverageStatsGenerateCoverageReport(unittest.TestCase):
+  """Tests for GenerateCoverageReport."""
+
+  def testGenerateCoverageReport_missingJsonFile(self):
+    with self.assertRaises(IOError):
+      with mock.patch('os.path.exists', return_value=False):
+        emma_coverage_stats.GenerateCoverageReport('', '', '')
+
+  def testGenerateCoverageReport_invalidJsonFile(self):
+    with self.assertRaises(ValueError):
+      with mock.patch('os.path.exists', return_value=True):
+        MockOpenForFunction(emma_coverage_stats.GenerateCoverageReport, [''],
+                            line_coverage_file='', out_file_path='',
+                            coverage_dir='')
+
+
+def MockOpenForFunction(func, side_effects, **kwargs):
+  """Allows easy mock open and read for callables that open multiple files.
+
+  Will mock the python open function in a way such that each time read() is
+  called on an open file, the next element in |side_effects| is returned. This
+  makes it easier to test functions that call open() multiple times.
+
+  Args:
+    func: The callable to invoke once mock files are setup.
+    side_effects: A list of return values for each file to return once read.
+      Length of list should be equal to the number calls to open in |func|.
+    **kwargs: Keyword arguments to be passed to |func|.
+
+  Returns:
+    A tuple containing the return value of |func| and the MagicMock object used
+      to mock all calls to open respectively.
+  """
+  mock_open = mock.mock_open()
+  mock_open.side_effect = [mock.mock_open(read_data=side_effect).return_value
+                           for side_effect in side_effects]
+  with mock.patch('__builtin__.open', mock_open):
+    return func(**kwargs), mock_open
+
+
+if __name__ == '__main__':
+  # Suppress logging messages.
+  unittest.main(buffer=True)
diff --git a/src/build/android/envsetup.sh b/src/build/android/envsetup.sh
new file mode 100755
index 0000000..7f549d9
--- /dev/null
+++ b/src/build/android/envsetup.sh
@@ -0,0 +1,35 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Adds Android SDK tools and related helpers to PATH, useful for development.
+# Not used on bots, nor required for any commands to succeed.
+# Use like: source build/android/envsetup.sh
+
+# Make sure we're being sourced.
+if [[ -n "$BASH_VERSION" && "${BASH_SOURCE:-$0}" == "$0" ]]; then
+  echo "ERROR: envsetup must be sourced."
+  exit 1
+fi
+
+# This only exists to set local variables. Don't call this manually.
+android_envsetup_main() {
+  local SCRIPT_PATH="$1"
+  local SCRIPT_DIR="$(dirname "$SCRIPT_PATH")"
+  local CHROME_SRC="$(readlink -f "${SCRIPT_DIR}/../../")"
+
+  # Some tools expect these environmental variables.
+  export ANDROID_SDK_ROOT="${CHROME_SRC}/third_party/android_sdk/public"
+  # ANDROID_HOME is deprecated, but generally means the same thing as
+  # ANDROID_SDK_ROOT and shouldn't hurt to set it.
+  export ANDROID_HOME="$ANDROID_SDK_ROOT"
+
+  # Set up PATH to point to SDK-provided (and other) tools, such as 'adb'.
+  export PATH=${CHROME_SRC}/build/android:$PATH
+  export PATH=${ANDROID_SDK_ROOT}/tools/:$PATH
+  export PATH=${ANDROID_SDK_ROOT}/platform-tools:$PATH
+}
+# In zsh, $0 is the name of the file being sourced.
+android_envsetup_main "${BASH_SOURCE:-$0}"
+unset -f android_envsetup_main
diff --git a/src/build/android/fast_local_dev_server.py b/src/build/android/fast_local_dev_server.py
new file mode 100755
index 0000000..a35c500
--- /dev/null
+++ b/src/build/android/fast_local_dev_server.py
@@ -0,0 +1,314 @@
+#!/usr/bin/env python3
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Creates an server to offload non-critical-path GN targets."""
+
+from __future__ import annotations
+
+import argparse
+import json
+import os
+import queue
+import shutil
+import socket
+import subprocess
+import sys
+import threading
+from typing import Callable, Dict, List, Optional, Tuple
+
+sys.path.append(os.path.join(os.path.dirname(__file__), 'gyp'))
+from util import server_utils
+
+
+def log(msg: str, *, end: str = ''):
+  # Shrink the message (leaving a 2-char prefix and use the rest of the room
+  # for the suffix) according to terminal size so it is always one line.
+  width = shutil.get_terminal_size().columns
+  prefix = f'[{TaskStats.prefix()}] '
+  max_msg_width = width - len(prefix)
+  if len(msg) > max_msg_width:
+    length_to_show = max_msg_width - 5  # Account for ellipsis and header.
+    msg = f'{msg[:2]}...{msg[-length_to_show:]}'
+  # \r to return the carriage to the beginning of line.
+  # \033[K to replace the normal \n to erase until the end of the line.
+  # Avoid the default line ending so the next \r overwrites the same line just
+  #     like ninja's output.
+  print(f'\r{prefix}{msg}\033[K', end=end, flush=True)
+
+
+class TaskStats:
+  """Class to keep track of aggregate stats for all tasks across threads."""
+  _num_processes = 0
+  _completed_tasks = 0
+  _total_tasks = 0
+  _lock = threading.Lock()
+
+  @classmethod
+  def no_running_processes(cls):
+    return cls._num_processes == 0
+
+  @classmethod
+  def add_task(cls):
+    # Only the main thread calls this, so there is no need for locking.
+    cls._total_tasks += 1
+
+  @classmethod
+  def add_process(cls):
+    with cls._lock:
+      cls._num_processes += 1
+
+  @classmethod
+  def remove_process(cls):
+    with cls._lock:
+      cls._num_processes -= 1
+
+  @classmethod
+  def complete_task(cls):
+    with cls._lock:
+      cls._completed_tasks += 1
+
+  @classmethod
+  def prefix(cls):
+    # Ninja's prefix is: [205 processes, 6/734 @ 6.5/s : 0.922s ]
+    # Time taken and task completion rate are not important for the build server
+    # since it is always running in the background and uses idle priority for
+    # its tasks.
+    with cls._lock:
+      word = 'process' if cls._num_processes == 1 else 'processes'
+      return (f'{cls._num_processes} {word}, '
+              f'{cls._completed_tasks}/{cls._total_tasks}')
+
+
+class TaskManager:
+  """Class to encapsulate a threadsafe queue and handle deactivating it."""
+
+  def __init__(self):
+    self._queue: queue.SimpleQueue[Task] = queue.SimpleQueue()
+    self._deactivated = False
+
+  def add_task(self, task: Task):
+    assert not self._deactivated
+    TaskStats.add_task()
+    self._queue.put(task)
+    log(f'QUEUED {task.name}')
+    self._maybe_start_tasks()
+
+  def deactivate(self):
+    self._deactivated = True
+    while not self._queue.empty():
+      try:
+        task = self._queue.get_nowait()
+      except queue.Empty:
+        return
+      task.terminate()
+
+  @staticmethod
+  def _num_running_processes():
+    with open('/proc/stat') as f:
+      for line in f:
+        if line.startswith('procs_running'):
+          return int(line.rstrip().split()[1])
+    assert False, 'Could not read /proc/stat'
+
+  def _maybe_start_tasks(self):
+    if self._deactivated:
+      return
+    # Include load avg so that a small dip in the number of currently running
+    # processes will not cause new tasks to be started while the overall load is
+    # heavy.
+    cur_load = max(self._num_running_processes(), os.getloadavg()[0])
+    num_started = 0
+    # Always start a task if we don't have any running, so that all tasks are
+    # eventually finished. Try starting up tasks when the overall load is light.
+    # Limit to at most 2 new tasks to prevent ramping up too fast. There is a
+    # chance where multiple threads call _maybe_start_tasks and each gets to
+    # spawn up to 2 new tasks, but since the only downside is some build tasks
+    # get worked on earlier rather than later, it is not worth mitigating.
+    while num_started < 2 and (TaskStats.no_running_processes()
+                               or num_started + cur_load < os.cpu_count()):
+      try:
+        next_task = self._queue.get_nowait()
+      except queue.Empty:
+        return
+      num_started += next_task.start(self._maybe_start_tasks)
+
+
+# TODO(wnwen): Break this into Request (encapsulating what ninja sends) and Task
+#              when a Request starts to be run. This would eliminate ambiguity
+#              about when and whether _proc/_thread are initialized.
+class Task:
+  """Class to represent one task and operations on it."""
+
+  def __init__(self, name: str, cwd: str, cmd: List[str], stamp_file: str):
+    self.name = name
+    self.cwd = cwd
+    self.cmd = cmd
+    self.stamp_file = stamp_file
+    self._terminated = False
+    self._lock = threading.Lock()
+    self._proc: Optional[subprocess.Popen] = None
+    self._thread: Optional[threading.Thread] = None
+    self._return_code: Optional[int] = None
+
+  @property
+  def key(self):
+    return (self.cwd, self.name)
+
+  def start(self, on_complete_callback: Callable[[], None]) -> int:
+    """Starts the task if it has not already been terminated.
+
+    Returns the number of processes that have been started. This is called at
+    most once when the task is popped off the task queue."""
+
+    # The environment variable forces the script to actually run in order to
+    # avoid infinite recursion.
+    env = os.environ.copy()
+    env[server_utils.BUILD_SERVER_ENV_VARIABLE] = '1'
+
+    with self._lock:
+      if self._terminated:
+        return 0
+      # Use os.nice(19) to ensure the lowest priority (idle) for these analysis
+      # tasks since we want to avoid slowing down the actual build.
+      # TODO(wnwen): Use ionice to reduce resource consumption.
+      TaskStats.add_process()
+      log(f'STARTING {self.name}')
+      self._proc = subprocess.Popen(
+          self.cmd,
+          stdout=subprocess.PIPE,
+          stderr=subprocess.STDOUT,
+          cwd=self.cwd,
+          env=env,
+          text=True,
+          preexec_fn=lambda: os.nice(19),
+      )
+      self._thread = threading.Thread(
+          target=self._complete_when_process_finishes,
+          args=(on_complete_callback, ))
+      self._thread.start()
+      return 1
+
+  def terminate(self):
+    """Can be called multiple times to cancel and ignore the task's output."""
+
+    with self._lock:
+      if self._terminated:
+        return
+      self._terminated = True
+    # It is safe to access _proc and _thread outside of _lock since they are
+    # only changed by self.start holding _lock when self._terminate is false.
+    # Since we have just set self._terminate to true inside of _lock, we know
+    # that neither _proc nor _thread will be changed from this point onwards.
+    if self._proc:
+      self._proc.terminate()
+      self._proc.wait()
+    # Ensure that self._complete is called either by the thread or by us.
+    if self._thread:
+      self._thread.join()
+    else:
+      self._complete()
+
+  def _complete_when_process_finishes(self,
+                                      on_complete_callback: Callable[[], None]):
+    assert self._proc
+    # We know Popen.communicate will return a str and not a byte since it is
+    # constructed with text=True.
+    stdout: str = self._proc.communicate()[0]
+    self._return_code = self._proc.returncode
+    TaskStats.remove_process()
+    self._complete(stdout)
+    on_complete_callback()
+
+  def _complete(self, stdout: str = ''):
+    """Update the user and ninja after the task has run or been terminated.
+
+    This method should only be run once per task. Avoid modifying the task so
+    that this method does not need locking."""
+
+    TaskStats.complete_task()
+    failed = False
+    if self._terminated:
+      log(f'TERMINATED {self.name}')
+      # Ignore stdout as it is now outdated.
+      failed = True
+    else:
+      log(f'FINISHED {self.name}')
+      if stdout or self._return_code != 0:
+        failed = True
+        # An extra new line is needed since we want to preserve the previous
+        # _log line. Use a single print so that it is threadsafe.
+        # TODO(wnwen): Improve stdout display by parsing over it and moving the
+        #              actual error to the bottom. Otherwise long command lines
+        #              in the Traceback section obscure the actual error(s).
+        print('\n' + '\n'.join([
+            f'FAILED: {self.name}',
+            f'Return code: {self._return_code}',
+            ' '.join(self.cmd),
+            stdout,
+        ]))
+
+    if failed:
+      # Force ninja to consider failed targets as dirty.
+      try:
+        os.unlink(os.path.join(self.cwd, self.stamp_file))
+      except FileNotFoundError:
+        pass
+    else:
+      # Ninja will rebuild targets when their inputs change even if their stamp
+      # file has a later modified time. Thus we do not need to worry about the
+      # script being run by the build server updating the mtime incorrectly.
+      pass
+
+
+def _listen_for_request_data(sock: socket.socket):
+  while True:
+    conn = sock.accept()[0]
+    received = []
+    with conn:
+      while True:
+        data = conn.recv(4096)
+        if not data:
+          break
+        received.append(data)
+    if received:
+      yield json.loads(b''.join(received))
+
+
+def _process_requests(sock: socket.socket):
+  # Since dicts in python can contain anything, explicitly type tasks to help
+  # make static type checking more useful.
+  tasks: Dict[Tuple[str, str], Task] = {}
+  task_manager = TaskManager()
+  try:
+    for data in _listen_for_request_data(sock):
+      task = Task(name=data['name'],
+                  cwd=data['cwd'],
+                  cmd=data['cmd'],
+                  stamp_file=data['stamp_file'])
+      existing_task = tasks.get(task.key)
+      if existing_task:
+        existing_task.terminate()
+      tasks[task.key] = task
+      task_manager.add_task(task)
+  except KeyboardInterrupt:
+    log('STOPPING SERVER...', end='\n')
+    # Gracefully shut down the task manager, terminating all queued tasks.
+    task_manager.deactivate()
+    # Terminate all currently running tasks.
+    for task in tasks.values():
+      task.terminate()
+    log('STOPPED', end='\n')
+
+
+def main():
+  parser = argparse.ArgumentParser(description=__doc__)
+  parser.parse_args()
+  with socket.socket(socket.AF_UNIX) as sock:
+    sock.bind(server_utils.SOCKET_ADDRESS)
+    sock.listen()
+    _process_requests(sock)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/generate_jacoco_report.py b/src/build/android/generate_jacoco_report.py
new file mode 100755
index 0000000..d0a9987
--- /dev/null
+++ b/src/build/android/generate_jacoco_report.py
@@ -0,0 +1,270 @@
+#!/usr/bin/env vpython
+
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Aggregates Jacoco coverage files to produce output."""
+
+from __future__ import print_function
+
+import argparse
+import fnmatch
+import json
+import os
+import sys
+
+import devil_chromium
+from devil.utils import cmd_helper
+from pylib.constants import host_paths
+
+# Source paths should be passed to Jacoco in a way that the relative file paths
+# reflect the class package name.
+_PARTIAL_PACKAGE_NAMES = ['com/google', 'org/chromium']
+
+# The sources_json_file is generated by jacoco_instr.py with source directories
+# and input path to non-instrumented jars.
+# e.g.
+# 'source_dirs': [
+#   "chrome/android/java/src/org/chromium/chrome/browser/toolbar/bottom",
+#   "chrome/android/java/src/org/chromium/chrome/browser/ui/system",
+# ...]
+# 'input_path':
+#   '$CHROMIUM_OUTPUT_DIR/\
+#    obj/chrome/android/features/tab_ui/java__process_prebuilt-filtered.jar'
+
+_SOURCES_JSON_FILES_SUFFIX = '__jacoco_sources.json'
+
+# These should match the jar class files generated in internal_rules.gni
+_DEVICE_CLASS_EXCLUDE_SUFFIX = 'host_filter.jar'
+_HOST_CLASS_EXCLUDE_SUFFIX = 'device_filter.jar'
+
+
+def _CreateClassfileArgs(class_files, exclude_suffix=None):
+  """Returns a list of files that don't have a given suffix.
+
+  Args:
+    class_files: A list of class files.
+    exclude_suffix: Suffix to look for to exclude.
+
+  Returns:
+    A list of files that don't use the suffix.
+  """
+  result_class_files = []
+  for f in class_files:
+    if exclude_suffix:
+      if not f.endswith(exclude_suffix):
+        result_class_files += ['--classfiles', f]
+    else:
+      result_class_files += ['--classfiles', f]
+
+  return result_class_files
+
+
+def _GenerateReportOutputArgs(args, class_files, report_type):
+  class_jar_exclude = None
+  if report_type == 'device':
+    class_jar_exclude = _DEVICE_CLASS_EXCLUDE_SUFFIX
+  elif report_type == 'host':
+    class_jar_exclude = _HOST_CLASS_EXCLUDE_SUFFIX
+
+  cmd = _CreateClassfileArgs(class_files, class_jar_exclude)
+  if args.format == 'html':
+    report_dir = os.path.join(args.output_dir, report_type)
+    if not os.path.exists(report_dir):
+      os.makedirs(report_dir)
+    cmd += ['--html', report_dir]
+  elif args.format == 'xml':
+    cmd += ['--xml', args.output_file]
+  elif args.format == 'csv':
+    cmd += ['--csv', args.output_file]
+
+  return cmd
+
+
+def _GetFilesWithSuffix(root_dir, suffix):
+  """Gets all files with a given suffix.
+
+  Args:
+    root_dir: Directory in which to search for files.
+    suffix: Suffix to look for.
+
+  Returns:
+    A list of absolute paths to files that match.
+  """
+  files = []
+  for root, _, filenames in os.walk(root_dir):
+    basenames = fnmatch.filter(filenames, '*' + suffix)
+    files.extend([os.path.join(root, basename) for basename in basenames])
+
+  return files
+
+
+def _GetExecFiles(root_dir, exclude_substr=None):
+  """ Gets all .exec files
+
+  Args:
+    root_dir: Root directory in which to search for files.
+    exclude_substr: Substring which should be absent in filename. If None, all
+      files are selected.
+
+  Returns:
+    A list of absolute paths to .exec files
+
+  """
+  all_exec_files = _GetFilesWithSuffix(root_dir, ".exec")
+  valid_exec_files = []
+  for exec_file in all_exec_files:
+    if not exclude_substr or exclude_substr not in exec_file:
+      valid_exec_files.append(exec_file)
+  return valid_exec_files
+
+
+def _ParseArguments(parser):
+  """Parses the command line arguments.
+
+  Args:
+    parser: ArgumentParser object.
+
+  Returns:
+    The parsed arguments.
+  """
+  parser.add_argument(
+      '--format',
+      required=True,
+      choices=['html', 'xml', 'csv'],
+      help='Output report format. Choose one from html, xml and csv.')
+  parser.add_argument(
+      '--device-or-host',
+      choices=['device', 'host'],
+      help='Selection on whether to use the device classpath files or the '
+      'host classpath files. Host would typically be used for junit tests '
+      ' and device for tests that run on the device. Only used for xml and csv'
+      ' reports.')
+  parser.add_argument('--output-dir', help='html report output directory.')
+  parser.add_argument('--output-file',
+                      help='xml file to write device coverage results.')
+  parser.add_argument(
+      '--coverage-dir',
+      required=True,
+      help='Root of the directory in which to search for '
+      'coverage data (.exec) files.')
+  parser.add_argument('--exec-filename-excludes',
+                      required=False,
+                      help='Excludes .exec files which contain a particular '
+                      'substring in their name')
+  parser.add_argument(
+      '--sources-json-dir',
+      help='Root of the directory in which to search for '
+      '*__jacoco_sources.json files.')
+  parser.add_argument(
+      '--class-files',
+      nargs='+',
+      help='Location of Java non-instrumented class files. '
+      'Use non-instrumented jars instead of instrumented jars. '
+      'e.g. use chrome_java__process_prebuilt_(host/device)_filter.jar instead'
+      'of chrome_java__process_prebuilt-instrumented.jar')
+  parser.add_argument(
+      '--sources',
+      nargs='+',
+      help='Location of the source files. '
+      'Specified source folders must be the direct parent of the folders '
+      'that define the Java packages.'
+      'e.g. <src_dir>/chrome/android/java/src/')
+  parser.add_argument(
+      '--cleanup',
+      action='store_true',
+      help='If set, removes coverage files generated at '
+      'runtime.')
+  args = parser.parse_args()
+
+  if args.format == 'html' and not args.output_dir:
+    parser.error('--output-dir needed for report.')
+  if args.format in ('csv', 'xml'):
+    if not args.output_file:
+      parser.error('--output-file needed for xml/csv reports.')
+    if not args.device_or_host and args.sources_json_dir:
+      parser.error('--device-or-host selection needed with --sources-json-dir')
+  if not (args.sources_json_dir or args.class_files):
+    parser.error('At least either --sources-json-dir or --class-files needed.')
+  return args
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  args = _ParseArguments(parser)
+
+  devil_chromium.Initialize()
+
+  coverage_files = _GetExecFiles(args.coverage_dir, args.exec_filename_excludes)
+  if not coverage_files:
+    parser.error('No coverage file found under %s' % args.coverage_dir)
+  print('Found coverage files: %s' % str(coverage_files))
+
+  class_files = []
+  source_dirs = []
+  if args.sources_json_dir:
+    sources_json_files = _GetFilesWithSuffix(args.sources_json_dir,
+                                             _SOURCES_JSON_FILES_SUFFIX)
+    for f in sources_json_files:
+      with open(f, 'r') as json_file:
+        data = json.load(json_file)
+        class_files.extend(data['input_path'])
+        source_dirs.extend(data['source_dirs'])
+
+  # Fix source directories as direct parent of Java packages.
+  fixed_source_dirs = set()
+  for path in source_dirs:
+    for partial in _PARTIAL_PACKAGE_NAMES:
+      if partial in path:
+        fixed_dir = os.path.join(host_paths.DIR_SOURCE_ROOT,
+                                 path[:path.index(partial)])
+        fixed_source_dirs.add(fixed_dir)
+        break
+
+  if args.class_files:
+    class_files += args.class_files
+  if args.sources:
+    fixed_source_dirs.update(args.sources)
+
+  cmd = [
+      'java', '-jar',
+      os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party', 'jacoco', 'lib',
+                   'jacococli.jar'), 'report'
+  ] + coverage_files
+
+  for source in fixed_source_dirs:
+    cmd += ['--sourcefiles', source]
+
+  if args.format == 'html':
+    # Both reports are generated for html as the cq bot generates an html
+    # report and we wouldn't know which one a developer needed.
+    device_cmd = cmd + _GenerateReportOutputArgs(args, class_files, 'device')
+    host_cmd = cmd + _GenerateReportOutputArgs(args, class_files, 'host')
+    device_exit_code = cmd_helper.RunCmd(device_cmd)
+    host_exit_code = cmd_helper.RunCmd(host_cmd)
+    exit_code = device_exit_code or host_exit_code
+  else:
+    cmd = cmd + _GenerateReportOutputArgs(args, class_files,
+                                          args.device_or_host)
+    exit_code = cmd_helper.RunCmd(cmd)
+
+  if args.cleanup:
+    for f in coverage_files:
+      os.remove(f)
+
+  # Command tends to exit with status 0 when it actually failed.
+  if not exit_code:
+    if args.format == 'html':
+      if not os.path.isdir(args.output_dir) or not os.listdir(args.output_dir):
+        print('No report generated at %s' % args.output_dir)
+        exit_code = 1
+    elif not os.path.isfile(args.output_file):
+      print('No device coverage report generated at %s' % args.output_file)
+      exit_code = 1
+
+  return exit_code
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/gradle/AndroidManifest.xml b/src/build/android/gradle/AndroidManifest.xml
new file mode 100644
index 0000000..f3e50e0
--- /dev/null
+++ b/src/build/android/gradle/AndroidManifest.xml
@@ -0,0 +1,14 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+  Copyright 2018 The Chromium Authors. All rights reserved.
+  Use of this source code is governed by a BSD-style license that can be
+  found in the LICENSE file.
+-->
+
+<!--
+  This is a dummy manifest which is required by Android Studio's _all target.
+  No <uses-sdk> is allowed due to https://crbug.com/841529.
+-->
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="org.dummy">
+</manifest>
diff --git a/src/build/android/gradle/android.jinja b/src/build/android/gradle/android.jinja
new file mode 100644
index 0000000..40d4506
--- /dev/null
+++ b/src/build/android/gradle/android.jinja
@@ -0,0 +1,114 @@
+{# Copyright 2016 The Chromium Authors. All rights reserved. #}
+{# Use of this source code is governed by a BSD-style license that can be #}
+{# found in the LICENSE file. #}
+{% macro expand_sourceset(variables, prefix) %}
+{% if variables is defined %}
+        {{ prefix }} {
+{% if variables.android_manifest is defined %}
+            manifest.srcFile "{{ variables.android_manifest }}"
+{% endif %}
+{% if variables.java_dirs is defined %}
+            java.srcDirs = [
+{% for path in variables.java_dirs %}
+                "{{ path }}",
+{% endfor %}
+            ]
+{% endif %}
+{% if variables.java_excludes is defined %}
+            java.filter.exclude([
+{% for path in variables.java_excludes %}
+                "{{ path }}",
+{% endfor %}
+            ])
+{% endif %}
+{% if variables.jni_libs is defined %}
+            jniLibs.srcDirs = [
+{% for path in variables.jni_libs %}
+                "{{ path }}",
+{% endfor %}
+            ]
+{% endif %}
+{% if variables.res_dirs is defined %}
+            res.srcDirs = [
+{% for path in variables.res_dirs %}
+                "{{ path }}",
+{% endfor %}
+            ]
+{% endif %}
+        }
+{% endif %}
+{% endmacro %}
+// Generated by //build/android/generate_gradle.py
+
+{% if template_type in ('android_library', 'android_junit') %}
+apply plugin: "com.android.library"
+{% elif template_type == 'android_apk' %}
+apply plugin: "com.android.application"
+{% endif %}
+
+android {
+    compileSdkVersion "{{ compile_sdk_version }}"
+
+    defaultConfig {
+        vectorDrawables.useSupportLibrary = true
+        minSdkVersion 21
+        targetSdkVersion {{ target_sdk_version }}
+    }
+
+    compileOptions {
+        sourceCompatibility JavaVersion.VERSION_1_8
+        targetCompatibility JavaVersion.VERSION_1_8
+    }
+
+{% if native is defined %}
+    externalNativeBuild {
+        cmake {
+            path "CMakeLists.txt"
+        }
+    }
+{% endif %}
+
+    sourceSets {
+{% for name in ['main', 'test', 'androidTest', 'debug', 'release'] %}
+        {{ name }} {
+            aidl.srcDirs = []
+            assets.srcDirs = []
+            java.srcDirs = []
+            jni.srcDirs = []
+            renderscript.srcDirs = []
+            res.srcDirs = []
+            resources.srcDirs = []
+        }
+{% endfor %}
+
+{{ expand_sourceset(main, 'main') }}
+{{ expand_sourceset(test, 'test') }}
+{% if android_test is defined %}
+{% for t in android_test %}
+{{ expand_sourceset(t, 'androidTest') }}
+{% endfor %}
+{% endif %}
+    }
+}
+
+{% include 'dependencies.jinja' %}
+
+afterEvaluate {
+    def tasksToDisable = tasks.findAll {
+        return (it.name.equals('generateDebugSources')  // causes unwanted AndroidManifest.java
+                || it.name.equals('generateReleaseSources')
+                || it.name.endsWith('BuildConfig')  // causes unwanted BuildConfig.java
+                || it.name.equals('preDebugAndroidTestBuild')
+{% if not use_gradle_process_resources %}
+                || it.name.endsWith('Assets')
+                || it.name.endsWith('Resources')
+                || it.name.endsWith('ResValues')
+{% endif %}
+                || it.name.endsWith('Aidl')
+                || it.name.endsWith('Renderscript')
+                || it.name.endsWith('Shaders'))
+    }
+    tasksToDisable.each { Task task ->
+      task.enabled = false
+    }
+}
diff --git a/src/build/android/gradle/cmake.jinja b/src/build/android/gradle/cmake.jinja
new file mode 100644
index 0000000..b727388
--- /dev/null
+++ b/src/build/android/gradle/cmake.jinja
@@ -0,0 +1,25 @@
+{# Copyright 2018 The Chromium Authors. All rights reserved. #}
+{# Use of this source code is governed by a BSD-style license that can be #}
+{# found in the LICENSE file. #}
+# Generated by //build/android/generate_gradle.py
+
+cmake_minimum_required(VERSION 3.4.1)
+
+project(chrome C CXX)
+
+{% if native.includes is defined %}
+include_directories(
+{% for path in native.includes %}
+    {{ path }}
+{% endfor %}
+)
+{% endif %}
+
+# Android studio will index faster when adding all sources into one library.
+{% if native.sources is defined %}
+add_library("chromium"
+{% for path in native.sources %}
+    {{ path }}
+{% endfor %}
+)
+{% endif %}
diff --git a/src/build/android/gradle/dependencies.jinja b/src/build/android/gradle/dependencies.jinja
new file mode 100644
index 0000000..87bc312
--- /dev/null
+++ b/src/build/android/gradle/dependencies.jinja
@@ -0,0 +1,28 @@
+{# Copyright 2016 The Chromium Authors. All rights reserved. #}
+{# Use of this source code is governed by a BSD-style license that can be #}
+{# found in the LICENSE file. #}
+{% macro expand_deps(variables, prefix) %}
+{% if variables is defined %}
+{% if variables.prebuilts is defined %}
+{% for path in variables.prebuilts %}
+    {{ prefix }} files("{{ path }}")
+{% endfor %}
+{% endif %}
+{% if variables.java_project_deps is defined %}
+{% for proj in variables.java_project_deps %}
+    {{ prefix }} project(":{{ proj }}")
+{% endfor %}
+{% endif %}
+{% if variables.android_project_deps is defined %}
+{% for proj in variables.android_project_deps %}
+    {{ prefix }} project(path: ":{{ proj }}")
+{% endfor %}
+{% endif %}
+{% endif %}
+{% endmacro %}
+
+dependencies {
+{{ expand_deps(main, 'implementation') }}
+{{ expand_deps(test, 'testImplementation') }}
+{{ expand_deps(android_test, 'androidTestImplementation') }}
+}
diff --git a/src/build/android/gradle/generate_gradle.py b/src/build/android/gradle/generate_gradle.py
new file mode 100755
index 0000000..80d0b0a
--- /dev/null
+++ b/src/build/android/gradle/generate_gradle.py
@@ -0,0 +1,932 @@
+#!/usr/bin/env vpython
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generates an Android Studio project from a GN target."""
+
+import argparse
+import codecs
+import collections
+import glob
+import json
+import logging
+import os
+import re
+import shutil
+import subprocess
+import sys
+
+_BUILD_ANDROID = os.path.join(os.path.dirname(__file__), os.pardir)
+sys.path.append(_BUILD_ANDROID)
+import devil_chromium
+from devil.utils import run_tests_helper
+from pylib import constants
+from pylib.constants import host_paths
+
+sys.path.append(os.path.join(_BUILD_ANDROID, 'gyp'))
+import jinja_template
+from util import build_utils
+from util import resource_utils
+
+sys.path.append(os.path.dirname(_BUILD_ANDROID))
+import gn_helpers
+
+_DEPOT_TOOLS_PATH = os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party',
+                                 'depot_tools')
+_DEFAULT_ANDROID_MANIFEST_PATH = os.path.join(
+    host_paths.DIR_SOURCE_ROOT, 'build', 'android', 'gradle',
+    'AndroidManifest.xml')
+_FILE_DIR = os.path.dirname(__file__)
+_GENERATED_JAVA_SUBDIR = 'generated_java'
+_JNI_LIBS_SUBDIR = 'symlinked-libs'
+_ARMEABI_SUBDIR = 'armeabi'
+_GRADLE_BUILD_FILE = 'build.gradle'
+_CMAKE_FILE = 'CMakeLists.txt'
+# This needs to come first alphabetically among all modules.
+_MODULE_ALL = '_all'
+_SRC_INTERNAL = os.path.join(
+    os.path.dirname(host_paths.DIR_SOURCE_ROOT), 'src-internal')
+_INSTRUMENTATION_TARGET_SUFFIX = '_test_apk__test_apk__apk'
+
+_DEFAULT_TARGETS = [
+    '//android_webview/test/embedded_test_server:aw_net_test_support_apk',
+    '//android_webview/test:webview_instrumentation_apk',
+    '//android_webview/test:webview_instrumentation_test_apk',
+    '//base:base_junit_tests',
+    '//chrome/android:chrome_junit_tests',
+    '//chrome/android:chrome_public_apk',
+    '//chrome/android:chrome_public_test_apk',
+    '//content/public/android:content_junit_tests',
+    '//content/shell/android:content_shell_apk',
+    # Below must be included even with --all since they are libraries.
+    '//base/android/jni_generator:jni_processor',
+    '//tools/android/errorprone_plugin:errorprone_plugin_java',
+]
+
+_EXCLUDED_PREBUILT_JARS = [
+    # Android Studio already provides Desugar runtime.
+    # Including it would cause linking error because of a duplicate class.
+    'lib.java/third_party/bazel/desugar/Desugar-runtime.jar'
+]
+
+
+def _TemplatePath(name):
+  return os.path.join(_FILE_DIR, '{}.jinja'.format(name))
+
+
+def _RebasePath(path_or_list, new_cwd=None, old_cwd=None):
+  """Makes the given path(s) relative to new_cwd, or absolute if not specified.
+
+  If new_cwd is not specified, absolute paths are returned.
+  If old_cwd is not specified, constants.GetOutDirectory() is assumed.
+  """
+  if path_or_list is None:
+    return []
+  if not isinstance(path_or_list, basestring):
+    return [_RebasePath(p, new_cwd, old_cwd) for p in path_or_list]
+  if old_cwd is None:
+    old_cwd = constants.GetOutDirectory()
+  old_cwd = os.path.abspath(old_cwd)
+  if new_cwd:
+    new_cwd = os.path.abspath(new_cwd)
+    return os.path.relpath(os.path.join(old_cwd, path_or_list), new_cwd)
+  return os.path.abspath(os.path.join(old_cwd, path_or_list))
+
+
+def _IsSubpathOf(child, parent):
+  """Returns whether |child| is a subpath of |parent|."""
+  return not os.path.relpath(child, parent).startswith(os.pardir)
+
+
+def _WriteFile(path, data):
+  """Writes |data| to |path|, constucting parent directories if necessary."""
+  logging.info('Writing %s', path)
+  dirname = os.path.dirname(path)
+  if not os.path.exists(dirname):
+    os.makedirs(dirname)
+  with codecs.open(path, 'w', 'utf-8') as output_file:
+    output_file.write(data)
+
+
+def _RunGnGen(output_dir, args=None):
+  cmd = [os.path.join(_DEPOT_TOOLS_PATH, 'gn'), 'gen', output_dir]
+  if args:
+    cmd.extend(args)
+  logging.info('Running: %r', cmd)
+  subprocess.check_call(cmd)
+
+
+def _RunNinja(output_dir, args):
+  # Don't use version within _DEPOT_TOOLS_PATH, since most devs don't use
+  # that one when building.
+  cmd = ['autoninja', '-C', output_dir]
+  cmd.extend(args)
+  logging.info('Running: %r', cmd)
+  subprocess.check_call(cmd)
+
+
+def _QueryForAllGnTargets(output_dir):
+  cmd = [
+      os.path.join(_BUILD_ANDROID, 'list_java_targets.py'), '--gn-labels',
+      '--nested', '--build', '--output-directory', output_dir
+  ]
+  logging.info('Running: %r', cmd)
+  return subprocess.check_output(cmd).splitlines()
+
+
+class _ProjectEntry(object):
+  """Helper class for project entries."""
+
+  _cached_entries = {}
+
+  def __init__(self, gn_target):
+    # Use _ProjectEntry.FromGnTarget instead for caching.
+    self._gn_target = gn_target
+    self._build_config = None
+    self._java_files = None
+    self._all_entries = None
+    self.android_test_entries = []
+
+  @classmethod
+  def FromGnTarget(cls, gn_target):
+    assert gn_target.startswith('//'), gn_target
+    if ':' not in gn_target:
+      gn_target = '%s:%s' % (gn_target, os.path.basename(gn_target))
+    if gn_target not in cls._cached_entries:
+      cls._cached_entries[gn_target] = cls(gn_target)
+    return cls._cached_entries[gn_target]
+
+  @classmethod
+  def FromBuildConfigPath(cls, path):
+    prefix = 'gen/'
+    suffix = '.build_config'
+    assert path.startswith(prefix) and path.endswith(suffix), path
+    subdir = path[len(prefix):-len(suffix)]
+    gn_target = '//%s:%s' % (os.path.split(subdir))
+    return cls.FromGnTarget(gn_target)
+
+  def __hash__(self):
+    return hash(self._gn_target)
+
+  def __eq__(self, other):
+    return self._gn_target == other.GnTarget()
+
+  def GnTarget(self):
+    return self._gn_target
+
+  def NinjaTarget(self):
+    return self._gn_target[2:]
+
+  def GnBuildConfigTarget(self):
+    return '%s__build_config_crbug_908819' % self._gn_target
+
+  def GradleSubdir(self):
+    """Returns the output subdirectory."""
+    ninja_target = self.NinjaTarget()
+    # Support targets at the root level. e.g. //:foo
+    if ninja_target[0] == ':':
+      ninja_target = ninja_target[1:]
+    return ninja_target.replace(':', os.path.sep)
+
+  def GeneratedJavaSubdir(self):
+    return _RebasePath(
+        os.path.join('gen', self.GradleSubdir(), _GENERATED_JAVA_SUBDIR))
+
+  def ProjectName(self):
+    """Returns the Gradle project name."""
+    return self.GradleSubdir().replace(os.path.sep, '.')
+
+  def BuildConfig(self):
+    """Reads and returns the project's .build_config JSON."""
+    if not self._build_config:
+      path = os.path.join('gen', self.GradleSubdir() + '.build_config')
+      with open(_RebasePath(path)) as jsonfile:
+        self._build_config = json.load(jsonfile)
+    return self._build_config
+
+  def DepsInfo(self):
+    return self.BuildConfig()['deps_info']
+
+  def Gradle(self):
+    return self.BuildConfig()['gradle']
+
+  def Javac(self):
+    return self.BuildConfig()['javac']
+
+  def GetType(self):
+    """Returns the target type from its .build_config."""
+    return self.DepsInfo()['type']
+
+  def IsValid(self):
+    return self.GetType() in (
+        'android_apk',
+        'android_app_bundle_module',
+        'java_library',
+        "java_annotation_processor",
+        'java_binary',
+        'junit_binary',
+    )
+
+  def ResSources(self):
+    return self.DepsInfo().get('lint_resource_sources', [])
+
+  def JavaFiles(self):
+    if self._java_files is None:
+      java_sources_file = self.DepsInfo().get('java_sources_file')
+      java_files = []
+      if java_sources_file:
+        java_sources_file = _RebasePath(java_sources_file)
+        java_files = build_utils.ReadSourcesList(java_sources_file)
+      self._java_files = java_files
+    return self._java_files
+
+  def PrebuiltJars(self):
+    all_jars = self.Gradle().get('dependent_prebuilt_jars', [])
+    return [i for i in all_jars if i not in _EXCLUDED_PREBUILT_JARS]
+
+  def AllEntries(self):
+    """Returns a list of all entries that the current entry depends on.
+
+    This includes the entry itself to make iterating simpler."""
+    if self._all_entries is None:
+      logging.debug('Generating entries for %s', self.GnTarget())
+      deps = [_ProjectEntry.FromBuildConfigPath(p)
+          for p in self.Gradle()['dependent_android_projects']]
+      deps.extend(_ProjectEntry.FromBuildConfigPath(p)
+          for p in self.Gradle()['dependent_java_projects'])
+      all_entries = set()
+      for dep in deps:
+        all_entries.update(dep.AllEntries())
+      all_entries.add(self)
+      self._all_entries = list(all_entries)
+    return self._all_entries
+
+
+class _ProjectContextGenerator(object):
+  """Helper class to generate gradle build files"""
+  def __init__(self, project_dir, build_vars, use_gradle_process_resources,
+               jinja_processor, split_projects, channel):
+    self.project_dir = project_dir
+    self.build_vars = build_vars
+    self.use_gradle_process_resources = use_gradle_process_resources
+    self.jinja_processor = jinja_processor
+    self.split_projects = split_projects
+    self.channel = channel
+    self.processed_java_dirs = set()
+    self.processed_prebuilts = set()
+    self.processed_res_dirs = set()
+
+  def _GenJniLibs(self, root_entry):
+    libraries = []
+    for entry in self._GetEntries(root_entry):
+      libraries += entry.BuildConfig().get('native', {}).get('libraries', [])
+    if libraries:
+      return _CreateJniLibsDir(constants.GetOutDirectory(),
+          self.EntryOutputDir(root_entry), libraries)
+    return []
+
+  def _GenJavaDirs(self, root_entry):
+    java_files = []
+    for entry in self._GetEntries(root_entry):
+      java_files += entry.JavaFiles()
+    java_dirs, excludes = _ComputeJavaSourceDirsAndExcludes(
+        constants.GetOutDirectory(), java_files)
+    return java_dirs, excludes
+
+  def _GenCustomManifest(self, entry):
+    """Returns the path to the generated AndroidManifest.xml.
+
+    Gradle uses package id from manifest when generating R.class. So, we need
+    to generate a custom manifest if we let gradle process resources. We cannot
+    simply set android.defaultConfig.applicationId because it is not supported
+    for library targets."""
+    resource_packages = entry.Javac().get('resource_packages')
+    if not resource_packages:
+      logging.debug('Target ' + entry.GnTarget() + ' includes resources from '
+          'unknown package. Unable to process with gradle.')
+      return _DEFAULT_ANDROID_MANIFEST_PATH
+    elif len(resource_packages) > 1:
+      logging.debug('Target ' + entry.GnTarget() + ' includes resources from '
+          'multiple packages. Unable to process with gradle.')
+      return _DEFAULT_ANDROID_MANIFEST_PATH
+
+    variables = {'package': resource_packages[0]}
+    data = self.jinja_processor.Render(_TemplatePath('manifest'), variables)
+    output_file = os.path.join(
+        self.EntryOutputDir(entry), 'AndroidManifest.xml')
+    _WriteFile(output_file, data)
+
+    return output_file
+
+  def _Relativize(self, entry, paths):
+    return _RebasePath(paths, self.EntryOutputDir(entry))
+
+  def _GetEntries(self, entry):
+    if self.split_projects:
+      return [entry]
+    return entry.AllEntries()
+
+  def EntryOutputDir(self, entry):
+    return os.path.join(self.project_dir, entry.GradleSubdir())
+
+  def GeneratedInputs(self, root_entry):
+    generated_inputs = set()
+    for entry in self._GetEntries(root_entry):
+      generated_inputs.update(entry.PrebuiltJars())
+    return generated_inputs
+
+  def GenerateManifest(self, root_entry):
+    android_manifest = root_entry.DepsInfo().get('android_manifest')
+    if not android_manifest:
+      android_manifest = self._GenCustomManifest(root_entry)
+    return self._Relativize(root_entry, android_manifest)
+
+  def Generate(self, root_entry):
+    # TODO(agrieve): Add an option to use interface jars and see if that speeds
+    # things up at all.
+    variables = {}
+    java_dirs, excludes = self._GenJavaDirs(root_entry)
+    java_dirs.extend(
+        e.GeneratedJavaSubdir() for e in self._GetEntries(root_entry))
+    self.processed_java_dirs.update(java_dirs)
+    java_dirs.sort()
+    variables['java_dirs'] = self._Relativize(root_entry, java_dirs)
+    variables['java_excludes'] = excludes
+    variables['jni_libs'] = self._Relativize(
+        root_entry, set(self._GenJniLibs(root_entry)))
+    prebuilts = set(
+        p for e in self._GetEntries(root_entry) for p in e.PrebuiltJars())
+    self.processed_prebuilts.update(prebuilts)
+    variables['prebuilts'] = self._Relativize(root_entry, prebuilts)
+    res_sources_files = _RebasePath(
+        set(p for e in self._GetEntries(root_entry) for p in e.ResSources()))
+    res_sources = []
+    for res_sources_file in res_sources_files:
+      res_sources.extend(build_utils.ReadSourcesList(res_sources_file))
+    res_dirs = resource_utils.DeduceResourceDirsFromFileList(res_sources)
+    # Do not add generated resources for the all module since it creates many
+    # duplicates, and currently resources are only used for editing.
+    self.processed_res_dirs.update(res_dirs)
+    variables['res_dirs'] = self._Relativize(root_entry, res_dirs)
+    if self.split_projects:
+      deps = [_ProjectEntry.FromBuildConfigPath(p)
+              for p in root_entry.Gradle()['dependent_android_projects']]
+      variables['android_project_deps'] = [d.ProjectName() for d in deps]
+      deps = [_ProjectEntry.FromBuildConfigPath(p)
+              for p in root_entry.Gradle()['dependent_java_projects']]
+      variables['java_project_deps'] = [d.ProjectName() for d in deps]
+    return variables
+
+
+def _ComputeJavaSourceDirs(java_files):
+  """Returns a dictionary of source dirs with each given files in one."""
+  found_roots = {}
+  for path in java_files:
+    path_root = path
+    # Recognize these tokens as top-level.
+    while True:
+      path_root = os.path.dirname(path_root)
+      basename = os.path.basename(path_root)
+      assert basename, 'Failed to find source dir for ' + path
+      if basename in ('java', 'src'):
+        break
+      if basename in ('javax', 'org', 'com'):
+        path_root = os.path.dirname(path_root)
+        break
+    if path_root not in found_roots:
+      found_roots[path_root] = []
+    found_roots[path_root].append(path)
+  return found_roots
+
+
+def _ComputeExcludeFilters(wanted_files, unwanted_files, parent_dir):
+  """Returns exclude patters to exclude unwanted files but keep wanted files.
+
+  - Shortens exclude list by globbing if possible.
+  - Exclude patterns are relative paths from the parent directory.
+  """
+  excludes = []
+  files_to_include = set(wanted_files)
+  files_to_exclude = set(unwanted_files)
+  while files_to_exclude:
+    unwanted_file = files_to_exclude.pop()
+    target_exclude = os.path.join(
+        os.path.dirname(unwanted_file), '*.java')
+    found_files = set(glob.glob(target_exclude))
+    valid_files = found_files & files_to_include
+    if valid_files:
+      excludes.append(os.path.relpath(unwanted_file, parent_dir))
+    else:
+      excludes.append(os.path.relpath(target_exclude, parent_dir))
+      files_to_exclude -= found_files
+  return excludes
+
+
+def _ComputeJavaSourceDirsAndExcludes(output_dir, java_files):
+  """Computes the list of java source directories and exclude patterns.
+
+  1. Computes the root java source directories from the list of files.
+  2. Compute exclude patterns that exclude all extra files only.
+  3. Returns the list of java source directories and exclude patterns.
+  """
+  java_dirs = []
+  excludes = []
+  if java_files:
+    java_files = _RebasePath(java_files)
+    computed_dirs = _ComputeJavaSourceDirs(java_files)
+    java_dirs = computed_dirs.keys()
+    all_found_java_files = set()
+
+    for directory, files in computed_dirs.iteritems():
+      found_java_files = build_utils.FindInDirectory(directory, '*.java')
+      all_found_java_files.update(found_java_files)
+      unwanted_java_files = set(found_java_files) - set(files)
+      if unwanted_java_files:
+        logging.debug('Directory requires excludes: %s', directory)
+        excludes.extend(
+            _ComputeExcludeFilters(files, unwanted_java_files, directory))
+
+    missing_java_files = set(java_files) - all_found_java_files
+    # Warn only about non-generated files that are missing.
+    missing_java_files = [p for p in missing_java_files
+                          if not p.startswith(output_dir)]
+    if missing_java_files:
+      logging.warning(
+          'Some java files were not found: %s', missing_java_files)
+
+  return java_dirs, excludes
+
+
+def _CreateRelativeSymlink(target_path, link_path):
+  link_dir = os.path.dirname(link_path)
+  relpath = os.path.relpath(target_path, link_dir)
+  logging.debug('Creating symlink %s -> %s', link_path, relpath)
+  os.symlink(relpath, link_path)
+
+
+def _CreateJniLibsDir(output_dir, entry_output_dir, so_files):
+  """Creates directory with symlinked .so files if necessary.
+
+  Returns list of JNI libs directories."""
+
+  if so_files:
+    symlink_dir = os.path.join(entry_output_dir, _JNI_LIBS_SUBDIR)
+    shutil.rmtree(symlink_dir, True)
+    abi_dir = os.path.join(symlink_dir, _ARMEABI_SUBDIR)
+    if not os.path.exists(abi_dir):
+      os.makedirs(abi_dir)
+    for so_file in so_files:
+      target_path = os.path.join(output_dir, so_file)
+      symlinked_path = os.path.join(abi_dir, so_file)
+      _CreateRelativeSymlink(target_path, symlinked_path)
+
+    return [symlink_dir]
+
+  return []
+
+
+def _GenerateLocalProperties(sdk_dir):
+  """Returns the data for local.properties as a string."""
+  return '\n'.join([
+      '# Generated by //build/android/gradle/generate_gradle.py',
+      'sdk.dir=%s' % sdk_dir,
+      '',
+  ])
+
+
+def _GenerateGradleWrapperPropertiesCanary():
+  """Returns the data for gradle-wrapper.properties as a string."""
+  # Before May 2020, this wasn't necessary. Might not be necessary at some point
+  # in the future?
+  return '\n'.join([
+      '# Generated by //build/android/gradle/generate_gradle.py',
+      ('distributionUrl=https\\://services.gradle.org/distributions/'
+       'gradle-6.5-rc-1-all.zip\n'),
+      '',
+  ])
+
+
+def _GenerateGradleProperties():
+  """Returns the data for gradle.properties as a string."""
+  return '\n'.join([
+      '# Generated by //build/android/gradle/generate_gradle.py',
+      '',
+      '# Tells Gradle to show warnings during project sync.',
+      'org.gradle.warning.mode=all',
+      '',
+  ])
+
+
+def _GenerateBaseVars(generator, build_vars):
+  variables = {}
+  variables['compile_sdk_version'] = (
+      'android-%s' % build_vars['compile_sdk_version'])
+  target_sdk_version = build_vars['android_sdk_version']
+  if target_sdk_version.isalpha():
+    target_sdk_version = '"{}"'.format(target_sdk_version)
+  variables['target_sdk_version'] = target_sdk_version
+  variables['use_gradle_process_resources'] = (
+      generator.use_gradle_process_resources)
+  variables['channel'] = generator.channel
+  return variables
+
+
+def _GenerateGradleFile(entry, generator, build_vars, jinja_processor):
+  """Returns the data for a project's build.gradle."""
+  deps_info = entry.DepsInfo()
+  variables = _GenerateBaseVars(generator, build_vars)
+  sourceSetName = 'main'
+
+  if deps_info['type'] == 'android_apk':
+    target_type = 'android_apk'
+  elif deps_info['type'] in ('java_library', 'java_annotation_processor'):
+    is_prebuilt = deps_info.get('is_prebuilt', False)
+    gradle_treat_as_prebuilt = deps_info.get('gradle_treat_as_prebuilt', False)
+    if is_prebuilt or gradle_treat_as_prebuilt:
+      return None
+    elif deps_info['requires_android']:
+      target_type = 'android_library'
+    else:
+      target_type = 'java_library'
+  elif deps_info['type'] == 'java_binary':
+    target_type = 'java_binary'
+    variables['main_class'] = deps_info.get('main_class')
+  elif deps_info['type'] == 'junit_binary':
+    target_type = 'android_junit'
+    sourceSetName = 'test'
+  else:
+    return None
+
+  variables['target_name'] = os.path.splitext(deps_info['name'])[0]
+  variables['template_type'] = target_type
+  variables['main'] = {}
+  variables[sourceSetName] = generator.Generate(entry)
+  variables['main']['android_manifest'] = generator.GenerateManifest(entry)
+
+  if entry.android_test_entries:
+    variables['android_test'] = []
+    for e in entry.android_test_entries:
+      test_entry = generator.Generate(e)
+      test_entry['android_manifest'] = generator.GenerateManifest(e)
+      variables['android_test'].append(test_entry)
+      for key, value in test_entry.iteritems():
+        if isinstance(value, list):
+          test_entry[key] = sorted(set(value) - set(variables['main'][key]))
+
+  return jinja_processor.Render(
+      _TemplatePath(target_type.split('_')[0]), variables)
+
+
+# Example: //chrome/android:monochrome
+def _GetNative(relative_func, target_names):
+  """Returns an object containing native c++ sources list and its included path
+
+  Iterate through all target_names and their deps to get the list of included
+  paths and sources."""
+  out_dir = constants.GetOutDirectory()
+  with open(os.path.join(out_dir, 'project.json'), 'r') as project_file:
+    projects = json.load(project_file)
+  project_targets = projects['targets']
+  root_dir = projects['build_settings']['root_path']
+  includes = set()
+  processed_target = set()
+  targets_stack = list(target_names)
+  sources = []
+
+  while targets_stack:
+    target_name = targets_stack.pop()
+    if target_name in processed_target:
+      continue
+    processed_target.add(target_name)
+    target = project_targets[target_name]
+    includes.update(target.get('include_dirs', []))
+    targets_stack.extend(target.get('deps', []))
+    # Ignore generated files
+    sources.extend(f for f in target.get('sources', [])
+                   if f.endswith('.cc') and not f.startswith('//out'))
+
+  def process_paths(paths):
+    # Ignores leading //
+    return relative_func(
+        sorted(os.path.join(root_dir, path[2:]) for path in paths))
+
+  return {
+      'sources': process_paths(sources),
+      'includes': process_paths(includes),
+  }
+
+
+def _GenerateModuleAll(gradle_output_dir, generator, build_vars,
+                       jinja_processor, native_targets):
+  """Returns the data for a pseudo build.gradle of all dirs.
+
+  See //docs/android_studio.md for more details."""
+  variables = _GenerateBaseVars(generator, build_vars)
+  target_type = 'android_apk'
+  variables['target_name'] = _MODULE_ALL
+  variables['template_type'] = target_type
+  java_dirs = sorted(generator.processed_java_dirs)
+  prebuilts = sorted(generator.processed_prebuilts)
+  res_dirs = sorted(generator.processed_res_dirs)
+  def Relativize(paths):
+    return _RebasePath(paths, os.path.join(gradle_output_dir, _MODULE_ALL))
+
+  # As after clank modularization, the java and javatests code will live side by
+  # side in the same module, we will list both of them in the main target here.
+  main_java_dirs = [d for d in java_dirs if 'junit/' not in d]
+  junit_test_java_dirs = [d for d in java_dirs if 'junit/' in d]
+  variables['main'] = {
+      'android_manifest': Relativize(_DEFAULT_ANDROID_MANIFEST_PATH),
+      'java_dirs': Relativize(main_java_dirs),
+      'prebuilts': Relativize(prebuilts),
+      'java_excludes': ['**/*.java'],
+      'res_dirs': Relativize(res_dirs),
+  }
+  variables['android_test'] = [{
+      'java_dirs': Relativize(junit_test_java_dirs),
+      'java_excludes': ['**/*.java'],
+  }]
+  if native_targets:
+    variables['native'] = _GetNative(
+        relative_func=Relativize, target_names=native_targets)
+  data = jinja_processor.Render(
+      _TemplatePath(target_type.split('_')[0]), variables)
+  _WriteFile(
+      os.path.join(gradle_output_dir, _MODULE_ALL, _GRADLE_BUILD_FILE), data)
+  if native_targets:
+    cmake_data = jinja_processor.Render(_TemplatePath('cmake'), variables)
+    _WriteFile(
+        os.path.join(gradle_output_dir, _MODULE_ALL, _CMAKE_FILE), cmake_data)
+
+
+def _GenerateRootGradle(jinja_processor, channel):
+  """Returns the data for the root project's build.gradle."""
+  return jinja_processor.Render(_TemplatePath('root'), {'channel': channel})
+
+
+def _GenerateSettingsGradle(project_entries):
+  """Returns the data for settings.gradle."""
+  project_name = os.path.basename(os.path.dirname(host_paths.DIR_SOURCE_ROOT))
+  lines = []
+  lines.append('// Generated by //build/android/gradle/generate_gradle.py')
+  lines.append('rootProject.name = "%s"' % project_name)
+  lines.append('rootProject.projectDir = settingsDir')
+  lines.append('')
+  for name, subdir in project_entries:
+    # Example target:
+    # android_webview:android_webview_java__build_config_crbug_908819
+    lines.append('include ":%s"' % name)
+    lines.append('project(":%s").projectDir = new File(settingsDir, "%s")' %
+                 (name, subdir))
+  return '\n'.join(lines)
+
+
+def _FindAllProjectEntries(main_entries):
+  """Returns the list of all _ProjectEntry instances given the root project."""
+  found = set()
+  to_scan = list(main_entries)
+  while to_scan:
+    cur_entry = to_scan.pop()
+    if cur_entry in found:
+      continue
+    found.add(cur_entry)
+    sub_config_paths = cur_entry.DepsInfo()['deps_configs']
+    to_scan.extend(
+        _ProjectEntry.FromBuildConfigPath(p) for p in sub_config_paths)
+  return list(found)
+
+
+def _CombineTestEntries(entries):
+  """Combines test apks into the androidTest source set of their target.
+
+  - Speeds up android studio
+  - Adds proper dependency between test and apk_under_test
+  - Doesn't work for junit yet due to resulting circular dependencies
+    - e.g. base_junit_tests > base_junit_test_support > base_java
+  """
+  combined_entries = []
+  android_test_entries = collections.defaultdict(list)
+  for entry in entries:
+    target_name = entry.GnTarget()
+    if (target_name.endswith(_INSTRUMENTATION_TARGET_SUFFIX)
+        and 'apk_under_test' in entry.Gradle()):
+      apk_name = entry.Gradle()['apk_under_test']
+      android_test_entries[apk_name].append(entry)
+    else:
+      combined_entries.append(entry)
+  for entry in combined_entries:
+    target_name = entry.DepsInfo()['name']
+    if target_name in android_test_entries:
+      entry.android_test_entries = android_test_entries[target_name]
+      del android_test_entries[target_name]
+  # Add unmatched test entries as individual targets.
+  combined_entries.extend(e for l in android_test_entries.values() for e in l)
+  return combined_entries
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--output-directory',
+                      help='Path to the root build directory.')
+  parser.add_argument('-v',
+                      '--verbose',
+                      dest='verbose_count',
+                      default=0,
+                      action='count',
+                      help='Verbose level')
+  parser.add_argument('--target',
+                      dest='targets',
+                      action='append',
+                      help='GN target to generate project for. Replaces set of '
+                           'default targets. May be repeated.')
+  parser.add_argument('--extra-target',
+                      dest='extra_targets',
+                      action='append',
+                      help='GN target to generate project for, in addition to '
+                           'the default ones. May be repeated.')
+  parser.add_argument('--project-dir',
+                      help='Root of the output project.',
+                      default=os.path.join('$CHROMIUM_OUTPUT_DIR', 'gradle'))
+  parser.add_argument('--all',
+                      action='store_true',
+                      help='Include all .java files reachable from any '
+                           'apk/test/binary target. On by default unless '
+                           '--split-projects is used (--split-projects can '
+                           'slow down Studio given too many targets).')
+  parser.add_argument('--use-gradle-process-resources',
+                      action='store_true',
+                      help='Have gradle generate R.java rather than ninja')
+  parser.add_argument('--split-projects',
+                      action='store_true',
+                      help='Split projects by their gn deps rather than '
+                           'combining all the dependencies of each target')
+  parser.add_argument('--native-target',
+                      dest='native_targets',
+                      action='append',
+                      help='GN native targets to generate for. May be '
+                           'repeated.')
+  parser.add_argument('--compile-sdk-version',
+                      type=int,
+                      default=0,
+                      help='Override compileSdkVersion for android sdk docs. '
+                           'Useful when sources for android_sdk_version is '
+                           'not available in Android Studio.')
+  parser.add_argument(
+      '--sdk-path',
+      default=os.path.expanduser('~/Android/Sdk'),
+      help='The path to use as the SDK root, overrides the '
+      'default at ~/Android/Sdk.')
+  version_group = parser.add_mutually_exclusive_group()
+  version_group.add_argument('--beta',
+                      action='store_true',
+                      help='Generate a project that is compatible with '
+                           'Android Studio Beta.')
+  version_group.add_argument('--canary',
+                      action='store_true',
+                      help='Generate a project that is compatible with '
+                           'Android Studio Canary.')
+  args = parser.parse_args()
+  if args.output_directory:
+    constants.SetOutputDirectory(args.output_directory)
+  constants.CheckOutputDirectory()
+  output_dir = constants.GetOutDirectory()
+  devil_chromium.Initialize(output_directory=output_dir)
+  run_tests_helper.SetLogLevel(args.verbose_count)
+
+  if args.use_gradle_process_resources:
+    assert args.split_projects, (
+        'Gradle resources does not work without --split-projects.')
+
+  _gradle_output_dir = os.path.abspath(
+      args.project_dir.replace('$CHROMIUM_OUTPUT_DIR', output_dir))
+  logging.warning('Creating project at: %s', _gradle_output_dir)
+
+  # Generate for "all targets" by default when not using --split-projects (too
+  # slow), and when no --target has been explicitly set. "all targets" means all
+  # java targets that are depended on by an apk or java_binary (leaf
+  # java_library targets will not be included).
+  args.all = args.all or (not args.split_projects and not args.targets)
+
+  targets_from_args = set(args.targets or _DEFAULT_TARGETS)
+  if args.extra_targets:
+    targets_from_args.update(args.extra_targets)
+
+  if args.all:
+    if args.native_targets:
+      _RunGnGen(output_dir, ['--ide=json'])
+    elif not os.path.exists(os.path.join(output_dir, 'build.ninja')):
+      _RunGnGen(output_dir)
+    else:
+      # Faster than running "gn gen" in the no-op case.
+      _RunNinja(output_dir, ['build.ninja'])
+    # Query ninja for all __build_config_crbug_908819 targets.
+    targets = _QueryForAllGnTargets(output_dir)
+  else:
+    assert not args.native_targets, 'Native editing requires --all.'
+    targets = [
+        re.sub(r'_test_apk$', _INSTRUMENTATION_TARGET_SUFFIX, t)
+        for t in targets_from_args
+    ]
+    # Necessary after "gn clean"
+    if not os.path.exists(
+        os.path.join(output_dir, gn_helpers.BUILD_VARS_FILENAME)):
+      _RunGnGen(output_dir)
+
+  build_vars = gn_helpers.ReadBuildVars(output_dir)
+  jinja_processor = jinja_template.JinjaProcessor(_FILE_DIR)
+  if args.beta:
+    channel = 'beta'
+  elif args.canary:
+    channel = 'canary'
+  else:
+    channel = 'stable'
+  if args.compile_sdk_version:
+    build_vars['compile_sdk_version'] = args.compile_sdk_version
+  else:
+    build_vars['compile_sdk_version'] = build_vars['android_sdk_version']
+  generator = _ProjectContextGenerator(_gradle_output_dir, build_vars,
+      args.use_gradle_process_resources, jinja_processor, args.split_projects,
+      channel)
+
+  main_entries = [_ProjectEntry.FromGnTarget(t) for t in targets]
+
+  if args.all:
+    # There are many unused libraries, so restrict to those that are actually
+    # used by apks/bundles/binaries/tests or that are explicitly mentioned in
+    # --targets.
+    BASE_TYPES = ('android_apk', 'android_app_bundle_module', 'java_binary',
+                  'junit_binary')
+    main_entries = [
+        e for e in main_entries
+        if (e.GetType() in BASE_TYPES or e.GnTarget() in targets_from_args
+            or e.GnTarget().endswith(_INSTRUMENTATION_TARGET_SUFFIX))
+    ]
+
+  if args.split_projects:
+    main_entries = _FindAllProjectEntries(main_entries)
+
+  logging.info('Generating for %d targets.', len(main_entries))
+
+  entries = [e for e in _CombineTestEntries(main_entries) if e.IsValid()]
+  logging.info('Creating %d projects for targets.', len(entries))
+
+  logging.warning('Writing .gradle files...')
+  project_entries = []
+  # When only one entry will be generated we want it to have a valid
+  # build.gradle file with its own AndroidManifest.
+  for entry in entries:
+    data = _GenerateGradleFile(entry, generator, build_vars, jinja_processor)
+    if data and not args.all:
+      project_entries.append((entry.ProjectName(), entry.GradleSubdir()))
+      _WriteFile(
+          os.path.join(generator.EntryOutputDir(entry), _GRADLE_BUILD_FILE),
+          data)
+  if args.all:
+    project_entries.append((_MODULE_ALL, _MODULE_ALL))
+    _GenerateModuleAll(_gradle_output_dir, generator, build_vars,
+                       jinja_processor, args.native_targets)
+
+  _WriteFile(os.path.join(generator.project_dir, _GRADLE_BUILD_FILE),
+             _GenerateRootGradle(jinja_processor, channel))
+
+  _WriteFile(os.path.join(generator.project_dir, 'settings.gradle'),
+             _GenerateSettingsGradle(project_entries))
+
+  # Ensure the Android Studio sdk is correctly initialized.
+  if not os.path.exists(args.sdk_path):
+    # Help first-time users avoid Android Studio forcibly changing back to
+    # the previous default due to not finding a valid sdk under this dir.
+    shutil.copytree(_RebasePath(build_vars['android_sdk_root']), args.sdk_path)
+  _WriteFile(
+      os.path.join(generator.project_dir, 'local.properties'),
+      _GenerateLocalProperties(args.sdk_path))
+  _WriteFile(os.path.join(generator.project_dir, 'gradle.properties'),
+             _GenerateGradleProperties())
+
+  wrapper_properties = os.path.join(generator.project_dir, 'gradle', 'wrapper',
+                                    'gradle-wrapper.properties')
+  if os.path.exists(wrapper_properties):
+    os.unlink(wrapper_properties)
+  if args.canary:
+    _WriteFile(wrapper_properties, _GenerateGradleWrapperPropertiesCanary())
+
+  generated_inputs = set()
+  for entry in entries:
+    entries_to_gen = [entry]
+    entries_to_gen.extend(entry.android_test_entries)
+    for entry_to_gen in entries_to_gen:
+      # Build all paths references by .gradle that exist within output_dir.
+      generated_inputs.update(generator.GeneratedInputs(entry_to_gen))
+  if generated_inputs:
+    targets = _RebasePath(generated_inputs, output_dir)
+    _RunNinja(output_dir, targets)
+
+  logging.warning('Generated files will only appear once you\'ve built them.')
+  logging.warning('Generated projects for Android Studio %s', channel)
+  logging.warning('For more tips: https://chromium.googlesource.com/chromium'
+                  '/src.git/+/master/docs/android_studio.md')
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/gradle/gn_to_cmake.py b/src/build/android/gradle/gn_to_cmake.py
new file mode 100755
index 0000000..d3e80ae
--- /dev/null
+++ b/src/build/android/gradle/gn_to_cmake.py
@@ -0,0 +1,689 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Usage: gn_to_cmake.py <json_file_name>
+
+gn gen out/config --ide=json --json-ide-script=../../gn/gn_to_cmake.py
+
+or
+
+gn gen out/config --ide=json
+python gn/gn_to_cmake.py out/config/project.json
+
+The first is recommended, as it will auto-update.
+"""
+
+from __future__ import print_function
+
+import functools
+import json
+import posixpath
+import string
+import sys
+
+
+def CMakeStringEscape(a):
+  """Escapes the string 'a' for use inside a CMake string.
+
+  This means escaping
+  '\' otherwise it may be seen as modifying the next character
+  '"' otherwise it will end the string
+  ';' otherwise the string becomes a list
+
+  The following do not need to be escaped
+  '#' when the lexer is in string state, this does not start a comment
+  """
+  return a.replace('\\', '\\\\').replace(';', '\\;').replace('"', '\\"')
+
+
+def CMakeTargetEscape(a):
+  """Escapes the string 'a' for use as a CMake target name.
+
+  CMP0037 in CMake 3.0 restricts target names to "^[A-Za-z0-9_.:+-]+$"
+  The ':' is only allowed for imported targets.
+  """
+  def Escape(c):
+    if c in string.ascii_letters or c in string.digits or c in '_.+-':
+      return c
+    else:
+      return '__'
+  return ''.join([Escape(c) for c in a])
+
+
+def SetVariable(out, variable_name, value):
+  """Sets a CMake variable."""
+  out.write('set("')
+  out.write(CMakeStringEscape(variable_name))
+  out.write('" "')
+  out.write(CMakeStringEscape(value))
+  out.write('")\n')
+
+
+def SetVariableList(out, variable_name, values):
+  """Sets a CMake variable to a list."""
+  if not values:
+    return SetVariable(out, variable_name, "")
+  if len(values) == 1:
+    return SetVariable(out, variable_name, values[0])
+  out.write('list(APPEND "')
+  out.write(CMakeStringEscape(variable_name))
+  out.write('"\n  "')
+  out.write('"\n  "'.join([CMakeStringEscape(value) for value in values]))
+  out.write('")\n')
+
+
+def SetFilesProperty(output, variable, property_name, values, sep):
+  """Given a set of source files, sets the given property on them."""
+  output.write('set_source_files_properties(')
+  WriteVariable(output, variable)
+  output.write(' PROPERTIES ')
+  output.write(property_name)
+  output.write(' "')
+  for value in values:
+    output.write(CMakeStringEscape(value))
+    output.write(sep)
+  output.write('")\n')
+
+
+def SetCurrentTargetProperty(out, property_name, values, sep=''):
+  """Given a target, sets the given property."""
+  out.write('set_target_properties("${target}" PROPERTIES ')
+  out.write(property_name)
+  out.write(' "')
+  for value in values:
+    out.write(CMakeStringEscape(value))
+    out.write(sep)
+  out.write('")\n')
+
+
+def WriteVariable(output, variable_name, prepend=None):
+  if prepend:
+    output.write(prepend)
+  output.write('${')
+  output.write(variable_name)
+  output.write('}')
+
+
+# See GetSourceFileType in gn
+source_file_types = {
+  '.cc': 'cxx',
+  '.cpp': 'cxx',
+  '.cxx': 'cxx',
+  '.c': 'c',
+  '.s': 'asm',
+  '.S': 'asm',
+  '.asm': 'asm',
+  '.o': 'obj',
+  '.obj': 'obj',
+}
+
+
+class CMakeTargetType(object):
+  def __init__(self, command, modifier, property_modifier, is_linkable):
+    self.command = command
+    self.modifier = modifier
+    self.property_modifier = property_modifier
+    self.is_linkable = is_linkable
+CMakeTargetType.custom = CMakeTargetType('add_custom_target', 'SOURCES',
+                                         None, False)
+
+# See GetStringForOutputType in gn
+cmake_target_types = {
+  'unknown': CMakeTargetType.custom,
+  'group': CMakeTargetType.custom,
+  'executable': CMakeTargetType('add_executable', None, 'RUNTIME', True),
+  'loadable_module': CMakeTargetType('add_library', 'MODULE', 'LIBRARY', True),
+  'shared_library': CMakeTargetType('add_library', 'SHARED', 'LIBRARY', True),
+  'static_library': CMakeTargetType('add_library', 'STATIC', 'ARCHIVE', False),
+  'source_set': CMakeTargetType('add_library', 'OBJECT', None, False),
+  'copy': CMakeTargetType.custom,
+  'action': CMakeTargetType.custom,
+  'action_foreach': CMakeTargetType.custom,
+  'bundle_data': CMakeTargetType.custom,
+  'create_bundle': CMakeTargetType.custom,
+}
+
+
+def FindFirstOf(s, a):
+  return min(s.find(i) for i in a if i in s)
+
+
+def GetCMakeTargetName(gn_target_name):
+  # See <chromium>/src/tools/gn/label.cc#Resolve
+  # //base/test:test_support(//build/toolchain/win:msvc)
+  path_separator = FindFirstOf(gn_target_name, (':', '('))
+  location = None
+  name = None
+  toolchain = None
+  if not path_separator:
+    location = gn_target_name[2:]
+  else:
+    location = gn_target_name[2:path_separator]
+    toolchain_separator = gn_target_name.find('(', path_separator)
+    if toolchain_separator == -1:
+      name = gn_target_name[path_separator + 1:]
+    else:
+      if toolchain_separator > path_separator:
+        name = gn_target_name[path_separator + 1:toolchain_separator]
+      assert gn_target_name.endswith(')')
+      toolchain = gn_target_name[toolchain_separator + 1:-1]
+  assert location or name
+
+  cmake_target_name = None
+  if location.endswith('/' + name):
+    cmake_target_name = location
+  elif location:
+    cmake_target_name = location + '_' + name
+  else:
+    cmake_target_name = name
+  if toolchain:
+    cmake_target_name += '--' + toolchain
+  return CMakeTargetEscape(cmake_target_name)
+
+
+class Project(object):
+  def __init__(self, project_json):
+    self.targets = project_json['targets']
+    build_settings = project_json['build_settings']
+    self.root_path = build_settings['root_path']
+    self.build_path = posixpath.join(self.root_path,
+                                     build_settings['build_dir'][2:])
+    self.object_source_deps = {}
+
+  def GetAbsolutePath(self, path):
+    if path.startswith("//"):
+      return self.root_path + "/" + path[2:]
+    else:
+      return path
+
+  def GetObjectSourceDependencies(self, gn_target_name, object_dependencies):
+    """All OBJECT libraries whose sources have not been absorbed."""
+    if gn_target_name in self.object_source_deps:
+      object_dependencies.update(self.object_source_deps[gn_target_name])
+      return
+    target_deps = set()
+    dependencies = self.targets[gn_target_name].get('deps', [])
+    for dependency in dependencies:
+      dependency_type = self.targets[dependency].get('type', None)
+      if dependency_type == 'source_set':
+        target_deps.add(dependency)
+      if dependency_type not in gn_target_types_that_absorb_objects:
+        self.GetObjectSourceDependencies(dependency, target_deps)
+    self.object_source_deps[gn_target_name] = target_deps
+    object_dependencies.update(target_deps)
+
+  def GetObjectLibraryDependencies(self, gn_target_name, object_dependencies):
+    """All OBJECT libraries whose libraries have not been absorbed."""
+    dependencies = self.targets[gn_target_name].get('deps', [])
+    for dependency in dependencies:
+      dependency_type = self.targets[dependency].get('type', None)
+      if dependency_type == 'source_set':
+        object_dependencies.add(dependency)
+        self.GetObjectLibraryDependencies(dependency, object_dependencies)
+
+
+class Target(object):
+  def __init__(self, gn_target_name, project):
+    self.gn_name = gn_target_name
+    self.properties = project.targets[self.gn_name]
+    self.cmake_name = GetCMakeTargetName(self.gn_name)
+    self.gn_type = self.properties.get('type', None)
+    self.cmake_type = cmake_target_types.get(self.gn_type, None)
+
+
+def WriteAction(out, target, project, sources, synthetic_dependencies):
+  outputs = []
+  output_directories = set()
+  for output in target.properties.get('outputs', []):
+    output_abs_path = project.GetAbsolutePath(output)
+    outputs.append(output_abs_path)
+    output_directory = posixpath.dirname(output_abs_path)
+    if output_directory:
+      output_directories.add(output_directory)
+  outputs_name = '${target}__output'
+  SetVariableList(out, outputs_name, outputs)
+
+  out.write('add_custom_command(OUTPUT ')
+  WriteVariable(out, outputs_name)
+  out.write('\n')
+
+  if output_directories:
+    out.write('  COMMAND ${CMAKE_COMMAND} -E make_directory "')
+    out.write('" "'.join([CMakeStringEscape(d) for d in output_directories]))
+    out.write('"\n')
+
+  script = target.properties['script']
+  arguments = target.properties['args']
+  out.write('  COMMAND python "')
+  out.write(CMakeStringEscape(project.GetAbsolutePath(script)))
+  out.write('"')
+  if arguments:
+    out.write('\n    "')
+    out.write('"\n    "'.join([CMakeStringEscape(a) for a in arguments]))
+    out.write('"')
+  out.write('\n')
+
+  out.write('  DEPENDS ')
+  for sources_type_name in sources.values():
+    WriteVariable(out, sources_type_name, ' ')
+  out.write('\n')
+
+  #TODO: CMake 3.7 is introducing DEPFILE
+
+  out.write('  WORKING_DIRECTORY "')
+  out.write(CMakeStringEscape(project.build_path))
+  out.write('"\n')
+
+  out.write('  COMMENT "Action: ${target}"\n')
+
+  out.write('  VERBATIM)\n')
+
+  synthetic_dependencies.add(outputs_name)
+
+
+def ExpandPlaceholders(source, a):
+  source_dir, source_file_part = posixpath.split(source)
+  source_name_part, _ = posixpath.splitext(source_file_part)
+  #TODO: {{source_gen_dir}}, {{source_out_dir}}, {{response_file_name}}
+  return a.replace('{{source}}', source) \
+          .replace('{{source_file_part}}', source_file_part) \
+          .replace('{{source_name_part}}', source_name_part) \
+          .replace('{{source_dir}}', source_dir) \
+          .replace('{{source_root_relative_dir}}', source_dir)
+
+
+def WriteActionForEach(out, target, project, sources, synthetic_dependencies):
+  all_outputs = target.properties.get('outputs', [])
+  inputs = target.properties.get('sources', [])
+  # TODO: consider expanding 'output_patterns' instead.
+  outputs_per_input = len(all_outputs) / len(inputs)
+  for count, source in enumerate(inputs):
+    source_abs_path = project.GetAbsolutePath(source)
+
+    outputs = []
+    output_directories = set()
+    for output in all_outputs[outputs_per_input *  count:
+                              outputs_per_input * (count+1)]:
+      output_abs_path = project.GetAbsolutePath(output)
+      outputs.append(output_abs_path)
+      output_directory = posixpath.dirname(output_abs_path)
+      if output_directory:
+        output_directories.add(output_directory)
+    outputs_name = '${target}__output_' + str(count)
+    SetVariableList(out, outputs_name, outputs)
+
+    out.write('add_custom_command(OUTPUT ')
+    WriteVariable(out, outputs_name)
+    out.write('\n')
+
+    if output_directories:
+      out.write('  COMMAND ${CMAKE_COMMAND} -E make_directory "')
+      out.write('" "'.join([CMakeStringEscape(d) for d in output_directories]))
+      out.write('"\n')
+
+    script = target.properties['script']
+    # TODO: need to expand {{xxx}} in arguments
+    arguments = target.properties['args']
+    out.write('  COMMAND python "')
+    out.write(CMakeStringEscape(project.GetAbsolutePath(script)))
+    out.write('"')
+    if arguments:
+      out.write('\n    "')
+      expand = functools.partial(ExpandPlaceholders, source_abs_path)
+      out.write('"\n    "'.join(
+          [CMakeStringEscape(expand(a)) for a in arguments]))
+      out.write('"')
+    out.write('\n')
+
+    out.write('  DEPENDS')
+    if 'input' in sources:
+      WriteVariable(out, sources['input'], ' ')
+    out.write(' "')
+    out.write(CMakeStringEscape(source_abs_path))
+    out.write('"\n')
+
+    #TODO: CMake 3.7 is introducing DEPFILE
+
+    out.write('  WORKING_DIRECTORY "')
+    out.write(CMakeStringEscape(project.build_path))
+    out.write('"\n')
+
+    out.write('  COMMENT "Action ${target} on ')
+    out.write(CMakeStringEscape(source_abs_path))
+    out.write('"\n')
+
+    out.write('  VERBATIM)\n')
+
+    synthetic_dependencies.add(outputs_name)
+
+
+def WriteCopy(out, target, project, sources, synthetic_dependencies):
+  inputs = target.properties.get('sources', [])
+  raw_outputs = target.properties.get('outputs', [])
+
+  # TODO: consider expanding 'output_patterns' instead.
+  outputs = []
+  for output in raw_outputs:
+    output_abs_path = project.GetAbsolutePath(output)
+    outputs.append(output_abs_path)
+  outputs_name = '${target}__output'
+  SetVariableList(out, outputs_name, outputs)
+
+  out.write('add_custom_command(OUTPUT ')
+  WriteVariable(out, outputs_name)
+  out.write('\n')
+
+  for src, dst in zip(inputs, outputs):
+    out.write('  COMMAND ${CMAKE_COMMAND} -E copy "')
+    out.write(CMakeStringEscape(project.GetAbsolutePath(src)))
+    out.write('" "')
+    out.write(CMakeStringEscape(dst))
+    out.write('"\n')
+
+  out.write('  DEPENDS ')
+  for sources_type_name in sources.values():
+    WriteVariable(out, sources_type_name, ' ')
+  out.write('\n')
+
+  out.write('  WORKING_DIRECTORY "')
+  out.write(CMakeStringEscape(project.build_path))
+  out.write('"\n')
+
+  out.write('  COMMENT "Copy ${target}"\n')
+
+  out.write('  VERBATIM)\n')
+
+  synthetic_dependencies.add(outputs_name)
+
+
+def WriteCompilerFlags(out, target, project, sources):
+  # Hack, set linker language to c if no c or cxx files present.
+  if not 'c' in sources and not 'cxx' in sources:
+    SetCurrentTargetProperty(out, 'LINKER_LANGUAGE', ['C'])
+
+  # Mark uncompiled sources as uncompiled.
+  if 'input' in sources:
+    SetFilesProperty(out, sources['input'], 'HEADER_FILE_ONLY', ('True',), '')
+  if 'other' in sources:
+    SetFilesProperty(out, sources['other'], 'HEADER_FILE_ONLY', ('True',), '')
+
+  # Mark object sources as linkable.
+  if 'obj' in sources:
+    SetFilesProperty(out, sources['obj'], 'EXTERNAL_OBJECT', ('True',), '')
+
+  # TODO: 'output_name', 'output_dir', 'output_extension'
+  # This includes using 'source_outputs' to direct compiler output.
+
+  # Includes
+  includes = target.properties.get('include_dirs', [])
+  if includes:
+    out.write('set_property(TARGET "${target}" ')
+    out.write('APPEND PROPERTY INCLUDE_DIRECTORIES')
+    for include_dir in includes:
+      out.write('\n  "')
+      out.write(project.GetAbsolutePath(include_dir))
+      out.write('"')
+    out.write(')\n')
+
+  # Defines
+  defines = target.properties.get('defines', [])
+  if defines:
+    SetCurrentTargetProperty(out, 'COMPILE_DEFINITIONS', defines, ';')
+
+  # Compile flags
+  # "arflags", "asmflags", "cflags",
+  # "cflags_c", "clfags_cc", "cflags_objc", "clfags_objcc"
+  # CMake does not have per target lang compile flags.
+  # TODO: $<$<COMPILE_LANGUAGE:CXX>:cflags_cc style generator expression.
+  #       http://public.kitware.com/Bug/view.php?id=14857
+  flags = []
+  flags.extend(target.properties.get('cflags', []))
+  cflags_asm = target.properties.get('asmflags', [])
+  cflags_c = target.properties.get('cflags_c', [])
+  cflags_cxx = target.properties.get('cflags_cc', [])
+  if 'c' in sources and not any(k in sources for k in ('asm', 'cxx')):
+    flags.extend(cflags_c)
+  elif 'cxx' in sources and not any(k in sources for k in ('asm', 'c')):
+    flags.extend(cflags_cxx)
+  else:
+    # TODO: This is broken, one cannot generally set properties on files,
+    # as other targets may require different properties on the same files.
+    if 'asm' in sources and cflags_asm:
+      SetFilesProperty(out, sources['asm'], 'COMPILE_FLAGS', cflags_asm, ' ')
+    if 'c' in sources and cflags_c:
+      SetFilesProperty(out, sources['c'], 'COMPILE_FLAGS', cflags_c, ' ')
+    if 'cxx' in sources and cflags_cxx:
+      SetFilesProperty(out, sources['cxx'], 'COMPILE_FLAGS', cflags_cxx, ' ')
+  if flags:
+    SetCurrentTargetProperty(out, 'COMPILE_FLAGS', flags, ' ')
+
+  # Linker flags
+  ldflags = target.properties.get('ldflags', [])
+  if ldflags:
+    SetCurrentTargetProperty(out, 'LINK_FLAGS', ldflags, ' ')
+
+
+gn_target_types_that_absorb_objects = (
+  'executable',
+  'loadable_module',
+  'shared_library',
+  'static_library'
+)
+
+
+def WriteSourceVariables(out, target, project):
+  # gn separates the sheep from the goats based on file extensions.
+  # A full separation is done here because of flag handing (see Compile flags).
+  source_types = {'cxx':[], 'c':[], 'asm':[],
+                  'obj':[], 'obj_target':[], 'input':[], 'other':[]}
+
+  # TODO .def files on Windows
+  for source in target.properties.get('sources', []):
+    _, ext = posixpath.splitext(source)
+    source_abs_path = project.GetAbsolutePath(source)
+    source_types[source_file_types.get(ext, 'other')].append(source_abs_path)
+
+  for input_path in target.properties.get('inputs', []):
+    input_abs_path = project.GetAbsolutePath(input_path)
+    source_types['input'].append(input_abs_path)
+
+  # OBJECT library dependencies need to be listed as sources.
+  # Only executables and non-OBJECT libraries may reference an OBJECT library.
+  # https://gitlab.kitware.com/cmake/cmake/issues/14778
+  if target.gn_type in gn_target_types_that_absorb_objects:
+    object_dependencies = set()
+    project.GetObjectSourceDependencies(target.gn_name, object_dependencies)
+    for dependency in object_dependencies:
+      cmake_dependency_name = GetCMakeTargetName(dependency)
+      obj_target_sources = '$<TARGET_OBJECTS:' + cmake_dependency_name + '>'
+      source_types['obj_target'].append(obj_target_sources)
+
+  sources = {}
+  for source_type, sources_of_type in source_types.items():
+    if sources_of_type:
+      sources[source_type] = '${target}__' + source_type + '_srcs'
+      SetVariableList(out, sources[source_type], sources_of_type)
+  return sources
+
+
+def WriteTarget(out, target, project):
+  out.write('\n#')
+  out.write(target.gn_name)
+  out.write('\n')
+
+  if target.cmake_type is None:
+    print('Target {} has unknown target type {}, skipping.'.format(
+        target.gn_name, target.gn_type))
+    return
+
+  SetVariable(out, 'target', target.cmake_name)
+
+  sources = WriteSourceVariables(out, target, project)
+
+  synthetic_dependencies = set()
+  if target.gn_type == 'action':
+    WriteAction(out, target, project, sources, synthetic_dependencies)
+  if target.gn_type == 'action_foreach':
+    WriteActionForEach(out, target, project, sources, synthetic_dependencies)
+  if target.gn_type == 'copy':
+    WriteCopy(out, target, project, sources, synthetic_dependencies)
+
+  out.write(target.cmake_type.command)
+  out.write('("${target}"')
+  if target.cmake_type.modifier is not None:
+    out.write(' ')
+    out.write(target.cmake_type.modifier)
+  for sources_type_name in sources.values():
+    WriteVariable(out, sources_type_name, ' ')
+  if synthetic_dependencies:
+    out.write(' DEPENDS')
+    for synthetic_dependencie in synthetic_dependencies:
+      WriteVariable(out, synthetic_dependencie, ' ')
+  out.write(')\n')
+
+  if target.cmake_type.command != 'add_custom_target':
+    WriteCompilerFlags(out, target, project, sources)
+
+  libraries = set()
+  nonlibraries = set()
+
+  dependencies = set(target.properties.get('deps', []))
+  # Transitive OBJECT libraries are in sources.
+  # Those sources are dependent on the OBJECT library dependencies.
+  # Those sources cannot bring in library dependencies.
+  object_dependencies = set()
+  if target.gn_type != 'source_set':
+    project.GetObjectLibraryDependencies(target.gn_name, object_dependencies)
+  for object_dependency in object_dependencies:
+    dependencies.update(project.targets.get(object_dependency).get('deps', []))
+
+  for dependency in dependencies:
+    gn_dependency_type = project.targets.get(dependency, {}).get('type', None)
+    cmake_dependency_type = cmake_target_types.get(gn_dependency_type, None)
+    cmake_dependency_name = GetCMakeTargetName(dependency)
+    if cmake_dependency_type.command != 'add_library':
+      nonlibraries.add(cmake_dependency_name)
+    elif cmake_dependency_type.modifier != 'OBJECT':
+      if target.cmake_type.is_linkable:
+        libraries.add(cmake_dependency_name)
+      else:
+        nonlibraries.add(cmake_dependency_name)
+
+  # Non-library dependencies.
+  if nonlibraries:
+    out.write('add_dependencies("${target}"')
+    for nonlibrary in nonlibraries:
+      out.write('\n  "')
+      out.write(nonlibrary)
+      out.write('"')
+    out.write(')\n')
+
+  # Non-OBJECT library dependencies.
+  external_libraries = target.properties.get('libs', [])
+  if target.cmake_type.is_linkable and (external_libraries or libraries):
+    library_dirs = target.properties.get('lib_dirs', [])
+    if library_dirs:
+      SetVariableList(out, '${target}__library_directories', library_dirs)
+
+    system_libraries = []
+    for external_library in external_libraries:
+      if '/' in external_library:
+        libraries.add(project.GetAbsolutePath(external_library))
+      else:
+        if external_library.endswith('.framework'):
+          external_library = external_library[:-len('.framework')]
+        system_library = 'library__' + external_library
+        if library_dirs:
+          system_library = system_library + '__for_${target}'
+        out.write('find_library("')
+        out.write(CMakeStringEscape(system_library))
+        out.write('" "')
+        out.write(CMakeStringEscape(external_library))
+        out.write('"')
+        if library_dirs:
+          out.write(' PATHS "')
+          WriteVariable(out, '${target}__library_directories')
+          out.write('"')
+        out.write(')\n')
+        system_libraries.append(system_library)
+    out.write('target_link_libraries("${target}"')
+    for library in libraries:
+      out.write('\n  "')
+      out.write(CMakeStringEscape(library))
+      out.write('"')
+    for system_library in system_libraries:
+      WriteVariable(out, system_library, '\n  "')
+      out.write('"')
+    out.write(')\n')
+
+
+def WriteProject(project):
+  out = open(posixpath.join(project.build_path, 'CMakeLists.txt'), 'w+')
+  out.write('# Generated by gn_to_cmake.py.\n')
+  out.write('cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)\n')
+  out.write('cmake_policy(VERSION 2.8.8)\n\n')
+
+  # Update the gn generated ninja build.
+  # If a build file has changed, this will update CMakeLists.ext if
+  # gn gen out/config --ide=json --json-ide-script=../../gn/gn_to_cmake.py
+  # style was used to create this config.
+  out.write('execute_process(COMMAND ninja -C "')
+  out.write(CMakeStringEscape(project.build_path))
+  out.write('" build.ninja)\n')
+
+  out.write('include(CMakeLists.ext)\n')
+  out.close()
+
+  out = open(posixpath.join(project.build_path, 'CMakeLists.ext'), 'w+')
+  out.write('# Generated by gn_to_cmake.py.\n')
+  out.write('cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)\n')
+  out.write('cmake_policy(VERSION 2.8.8)\n')
+
+  # The following appears to be as-yet undocumented.
+  # http://public.kitware.com/Bug/view.php?id=8392
+  out.write('enable_language(ASM)\n\n')
+  # ASM-ATT does not support .S files.
+  # output.write('enable_language(ASM-ATT)\n')
+
+  # Current issues with automatic re-generation:
+  # The gn generated build.ninja target uses build.ninja.d
+  #   but build.ninja.d does not contain the ide or gn.
+  # Currently the ide is not run if the project.json file is not changed
+  #   but the ide needs to be run anyway if it has itself changed.
+  #   This can be worked around by deleting the project.json file.
+  out.write('file(READ "')
+  gn_deps_file = posixpath.join(project.build_path, 'build.ninja.d')
+  out.write(CMakeStringEscape(gn_deps_file))
+  out.write('" "gn_deps_string" OFFSET ')
+  out.write(str(len('build.ninja: ')))
+  out.write(')\n')
+  # One would think this would need to worry about escaped spaces
+  # but gn doesn't escape spaces here (it generates invalid .d files).
+  out.write('string(REPLACE " " ";" "gn_deps" ${gn_deps_string})\n')
+  out.write('foreach("gn_dep" ${gn_deps})\n')
+  out.write('  configure_file(${gn_dep} "CMakeLists.devnull" COPYONLY)\n')
+  out.write('endforeach("gn_dep")\n')
+
+  for target_name in project.targets.keys():
+    out.write('\n')
+    WriteTarget(out, Target(target_name, project), project)
+
+
+def main():
+  if len(sys.argv) != 2:
+    print('Usage: ' + sys.argv[0] + ' <json_file_name>')
+    exit(1)
+
+  json_path = sys.argv[1]
+  project = None
+  with open(json_path, 'r') as json_file:
+    project = json.loads(json_file.read())
+
+  WriteProject(Project(project))
+
+
+if __name__ == "__main__":
+  main()
diff --git a/src/build/android/gradle/java.jinja b/src/build/android/gradle/java.jinja
new file mode 100644
index 0000000..7626f61
--- /dev/null
+++ b/src/build/android/gradle/java.jinja
@@ -0,0 +1,41 @@
+{# Copyright 2016 The Chromium Authors. All rights reserved. #}
+{# Use of this source code is governed by a BSD-style license that can be #}
+{# found in the LICENSE file. #}
+// Generated by //build/android/generate_gradle.py
+
+apply plugin: "java"
+{% if template_type == 'java_binary' %}
+apply plugin: "application"
+{% endif %}
+
+sourceSets {
+    main {
+        java.srcDirs = [
+{% for path in main.java_dirs %}
+            "{{ path }}",
+{% endfor %}
+        ]
+{% if main.java_excludes is defined %}
+        java.filter.exclude([
+{% for path in main.java_excludes %}
+            "{{ path }}",
+{% endfor %}
+        ])
+{% endif %}
+    }
+}
+
+sourceCompatibility = JavaVersion.VERSION_1_8
+targetCompatibility = JavaVersion.VERSION_1_8
+
+{% if template_type == 'java_binary' %}
+applicationName = "{{ target_name }}"
+{% if main_class %}
+mainClassName = "{{ main_class }}"
+{% endif %}
+{% endif %}
+{% if template_type in ('java_binary', 'java_library') %}
+archivesBaseName = "{{ target_name }}"
+{% endif %}
+
+{% include 'dependencies.jinja' %}
diff --git a/src/build/android/gradle/manifest.jinja b/src/build/android/gradle/manifest.jinja
new file mode 100644
index 0000000..dea7071
--- /dev/null
+++ b/src/build/android/gradle/manifest.jinja
@@ -0,0 +1,7 @@
+{# Copyright 2017 The Chromium Authors. All rights reserved. #}
+{# Use of this source code is governed by a BSD-style license that can be #}
+{# found in the LICENSE file. #}
+<?xml version="1.0" encoding="utf-8"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="{{ package }}">
+</manifest>
diff --git a/src/build/android/gradle/root.jinja b/src/build/android/gradle/root.jinja
new file mode 100644
index 0000000..15b5e10
--- /dev/null
+++ b/src/build/android/gradle/root.jinja
@@ -0,0 +1,26 @@
+{# Copyright 2016 The Chromium Authors. All rights reserved. #}
+{# Use of this source code is governed by a BSD-style license that can be #}
+{# found in the LICENSE file. #}
+// Generated by //build/android/generate_gradle.py
+
+buildscript {
+    repositories {
+        google()
+        jcenter()
+{% if channel == 'canary' %}
+        // Workaround for http://b/144885480.
+        //maven() {
+        //  url "http://dl.bintray.com/kotlin/kotlin-eap"
+        //}
+{% endif %}
+    }
+    dependencies {
+{% if channel == 'canary' %}
+        classpath "com.android.tools.build:gradle:4.1.0-beta01"
+{% elif channel == 'beta' %}
+        classpath "com.android.tools.build:gradle:4.0.0-rc01"
+{% else %}
+        classpath "com.android.tools.build:gradle:4.0.1"
+{% endif %}
+    }
+}
diff --git a/src/build/android/gtest_apk/BUILD.gn b/src/build/android/gtest_apk/BUILD.gn
new file mode 100644
index 0000000..2a72bc4
--- /dev/null
+++ b/src/build/android/gtest_apk/BUILD.gn
@@ -0,0 +1,15 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+
+android_library("native_test_instrumentation_test_runner_java") {
+  testonly = true
+  sources = [
+    "java/src/org/chromium/build/gtest_apk/NativeTestInstrumentationTestRunner.java",
+    "java/src/org/chromium/build/gtest_apk/NativeTestIntent.java",
+    "java/src/org/chromium/build/gtest_apk/TestStatusIntent.java",
+    "java/src/org/chromium/build/gtest_apk/TestStatusReceiver.java",
+  ]
+}
diff --git a/src/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/NativeTestInstrumentationTestRunner.java b/src/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/NativeTestInstrumentationTestRunner.java
new file mode 100644
index 0000000..652333b
--- /dev/null
+++ b/src/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/NativeTestInstrumentationTestRunner.java
@@ -0,0 +1,281 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.build.gtest_apk;
+
+import android.annotation.SuppressLint;
+import android.app.Activity;
+import android.app.ActivityManager;
+import android.app.Instrumentation;
+import android.content.ComponentName;
+import android.content.Context;
+import android.content.Intent;
+import android.os.Bundle;
+import android.os.Environment;
+import android.os.Handler;
+import android.os.Process;
+import android.text.TextUtils;
+import android.util.Log;
+import android.util.SparseArray;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.ArrayDeque;
+import java.util.ArrayList;
+import java.util.Queue;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+/**
+ *  An Instrumentation that runs tests based on NativeTest.
+ */
+public class NativeTestInstrumentationTestRunner extends Instrumentation {
+    private static final String EXTRA_NATIVE_TEST_ACTIVITY =
+            "org.chromium.native_test.NativeTestInstrumentationTestRunner.NativeTestActivity";
+    private static final String EXTRA_SHARD_NANO_TIMEOUT =
+            "org.chromium.native_test.NativeTestInstrumentationTestRunner.ShardNanoTimeout";
+    private static final String EXTRA_SHARD_SIZE_LIMIT =
+            "org.chromium.native_test.NativeTestInstrumentationTestRunner.ShardSizeLimit";
+    private static final String EXTRA_STDOUT_FILE =
+            "org.chromium.native_test.NativeTestInstrumentationTestRunner.StdoutFile";
+    private static final String EXTRA_TEST_LIST_FILE =
+            "org.chromium.native_test.NativeTestInstrumentationTestRunner.TestList";
+    private static final String EXTRA_TEST =
+            "org.chromium.native_test.NativeTestInstrumentationTestRunner.Test";
+
+    private static final String TAG = "NativeTest";
+
+    private static final long DEFAULT_SHARD_NANO_TIMEOUT = 60 * 1000000000L;
+    // Default to no size limit.
+    private static final int DEFAULT_SHARD_SIZE_LIMIT = 0;
+
+    private Handler mHandler = new Handler();
+    private Bundle mLogBundle = new Bundle();
+    private SparseArray<ShardMonitor> mMonitors = new SparseArray<ShardMonitor>();
+    private String mNativeTestActivity;
+    private TestStatusReceiver mReceiver;
+    private Queue<String> mShards = new ArrayDeque<String>();
+    private long mShardNanoTimeout = DEFAULT_SHARD_NANO_TIMEOUT;
+    private int mShardSizeLimit = DEFAULT_SHARD_SIZE_LIMIT;
+    private File mStdoutFile;
+    private Bundle mTransparentArguments;
+
+    @Override
+    public void onCreate(Bundle arguments) {
+        Context context = getContext();
+        mTransparentArguments = new Bundle(arguments);
+
+        mNativeTestActivity = arguments.getString(EXTRA_NATIVE_TEST_ACTIVITY);
+        if (mNativeTestActivity == null) {
+            Log.e(TAG,
+                    "Unable to find org.chromium.native_test.NativeUnitTestActivity extra on "
+                            + "NativeTestInstrumentationTestRunner launch intent.");
+            finish(Activity.RESULT_CANCELED, new Bundle());
+            return;
+        }
+        mTransparentArguments.remove(EXTRA_NATIVE_TEST_ACTIVITY);
+
+        String shardNanoTimeout = arguments.getString(EXTRA_SHARD_NANO_TIMEOUT);
+        if (shardNanoTimeout != null) mShardNanoTimeout = Long.parseLong(shardNanoTimeout);
+        mTransparentArguments.remove(EXTRA_SHARD_NANO_TIMEOUT);
+
+        String shardSizeLimit = arguments.getString(EXTRA_SHARD_SIZE_LIMIT);
+        if (shardSizeLimit != null) mShardSizeLimit = Integer.parseInt(shardSizeLimit);
+        mTransparentArguments.remove(EXTRA_SHARD_SIZE_LIMIT);
+
+        String stdoutFile = arguments.getString(EXTRA_STDOUT_FILE);
+        if (stdoutFile != null) {
+            mStdoutFile = new File(stdoutFile);
+        } else {
+            try {
+                mStdoutFile = File.createTempFile(
+                        ".temp_stdout_", ".txt", Environment.getExternalStorageDirectory());
+                Log.i(TAG, "stdout file created: " + mStdoutFile.getAbsolutePath());
+            } catch (IOException e) {
+                Log.e(TAG, "Unable to create temporary stdout file.", e);
+                finish(Activity.RESULT_CANCELED, new Bundle());
+                return;
+            }
+        }
+
+        mTransparentArguments.remove(EXTRA_STDOUT_FILE);
+
+        String singleTest = arguments.getString(EXTRA_TEST);
+        if (singleTest != null) {
+            mShards.add(singleTest);
+        }
+
+        String testListFilePath = arguments.getString(EXTRA_TEST_LIST_FILE);
+        if (testListFilePath != null) {
+            File testListFile = new File(testListFilePath);
+            try {
+                BufferedReader testListFileReader =
+                        new BufferedReader(new FileReader(testListFile));
+
+                String test;
+                ArrayList<String> workingShard = new ArrayList<String>();
+                while ((test = testListFileReader.readLine()) != null) {
+                    workingShard.add(test);
+                    if (workingShard.size() == mShardSizeLimit) {
+                        mShards.add(TextUtils.join(":", workingShard));
+                        workingShard = new ArrayList<String>();
+                    }
+                }
+
+                if (!workingShard.isEmpty()) {
+                    mShards.add(TextUtils.join(":", workingShard));
+                }
+
+                testListFileReader.close();
+            } catch (IOException e) {
+                Log.e(TAG, "Error reading " + testListFile.getAbsolutePath(), e);
+            }
+        }
+        mTransparentArguments.remove(EXTRA_TEST_LIST_FILE);
+
+        start();
+    }
+
+    @Override
+    @SuppressLint("DefaultLocale")
+    public void onStart() {
+        super.onStart();
+
+        mReceiver = new TestStatusReceiver();
+        mReceiver.register(getContext());
+        mReceiver.registerCallback(new TestStatusReceiver.TestRunCallback() {
+            @Override
+            public void testRunStarted(int pid) {
+                if (pid != Process.myPid()) {
+                    ShardMonitor m = new ShardMonitor(pid, System.nanoTime() + mShardNanoTimeout);
+                    mMonitors.put(pid, m);
+                    mHandler.post(m);
+                }
+            }
+
+            @Override
+            public void testRunFinished(int pid) {
+                ShardMonitor m = mMonitors.get(pid);
+                if (m != null) {
+                    m.stopped();
+                    mMonitors.remove(pid);
+                }
+                mHandler.post(new ShardEnder(pid));
+            }
+
+            @Override
+            public void uncaughtException(int pid, String stackTrace) {
+                mLogBundle.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
+                        String.format("Uncaught exception in test process (pid: %d)%n%s%n", pid,
+                                stackTrace));
+                sendStatus(0, mLogBundle);
+            }
+        });
+
+        mHandler.post(new ShardStarter());
+    }
+
+    /** Monitors a test shard's execution. */
+    private class ShardMonitor implements Runnable {
+        private static final int MONITOR_FREQUENCY_MS = 1000;
+
+        private long mExpirationNanoTime;
+        private int mPid;
+        private AtomicBoolean mStopped;
+
+        public ShardMonitor(int pid, long expirationNanoTime) {
+            mPid = pid;
+            mExpirationNanoTime = expirationNanoTime;
+            mStopped = new AtomicBoolean(false);
+        }
+
+        public void stopped() {
+            mStopped.set(true);
+        }
+
+        @Override
+        public void run() {
+            if (mStopped.get()) {
+                return;
+            }
+
+            if (isAppProcessAlive(getContext(), mPid)) {
+                if (System.nanoTime() > mExpirationNanoTime) {
+                    Log.e(TAG, String.format("Test process %d timed out.", mPid));
+                    mHandler.post(new ShardEnder(mPid));
+                    return;
+                } else {
+                    mHandler.postDelayed(this, MONITOR_FREQUENCY_MS);
+                    return;
+                }
+            }
+
+            Log.e(TAG, String.format("Test process %d died unexpectedly.", mPid));
+            mHandler.post(new ShardEnder(mPid));
+        }
+    }
+
+    private static boolean isAppProcessAlive(Context context, int pid) {
+        ActivityManager activityManager =
+                (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
+        for (ActivityManager.RunningAppProcessInfo processInfo :
+                activityManager.getRunningAppProcesses()) {
+            if (processInfo.pid == pid) return true;
+        }
+        return false;
+    }
+
+    protected Intent createShardMainIntent() {
+        Intent i = new Intent(Intent.ACTION_MAIN);
+        i.setComponent(new ComponentName(getContext().getPackageName(), mNativeTestActivity));
+        i.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
+        i.putExtras(mTransparentArguments);
+        if (mShards != null && !mShards.isEmpty()) {
+            String gtestFilter = mShards.remove();
+            i.putExtra(NativeTestIntent.EXTRA_GTEST_FILTER, gtestFilter);
+        }
+        i.putExtra(NativeTestIntent.EXTRA_STDOUT_FILE, mStdoutFile.getAbsolutePath());
+        return i;
+    }
+
+    /**
+     * Starts the NativeTest Activity.
+     */
+    private class ShardStarter implements Runnable {
+        @Override
+        public void run() {
+            getContext().startActivity(createShardMainIntent());
+        }
+    }
+
+    private class ShardEnder implements Runnable {
+        private static final int WAIT_FOR_DEATH_MILLIS = 10;
+
+        private int mPid;
+
+        public ShardEnder(int pid) {
+            mPid = pid;
+        }
+
+        @Override
+        public void run() {
+            if (mPid != Process.myPid()) {
+                Process.killProcess(mPid);
+                try {
+                    while (isAppProcessAlive(getContext(), mPid)) {
+                        Thread.sleep(WAIT_FOR_DEATH_MILLIS);
+                    }
+                } catch (InterruptedException e) {
+                    Log.e(TAG, String.format("%d may still be alive.", mPid), e);
+                }
+            }
+            if (mShards != null && !mShards.isEmpty()) {
+                mHandler.post(new ShardStarter());
+            } else {
+                finish(Activity.RESULT_OK, new Bundle());
+            }
+        }
+    }
+}
diff --git a/src/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/NativeTestIntent.java b/src/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/NativeTestIntent.java
new file mode 100644
index 0000000..a875e97
--- /dev/null
+++ b/src/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/NativeTestIntent.java
@@ -0,0 +1,22 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.build.gtest_apk;
+
+/**
+ * Extras for intent sent by NativeTestInstrumentationTestRunner.
+ */
+public class NativeTestIntent {
+    public static final String EXTRA_COMMAND_LINE_FILE =
+            "org.chromium.native_test.NativeTest.CommandLineFile";
+    public static final String EXTRA_COMMAND_LINE_FLAGS =
+            "org.chromium.native_test.NativeTest.CommandLineFlags";
+    public static final String EXTRA_RUN_IN_SUB_THREAD =
+            "org.chromium.native_test.NativeTest.RunInSubThread";
+    public static final String EXTRA_GTEST_FILTER =
+            "org.chromium.native_test.NativeTest.GtestFilter";
+    public static final String EXTRA_STDOUT_FILE = "org.chromium.native_test.NativeTest.StdoutFile";
+    public static final String EXTRA_COVERAGE_DEVICE_FILE =
+            "org.chromium.native_test.NativeTest.CoverageDeviceFile";
+}
diff --git a/src/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/TestStatusIntent.java b/src/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/TestStatusIntent.java
new file mode 100644
index 0000000..520b748
--- /dev/null
+++ b/src/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/TestStatusIntent.java
@@ -0,0 +1,21 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.build.gtest_apk;
+
+/**
+ * Intent action and extras of broadcasts intercepted by TestStatusReceiver.
+ */
+public class TestStatusIntent {
+    public static final String ACTION_TEST_RUN_STARTED =
+            "org.chromium.test.reporter.TestStatusReporter.TEST_RUN_STARTED";
+    public static final String ACTION_TEST_RUN_FINISHED =
+            "org.chromium.test.reporter.TestStatusReporter.TEST_RUN_FINISHED";
+    public static final String ACTION_UNCAUGHT_EXCEPTION =
+            "org.chromium.test.reporter.TestStatusReporter.UNCAUGHT_EXCEPTION";
+    public static final String DATA_TYPE_RESULT = "org.chromium.test.reporter/result";
+    public static final String EXTRA_PID = "org.chromium.test.reporter.TestStatusReporter.PID";
+    public static final String EXTRA_STACK_TRACE =
+            "org.chromium.test.reporter.TestStatusReporter.STACK_TRACE";
+}
diff --git a/src/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/TestStatusReceiver.java b/src/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/TestStatusReceiver.java
new file mode 100644
index 0000000..e539009
--- /dev/null
+++ b/src/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/TestStatusReceiver.java
@@ -0,0 +1,89 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.build.gtest_apk;
+
+import android.content.BroadcastReceiver;
+import android.content.Context;
+import android.content.Intent;
+import android.content.IntentFilter;
+import android.util.Log;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+   Receives test status broadcasts sent from
+    {@link org.chromium.test.reporter.TestStatusReporter}.
+ */
+public class TestStatusReceiver extends BroadcastReceiver {
+    private static final String TAG = "test_reporter";
+
+    private final List<TestRunCallback> mTestRunCallbacks = new ArrayList<TestRunCallback>();
+
+    /** An IntentFilter that matches the intents that this class can receive. */
+    private static final IntentFilter INTENT_FILTER;
+    static {
+        IntentFilter filter = new IntentFilter();
+        filter.addAction(TestStatusIntent.ACTION_TEST_RUN_STARTED);
+        filter.addAction(TestStatusIntent.ACTION_TEST_RUN_FINISHED);
+        filter.addAction(TestStatusIntent.ACTION_UNCAUGHT_EXCEPTION);
+        try {
+            filter.addDataType(TestStatusIntent.DATA_TYPE_RESULT);
+        } catch (IntentFilter.MalformedMimeTypeException e) {
+            Log.wtf(TAG, "Invalid MIME type", e);
+        }
+        INTENT_FILTER = filter;
+    }
+
+    /** A callback used when a test run has started or finished. */
+    public interface TestRunCallback {
+        void testRunStarted(int pid);
+        void testRunFinished(int pid);
+        void uncaughtException(int pid, String stackTrace);
+    }
+
+    /** Register a callback for when a test run has started or finished. */
+    public void registerCallback(TestRunCallback c) {
+        mTestRunCallbacks.add(c);
+    }
+
+    /** Register this receiver using the provided context. */
+    public void register(Context c) {
+        c.registerReceiver(this, INTENT_FILTER);
+    }
+
+    /**
+     * Receive a broadcast intent.
+     *
+     * @param context The Context in which the receiver is running.
+     * @param intent The intent received.
+     */
+    @Override
+    public void onReceive(Context context, Intent intent) {
+        int pid = intent.getIntExtra(TestStatusIntent.EXTRA_PID, 0);
+        String stackTrace = intent.getStringExtra(TestStatusIntent.EXTRA_STACK_TRACE);
+
+        switch (intent.getAction()) {
+            case TestStatusIntent.ACTION_TEST_RUN_STARTED:
+                for (TestRunCallback c : mTestRunCallbacks) {
+                    c.testRunStarted(pid);
+                }
+                break;
+            case TestStatusIntent.ACTION_TEST_RUN_FINISHED:
+                for (TestRunCallback c : mTestRunCallbacks) {
+                    c.testRunFinished(pid);
+                }
+                break;
+            case TestStatusIntent.ACTION_UNCAUGHT_EXCEPTION:
+                for (TestRunCallback c : mTestRunCallbacks) {
+                    c.uncaughtException(pid, stackTrace);
+                }
+                break;
+            default:
+                Log.e(TAG, "Unrecognized intent received: " + intent.toString());
+                break;
+        }
+    }
+}
diff --git a/src/build/android/gyp/aar.py b/src/build/android/gyp/aar.py
new file mode 100755
index 0000000..b157cd8
--- /dev/null
+++ b/src/build/android/gyp/aar.py
@@ -0,0 +1,210 @@
+#!/usr/bin/env python3
+#
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Processes an Android AAR file."""
+
+import argparse
+import os
+import posixpath
+import re
+import shutil
+import sys
+from xml.etree import ElementTree
+import zipfile
+
+from util import build_utils
+
+sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),
+                                             os.pardir, os.pardir)))
+import gn_helpers
+
+
+_PROGUARD_TXT = 'proguard.txt'
+
+
+def _GetManifestPackage(doc):
+  """Returns the package specified in the manifest.
+
+  Args:
+    doc: an XML tree parsed by ElementTree
+
+  Returns:
+    String representing the package name.
+  """
+  return doc.attrib['package']
+
+
+def _IsManifestEmpty(doc):
+  """Decides whether the given manifest has merge-worthy elements.
+
+  E.g.: <activity>, <service>, etc.
+
+  Args:
+    doc: an XML tree parsed by ElementTree
+
+  Returns:
+    Whether the manifest has merge-worthy elements.
+  """
+  for node in doc:
+    if node.tag == 'application':
+      if list(node):
+        return False
+    elif node.tag != 'uses-sdk':
+      return False
+
+  return True
+
+
+def _CreateInfo(aar_file):
+  """Extracts and return .info data from an .aar file.
+
+  Args:
+    aar_file: Path to an input .aar file.
+
+  Returns:
+    A dict containing .info data.
+  """
+  data = {}
+  data['aidl'] = []
+  data['assets'] = []
+  data['resources'] = []
+  data['subjars'] = []
+  data['subjar_tuples'] = []
+  data['has_classes_jar'] = False
+  data['has_proguard_flags'] = False
+  data['has_native_libraries'] = False
+  data['has_r_text_file'] = False
+  with zipfile.ZipFile(aar_file) as z:
+    manifest_xml = ElementTree.fromstring(z.read('AndroidManifest.xml'))
+    data['is_manifest_empty'] = _IsManifestEmpty(manifest_xml)
+    manifest_package = _GetManifestPackage(manifest_xml)
+    if manifest_package:
+      data['manifest_package'] = manifest_package
+
+    for name in z.namelist():
+      if name.endswith('/'):
+        continue
+      if name.startswith('aidl/'):
+        data['aidl'].append(name)
+      elif name.startswith('res/'):
+        data['resources'].append(name)
+      elif name.startswith('libs/') and name.endswith('.jar'):
+        label = posixpath.basename(name)[:-4]
+        label = re.sub(r'[^a-zA-Z0-9._]', '_', label)
+        data['subjars'].append(name)
+        data['subjar_tuples'].append([label, name])
+      elif name.startswith('assets/'):
+        data['assets'].append(name)
+      elif name.startswith('jni/'):
+        data['has_native_libraries'] = True
+        if 'native_libraries' in data:
+          data['native_libraries'].append(name)
+        else:
+          data['native_libraries'] = [name]
+      elif name == 'classes.jar':
+        data['has_classes_jar'] = True
+      elif name == _PROGUARD_TXT:
+        data['has_proguard_flags'] = True
+      elif name == 'R.txt':
+        # Some AARs, e.g. gvr_controller_java, have empty R.txt. Such AARs
+        # have no resources as well. We treat empty R.txt as having no R.txt.
+        data['has_r_text_file'] = bool(z.read('R.txt').strip())
+
+  return data
+
+
+def _PerformExtract(aar_file, output_dir, name_allowlist):
+  with build_utils.TempDir() as tmp_dir:
+    tmp_dir = os.path.join(tmp_dir, 'staging')
+    os.mkdir(tmp_dir)
+    build_utils.ExtractAll(
+        aar_file, path=tmp_dir, predicate=name_allowlist.__contains__)
+    # Write a breadcrumb so that SuperSize can attribute files back to the .aar.
+    with open(os.path.join(tmp_dir, 'source.info'), 'w') as f:
+      f.write('source={}\n'.format(aar_file))
+
+    shutil.rmtree(output_dir, ignore_errors=True)
+    shutil.move(tmp_dir, output_dir)
+
+
+def _AddCommonArgs(parser):
+  parser.add_argument(
+      'aar_file', help='Path to the AAR file.', type=os.path.normpath)
+
+
+def main():
+  parser = argparse.ArgumentParser(description=__doc__)
+  command_parsers = parser.add_subparsers(dest='command')
+  subp = command_parsers.add_parser(
+      'list', help='Output a GN scope describing the contents of the .aar.')
+  _AddCommonArgs(subp)
+  subp.add_argument('--output', help='Output file.', default='-')
+
+  subp = command_parsers.add_parser('extract', help='Extracts the .aar')
+  _AddCommonArgs(subp)
+  subp.add_argument(
+      '--output-dir',
+      help='Output directory for the extracted files.',
+      required=True,
+      type=os.path.normpath)
+  subp.add_argument(
+      '--assert-info-file',
+      help='Path to .info file. Asserts that it matches what '
+      '"list" would output.',
+      type=argparse.FileType('r'))
+  subp.add_argument(
+      '--ignore-resources',
+      action='store_true',
+      help='Whether to skip extraction of res/')
+
+  args = parser.parse_args()
+
+  aar_info = _CreateInfo(args.aar_file)
+  formatted_info = """\
+# Generated by //build/android/gyp/aar.py
+# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
+
+""" + gn_helpers.ToGNString(aar_info, pretty=True)
+
+  if args.command == 'extract':
+    if args.assert_info_file:
+      cached_info = args.assert_info_file.read()
+      if formatted_info != cached_info:
+        raise Exception('android_aar_prebuilt() cached .info file is '
+                        'out-of-date. Run gn gen with '
+                        'update_android_aar_prebuilts=true to update it.')
+
+    with zipfile.ZipFile(args.aar_file) as zf:
+      names = zf.namelist()
+      if args.ignore_resources:
+        names = [n for n in names if not n.startswith('res')]
+
+    _PerformExtract(args.aar_file, args.output_dir, set(names))
+
+  elif args.command == 'list':
+    aar_output_present = args.output != '-' and os.path.isfile(args.output)
+    if aar_output_present:
+      # Some .info files are read-only, for examples the cipd-controlled ones
+      # under third_party/android_deps/repositoty. To deal with these, first
+      # that its content is correct, and if it is, exit without touching
+      # the file system.
+      file_info = open(args.output, 'r').read()
+      if file_info == formatted_info:
+        return
+
+    # Try to write the file. This may fail for read-only ones that were
+    # not updated.
+    try:
+      with open(args.output, 'w') as f:
+        f.write(formatted_info)
+    except IOError as e:
+      if not aar_output_present:
+        raise e
+      raise Exception('Could not update output file: %s\n%s\n' %
+                      (args.output, e))
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/gyp/aar.pydeps b/src/build/android/gyp/aar.pydeps
new file mode 100644
index 0000000..7e2924b
--- /dev/null
+++ b/src/build/android/gyp/aar.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/aar.pydeps build/android/gyp/aar.py
+../../gn_helpers.py
+aar.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/aidl.py b/src/build/android/gyp/aidl.py
new file mode 100755
index 0000000..b8099aa
--- /dev/null
+++ b/src/build/android/gyp/aidl.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python3
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Invokes Android's aidl
+"""
+
+import optparse
+import os
+import re
+import sys
+import zipfile
+
+from util import build_utils
+
+
+def main(argv):
+  option_parser = optparse.OptionParser()
+  option_parser.add_option('--aidl-path', help='Path to the aidl binary.')
+  option_parser.add_option('--imports', help='Files to import.')
+  option_parser.add_option('--includes',
+                           help='Directories to add as import search paths.')
+  option_parser.add_option('--srcjar', help='Path for srcjar output.')
+  build_utils.AddDepfileOption(option_parser)
+  options, args = option_parser.parse_args(argv[1:])
+
+  options.includes = build_utils.ParseGnList(options.includes)
+
+  with build_utils.TempDir() as temp_dir:
+    for f in args:
+      classname = os.path.splitext(os.path.basename(f))[0]
+      output = os.path.join(temp_dir, classname + '.java')
+      aidl_cmd = [options.aidl_path]
+      aidl_cmd += [
+        '-p' + s for s in build_utils.ParseGnList(options.imports)
+      ]
+      aidl_cmd += ['-I' + s for s in options.includes]
+      aidl_cmd += [
+        f,
+        output
+      ]
+      build_utils.CheckOutput(aidl_cmd)
+
+    with build_utils.AtomicOutput(options.srcjar) as f:
+      with zipfile.ZipFile(f, 'w') as srcjar:
+        for path in build_utils.FindInDirectory(temp_dir, '*.java'):
+          with open(path) as fileobj:
+            data = fileobj.read()
+          pkg_name = re.search(r'^\s*package\s+(.*?)\s*;', data, re.M).group(1)
+          arcname = '%s/%s' % (
+              pkg_name.replace('.', '/'), os.path.basename(path))
+          build_utils.AddToZipHermetic(srcjar, arcname, data=data)
+
+  if options.depfile:
+    include_files = []
+    for include_dir in options.includes:
+      include_files += build_utils.FindInDirectory(include_dir, '*.java')
+    build_utils.WriteDepfile(options.depfile, options.srcjar, include_files)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/src/build/android/gyp/aidl.pydeps b/src/build/android/gyp/aidl.pydeps
new file mode 100644
index 0000000..11c55ed
--- /dev/null
+++ b/src/build/android/gyp/aidl.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/aidl.pydeps build/android/gyp/aidl.py
+../../gn_helpers.py
+aidl.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/allot_native_libraries.py b/src/build/android/gyp/allot_native_libraries.py
new file mode 100755
index 0000000..978b173
--- /dev/null
+++ b/src/build/android/gyp/allot_native_libraries.py
@@ -0,0 +1,185 @@
+#!/usr/bin/env python3
+#
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Allots libraries to modules to be packaged into.
+
+All libraries that are depended on by a single module will be allotted to this
+module. All other libraries will be allotted to the closest ancestor.
+
+Example:
+  Given the module dependency structure
+
+        c
+       / \
+      b   d
+     /     \
+    a       e
+
+  and libraries assignment
+
+    a: ['lib1.so']
+    e: ['lib2.so', 'lib1.so']
+
+  will make the allotment decision
+
+    c: ['lib1.so']
+    e: ['lib2.so']
+
+  The above example is invoked via:
+
+    ./allot_native_libraries \
+      --libraries 'a,["1.so"]' \
+      --libraries 'e,["2.so", "1.so"]' \
+      --dep c:b \
+      --dep b:a \
+      --dep c:d \
+      --dep d:e \
+      --output <output JSON>
+"""
+
+import argparse
+import collections
+import json
+import sys
+
+from util import build_utils
+
+
+def _ModuleLibrariesPair(arg):
+  pos = arg.find(',')
+  assert pos > 0
+  return (arg[:pos], arg[pos + 1:])
+
+
+def _DepPair(arg):
+  parent, child = arg.split(':')
+  return (parent, child)
+
+
+def _PathFromRoot(module_tree, module):
+  """Computes path from root to a module.
+
+  Parameters:
+    module_tree: Dictionary mapping each module to its parent.
+    module: Module to which to compute the path.
+
+  Returns:
+    Path from root the the module.
+  """
+  path = [module]
+  while module_tree.get(module):
+    module = module_tree[module]
+    path = [module] + path
+  return path
+
+
+def _ClosestCommonAncestor(module_tree, modules):
+  """Computes the common ancestor of a set of modules.
+
+  Parameters:
+    module_tree: Dictionary mapping each module to its parent.
+    modules: Set of modules for which to find the closest common ancestor.
+
+  Returns:
+    The closest common ancestor.
+  """
+  paths = [_PathFromRoot(module_tree, m) for m in modules]
+  assert len(paths) > 0
+  ancestor = None
+  for level in zip(*paths):
+    if len(set(level)) != 1:
+      return ancestor
+    ancestor = level[0]
+  return ancestor
+
+
+def _AllotLibraries(module_tree, libraries_map):
+  """Allot all libraries to a module.
+
+  Parameters:
+    module_tree: Dictionary mapping each module to its parent. Modules can map
+      to None, which is considered the root of the tree.
+    libraries_map: Dictionary mapping each library to a set of modules, which
+      depend on the library.
+
+  Returns:
+    A dictionary mapping mapping each module name to a set of libraries allotted
+    to the module such that libraries with multiple dependees are allotted to
+    the closest ancestor.
+
+  Raises:
+    Exception if some libraries can only be allotted to the None root.
+  """
+  allotment_map = collections.defaultdict(set)
+  for library, modules in libraries_map.items():
+    ancestor = _ClosestCommonAncestor(module_tree, modules)
+    if not ancestor:
+      raise Exception('Cannot allot libraries for given dependency tree')
+    allotment_map[ancestor].add(library)
+  return allotment_map
+
+
+def main(args):
+  parser = argparse.ArgumentParser()
+  parser.add_argument(
+      '--libraries',
+      action='append',
+      type=_ModuleLibrariesPair,
+      required=True,
+      help='A pair of module name and GN list of libraries a module depends '
+      'on. Can be specified multiple times.')
+  parser.add_argument(
+      '--output',
+      required=True,
+      help='A JSON file with a key for each module mapping to a list of '
+      'libraries, which should be packaged into this module.')
+  parser.add_argument(
+      '--dep',
+      action='append',
+      type=_DepPair,
+      dest='deps',
+      default=[],
+      help='A pair of parent module name and child module name '
+      '(format: "<parent>:<child>"). Can be specified multiple times.')
+  options = parser.parse_args(build_utils.ExpandFileArgs(args))
+  options.libraries = [(m, build_utils.ParseGnList(l))
+                       for m, l in options.libraries]
+
+  # Parse input creating libraries and dependency tree.
+  libraries_map = collections.defaultdict(set)  # Maps each library to its
+  #                                               dependee modules.
+  module_tree = {}  # Maps each module name to its parent.
+  for module, libraries in options.libraries:
+    module_tree[module] = None
+    for library in libraries:
+      libraries_map[library].add(module)
+  for parent, child in options.deps:
+    if module_tree.get(child):
+      raise Exception('%s cannot have multiple parents' % child)
+    module_tree[child] = parent
+    module_tree[parent] = module_tree.get(parent)
+
+  # Allot all libraries to a module such that libraries with multiple dependees
+  # are allotted to the closest ancestor.
+  allotment_map = _AllotLibraries(module_tree, libraries_map)
+
+  # The build system expects there to be a set of libraries even for the modules
+  # that don't have any libraries allotted.
+  for module in module_tree:
+    # Creates missing sets because of defaultdict.
+    allotment_map[module] = allotment_map[module]
+
+  with open(options.output, 'w') as f:
+    # Write native libraries config and ensure the output is deterministic.
+    json.dump({m: sorted(l)
+               for m, l in allotment_map.items()},
+              f,
+              sort_keys=True,
+              indent=2)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/gyp/allot_native_libraries.pydeps b/src/build/android/gyp/allot_native_libraries.pydeps
new file mode 100644
index 0000000..d8b10cd
--- /dev/null
+++ b/src/build/android/gyp/allot_native_libraries.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/allot_native_libraries.pydeps build/android/gyp/allot_native_libraries.py
+../../gn_helpers.py
+allot_native_libraries.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/apkbuilder.py b/src/build/android/gyp/apkbuilder.py
new file mode 100755
index 0000000..f1e6563
--- /dev/null
+++ b/src/build/android/gyp/apkbuilder.py
@@ -0,0 +1,560 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Adds the code parts to a resource APK."""
+
+import argparse
+import logging
+import os
+import shutil
+import sys
+import tempfile
+import zipfile
+import zlib
+
+import finalize_apk
+
+from util import build_utils
+from util import diff_utils
+from util import zipalign
+
+# Input dex.jar files are zipaligned.
+zipalign.ApplyZipFileZipAlignFix()
+
+
+# Taken from aapt's Package.cpp:
+_NO_COMPRESS_EXTENSIONS = ('.jpg', '.jpeg', '.png', '.gif', '.wav', '.mp2',
+                           '.mp3', '.ogg', '.aac', '.mpg', '.mpeg', '.mid',
+                           '.midi', '.smf', '.jet', '.rtttl', '.imy', '.xmf',
+                           '.mp4', '.m4a', '.m4v', '.3gp', '.3gpp', '.3g2',
+                           '.3gpp2', '.amr', '.awb', '.wma', '.wmv', '.webm')
+
+
+def _ParseArgs(args):
+  parser = argparse.ArgumentParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_argument(
+      '--assets',
+      help='GYP-list of files to add as assets in the form '
+      '"srcPath:zipPath", where ":zipPath" is optional.')
+  parser.add_argument(
+      '--java-resources', help='GYP-list of java_resources JARs to include.')
+  parser.add_argument('--write-asset-list',
+                      action='store_true',
+                      help='Whether to create an assets/assets_list file.')
+  parser.add_argument(
+      '--uncompressed-assets',
+      help='Same as --assets, except disables compression.')
+  parser.add_argument('--resource-apk',
+                      help='An .ap_ file built using aapt',
+                      required=True)
+  parser.add_argument('--output-apk',
+                      help='Path to the output file',
+                      required=True)
+  parser.add_argument('--format', choices=['apk', 'bundle-module'],
+                      default='apk', help='Specify output format.')
+  parser.add_argument('--dex-file',
+                      help='Path to the classes.dex to use')
+  parser.add_argument(
+      '--jdk-libs-dex-file',
+      help='Path to classes.dex created by dex_jdk_libs.py')
+  parser.add_argument('--uncompress-dex', action='store_true',
+                      help='Store .dex files uncompressed in the APK')
+  parser.add_argument('--native-libs',
+                      action='append',
+                      help='GYP-list of native libraries to include. '
+                           'Can be specified multiple times.',
+                      default=[])
+  parser.add_argument('--secondary-native-libs',
+                      action='append',
+                      help='GYP-list of native libraries for secondary '
+                           'android-abi. Can be specified multiple times.',
+                      default=[])
+  parser.add_argument('--android-abi',
+                      help='Android architecture to use for native libraries')
+  parser.add_argument('--secondary-android-abi',
+                      help='The secondary Android architecture to use for'
+                           'secondary native libraries')
+  parser.add_argument(
+      '--is-multi-abi',
+      action='store_true',
+      help='Will add a placeholder for the missing ABI if no native libs or '
+      'placeholders are set for either the primary or secondary ABI. Can only '
+      'be set if both --android-abi and --secondary-android-abi are set.')
+  parser.add_argument(
+      '--native-lib-placeholders',
+      help='GYP-list of native library placeholders to add.')
+  parser.add_argument(
+      '--secondary-native-lib-placeholders',
+      help='GYP-list of native library placeholders to add '
+      'for the secondary ABI')
+  parser.add_argument('--uncompress-shared-libraries', default='False',
+      choices=['true', 'True', 'false', 'False'],
+      help='Whether to uncompress native shared libraries. Argument must be '
+           'a boolean value.')
+  parser.add_argument(
+      '--apksigner-jar', help='Path to the apksigner executable.')
+  parser.add_argument('--zipalign-path',
+                      help='Path to the zipalign executable.')
+  parser.add_argument('--key-path',
+                      help='Path to keystore for signing.')
+  parser.add_argument('--key-passwd',
+                      help='Keystore password')
+  parser.add_argument('--key-name',
+                      help='Keystore name')
+  parser.add_argument(
+      '--min-sdk-version', required=True, help='Value of APK\'s minSdkVersion')
+  parser.add_argument(
+      '--best-compression',
+      action='store_true',
+      help='Use zip -9 rather than zip -1')
+  parser.add_argument(
+      '--library-always-compress',
+      action='append',
+      help='The list of library files that we always compress.')
+  parser.add_argument(
+      '--library-renames',
+      action='append',
+      help='The list of library files that we prepend crazy. to their names.')
+  parser.add_argument('--warnings-as-errors',
+                      action='store_true',
+                      help='Treat all warnings as errors.')
+  diff_utils.AddCommandLineFlags(parser)
+  options = parser.parse_args(args)
+  options.assets = build_utils.ParseGnList(options.assets)
+  options.uncompressed_assets = build_utils.ParseGnList(
+      options.uncompressed_assets)
+  options.native_lib_placeholders = build_utils.ParseGnList(
+      options.native_lib_placeholders)
+  options.secondary_native_lib_placeholders = build_utils.ParseGnList(
+      options.secondary_native_lib_placeholders)
+  options.java_resources = build_utils.ParseGnList(options.java_resources)
+  options.native_libs = build_utils.ParseGnList(options.native_libs)
+  options.secondary_native_libs = build_utils.ParseGnList(
+      options.secondary_native_libs)
+  options.library_always_compress = build_utils.ParseGnList(
+      options.library_always_compress)
+  options.library_renames = build_utils.ParseGnList(options.library_renames)
+
+  # --apksigner-jar, --zipalign-path, --key-xxx arguments are
+  # required when building an APK, but not a bundle module.
+  if options.format == 'apk':
+    required_args = [
+        'apksigner_jar', 'zipalign_path', 'key_path', 'key_passwd', 'key_name'
+    ]
+    for required in required_args:
+      if not vars(options)[required]:
+        raise Exception('Argument --%s is required for APKs.' % (
+            required.replace('_', '-')))
+
+  options.uncompress_shared_libraries = \
+      options.uncompress_shared_libraries in [ 'true', 'True' ]
+
+  if not options.android_abi and (options.native_libs or
+                                  options.native_lib_placeholders):
+    raise Exception('Must specify --android-abi with --native-libs')
+  if not options.secondary_android_abi and (options.secondary_native_libs or
+      options.secondary_native_lib_placeholders):
+    raise Exception('Must specify --secondary-android-abi with'
+                    ' --secondary-native-libs')
+  if options.is_multi_abi and not (options.android_abi
+                                   and options.secondary_android_abi):
+    raise Exception('Must specify --is-multi-abi with both --android-abi '
+                    'and --secondary-android-abi.')
+  return options
+
+
+def _SplitAssetPath(path):
+  """Returns (src, dest) given an asset path in the form src[:dest]."""
+  path_parts = path.split(':')
+  src_path = path_parts[0]
+  if len(path_parts) > 1:
+    dest_path = path_parts[1]
+  else:
+    dest_path = os.path.basename(src_path)
+  return src_path, dest_path
+
+
+def _ExpandPaths(paths):
+  """Converts src:dst into tuples and enumerates files within directories.
+
+  Args:
+    paths: Paths in the form "src_path:dest_path"
+
+  Returns:
+    A list of (src_path, dest_path) tuples sorted by dest_path (for stable
+    ordering within output .apk).
+  """
+  ret = []
+  for path in paths:
+    src_path, dest_path = _SplitAssetPath(path)
+    if os.path.isdir(src_path):
+      for f in build_utils.FindInDirectory(src_path, '*'):
+        ret.append((f, os.path.join(dest_path, f[len(src_path) + 1:])))
+    else:
+      ret.append((src_path, dest_path))
+  ret.sort(key=lambda t:t[1])
+  return ret
+
+
+def _GetAssetsToAdd(path_tuples,
+                    fast_align,
+                    disable_compression=False,
+                    allow_reads=True):
+  """Returns the list of file_detail tuples for assets in the apk.
+
+  Args:
+    path_tuples: List of src_path, dest_path tuples to add.
+    fast_align: Whether to perform alignment in python zipfile (alternatively
+                alignment can be done using the zipalign utility out of band).
+    disable_compression: Whether to disable compression.
+    allow_reads: If false, we do not try to read the files from disk (to find
+                 their size for example).
+
+  Returns: A list of (src_path, apk_path, compress, alignment) tuple
+  representing what and how assets are added.
+  """
+  assets_to_add = []
+
+  # Group all uncompressed assets together in the hope that it will increase
+  # locality of mmap'ed files.
+  for target_compress in (False, True):
+    for src_path, dest_path in path_tuples:
+      compress = not disable_compression and (
+          os.path.splitext(src_path)[1] not in _NO_COMPRESS_EXTENSIONS)
+
+      if target_compress == compress:
+        # AddToZipHermetic() uses this logic to avoid growing small files.
+        # We need it here in order to set alignment correctly.
+        if allow_reads and compress and os.path.getsize(src_path) < 16:
+          compress = False
+
+        apk_path = 'assets/' + dest_path
+        alignment = 0 if compress and not fast_align else 4
+        assets_to_add.append((apk_path, src_path, compress, alignment))
+  return assets_to_add
+
+
+def _AddFiles(apk, details):
+  """Adds files to the apk.
+
+  Args:
+    apk: path to APK to add to.
+    details: A list of file detail tuples (src_path, apk_path, compress,
+    alignment) representing what and how files are added to the APK.
+  """
+  for apk_path, src_path, compress, alignment in details:
+    # This check is only relevant for assets, but it should not matter if it is
+    # checked for the whole list of files.
+    try:
+      apk.getinfo(apk_path)
+      # Should never happen since write_build_config.py handles merging.
+      raise Exception(
+          'Multiple targets specified the asset path: %s' % apk_path)
+    except KeyError:
+      zipalign.AddToZipHermetic(
+          apk,
+          apk_path,
+          src_path=src_path,
+          compress=compress,
+          alignment=alignment)
+
+
+def _GetNativeLibrariesToAdd(native_libs, android_abi, uncompress, fast_align,
+                             lib_always_compress, lib_renames):
+  """Returns the list of file_detail tuples for native libraries in the apk.
+
+  Returns: A list of (src_path, apk_path, compress, alignment) tuple
+  representing what and how native libraries are added.
+  """
+  libraries_to_add = []
+
+
+  for path in native_libs:
+    basename = os.path.basename(path)
+    compress = not uncompress or any(lib_name in basename
+                                     for lib_name in lib_always_compress)
+    rename = any(lib_name in basename for lib_name in lib_renames)
+    if rename:
+      basename = 'crazy.' + basename
+
+    lib_android_abi = android_abi
+    if path.startswith('android_clang_arm64_hwasan/'):
+      lib_android_abi = 'arm64-v8a-hwasan'
+
+    apk_path = 'lib/%s/%s' % (lib_android_abi, basename)
+    alignment = 0 if compress and not fast_align else 0x1000
+    libraries_to_add.append((apk_path, path, compress, alignment))
+
+  return libraries_to_add
+
+
+def _CreateExpectationsData(native_libs, assets):
+  """Creates list of native libraries and assets."""
+  native_libs = sorted(native_libs)
+  assets = sorted(assets)
+
+  ret = []
+  for apk_path, _, compress, alignment in native_libs + assets:
+    ret.append('apk_path=%s, compress=%s, alignment=%s\n' %
+               (apk_path, compress, alignment))
+  return ''.join(ret)
+
+
+def main(args):
+  build_utils.InitLogging('APKBUILDER_DEBUG')
+  args = build_utils.ExpandFileArgs(args)
+  options = _ParseArgs(args)
+
+  # Until Python 3.7, there's no better way to set compression level.
+  # The default is 6.
+  if options.best_compression:
+    # Compresses about twice as slow as the default.
+    zlib.Z_DEFAULT_COMPRESSION = 9
+  else:
+    # Compresses about twice as fast as the default.
+    zlib.Z_DEFAULT_COMPRESSION = 1
+
+  # Manually align only when alignment is necessary.
+  # Python's zip implementation duplicates file comments in the central
+  # directory, whereas zipalign does not, so use zipalign for official builds.
+  fast_align = options.format == 'apk' and not options.best_compression
+
+  native_libs = sorted(options.native_libs)
+
+  # Include native libs in the depfile_deps since GN doesn't know about the
+  # dependencies when is_component_build=true.
+  depfile_deps = list(native_libs)
+
+  # For targets that depend on static library APKs, dex paths are created by
+  # the static library's dexsplitter target and GN doesn't know about these
+  # paths.
+  if options.dex_file:
+    depfile_deps.append(options.dex_file)
+
+  secondary_native_libs = []
+  if options.secondary_native_libs:
+    secondary_native_libs = sorted(options.secondary_native_libs)
+    depfile_deps += secondary_native_libs
+
+  if options.java_resources:
+    # Included via .build_config, so need to write it to depfile.
+    depfile_deps.extend(options.java_resources)
+
+  assets = _ExpandPaths(options.assets)
+  uncompressed_assets = _ExpandPaths(options.uncompressed_assets)
+
+  # Included via .build_config, so need to write it to depfile.
+  depfile_deps.extend(x[0] for x in assets)
+  depfile_deps.extend(x[0] for x in uncompressed_assets)
+
+  # Bundle modules have a structure similar to APKs, except that resources
+  # are compiled in protobuf format (instead of binary xml), and that some
+  # files are located into different top-level directories, e.g.:
+  #  AndroidManifest.xml -> manifest/AndroidManifest.xml
+  #  classes.dex -> dex/classes.dex
+  #  res/ -> res/  (unchanged)
+  #  assets/ -> assets/  (unchanged)
+  #  <other-file> -> root/<other-file>
+  #
+  # Hence, the following variables are used to control the location of files in
+  # the final archive.
+  if options.format == 'bundle-module':
+    apk_manifest_dir = 'manifest/'
+    apk_root_dir = 'root/'
+    apk_dex_dir = 'dex/'
+  else:
+    apk_manifest_dir = ''
+    apk_root_dir = ''
+    apk_dex_dir = ''
+
+  def _GetAssetDetails(assets, uncompressed_assets, fast_align, allow_reads):
+    ret = _GetAssetsToAdd(assets,
+                          fast_align,
+                          disable_compression=False,
+                          allow_reads=allow_reads)
+    ret.extend(
+        _GetAssetsToAdd(uncompressed_assets,
+                        fast_align,
+                        disable_compression=True,
+                        allow_reads=allow_reads))
+    return ret
+
+  libs_to_add = _GetNativeLibrariesToAdd(
+      native_libs, options.android_abi, options.uncompress_shared_libraries,
+      fast_align, options.library_always_compress, options.library_renames)
+  if options.secondary_android_abi:
+    libs_to_add.extend(
+        _GetNativeLibrariesToAdd(
+            secondary_native_libs, options.secondary_android_abi,
+            options.uncompress_shared_libraries, fast_align,
+            options.library_always_compress, options.library_renames))
+
+  if options.expected_file:
+    # We compute expectations without reading the files. This allows us to check
+    # expectations for different targets by just generating their build_configs
+    # and not have to first generate all the actual files and all their
+    # dependencies (for example by just passing --only-verify-expectations).
+    asset_details = _GetAssetDetails(assets,
+                                     uncompressed_assets,
+                                     fast_align,
+                                     allow_reads=False)
+
+    actual_data = _CreateExpectationsData(libs_to_add, asset_details)
+    diff_utils.CheckExpectations(actual_data, options)
+
+    if options.only_verify_expectations:
+      if options.depfile:
+        build_utils.WriteDepfile(options.depfile,
+                                 options.actual_file,
+                                 inputs=depfile_deps)
+      return
+
+  # If we are past this point, we are going to actually create the final apk so
+  # we should recompute asset details again but maybe perform some optimizations
+  # based on the size of the files on disk.
+  assets_to_add = _GetAssetDetails(
+      assets, uncompressed_assets, fast_align, allow_reads=True)
+
+  # Targets generally do not depend on apks, so no need for only_if_changed.
+  with build_utils.AtomicOutput(options.output_apk, only_if_changed=False) as f:
+    with zipfile.ZipFile(options.resource_apk) as resource_apk, \
+         zipfile.ZipFile(f, 'w') as out_apk:
+
+      def add_to_zip(zip_path, data, compress=True, alignment=4):
+        zipalign.AddToZipHermetic(
+            out_apk,
+            zip_path,
+            data=data,
+            compress=compress,
+            alignment=0 if compress and not fast_align else alignment)
+
+      def copy_resource(zipinfo, out_dir=''):
+        add_to_zip(
+            out_dir + zipinfo.filename,
+            resource_apk.read(zipinfo.filename),
+            compress=zipinfo.compress_type != zipfile.ZIP_STORED)
+
+      # Make assets come before resources in order to maintain the same file
+      # ordering as GYP / aapt. http://crbug.com/561862
+      resource_infos = resource_apk.infolist()
+
+      # 1. AndroidManifest.xml
+      logging.debug('Adding AndroidManifest.xml')
+      copy_resource(
+          resource_apk.getinfo('AndroidManifest.xml'), out_dir=apk_manifest_dir)
+
+      # 2. Assets
+      logging.debug('Adding assets/')
+      _AddFiles(out_apk, assets_to_add)
+
+      # 3. Dex files
+      logging.debug('Adding classes.dex')
+      if options.dex_file:
+        with open(options.dex_file, 'rb') as dex_file_obj:
+          if options.dex_file.endswith('.dex'):
+            max_dex_number = 1
+            # This is the case for incremental_install=true.
+            add_to_zip(
+                apk_dex_dir + 'classes.dex',
+                dex_file_obj.read(),
+                compress=not options.uncompress_dex)
+          else:
+            max_dex_number = 0
+            with zipfile.ZipFile(dex_file_obj) as dex_zip:
+              for dex in (d for d in dex_zip.namelist() if d.endswith('.dex')):
+                max_dex_number += 1
+                add_to_zip(
+                    apk_dex_dir + dex,
+                    dex_zip.read(dex),
+                    compress=not options.uncompress_dex)
+
+      if options.jdk_libs_dex_file:
+        with open(options.jdk_libs_dex_file, 'rb') as dex_file_obj:
+          add_to_zip(
+              apk_dex_dir + 'classes{}.dex'.format(max_dex_number + 1),
+              dex_file_obj.read(),
+              compress=not options.uncompress_dex)
+
+      # 4. Native libraries.
+      logging.debug('Adding lib/')
+      _AddFiles(out_apk, libs_to_add)
+
+      # Add a placeholder lib if the APK should be multi ABI but is missing libs
+      # for one of the ABIs.
+      native_lib_placeholders = options.native_lib_placeholders
+      secondary_native_lib_placeholders = (
+          options.secondary_native_lib_placeholders)
+      if options.is_multi_abi:
+        if ((secondary_native_libs or secondary_native_lib_placeholders)
+            and not native_libs and not native_lib_placeholders):
+          native_lib_placeholders += ['libplaceholder.so']
+        if ((native_libs or native_lib_placeholders)
+            and not secondary_native_libs
+            and not secondary_native_lib_placeholders):
+          secondary_native_lib_placeholders += ['libplaceholder.so']
+
+      # Add placeholder libs.
+      for name in sorted(native_lib_placeholders):
+        # Note: Empty libs files are ignored by md5check (can cause issues
+        # with stale builds when the only change is adding/removing
+        # placeholders).
+        apk_path = 'lib/%s/%s' % (options.android_abi, name)
+        add_to_zip(apk_path, '', alignment=0x1000)
+
+      for name in sorted(secondary_native_lib_placeholders):
+        # Note: Empty libs files are ignored by md5check (can cause issues
+        # with stale builds when the only change is adding/removing
+        # placeholders).
+        apk_path = 'lib/%s/%s' % (options.secondary_android_abi, name)
+        add_to_zip(apk_path,